gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package io.sarnowski.aci;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Strings.isNullOrEmpty;
public class AppManifest {
private static final String MANIFEST_VERSION = "0.1.0";
private static final String MANIFEST_KIND = "AppManifest";
@JsonProperty("acVersion")
private String acVersion = MANIFEST_VERSION;
@JsonProperty("acKind")
private String acKind = MANIFEST_KIND;
@JsonProperty("name")
private String name;
@JsonProperty("version")
private String version;
@JsonProperty("os")
private String os;
@JsonProperty("arch")
private String arch;
@JsonProperty("exec")
private List<String> exec;
@JsonProperty("user")
private String user;
@JsonProperty("group")
private String group;
@JsonProperty("eventHandlers")
private List<EventHandler> eventHandlers;
@JsonProperty("environment")
private Map<String,String> environment;
@JsonProperty("mountPoints")
private List<MountPoint> mountPoints;
@JsonProperty("ports")
private List<Port> ports;
@JsonProperty("isolators")
private List<Isolator> isolators;
@JsonProperty("annotations")
private Map<String,String> annotations;
private AppManifest() {
}
public AppManifest(final String name, final String version, final String os, final String arch) {
checkArgument(!isNullOrEmpty(name), "app manifest name required");
checkArgument(!isNullOrEmpty(version), "app manifest version required");
checkArgument(!isNullOrEmpty(os), "app manifest os required");
checkArgument(!isNullOrEmpty(arch), "app manifest arch required");
this.name = name;
this.version = version;
this.os = os;
this.arch = arch;
}
public String getAcVersion() {
return acVersion;
}
public String getAcKind() {
return acKind;
}
public String getName() {
return name;
}
public String getVersion() {
return version;
}
public String getOs() {
return os;
}
public String getArch() {
return arch;
}
public List<String> getExec() {
if (exec == null) {
exec = new ArrayList<>();
}
return exec;
}
public String getUser() {
return user;
}
public void setUser(final String user) {
this.user = user;
}
public String getGroup() {
return group;
}
public void setGroup(final String group) {
this.group = group;
}
public List<EventHandler> getEventHandlers() {
if (eventHandlers == null) {
eventHandlers = new ArrayList<>();
}
return eventHandlers;
}
public Map<String, String> getEnvironment() {
if (environment == null) {
environment = Maps.newHashMap();
}
return environment;
}
public List<MountPoint> getMountPoints() {
if (mountPoints == null) {
mountPoints = new ArrayList<>();
}
return mountPoints;
}
public List<Port> getPorts() {
if (ports == null) {
ports = new ArrayList<>();
}
return ports;
}
public List<Isolator> getIsolators() {
if (isolators == null) {
isolators = new ArrayList<>();
}
return isolators;
}
public Map<String, String> getAnnotations() {
if (annotations == null) {
annotations = Maps.newHashMap();
}
return annotations;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AppManifest that = (AppManifest) o;
if (!acKind.equals(that.acKind)) return false;
if (!acVersion.equals(that.acVersion)) return false;
if (!arch.equals(that.arch)) return false;
if (!name.equals(that.name)) return false;
if (!os.equals(that.os)) return false;
if (!version.equals(that.version)) return false;
return true;
}
@Override
public int hashCode() {
int result = acVersion.hashCode();
result = 31 * result + acKind.hashCode();
result = 31 * result + name.hashCode();
result = 31 * result + version.hashCode();
result = 31 * result + os.hashCode();
result = 31 * result + arch.hashCode();
return result;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("acVersion", acVersion)
.add("acKind", acKind)
.add("name", name)
.add("version", version)
.add("os", os)
.add("arch", arch)
.toString();
}
public static final class OS {
public static final String LINUX = "linux";
// TODO more standards? (https://github.com/coreos/rocket/issues/234)
}
public static final class Arch {
public static final String AMD64 = "amd64";
public static final String I386 = "i386";
// TODO more standards? (https://github.com/coreos/rocket/issues/234)
}
}
| |
/*
Copyright 2010 Massachusetts General Hospital
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.sc.probro.servlets;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.*;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.FileCleanerCleanup;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FileCleaningTracker;
import org.apache.log4j.Logger;
import org.sc.obo.*;
import org.sc.probro.Broker;
import org.sc.probro.BrokerProperties;
import org.sc.probro.Ontology;
import org.sc.probro.UserCredentials;
import org.sc.probro.data.BrokerModel;
import org.sc.probro.data.DBModelException;
import org.sc.probro.exceptions.BadRequestException;
import org.sc.probro.exceptions.BrokerException;
import org.sc.probro.lucene.IndexCreator;
import org.sc.probro.sparql.BindingTable;
import org.sc.probro.sparql.OBOBuilder;
import org.sc.probro.sparql.OBOSparql;
import org.sc.probro.sparql.Prefixes;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.RDFNode;
public class IndexCreatorServlet extends BrokerServlet {
public static Logger Log = Logger.getLogger(IndexCreatorServlet.class);
private OBOSparql oboSparql;
public IndexCreatorServlet(BrokerProperties ps) {
super(ps);
oboSparql = new OBOSparql(ps);
}
public void init() throws ServletException {
super.init();
}
public void destroy() {
super.destroy();
}
public static final int MB = 1024*1024;
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
OBOBuilder builder = new OBOBuilder(oboSparql);
String oboName = getRequiredParam(request, "obo", String.class);
try {
String oboText = builder.loadOBO(oboName);
response.setStatus(HttpServletResponse.SC_OK);
response.setContentType("text");
PrintWriter writer = response.getWriter();
writer.println(oboText);
} catch(IOException e) {
throw new BrokerException(e);
}
} catch (BrokerException e) {
handleException(response, e);
return;
}
}
public static DiskFileItemFactory newDiskFileItemFactory(ServletContext context, File repository) {
FileCleaningTracker fileCleaningTracker = FileCleanerCleanup.getFileCleaningTracker(context);
DiskFileItemFactory factory = new DiskFileItemFactory(10*MB, repository);
factory.setFileCleaningTracker(fileCleaningTracker);
return factory;
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
OBOParser parser = null;
Map<String,String[]> params = decodedParams(request);
UserCredentials creds = new UserCredentials();
String ontologyName = null;
boolean isMultipart = ServletFileUpload.isMultipartContent(request);
if(!isMultipart) {
throw new BrokerException(HttpServletResponse.SC_BAD_REQUEST, "No multipart form data.");
}
try {
File repository = new File(System.getProperty("java.io.tmpdir"));
// Create a factory for disk-based file items
//DiskFileItemFactory factory = new DiskFileItemFactory();
DiskFileItemFactory factory = newDiskFileItemFactory(getServletContext(), repository);
// Set factory constraints
factory.setSizeThreshold(10*MB);
//factory.setRepository(yourTempDirectory);
// Create a new file upload handler
ServletFileUpload upload = new ServletFileUpload(factory);
upload.setSizeMax(50*MB);
// Parse the request
List<FileItem> items = upload.parseRequest(request);
for(FileItem item : items) {
if(item.isFormField()) {
String formName = item.getFieldName();
String formValue = item.getString();
Log.info(String.format("%s=%s", formName, formValue));
if(formName.equals("ontology_name")) {
ontologyName = formValue;
}
} else {
String formName = item.getFieldName();
String fileName = item.getName();
String contentType = item.getContentType();
boolean isInMemory = item.isInMemory();
long sizeInBytes = item.getSize();
if(formName.equals("ontology_file")) {
Log.info(String.format("fileName=%s, contentType=%s, size=%d", fileName, contentType, sizeInBytes));
if(fileName.length() > 0) {
InputStream uploadedStream = item.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(uploadedStream));
parser = new OBOParser();
parser.parse(reader);
reader.close();
}
} else {
Log.warn(String.format("unknown file: %s", formName));
}
}
}
} catch(IOException e) {
throw new BrokerException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} catch (FileUploadException e) {
throw new BrokerException(e);
}
if(ontologyName == null) {
throw new BadRequestException("No ontology_name field given.");
}
OBOOntology ontology = null;
try {
if(parser != null) {
Log.info(String.format("Retrieving OBO ontology from file."));
// file was uploaded
ontology = parser.getOntology();
} else {
// try to get it from teh sparql endpoint
Log.info(String.format("No OBO file uploaded, reading from Sparql endpoint instead."));
OBOBuilder builder = new OBOBuilder(oboSparql);
ontology = builder.buildOntology(ontologyName);
}
} catch(IOException e) {
throw new BrokerException(e);
}
Broker broker = getBroker();
try {
Ontology ont = broker.createOntology(creds, ontologyName, ontology);
response.setStatus(HttpServletResponse.SC_OK);
response.setContentType("text");
response.getWriter().print(ont.id);
} finally {
broker.close();
}
} catch(BrokerException e) {
handleException(response, e);
return;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.registry.eureka;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.event.EventDispatcher;
import org.apache.dubbo.registry.client.DefaultServiceInstance;
import org.apache.dubbo.registry.client.ServiceDiscovery;
import org.apache.dubbo.registry.client.ServiceInstance;
import org.apache.dubbo.registry.client.event.ServiceInstancesChangedEvent;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.appinfo.EurekaInstanceConfig;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.config.ConfigurationManager;
import com.netflix.discovery.CacheRefreshedEvent;
import com.netflix.discovery.DefaultEurekaClientConfig;
import com.netflix.discovery.DiscoveryClient;
import com.netflix.discovery.EurekaClient;
import com.netflix.discovery.EurekaClientConfig;
import com.netflix.discovery.EurekaEvent;
import com.netflix.discovery.shared.Application;
import com.netflix.discovery.shared.Applications;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import static java.util.Collections.emptyList;
import static org.apache.dubbo.common.constants.RegistryConstants.SUBSCRIBED_SERVICE_NAMES_KEY;
import static org.apache.dubbo.event.EventDispatcher.getDefaultExtension;
import static org.apache.dubbo.registry.client.ServiceDiscoveryRegistry.parseServices;
/**
* Eureka {@link ServiceDiscovery} implementation based on Eureka API
*/
public class EurekaServiceDiscovery implements ServiceDiscovery {
private final EventDispatcher eventDispatcher = getDefaultExtension();
private ApplicationInfoManager applicationInfoManager;
private EurekaClient eurekaClient;
private Set<String> subscribedServices;
/**
* last apps hash code is used to identify the {@link Applications} is changed or not
*/
private String lastAppsHashCode;
@Override
public void initialize(URL registryURL) throws Exception {
Properties eurekaConfigProperties = buildEurekaConfigProperties(registryURL);
initConfigurationManager(eurekaConfigProperties);
initSubscribedServices(registryURL);
}
/**
* Build the Properties whose {@link java.util.Map.Entry entries} are retrieved from
* {@link URL#getParameters() the parameters of the specified URL}, which will be used in the Eureka's {@link ConfigurationManager}
*
* @param registryURL the {@link URL url} to connect Eureka
* @return non-null
*/
private Properties buildEurekaConfigProperties(URL registryURL) {
Properties properties = new Properties();
Map<String, String> parameters = registryURL.getParameters();
setDefaultProperties(registryURL, properties);
parameters.entrySet().stream()
.filter(this::filterEurekaProperty)
.forEach(propertyEntry -> {
properties.setProperty(propertyEntry.getKey(), propertyEntry.getValue());
});
return properties;
}
/**
* Initialize {@link #subscribedServices} property
*
* @param registryURL the {@link URL url} to connect Eureka
*/
private void initSubscribedServices(URL registryURL) {
this.subscribedServices = parseServices(registryURL.getParameter(SUBSCRIBED_SERVICE_NAMES_KEY));
}
private boolean filterEurekaProperty(Map.Entry<String, String> propertyEntry) {
String propertyName = propertyEntry.getKey();
return propertyName.startsWith("eureka.");
}
private void setDefaultProperties(URL registryURL, Properties properties) {
setDefaultServiceURL(registryURL, properties);
setDefaultInitialInstanceInfoReplicationIntervalSeconds(properties);
}
private void setDefaultServiceURL(URL registryURL, Properties properties) {
StringBuilder defaultServiceURLBuilder = new StringBuilder("http://")
.append(registryURL.getHost())
.append(":")
.append(registryURL.getPort())
.append("/eureka");
properties.setProperty("eureka.serviceUrl.default", defaultServiceURLBuilder.toString());
}
/**
* Set the default property for {@link EurekaClientConfig#getInitialInstanceInfoReplicationIntervalSeconds()}
* which means do register immediately
*
* @param properties {@link Properties}
*/
private void setDefaultInitialInstanceInfoReplicationIntervalSeconds(Properties properties) {
properties.setProperty("eureka.appinfo.initial.replicate.time", "0");
}
/**
* Initialize {@link ConfigurationManager}
*
* @param eurekaConfigProperties the Eureka's {@link ConfigurationManager}
*/
private void initConfigurationManager(Properties eurekaConfigProperties) {
ConfigurationManager.loadProperties(eurekaConfigProperties);
}
private void initApplicationInfoManager(ServiceInstance serviceInstance) {
EurekaInstanceConfig eurekaInstanceConfig = buildEurekaInstanceConfig(serviceInstance);
this.applicationInfoManager = new ApplicationInfoManager(eurekaInstanceConfig, (ApplicationInfoManager.OptionalArgs) null);
}
/**
* Initialize {@link #eurekaClient} property
*
* @param serviceInstance {@link ServiceInstance}
*/
private void initEurekaClient(ServiceInstance serviceInstance) {
if (eurekaClient != null) {
return;
}
initApplicationInfoManager(serviceInstance);
EurekaClient eurekaClient = createEurekaClient();
registerEurekaEventListener(eurekaClient);
// set eurekaClient
this.eurekaClient = eurekaClient;
}
private void registerEurekaEventListener(EurekaClient eurekaClient) {
eurekaClient.registerEventListener(this::onEurekaEvent);
}
private void onEurekaEvent(EurekaEvent event) {
if (event instanceof CacheRefreshedEvent) {
onCacheRefreshedEvent(CacheRefreshedEvent.class.cast(event));
}
}
private void onCacheRefreshedEvent(CacheRefreshedEvent event) {
synchronized (this) { // Make sure thread-safe in async execution
Applications applications = eurekaClient.getApplications();
String appsHashCode = applications.getAppsHashCode();
if (!Objects.equals(lastAppsHashCode, appsHashCode)) { // Changed
// Dispatch Events
dispatchServiceInstancesChangedEvent();
lastAppsHashCode = appsHashCode; // update current result
}
}
}
private void dispatchServiceInstancesChangedEvent() {
subscribedServices.forEach((serviceName) -> {
eventDispatcher.dispatch(new ServiceInstancesChangedEvent(serviceName, getInstances(serviceName)));
});
}
private EurekaClient createEurekaClient() {
EurekaClientConfig eurekaClientConfig = new DefaultEurekaClientConfig();
DiscoveryClient eurekaClient = new DiscoveryClient(applicationInfoManager, eurekaClientConfig);
return eurekaClient;
}
@Override
public void destroy() throws Exception {
if (eurekaClient != null) {
this.eurekaClient.shutdown();
}
}
@Override
public void register(ServiceInstance serviceInstance) throws RuntimeException {
initEurekaClient(serviceInstance);
setInstanceStatus(InstanceInfo.InstanceStatus.UP);
}
private void setInstanceStatus(InstanceInfo.InstanceStatus status) {
if (applicationInfoManager != null) {
this.applicationInfoManager.setInstanceStatus(status);
}
}
@Override
public void update(ServiceInstance serviceInstance) throws RuntimeException {
setInstanceStatus(serviceInstance.isHealthy() ? InstanceInfo.InstanceStatus.UP :
InstanceInfo.InstanceStatus.UNKNOWN);
}
@Override
public void unregister(ServiceInstance serviceInstance) throws RuntimeException {
setInstanceStatus(InstanceInfo.InstanceStatus.UP);
}
@Override
public Set<String> getServices() {
Applications applications = this.eurekaClient.getApplications();
if (applications == null) {
return Collections.emptySet();
}
List<Application> registered = applications.getRegisteredApplications();
Set<String> names = new LinkedHashSet<>();
for (Application app : registered) {
if (app.getInstances().isEmpty()) {
continue;
}
names.add(app.getName().toLowerCase());
}
return names;
}
@Override
public List<ServiceInstance> getInstances(String serviceName) throws NullPointerException {
Application application = this.eurekaClient.getApplication(serviceName);
if (application == null) {
return emptyList();
}
List<InstanceInfo> infos = application.getInstances();
List<ServiceInstance> instances = new ArrayList<>();
for (InstanceInfo info : infos) {
instances.add(buildServiceInstance(info));
}
return instances;
}
private ServiceInstance buildServiceInstance(InstanceInfo instance) {
DefaultServiceInstance serviceInstance = new DefaultServiceInstance(instance.getId(), instance.getAppName(),
instance.getHostName(),
instance.isPortEnabled(InstanceInfo.PortType.SECURE) ? instance.getSecurePort() : instance.getPort());
serviceInstance.setMetadata(instance.getMetadata());
return serviceInstance;
}
private EurekaInstanceConfig buildEurekaInstanceConfig(ServiceInstance serviceInstance) {
ConfigurableEurekaInstanceConfig eurekaInstanceConfig = new ConfigurableEurekaInstanceConfig()
.setInstanceId(serviceInstance.getId())
.setAppname(serviceInstance.getServiceName())
.setIpAddress(serviceInstance.getHost())
.setNonSecurePort(serviceInstance.getPort())
.setMetadataMap(serviceInstance.getMetadata());
return eurekaInstanceConfig;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.time;
import java.text.DateFormat;
import java.text.FieldPosition;
import java.text.Format;
import java.text.ParseException;
import java.text.ParsePosition;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* <p>FastDateFormat is a fast and thread-safe version of
* {@link java.text.SimpleDateFormat}.</p>
*
* <p>To obtain an instance of FastDateFormat, use one of the static factory methods:
* {@link #getInstance(String, TimeZone, Locale)}, {@link #getDateInstance(int, TimeZone, Locale)},
* {@link #getTimeInstance(int, TimeZone, Locale)}, or {@link #getDateTimeInstance(int, int, TimeZone, Locale)}
* </p>
*
* <p>Since FastDateFormat is thread safe, you can use a static member instance:</p>
* <code>
* private static final FastDateFormat DATE_FORMATTER = FastDateFormat.getDateTimeInstance(FastDateFormat.LONG, FastDateFormat.SHORT);
* </code>
*
* <p>This class can be used as a direct replacement to
* {@code SimpleDateFormat} in most formatting and parsing situations.
* This class is especially useful in multi-threaded server environments.
* {@code SimpleDateFormat} is not thread-safe in any JDK version,
* nor will it be as Sun have closed the bug/RFE.
* </p>
*
* <p>All patterns are compatible with
* SimpleDateFormat (except time zones and some year patterns - see below).</p>
*
* <p>Since 3.2, FastDateFormat supports parsing as well as printing.</p>
*
* <p>Java 1.4 introduced a new pattern letter, {@code 'Z'}, to represent
* time zones in RFC822 format (eg. {@code +0800} or {@code -1100}).
* This pattern letter can be used here (on all JDK versions).</p>
*
* <p>In addition, the pattern {@code 'ZZ'} has been made to represent
* ISO 8601 full format time zones (eg. {@code +08:00} or {@code -11:00}).
* This introduces a minor incompatibility with Java 1.4, but at a gain of
* useful functionality.</p>
*
* <p>Javadoc cites for the year pattern: <i>For formatting, if the number of
* pattern letters is 2, the year is truncated to 2 digits; otherwise it is
* interpreted as a number.</i> Starting with Java 1.7 a pattern of 'Y' or
* 'YYY' will be formatted as '2003', while it was '03' in former Java
* versions. FastDateFormat implements the behavior of Java 7.</p>
*
* @since 2.0
*/
public class FastDateFormat extends Format implements DateParser, DatePrinter {
/**
* Required for serialization support.
*
* @see java.io.Serializable
*/
private static final long serialVersionUID = 2L;
/**
* FULL locale dependent date or time style.
*/
public static final int FULL = DateFormat.FULL;
/**
* LONG locale dependent date or time style.
*/
public static final int LONG = DateFormat.LONG;
/**
* MEDIUM locale dependent date or time style.
*/
public static final int MEDIUM = DateFormat.MEDIUM;
/**
* SHORT locale dependent date or time style.
*/
public static final int SHORT = DateFormat.SHORT;
private static final FormatCache<FastDateFormat> cache= new FormatCache<FastDateFormat>() {
@Override
protected FastDateFormat createInstance(final String pattern, final TimeZone timeZone, final Locale locale) {
return new FastDateFormat(pattern, timeZone, locale);
}
};
private final FastDatePrinter printer;
private final FastDateParser parser;
//-----------------------------------------------------------------------
/**
* <p>Gets a formatter instance using the default pattern in the
* default locale.</p>
*
* @return a date/time formatter
*/
public static FastDateFormat getInstance() {
return cache.getInstance();
}
/**
* <p>Gets a formatter instance using the specified pattern in the
* default locale.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible
* pattern
* @return a pattern based date/time formatter
* @throws IllegalArgumentException if pattern is invalid
*/
public static FastDateFormat getInstance(final String pattern) {
return cache.getInstance(pattern, null, null);
}
/**
* <p>Gets a formatter instance using the specified pattern and
* time zone.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible
* pattern
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @return a pattern based date/time formatter
* @throws IllegalArgumentException if pattern is invalid
*/
public static FastDateFormat getInstance(final String pattern, final TimeZone timeZone) {
return cache.getInstance(pattern, timeZone, null);
}
/**
* <p>Gets a formatter instance using the specified pattern and
* locale.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible
* pattern
* @param locale optional locale, overrides system locale
* @return a pattern based date/time formatter
* @throws IllegalArgumentException if pattern is invalid
*/
public static FastDateFormat getInstance(final String pattern, final Locale locale) {
return cache.getInstance(pattern, null, locale);
}
/**
* <p>Gets a formatter instance using the specified pattern, time zone
* and locale.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible
* pattern
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @param locale optional locale, overrides system locale
* @return a pattern based date/time formatter
* @throws IllegalArgumentException if pattern is invalid
* or {@code null}
*/
public static FastDateFormat getInstance(final String pattern, final TimeZone timeZone, final Locale locale) {
return cache.getInstance(pattern, timeZone, locale);
}
//-----------------------------------------------------------------------
/**
* <p>Gets a date formatter instance using the specified style in the
* default time zone and locale.</p>
*
* @param style date style: FULL, LONG, MEDIUM, or SHORT
* @return a localized standard date formatter
* @throws IllegalArgumentException if the Locale has no date
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateInstance(final int style) {
return cache.getDateInstance(style, null, null);
}
/**
* <p>Gets a date formatter instance using the specified style and
* locale in the default time zone.</p>
*
* @param style date style: FULL, LONG, MEDIUM, or SHORT
* @param locale optional locale, overrides system locale
* @return a localized standard date formatter
* @throws IllegalArgumentException if the Locale has no date
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateInstance(final int style, final Locale locale) {
return cache.getDateInstance(style, null, locale);
}
/**
* <p>Gets a date formatter instance using the specified style and
* time zone in the default locale.</p>
*
* @param style date style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @return a localized standard date formatter
* @throws IllegalArgumentException if the Locale has no date
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateInstance(final int style, final TimeZone timeZone) {
return cache.getDateInstance(style, timeZone, null);
}
/**
* <p>Gets a date formatter instance using the specified style, time
* zone and locale.</p>
*
* @param style date style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @param locale optional locale, overrides system locale
* @return a localized standard date formatter
* @throws IllegalArgumentException if the Locale has no date
* pattern defined
*/
public static FastDateFormat getDateInstance(final int style, final TimeZone timeZone, final Locale locale) {
return cache.getDateInstance(style, timeZone, locale);
}
//-----------------------------------------------------------------------
/**
* <p>Gets a time formatter instance using the specified style in the
* default time zone and locale.</p>
*
* @param style time style: FULL, LONG, MEDIUM, or SHORT
* @return a localized standard time formatter
* @throws IllegalArgumentException if the Locale has no time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getTimeInstance(final int style) {
return cache.getTimeInstance(style, null, null);
}
/**
* <p>Gets a time formatter instance using the specified style and
* locale in the default time zone.</p>
*
* @param style time style: FULL, LONG, MEDIUM, or SHORT
* @param locale optional locale, overrides system locale
* @return a localized standard time formatter
* @throws IllegalArgumentException if the Locale has no time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getTimeInstance(final int style, final Locale locale) {
return cache.getTimeInstance(style, null, locale);
}
/**
* <p>Gets a time formatter instance using the specified style and
* time zone in the default locale.</p>
*
* @param style time style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted time
* @return a localized standard time formatter
* @throws IllegalArgumentException if the Locale has no time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getTimeInstance(final int style, final TimeZone timeZone) {
return cache.getTimeInstance(style, timeZone, null);
}
/**
* <p>Gets a time formatter instance using the specified style, time
* zone and locale.</p>
*
* @param style time style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted time
* @param locale optional locale, overrides system locale
* @return a localized standard time formatter
* @throws IllegalArgumentException if the Locale has no time
* pattern defined
*/
public static FastDateFormat getTimeInstance(final int style, final TimeZone timeZone, final Locale locale) {
return cache.getTimeInstance(style, timeZone, locale);
}
//-----------------------------------------------------------------------
/**
* <p>Gets a date/time formatter instance using the specified style
* in the default time zone and locale.</p>
*
* @param dateStyle date style: FULL, LONG, MEDIUM, or SHORT
* @param timeStyle time style: FULL, LONG, MEDIUM, or SHORT
* @return a localized standard date/time formatter
* @throws IllegalArgumentException if the Locale has no date/time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateTimeInstance(final int dateStyle, final int timeStyle) {
return cache.getDateTimeInstance(dateStyle, timeStyle, null, null);
}
/**
* <p>Gets a date/time formatter instance using the specified style and
* locale in the default time zone.</p>
*
* @param dateStyle date style: FULL, LONG, MEDIUM, or SHORT
* @param timeStyle time style: FULL, LONG, MEDIUM, or SHORT
* @param locale optional locale, overrides system locale
* @return a localized standard date/time formatter
* @throws IllegalArgumentException if the Locale has no date/time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateTimeInstance(final int dateStyle, final int timeStyle, final Locale locale) {
return cache.getDateTimeInstance(dateStyle, timeStyle, null, locale);
}
/**
* <p>Gets a date/time formatter instance using the specified style and
* time zone in the default locale.</p>
*
* @param dateStyle date style: FULL, LONG, MEDIUM, or SHORT
* @param timeStyle time style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @return a localized standard date/time formatter
* @throws IllegalArgumentException if the Locale has no date/time
* pattern defined
* @since 2.1
*/
public static FastDateFormat getDateTimeInstance(final int dateStyle, final int timeStyle, final TimeZone timeZone) {
return getDateTimeInstance(dateStyle, timeStyle, timeZone, null);
}
/**
* <p>Gets a date/time formatter instance using the specified style,
* time zone and locale.</p>
*
* @param dateStyle date style: FULL, LONG, MEDIUM, or SHORT
* @param timeStyle time style: FULL, LONG, MEDIUM, or SHORT
* @param timeZone optional time zone, overrides time zone of
* formatted date
* @param locale optional locale, overrides system locale
* @return a localized standard date/time formatter
* @throws IllegalArgumentException if the Locale has no date/time
* pattern defined
*/
public static FastDateFormat getDateTimeInstance(
final int dateStyle, final int timeStyle, final TimeZone timeZone, final Locale locale) {
return cache.getDateTimeInstance(dateStyle, timeStyle, timeZone, locale);
}
// Constructor
//-----------------------------------------------------------------------
/**
* <p>Constructs a new FastDateFormat.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible pattern
* @param timeZone non-null time zone to use
* @param locale non-null locale to use
* @throws NullPointerException if pattern, timeZone, or locale is null.
*/
protected FastDateFormat(final String pattern, final TimeZone timeZone, final Locale locale) {
this(pattern, timeZone, locale, null);
}
// Constructor
//-----------------------------------------------------------------------
/**
* <p>Constructs a new FastDateFormat.</p>
*
* @param pattern {@link java.text.SimpleDateFormat} compatible pattern
* @param timeZone non-null time zone to use
* @param locale non-null locale to use
* @param centuryStart The start of the 100 year period to use as the "default century" for 2 digit year parsing. If centuryStart is null, defaults to now - 80 years
* @throws NullPointerException if pattern, timeZone, or locale is null.
*/
protected FastDateFormat(final String pattern, final TimeZone timeZone, final Locale locale, final Date centuryStart) {
printer= new FastDatePrinter(pattern, timeZone, locale);
parser= new FastDateParser(pattern, timeZone, locale, centuryStart);
}
// Format methods
//-----------------------------------------------------------------------
/**
* <p>Formats a {@code Date}, {@code Calendar} or
* {@code Long} (milliseconds) object.</p>
* This method is an implementation of {@link Format#format(Object, StringBuffer, FieldPosition)}
*
* @param obj the object to format
* @param toAppendTo the buffer to append to
* @param pos the position - ignored
* @return the buffer passed in
*/
@Override
public StringBuffer format(final Object obj, final StringBuffer toAppendTo, final FieldPosition pos) {
return toAppendTo.append(printer.format(obj));
}
/**
* <p>Formats a millisecond {@code long} value.</p>
*
* @param millis the millisecond value to format
* @return the formatted string
* @since 2.1
*/
@Override
public String format(final long millis) {
return printer.format(millis);
}
/**
* <p>Formats a {@code Date} object using a {@code GregorianCalendar}.</p>
*
* @param date the date to format
* @return the formatted string
*/
@Override
public String format(final Date date) {
return printer.format(date);
}
/**
* <p>Formats a {@code Calendar} object.</p>
*
* @param calendar the calendar to format
* @return the formatted string
*/
@Override
public String format(final Calendar calendar) {
return printer.format(calendar);
}
/**
* <p>Formats a millisecond {@code long} value into the
* supplied {@code StringBuffer}.</p>
* @deprecated Use {{@link #format(long, Appendable)}.
*
* @param millis the millisecond value to format
* @param buf the buffer to format into
* @return the specified string buffer
* @since 2.1
*/
@Deprecated
@Override
public StringBuffer format(final long millis, final StringBuffer buf) {
return printer.format(millis, buf);
}
/**
* <p>Formats a {@code Date} object into the
* supplied {@code StringBuffer} using a {@code GregorianCalendar}.</p>
* @deprecated Use {{@link #format(Date, Appendable)}.
*
* @param date the date to format
* @param buf the buffer to format into
* @return the specified string buffer
*/
@Deprecated
@Override
public StringBuffer format(final Date date, final StringBuffer buf) {
return printer.format(date, buf);
}
/**
* <p>Formats a {@code Calendar} object into the
* supplied {@code StringBuffer}.</p>
* @deprecated Use {{@link #format(Calendar, Appendable)}.
*
* @param calendar the calendar to format
* @param buf the buffer to format into
* @return the specified string buffer
*/
@Deprecated
@Override
public StringBuffer format(final Calendar calendar, final StringBuffer buf) {
return printer.format(calendar, buf);
}
/**
* <p>Formats a millisecond {@code long} value into the
* supplied {@code StringBuffer}.</p>
*
* @param millis the millisecond value to format
* @param buf the buffer to format into
* @return the specified string buffer
* @since 3.5
*/
@Override
public <B extends Appendable> B format(final long millis, final B buf) {
return printer.format(millis, buf);
}
/**
* <p>Formats a {@code Date} object into the
* supplied {@code StringBuffer} using a {@code GregorianCalendar}.</p>
*
* @param date the date to format
* @param buf the buffer to format into
* @return the specified string buffer
* @since 3.5
*/
@Override
public <B extends Appendable> B format(final Date date, final B buf) {
return printer.format(date, buf);
}
/**
* <p>Formats a {@code Calendar} object into the
* supplied {@code StringBuffer}.</p>
*
* @param calendar the calendar to format
* @param buf the buffer to format into
* @return the specified string buffer
* @since 3.5
*/
@Override
public <B extends Appendable> B format(final Calendar calendar, final B buf) {
return printer.format(calendar, buf);
}
// Parsing
//-----------------------------------------------------------------------
/* (non-Javadoc)
* @see DateParser#parse(java.lang.String)
*/
@Override
public Date parse(final String source) throws ParseException {
return parser.parse(source);
}
/* (non-Javadoc)
* @see DateParser#parse(java.lang.String, java.text.ParsePosition)
*/
@Override
public Date parse(final String source, final ParsePosition pos) {
return parser.parse(source, pos);
}
/* (non-Javadoc)
* @see java.text.Format#parseObject(java.lang.String, java.text.ParsePosition)
*/
@Override
public Object parseObject(final String source, final ParsePosition pos) {
return parser.parseObject(source, pos);
}
// Accessors
//-----------------------------------------------------------------------
/**
* <p>Gets the pattern used by this formatter.</p>
*
* @return the pattern, {@link java.text.SimpleDateFormat} compatible
*/
@Override
public String getPattern() {
return printer.getPattern();
}
/**
* <p>Gets the time zone used by this formatter.</p>
*
* <p>This zone is always used for {@code Date} formatting. </p>
*
* @return the time zone
*/
@Override
public TimeZone getTimeZone() {
return printer.getTimeZone();
}
/**
* <p>Gets the locale used by this formatter.</p>
*
* @return the locale
*/
@Override
public Locale getLocale() {
return printer.getLocale();
}
/**
* <p>Gets an estimate for the maximum string length that the
* formatter will produce.</p>
*
* <p>The actual formatted length will almost always be less than or
* equal to this amount.</p>
*
* @return the maximum formatted length
*/
public int getMaxLengthEstimate() {
return printer.getMaxLengthEstimate();
}
// Basics
//-----------------------------------------------------------------------
/**
* <p>Compares two objects for equality.</p>
*
* @param obj the object to compare to
* @return {@code true} if equal
*/
@Override
public boolean equals(final Object obj) {
if (obj instanceof FastDateFormat == false) {
return false;
}
final FastDateFormat other = (FastDateFormat) obj;
// no need to check parser, as it has same invariants as printer
return printer.equals(other.printer);
}
/**
* <p>Returns a hashcode compatible with equals.</p>
*
* @return a hashcode compatible with equals
*/
@Override
public int hashCode() {
return printer.hashCode();
}
/**
* <p>Gets a debugging string version of this formatter.</p>
*
* @return a debugging string
*/
@Override
public String toString() {
return "FastDateFormat[" + printer.getPattern() + "," + printer.getLocale() + "," + printer.getTimeZone().getID() + "]";
}
/**
* <p>Performs the formatting by applying the rules to the
* specified calendar.</p>
* @deprecated Use {{@link #format(Calendar, Appendable)
*
* @param calendar the calendar to format
* @param buf the buffer to format into
* @return the specified string buffer
*/
@Deprecated
protected StringBuffer applyRules(final Calendar calendar, final StringBuffer buf) {
return printer.applyRules(calendar, buf);
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1.stub;
import static com.google.cloud.dialogflow.cx.v3beta1.WebhooksClient.ListWebhooksPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest;
import com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest;
import com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse;
import com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest;
import com.google.cloud.dialogflow.cx.v3beta1.Webhook;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link WebhooksStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dialogflow.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getWebhook to 30 seconds:
*
* <pre>{@code
* WebhooksStubSettings.Builder webhooksSettingsBuilder = WebhooksStubSettings.newBuilder();
* webhooksSettingsBuilder
* .getWebhookSettings()
* .setRetrySettings(
* webhooksSettingsBuilder
* .getWebhookSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* WebhooksStubSettings webhooksSettings = webhooksSettingsBuilder.build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class WebhooksStubSettings extends StubSettings<WebhooksStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/dialogflow")
.build();
private final PagedCallSettings<
ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>
listWebhooksSettings;
private final UnaryCallSettings<GetWebhookRequest, Webhook> getWebhookSettings;
private final UnaryCallSettings<CreateWebhookRequest, Webhook> createWebhookSettings;
private final UnaryCallSettings<UpdateWebhookRequest, Webhook> updateWebhookSettings;
private final UnaryCallSettings<DeleteWebhookRequest, Empty> deleteWebhookSettings;
private static final PagedListDescriptor<ListWebhooksRequest, ListWebhooksResponse, Webhook>
LIST_WEBHOOKS_PAGE_STR_DESC =
new PagedListDescriptor<ListWebhooksRequest, ListWebhooksResponse, Webhook>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListWebhooksRequest injectToken(ListWebhooksRequest payload, String token) {
return ListWebhooksRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListWebhooksRequest injectPageSize(ListWebhooksRequest payload, int pageSize) {
return ListWebhooksRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListWebhooksRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListWebhooksResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Webhook> extractResources(ListWebhooksResponse payload) {
return payload.getWebhooksList() == null
? ImmutableList.<Webhook>of()
: payload.getWebhooksList();
}
};
private static final PagedListResponseFactory<
ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>
LIST_WEBHOOKS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>() {
@Override
public ApiFuture<ListWebhooksPagedResponse> getFuturePagedResponse(
UnaryCallable<ListWebhooksRequest, ListWebhooksResponse> callable,
ListWebhooksRequest request,
ApiCallContext context,
ApiFuture<ListWebhooksResponse> futureResponse) {
PageContext<ListWebhooksRequest, ListWebhooksResponse, Webhook> pageContext =
PageContext.create(callable, LIST_WEBHOOKS_PAGE_STR_DESC, request, context);
return ListWebhooksPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listWebhooks. */
public PagedCallSettings<ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>
listWebhooksSettings() {
return listWebhooksSettings;
}
/** Returns the object with the settings used for calls to getWebhook. */
public UnaryCallSettings<GetWebhookRequest, Webhook> getWebhookSettings() {
return getWebhookSettings;
}
/** Returns the object with the settings used for calls to createWebhook. */
public UnaryCallSettings<CreateWebhookRequest, Webhook> createWebhookSettings() {
return createWebhookSettings;
}
/** Returns the object with the settings used for calls to updateWebhook. */
public UnaryCallSettings<UpdateWebhookRequest, Webhook> updateWebhookSettings() {
return updateWebhookSettings;
}
/** Returns the object with the settings used for calls to deleteWebhook. */
public UnaryCallSettings<DeleteWebhookRequest, Empty> deleteWebhookSettings() {
return deleteWebhookSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public WebhooksStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcWebhooksStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "dialogflow.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "dialogflow.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(WebhooksStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected WebhooksStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listWebhooksSettings = settingsBuilder.listWebhooksSettings().build();
getWebhookSettings = settingsBuilder.getWebhookSettings().build();
createWebhookSettings = settingsBuilder.createWebhookSettings().build();
updateWebhookSettings = settingsBuilder.updateWebhookSettings().build();
deleteWebhookSettings = settingsBuilder.deleteWebhookSettings().build();
}
/** Builder for WebhooksStubSettings. */
public static class Builder extends StubSettings.Builder<WebhooksStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>
listWebhooksSettings;
private final UnaryCallSettings.Builder<GetWebhookRequest, Webhook> getWebhookSettings;
private final UnaryCallSettings.Builder<CreateWebhookRequest, Webhook> createWebhookSettings;
private final UnaryCallSettings.Builder<UpdateWebhookRequest, Webhook> updateWebhookSettings;
private final UnaryCallSettings.Builder<DeleteWebhookRequest, Empty> deleteWebhookSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listWebhooksSettings = PagedCallSettings.newBuilder(LIST_WEBHOOKS_PAGE_STR_FACT);
getWebhookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createWebhookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateWebhookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteWebhookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listWebhooksSettings,
getWebhookSettings,
createWebhookSettings,
updateWebhookSettings,
deleteWebhookSettings);
initDefaults(this);
}
protected Builder(WebhooksStubSettings settings) {
super(settings);
listWebhooksSettings = settings.listWebhooksSettings.toBuilder();
getWebhookSettings = settings.getWebhookSettings.toBuilder();
createWebhookSettings = settings.createWebhookSettings.toBuilder();
updateWebhookSettings = settings.updateWebhookSettings.toBuilder();
deleteWebhookSettings = settings.deleteWebhookSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listWebhooksSettings,
getWebhookSettings,
createWebhookSettings,
updateWebhookSettings,
deleteWebhookSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listWebhooksSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getWebhookSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createWebhookSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.updateWebhookSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteWebhookSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listWebhooks. */
public PagedCallSettings.Builder<
ListWebhooksRequest, ListWebhooksResponse, ListWebhooksPagedResponse>
listWebhooksSettings() {
return listWebhooksSettings;
}
/** Returns the builder for the settings used for calls to getWebhook. */
public UnaryCallSettings.Builder<GetWebhookRequest, Webhook> getWebhookSettings() {
return getWebhookSettings;
}
/** Returns the builder for the settings used for calls to createWebhook. */
public UnaryCallSettings.Builder<CreateWebhookRequest, Webhook> createWebhookSettings() {
return createWebhookSettings;
}
/** Returns the builder for the settings used for calls to updateWebhook. */
public UnaryCallSettings.Builder<UpdateWebhookRequest, Webhook> updateWebhookSettings() {
return updateWebhookSettings;
}
/** Returns the builder for the settings used for calls to deleteWebhook. */
public UnaryCallSettings.Builder<DeleteWebhookRequest, Empty> deleteWebhookSettings() {
return deleteWebhookSettings;
}
@Override
public WebhooksStubSettings build() throws IOException {
return new WebhooksStubSettings(this);
}
}
}
| |
package me.ialistannen.bukkitutil.commandsystem.implementation;
import me.ialistannen.bukkitutil.commandsystem.base.AbstractCommandNode;
import me.ialistannen.bukkitutil.commandsystem.base.CommandResultType;
import me.ialistannen.bukkitutil.commandsystem.base.CommandRoot;
import me.ialistannen.bukkitutil.commandsystem.base.CommandTree;
import me.ialistannen.bukkitutil.commandsystem.base.HelpCommandAnnotation;
import me.ialistannen.bukkitutil.commandsystem.util.CommandSystemUtil;
import me.ialistannen.bukkitutil.other.Pager;
import me.ialistannen.bukkitutil.other.Pager.Options;
import me.ialistannen.bukkitutil.other.Pager.PagerFilterable;
import me.ialistannen.bukkitutil.other.Pager.SearchMode;
import me.ialistannen.languageSystem.MessageProvider;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import org.bukkit.permissions.Permissible;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static me.ialistannen.bukkitutil.commandsystem.util.CommandSystemUtil.color;
/**
* The default help command
* <p><b>Needs a few keys:</b>
* <li>KEY + "_permission"</li>
* <li>KEY + "_name"</li>
* <li>KEY + "_keyword"</li>
* <li>KEY + "_description"</li>
* <li>KEY + "_usage"</li>
* <li>KEY + "_pattern"</li>
* <p>
* <li>KEY + "_not_found"</li>
* <ul>
* <li>{0} ==> What the user entered</li>
* </ul>
* </ul>
* <p>
* And all the keys from the {@link Pager.Page#send(CommandSender, MessageProvider)} method
*/
@HelpCommandAnnotation
public class DefaultHelpCommand extends AbstractCommandNode {
private final CommandTree tree;
private final String KEY;
/**
* Please see {@link DefaultHelpCommand} for the needed language keys
*
* @param language The language
* @param tree The command tree. Queried for all the children.
* @param key The Base key. Default is "command_help"
*
* @see AbstractCommandNode#AbstractCommandNode(MessageProvider)
*/
@SuppressWarnings("unused")
public DefaultHelpCommand(@Nonnull MessageProvider language, @Nonnull CommandTree tree,
String key) {
super(language, key);
this.tree = tree;
this.KEY = key;
}
@Override
public boolean isForbidden(Permissible permissible) {
return !permissible.hasPermission(language.tr(KEY + "_permission"));
}
@Override
public boolean isNotAble(CommandSender sender) {
return false;
}
@Override
public List<String> tabComplete(CommandSender sender, String alias, List<String> wholeUserChat,
int indexRelativeToYou) {
return Arrays.asList("--depth=", "--page=", "--entriesPerPage=", "--showUsage=false",
"--search=", "--searchRegEx=");
}
@Override
public CommandResultType execute(CommandSender sender, String[] args) {
AtomicInteger page = new AtomicInteger(0);
AtomicInteger depth = new AtomicInteger(2);
AtomicInteger entriesPerPage = new AtomicInteger(10);
AtomicBoolean showUsage = new AtomicBoolean(false);
StringBuilder searchFilter = new StringBuilder();
AtomicBoolean searchUsingRegEx = new AtomicBoolean(false);
Arrays.stream(args)
.filter(s -> s.matches("--depth=\\d{1,9}"))
.forEach(s -> {
s = s.replace("--depth=", "");
depth.set(Integer.parseInt(s));
});
Arrays.stream(args)
.filter(s -> s.toLowerCase().matches("--page=\\d{1,9}"))
.forEach(s -> {
s = s.replace("--page=", "");
page.set(Integer.parseInt(s) - 1);
});
Arrays.stream(args)
.filter(s -> s.toLowerCase().matches("--entriesperpage=\\d{1,9}"))
.forEach(s -> {
s = s.replace("--entriesPerPage=", "");
entriesPerPage.set(Integer.parseInt(s));
});
Arrays.stream(args)
.filter(s -> s.toLowerCase().matches("--showusage=(true|false)"))
.forEach(s -> {
s = s.replace("--showUsage=", "");
showUsage.set(Boolean.parseBoolean(s));
});
Arrays.stream(args)
.filter(s -> s.matches("--search=.+"))
.forEach(s -> {
s = s.replace("--search=", "");
searchFilter.append(s.trim());
});
Arrays.stream(args)
.filter(s -> s.matches("--searchRegEx=.+"))
.forEach(s -> {
s = s.replace("--searchRegEx=", "");
if (searchFilter.length() != 0) {
searchFilter.delete(0, searchFilter.length());
}
searchFilter.append(s.trim());
searchUsingRegEx.set(true);
});
Options options = Options.builder()
.setEntriesPerPage(entriesPerPage.get())
.setPageIndex(page.get())
.setSearchPattern(searchFilter.toString())
.setSearchModes(searchUsingRegEx.get()
? SearchMode.REGEX_FIND
: SearchMode.CONTAINS).build();
if (args.length > 0) {
AbstractCommandNode.FindCommandResult result = tree.find(new ArrayDeque<>(Arrays.asList(args)), sender);
if (result.getResult() == CommandResultType.SUCCESSFUL) {
List<PagerFilterable> entries =
getCommandFilterable(language, tree, showUsage.get(), result.getCommandNode(), depth.get(), 0);
Pager.getPageFromFilterable(options, entries)
.send(sender, language);
}
else {
sender.sendMessage(language.tr(KEY + "_not_found",
Arrays.stream(args).collect(Collectors.joining(" "))));
}
return CommandResultType.SUCCESSFUL;
}
List<PagerFilterable> entries =
getCommandFilterable(language, tree, showUsage.get(), tree.getRoot(), depth.get(), 0);
Pager.getPageFromFilterable(options, entries)
.send(sender, language);
return CommandResultType.SUCCESSFUL;
}
/**
* Sends help for one command
*
* @param maxDepth The maximum depth. Index based. 0 ==> Just this command, 1 ==> Command and children
* @param counter The current counter. Just supply 0. Used for recursion.
*/
private static List<PagerFilterable> getCommandFilterable(MessageProvider language, CommandTree tree,
boolean withUsage, AbstractCommandNode node,
int maxDepth, int counter) {
List<PagerFilterable> list = new ArrayList<>();
if (!(node instanceof CommandRoot)) {
PagerFilterable filterable = new CommandFilterable(node, withUsage, tree.getChildren(node).size(),
language, counter);
list.add(filterable);
}
else {
counter--;
}
if (counter >= maxDepth) {
return list;
}
for (AbstractCommandNode commandNode : tree.getChildren(node)) {
list.addAll(getCommandFilterable(language, tree, withUsage, commandNode, maxDepth, counter + 1));
}
return list;
}
private static class CommandFilterable implements PagerFilterable {
private AbstractCommandNode node;
private boolean showUsage;
private String childrenAmount;
private MessageProvider language;
private int depth;
private List<String> allLines;
CommandFilterable(AbstractCommandNode node, boolean showUsage, int childrenAmount,
MessageProvider language, int depth) {
this.node = node;
this.showUsage = showUsage;
this.childrenAmount = childrenAmount == 0 ? "" : Integer.toString(childrenAmount);
this.language = language;
this.depth = depth;
calculateAllLines();
}
@Override
public boolean accepts(Options options) {
// match against what is shown
for (String line : allLines) {
if (options.matchesPattern(strip(line))) {
return true;
}
}
return false;
}
/**
* @param coloredString The String to strip the colors from
*
* @return The uncolored String
*/
private static String strip(String coloredString) {
return ChatColor.stripColor(coloredString);
}
private void calculateAllLines() {
String finalString;
{
if (showUsage) {
String key = "command_help_format_with_usage";
finalString = language.trOrDefault(key,
"&3{0}&9: &7{1} &7<&6{2}&7><newline> &cUsage: {3}",
node.getName(), node.getDescription(), childrenAmount, node.getUsage());
}
else {
String key = "command_help_format_without_usage";
finalString = language.trOrDefault(key,
"&3{0}&9: &7{1} &7<&6{2}&7>",
node.getName(), node.getDescription(), childrenAmount, node.getUsage());
}
finalString = color(finalString);
}
List<String> list = new ArrayList<>();
for (String s : finalString.split("<newline>")) {
if (depth == 0) {
s = color(language.trOrDefault("command_help_top_level_prefix", "")) + s;
}
else {
s = color(language.trOrDefault("command_help_sub_level_prefix", "")) + s;
}
s = CommandSystemUtil.repeat(language.trOrDefault("command_help_padding_char", " "), depth) + s;
if (!s.isEmpty()) {
list.add(s);
}
}
allLines = list;
}
@Override
public @NotNull List<String> getAllLines() {
return allLines;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof CommandFilterable)) {
return false;
}
CommandFilterable that = (CommandFilterable) o;
return Objects.equals(node, that.node);
}
@Override
public int hashCode() {
return Objects.hash(node);
}
@Override
public String toString() {
return "CommandFilterable{" +
"node=" + node.getName() +
", showUsage=" + showUsage +
", childrenAmount='" + childrenAmount + '\'' +
", depth=" + depth +
", allLines=" + getAllLines() +
'}';
}
}
}
| |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.archetype.mgmt.client.table.presenters;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import elemental2.promise.Promise;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.guvnor.common.services.project.model.GAV;
import org.jboss.errai.security.shared.api.identity.User;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.screens.archetype.mgmt.client.modal.AddArchetypeModalPresenter;
import org.kie.workbench.common.screens.archetype.mgmt.client.table.config.ArchetypeTableConfiguration;
import org.kie.workbench.common.screens.archetype.mgmt.client.table.item.model.ArchetypeItem;
import org.kie.workbench.common.screens.archetype.mgmt.client.table.presenters.impl.GlobalArchetypeTablePresenter;
import org.kie.workbench.common.screens.archetype.mgmt.shared.events.ArchetypeListUpdatedEvent;
import org.kie.workbench.common.screens.archetype.mgmt.shared.model.Archetype;
import org.kie.workbench.common.screens.archetype.mgmt.shared.model.ArchetypeListOperation;
import org.kie.workbench.common.screens.archetype.mgmt.shared.model.ArchetypeStatus;
import org.kie.workbench.common.screens.archetype.mgmt.shared.model.PaginatedArchetypeList;
import org.kie.workbench.common.screens.archetype.mgmt.shared.preferences.ArchetypePreferences;
import org.kie.workbench.common.screens.archetype.mgmt.shared.services.ArchetypeService;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.client.promise.Promises;
import org.uberfire.ext.widgets.common.client.common.BusyIndicatorView;
import org.uberfire.mocks.CallerMock;
import org.uberfire.preferences.shared.PreferenceScope;
import org.uberfire.preferences.shared.PreferenceScopeFactory;
import org.uberfire.preferences.shared.impl.PreferenceScopeResolutionStrategyInfo;
import org.uberfire.promise.SyncPromises;
import org.uberfire.security.authz.AuthorizationManager;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class AbstractArchetypeTablePresenterTest {
private AbstractArchetypeTablePresenter presenter;
@Mock
private AbstractArchetypeTablePresenter.View view;
@Mock
private AbstractArchetypeTablePresenter.ArchetypeListPresenter archetypeListPresenter;
@Mock
private BusyIndicatorView busyIndicatorView;
@Mock
private TranslationService ts;
@Mock
private AddArchetypeModalPresenter addArchetypeModalPresenter;
@Mock
private ArchetypePreferences archetypePreferences;
@Mock
private ArchetypeService archetypeService;
@Mock
private PreferenceScopeFactory preferenceScopeFactory;
private Promises promises;
@Mock
private AuthorizationManager authorizationManager;
@Mock
private User user;
@Before
public void setup() {
promises = new SyncPromises();
presenter = spy(new GlobalArchetypeTablePresenter(view,
archetypeListPresenter,
busyIndicatorView,
ts,
addArchetypeModalPresenter,
archetypePreferences,
new CallerMock<>(archetypeService),
preferenceScopeFactory,
promises,
authorizationManager,
user));
}
@Test
public void onArchetypeListUpdatedEventWhenIsNotSetupTest() {
setPresenterPrivateField("setup", false);
presenter.onArchetypeListUpdatedEvent(new ArchetypeListUpdatedEvent(ArchetypeListOperation.ADD));
verify(archetypeService, never()).list(any(),
any(),
any());
}
@Test
public void onArchetypeListUpdatedEventWhenIsSetupTest() {
setPresenterPrivateField("setup", true);
doReturn(promises.resolve()).when(presenter).loadList(false);
presenter.onArchetypeListUpdatedEvent(new ArchetypeListUpdatedEvent(ArchetypeListOperation.ADD));
verify(presenter).loadList(false);
}
@Test
public void showBusyIndicatorTest() {
final String msg = "Loading";
presenter.showBusyIndicator(msg);
verify(busyIndicatorView).showBusyIndicator(msg);
}
@Test
public void hideBusyIndicatorTest() {
presenter.hideBusyIndicator();
verify(busyIndicatorView).hideBusyIndicator();
}
@Test
public void isEmptyShouldBeTrueWhenIsNotSetupTest() {
setPresenterPrivateField("setup", false);
assertTrue(presenter.isEmpty());
}
@Test
public void isEmptyWhenTrueTest() {
setPresenterPrivateField("setup", true);
doReturn(Collections.emptyList()).when(archetypeListPresenter).getObjectsList();
assertTrue(presenter.isEmpty());
}
@Test
public void resetTest() {
setPresenterPrivateField("setup", true);
presenter.reset();
assertFalse(presenter.isSetup());
}
@Test
public void isEmptyWhenFalseTest() {
setPresenterPrivateField("setup", true);
doReturn(Collections.singletonList(mock(ArchetypeItem.class))).when(archetypeListPresenter).getObjectsList();
assertFalse(presenter.isEmpty());
}
@Test
public void setupWhenIsNotRefreshTest() {
doReturn(promises.resolve()).when(presenter).loadList(false);
presenter.setup(false, () -> {
});
verify(view).init(presenter);
verify(presenter).loadList(false);
}
@Test
public void setupWhenIsRefreshTest() {
doReturn(promises.resolve()).when(presenter).loadList(true);
presenter.setup(true, () -> {
});
verify(view, never()).init(presenter);
verify(presenter).loadList(true);
}
@Test
public void setCurrentPageTest() {
setPresenterPrivateField("totalPages", 10);
doNothing().when(presenter).updateList();
presenter.setCurrentPage(5);
assertEquals(5, presenter.currentPage);
verify(presenter).updateList();
}
@Test
public void setCurrentPageOutRangeTest() {
setPresenterPrivateField("currentPage", 10);
setPresenterPrivateField("totalPages", 10);
presenter.setCurrentPage(50);
assertEquals(10, presenter.currentPage);
}
@Test
public void goToPreviousPageTest() {
setPresenterPrivateField("currentPage", 5);
setPresenterPrivateField("totalPages", 10);
doNothing().when(presenter).updateList();
presenter.goToPreviousPage();
assertEquals(4, presenter.currentPage);
verify(presenter).updateList();
}
@Test
public void goToPreviousPageDoNothingTest() {
setPresenterPrivateField("currentPage", 1);
setPresenterPrivateField("totalPages", 10);
presenter.goToPreviousPage();
assertEquals(1, presenter.currentPage);
verify(view, never()).setCurrentPage(anyInt());
verify(presenter, never()).updateList();
}
@Test
public void goToNextPageTest() {
setPresenterPrivateField("currentPage", 1);
setPresenterPrivateField("totalPages", 10);
doNothing().when(presenter).updateList();
presenter.goToNextPage();
assertEquals(2, presenter.currentPage);
verify(presenter).updateList();
}
@Test
public void goToNextPageDoNothingTest() {
setPresenterPrivateField("currentPage", 10);
setPresenterPrivateField("totalPages", 10);
presenter.goToNextPage();
assertEquals(10, presenter.currentPage);
verify(view, never()).setCurrentPage(anyInt());
verify(presenter, never()).updateList();
}
@Test
public void goToFirstPageTest() {
doNothing().when(presenter).updateList();
presenter.goToFirstPage();
assertEquals(1, presenter.currentPage);
}
@Test
public void goToLastPageTest() {
setPresenterPrivateField("totalPages", 10);
doNothing().when(presenter).updateList();
presenter.goToLastPage();
assertEquals(10, presenter.currentPage);
}
@Test
public void searchTest() {
final String searchFilter = "keyword";
doNothing().when(presenter).updateList();
presenter.search(searchFilter);
assertEquals(searchFilter, presenter.searchFilter);
assertEquals(1, presenter.currentPage);
}
@Test
public void addArchetypeWhenIsNotAllowedTest() {
doReturn(false).when(presenter).canMakeChanges();
presenter.addArchetype();
verify(addArchetypeModalPresenter, never()).show();
}
@Test
public void addArchetypeWhenIsAllowedTest() {
doReturn(true).when(presenter).canMakeChanges();
presenter.addArchetype();
verify(addArchetypeModalPresenter).show();
}
@Test
public void getIncludedWhenEmptyTest() {
doReturn(Collections.emptyMap()).when(archetypePreferences).getArchetypeSelectionMap();
final List<String> result = presenter.getIncluded();
assertEquals(0, result.size());
}
@Test
public void getIncludedWhenAllEnabledTest() {
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", true);
selectionMap.put("archetype 2", true);
selectionMap.put("archetype 3", true);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
final List<String> result = presenter.getIncluded();
assertEquals(3, result.size());
}
@Test
public void getIncludedWhenAllDisabledTest() {
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", false);
selectionMap.put("archetype 2", false);
selectionMap.put("archetype 3", false);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
final List<String> result = presenter.getIncluded();
assertEquals(0, result.size());
}
@Test
public void getIncludedMixedTest() {
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", true);
selectionMap.put("archetype 2", false);
selectionMap.put("archetype 3", true);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
final List<String> result = presenter.getIncluded();
assertEquals(2, result.size());
}
@Test
public void isShowIncludeColumnWhenIsTrueTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withIncludeColumn()
.build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowIncludeColumn();
assertTrue(result);
}
@Test
public void isShowIncludeColumnWhenIsFalseTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder().build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowIncludeColumn();
assertFalse(result);
}
@Test
public void isShowStatusColumnWhenIsTrueTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withStatusColumn()
.build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowStatusColumn();
assertTrue(result);
}
@Test
public void isShowStatusColumnWhenIsFalseTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder().build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowStatusColumn();
assertFalse(result);
}
@Test
public void isShowDeleteActionWhenIsTrueTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withDeleteAction()
.build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowDeleteAction();
assertTrue(result);
}
@Test
public void isShowDeleteActionWhenIsFalseTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder().build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowDeleteAction();
assertFalse(result);
}
@Test
public void isShowValidateActionWhenIsTrueTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withValidateAction()
.build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowValidateAction();
assertTrue(result);
}
@Test
public void isShowValidateActionWhenIsFalseTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder().build();
doReturn(config).when(presenter).getConfiguration();
final boolean result = presenter.isShowValidateAction();
assertFalse(result);
}
@Test
public void setSelectedWhenKeyIsNotPresentTest() {
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", true);
selectionMap.put("archetype 2", false);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
presenter.setSelected(createArchetypeItem(),
true);
verify(view, never()).setSelectionCounter(anyString());
}
@Test
public void setSelectedWhenKeyIsPresentTest() {
final Runnable callback = () -> {
};
setPresenterPrivateField("onChangedCallback", callback);
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("myArchetype", true);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
presenter.setSelected(createArchetypeItem(),
true);
verify(view).setSelectionCounter(Mockito.<String>any());
}
@Test
public void updateSelectionCounterTest() {
doReturn(Collections.nCopies(1, "archetype")).when(presenter).getIncluded();
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", true);
selectionMap.put("archetype 2", false);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
presenter.updateSelectionCounter();
verify(view).setSelectionCounter(Mockito.<String>any());
}
@Test
public void loadPreferencesSuccessCallbackTest() {
final PaginatedArchetypeList paginatedArchetypeList = mock(PaginatedArchetypeList.class);
doNothing().when(presenter).finishLoadList(paginatedArchetypeList);
presenter.loadPreferencesSuccessCallback(paginatedArchetypeList,
mock(Promise.PromiseExecutorCallbackFn.ResolveCallbackFn.class))
.execute(archetypePreferences);
verify(presenter).finishLoadList(paginatedArchetypeList);
}
@Test
public void loadPreferencesErrorCallbackTest() {
presenter.loadPreferencesErrorCallback(mock(Promise.PromiseExecutorCallbackFn.RejectCallbackFn.class))
.execute(mock(Throwable.class));
verify(busyIndicatorView).hideBusyIndicator();
}
@Test
public void savePreferencesByStrategyInfoWhenAllowedTest() {
doReturn(true).when(presenter).canMakeChanges();
presenter.savePreferences(mock(PreferenceScopeResolutionStrategyInfo.class),
true).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(archetypePreferences).save(any(PreferenceScopeResolutionStrategyInfo.class),
any(),
any());
}
@Test
public void savePreferencesByStrategyInfoWhenNotAllowedTest() {
doReturn(false).when(presenter).canMakeChanges();
presenter.savePreferences(mock(PreferenceScopeResolutionStrategyInfo.class),
true).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(archetypePreferences, never()).save(any(PreferenceScopeResolutionStrategyInfo.class),
any(),
any());
}
@Test
public void savePreferencesByScopeWhenIsAllowedTest() {
doReturn(true).when(presenter).canMakeChanges();
presenter.savePreferences(mock(PreferenceScope.class),
true).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(archetypePreferences).save(any(PreferenceScope.class),
any(),
any());
}
@Test
public void savePreferencesByScopeWhenIsNotAllowedTest() {
doReturn(false).when(presenter).canMakeChanges();
presenter.savePreferences(mock(PreferenceScope.class),
true).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(archetypePreferences, never()).save(any(PreferenceScope.class),
any(),
any());
}
@Test
public void updateListTest() {
doReturn(promises.resolve()).when(presenter).loadList(true);
presenter.updateList();
verify(presenter).loadList(true);
}
@Test
public void finishLoadListTest() {
final List<Archetype> archetypes = new ArrayList<>();
archetypes.add(createArchetypeWithAlias("archetype 1"));
archetypes.add(createArchetypeWithAlias("archetype 2"));
archetypes.add(createArchetypeWithAlias("archetype 3"));
final Map<String, Boolean> selectionMap = new HashMap<>();
selectionMap.put("archetype 1", true);
selectionMap.put("archetype 2", true);
selectionMap.put("archetype 3", true);
final PaginatedArchetypeList paginatedArchetypeList =
new PaginatedArchetypeList(archetypes,
0,
10,
3);
doReturn(selectionMap).when(archetypePreferences).getArchetypeSelectionMap();
doReturn("archetype 1").when(archetypePreferences).getDefaultSelection();
presenter.finishLoadList(paginatedArchetypeList);
verify(archetypeListPresenter).setup(any(),
any(),
any());
verify(view).setSelectionCounter(Mockito.<String>any());
verify(busyIndicatorView).hideBusyIndicator();
verify(view).show(true);
}
@Test
public void configureViewWhenEmptyTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withAddAction()
.withIncludeColumn()
.withStatusColumn()
.build();
doReturn(config).when(presenter).getConfiguration();
final PaginatedArchetypeList paginatedArchetypeList = new PaginatedArchetypeList(Collections.emptyList(),
0,
0,
0);
presenter.configureView(paginatedArchetypeList);
verify(view).showAddAction(true);
verify(view).showIncludeHeader(true);
verify(view).showStatusHeader(true);
verify(view).showSelectionCounter(true);
verify(view).showNoResults(false);
verify(view).showPagination(false);
verify(view).showToolbar(false);
verify(view).showEmpty(true);
}
@Test
public void configureViewWhenSearchEmptyTest() {
setPresenterPrivateField("searchFilter", "keyword");
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withAddAction()
.withIncludeColumn()
.withStatusColumn()
.build();
doReturn(config).when(presenter).getConfiguration();
final PaginatedArchetypeList paginatedArchetypeList = new PaginatedArchetypeList(Collections.emptyList(),
0,
0,
0);
presenter.configureView(paginatedArchetypeList);
verify(view).showAddAction(true);
verify(view).showIncludeHeader(true);
verify(view).showStatusHeader(true);
verify(view).showSelectionCounter(true);
verify(view).showNoResults(true);
verify(view).showPagination(false);
verify(view).showToolbar(true);
verify(view).showEmpty(false);
}
@Test
public void configureViewWhenPopulatedTest() {
final ArchetypeTableConfiguration config = new ArchetypeTableConfiguration.Builder()
.withAddAction()
.withIncludeColumn()
.withStatusColumn()
.build();
doReturn(config).when(presenter).getConfiguration();
final PaginatedArchetypeList paginatedArchetypeList =
new PaginatedArchetypeList(Collections.nCopies(10, mock(Archetype.class)),
0,
10,
10);
presenter.configureView(paginatedArchetypeList);
verify(view).showAddAction(true);
verify(view).showIncludeHeader(true);
verify(view).showStatusHeader(true);
verify(view).showSelectionCounter(true);
verify(view).showNoResults(false);
verify(view).showPagination(true);
verify(view).showToolbar(true);
verify(view).showEmpty(false);
}
@Test
public void getPreferencesTest() {
setPresenterPrivateField("archetypePreferences", archetypePreferences);
assertEquals(archetypePreferences, presenter.getPreferences());
}
@Test
public void getViewTest() {
setPresenterPrivateField("view", view);
assertEquals(view, presenter.getView());
}
@Test
public void setupCountersWhenEmptyListTest() {
setPresenterPrivateField("currentPage", 10);
presenter.setupCounters(0);
assertEquals(0, presenter.currentPage);
assertEquals(0, presenter.totalPages);
verify(view).setPageIndicator(Mockito.<String>any());
verify(view).setTotalPages(Mockito.<String>any());
verify(view).setCurrentPage(0);
verify(view).enablePreviousButton(false);
verify(view).enableNextButton(false);
verify(view).enableFirstButton(false);
verify(view).enableLastButton(false);
}
@Test
public void setupCountersWhenPopulatedListTest() {
setPresenterPrivateField("currentPage", 1);
presenter.setupCounters(100);
assertEquals(1, presenter.currentPage);
assertEquals(10, presenter.totalPages);
verify(view).setPageIndicator(Mockito.<String>any());
verify(view).setTotalPages(Mockito.<String>any());
verify(view).setCurrentPage(1);
verify(view).enablePreviousButton(false);
verify(view).enableNextButton(true);
verify(view).enableFirstButton(false);
verify(view).enableLastButton(true);
}
@Test
public void setupCountersWhenInLastPageListTest() {
setPresenterPrivateField("currentPage", 10);
presenter.setupCounters(100);
assertEquals(10, presenter.currentPage);
assertEquals(10, presenter.totalPages);
verify(view).enablePreviousButton(true);
verify(view).enableNextButton(false);
verify(view).enableFirstButton(true);
verify(view).enableLastButton(false);
}
@Test
public void setupCountersWhenInFirstPageListTest() {
setPresenterPrivateField("currentPage", 1);
presenter.setupCounters(100);
assertEquals(1, presenter.currentPage);
assertEquals(10, presenter.totalPages);
verify(view).enablePreviousButton(false);
verify(view).enableNextButton(true);
verify(view).enableFirstButton(false);
verify(view).enableLastButton(true);
}
@Test
public void setupCountersWhenInMiddlePageListTest() {
setPresenterPrivateField("currentPage", 5);
presenter.setupCounters(100);
assertEquals(5, presenter.currentPage);
assertEquals(10, presenter.totalPages);
verify(view).enablePreviousButton(true);
verify(view).enableNextButton(true);
verify(view).enableFirstButton(true);
verify(view).enableLastButton(true);
}
private ArchetypeItem createArchetypeItem() {
final Archetype archetype = createArchetypeWithAlias("myArchetype");
return new ArchetypeItem(archetype,
true,
true);
}
private Archetype createArchetypeWithAlias(final String alias) {
return new Archetype(alias,
mock(GAV.class),
new Date(),
ArchetypeStatus.VALID);
}
private void setPresenterPrivateField(final String fieldName, final Object value) {
try {
FieldUtils.writeField(AbstractArchetypeTablePresenter.class.getDeclaredField(fieldName), presenter, value, true);
} catch (IllegalAccessException | NoSuchFieldException e) {
Assert.fail();
}
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.dash;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import android.util.Pair;
import android.util.SparseIntArray;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.SeekParameters;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory;
import com.google.android.exoplayer2.source.EmptySampleStream;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.source.SequenceableLoader;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.chunk.ChunkSampleStream;
import com.google.android.exoplayer2.source.chunk.ChunkSampleStream.EmbeddedSampleStream;
import com.google.android.exoplayer2.source.dash.PlayerEmsgHandler.PlayerEmsgCallback;
import com.google.android.exoplayer2.source.dash.PlayerEmsgHandler.PlayerTrackEmsgHandler;
import com.google.android.exoplayer2.source.dash.manifest.AdaptationSet;
import com.google.android.exoplayer2.source.dash.manifest.DashManifest;
import com.google.android.exoplayer2.source.dash.manifest.Descriptor;
import com.google.android.exoplayer2.source.dash.manifest.EventStream;
import com.google.android.exoplayer2.source.dash.manifest.Period;
import com.google.android.exoplayer2.source.dash.manifest.Representation;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import com.google.android.exoplayer2.upstream.LoaderErrorThrower;
import com.google.android.exoplayer2.upstream.TransferListener;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** A DASH {@link MediaPeriod}. */
/* package */ final class DashMediaPeriod
implements MediaPeriod,
SequenceableLoader.Callback<ChunkSampleStream<DashChunkSource>>,
ChunkSampleStream.ReleaseCallback<DashChunkSource> {
private static final Pattern CEA608_SERVICE_DESCRIPTOR_REGEX = Pattern.compile("CC([1-4])=(.+)");
/* package */ final int id;
private final DashChunkSource.Factory chunkSourceFactory;
private final @Nullable TransferListener transferListener;
private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
private final long elapsedRealtimeOffsetMs;
private final LoaderErrorThrower manifestLoaderErrorThrower;
private final Allocator allocator;
private final TrackGroupArray trackGroups;
private final TrackGroupInfo[] trackGroupInfos;
private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory;
private final PlayerEmsgHandler playerEmsgHandler;
private final IdentityHashMap<ChunkSampleStream<DashChunkSource>, PlayerTrackEmsgHandler>
trackEmsgHandlerBySampleStream;
private final EventDispatcher eventDispatcher;
private @Nullable Callback callback;
private ChunkSampleStream<DashChunkSource>[] sampleStreams;
private EventSampleStream[] eventSampleStreams;
private SequenceableLoader compositeSequenceableLoader;
private DashManifest manifest;
private int periodIndex;
private List<EventStream> eventStreams;
private boolean notifiedReadingStarted;
public DashMediaPeriod(
int id,
DashManifest manifest,
int periodIndex,
DashChunkSource.Factory chunkSourceFactory,
@Nullable TransferListener transferListener,
LoadErrorHandlingPolicy loadErrorHandlingPolicy,
EventDispatcher eventDispatcher,
long elapsedRealtimeOffsetMs,
LoaderErrorThrower manifestLoaderErrorThrower,
Allocator allocator,
CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory,
PlayerEmsgCallback playerEmsgCallback) {
this.id = id;
this.manifest = manifest;
this.periodIndex = periodIndex;
this.chunkSourceFactory = chunkSourceFactory;
this.transferListener = transferListener;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
this.eventDispatcher = eventDispatcher;
this.elapsedRealtimeOffsetMs = elapsedRealtimeOffsetMs;
this.manifestLoaderErrorThrower = manifestLoaderErrorThrower;
this.allocator = allocator;
this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory;
playerEmsgHandler = new PlayerEmsgHandler(manifest, playerEmsgCallback, allocator);
sampleStreams = newSampleStreamArray(0);
eventSampleStreams = new EventSampleStream[0];
trackEmsgHandlerBySampleStream = new IdentityHashMap<>();
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(sampleStreams);
Period period = manifest.getPeriod(periodIndex);
eventStreams = period.eventStreams;
Pair<TrackGroupArray, TrackGroupInfo[]> result = buildTrackGroups(period.adaptationSets,
eventStreams);
trackGroups = result.first;
trackGroupInfos = result.second;
eventDispatcher.mediaPeriodCreated();
}
/**
* Updates the {@link DashManifest} and the index of this period in the manifest.
*
* @param manifest The updated manifest.
* @param periodIndex the new index of this period in the updated manifest.
*/
public void updateManifest(DashManifest manifest, int periodIndex) {
this.manifest = manifest;
this.periodIndex = periodIndex;
playerEmsgHandler.updateManifest(manifest);
if (sampleStreams != null) {
for (ChunkSampleStream<DashChunkSource> sampleStream : sampleStreams) {
sampleStream.getChunkSource().updateManifest(manifest, periodIndex);
}
callback.onContinueLoadingRequested(this);
}
eventStreams = manifest.getPeriod(periodIndex).eventStreams;
for (EventSampleStream eventSampleStream : eventSampleStreams) {
for (EventStream eventStream : eventStreams) {
if (eventStream.id().equals(eventSampleStream.eventStreamId())) {
int lastPeriodIndex = manifest.getPeriodCount() - 1;
eventSampleStream.updateEventStream(
eventStream,
/* eventStreamAppendable= */ manifest.dynamic && periodIndex == lastPeriodIndex);
break;
}
}
}
}
public void release() {
playerEmsgHandler.release();
for (ChunkSampleStream<DashChunkSource> sampleStream : sampleStreams) {
sampleStream.release(this);
}
callback = null;
eventDispatcher.mediaPeriodReleased();
}
// ChunkSampleStream.ReleaseCallback implementation.
@Override
public synchronized void onSampleStreamReleased(ChunkSampleStream<DashChunkSource> stream) {
PlayerTrackEmsgHandler trackEmsgHandler = trackEmsgHandlerBySampleStream.remove(stream);
if (trackEmsgHandler != null) {
trackEmsgHandler.release();
}
}
// MediaPeriod implementation.
@Override
public void prepare(Callback callback, long positionUs) {
this.callback = callback;
callback.onPrepared(this);
}
@Override
public void maybeThrowPrepareError() throws IOException {
manifestLoaderErrorThrower.maybeThrowError();
}
@Override
public TrackGroupArray getTrackGroups() {
return trackGroups;
}
@Override
public List<StreamKey> getStreamKeys(List<TrackSelection> trackSelections) {
List<AdaptationSet> manifestAdaptationSets = manifest.getPeriod(periodIndex).adaptationSets;
List<StreamKey> streamKeys = new ArrayList<>();
for (TrackSelection trackSelection : trackSelections) {
int trackGroupIndex = trackGroups.indexOf(trackSelection.getTrackGroup());
TrackGroupInfo trackGroupInfo = trackGroupInfos[trackGroupIndex];
if (trackGroupInfo.trackGroupCategory != TrackGroupInfo.CATEGORY_PRIMARY) {
// Ignore non-primary tracks.
continue;
}
int[] adaptationSetIndices = trackGroupInfo.adaptationSetIndices;
int[] trackIndices = new int[trackSelection.length()];
for (int i = 0; i < trackSelection.length(); i++) {
trackIndices[i] = trackSelection.getIndexInTrackGroup(i);
}
Arrays.sort(trackIndices);
int currentAdaptationSetIndex = 0;
int totalTracksInPreviousAdaptationSets = 0;
int tracksInCurrentAdaptationSet =
manifestAdaptationSets.get(adaptationSetIndices[0]).representations.size();
for (int i = 0; i < trackIndices.length; i++) {
while (trackIndices[i]
>= totalTracksInPreviousAdaptationSets + tracksInCurrentAdaptationSet) {
currentAdaptationSetIndex++;
totalTracksInPreviousAdaptationSets += tracksInCurrentAdaptationSet;
tracksInCurrentAdaptationSet =
manifestAdaptationSets
.get(adaptationSetIndices[currentAdaptationSetIndex])
.representations
.size();
}
streamKeys.add(
new StreamKey(
periodIndex,
adaptationSetIndices[currentAdaptationSetIndex],
trackIndices[i] - totalTracksInPreviousAdaptationSets));
}
}
return streamKeys;
}
@Override
public long selectTracks(TrackSelection[] selections, boolean[] mayRetainStreamFlags,
SampleStream[] streams, boolean[] streamResetFlags, long positionUs) {
int[] streamIndexToTrackGroupIndex = getStreamIndexToTrackGroupIndex(selections);
releaseDisabledStreams(selections, mayRetainStreamFlags, streams);
releaseOrphanEmbeddedStreams(selections, streams, streamIndexToTrackGroupIndex);
selectNewStreams(
selections, streams, streamResetFlags, positionUs, streamIndexToTrackGroupIndex);
ArrayList<ChunkSampleStream<DashChunkSource>> sampleStreamList = new ArrayList<>();
ArrayList<EventSampleStream> eventSampleStreamList = new ArrayList<>();
for (SampleStream sampleStream : streams) {
if (sampleStream instanceof ChunkSampleStream) {
@SuppressWarnings("unchecked")
ChunkSampleStream<DashChunkSource> stream =
(ChunkSampleStream<DashChunkSource>) sampleStream;
sampleStreamList.add(stream);
} else if (sampleStream instanceof EventSampleStream) {
eventSampleStreamList.add((EventSampleStream) sampleStream);
}
}
sampleStreams = newSampleStreamArray(sampleStreamList.size());
sampleStreamList.toArray(sampleStreams);
eventSampleStreams = new EventSampleStream[eventSampleStreamList.size()];
eventSampleStreamList.toArray(eventSampleStreams);
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(sampleStreams);
return positionUs;
}
@Override
public void discardBuffer(long positionUs, boolean toKeyframe) {
for (ChunkSampleStream<DashChunkSource> sampleStream : sampleStreams) {
sampleStream.discardBuffer(positionUs, toKeyframe);
}
}
@Override
public void reevaluateBuffer(long positionUs) {
compositeSequenceableLoader.reevaluateBuffer(positionUs);
}
@Override
public boolean continueLoading(long positionUs) {
return compositeSequenceableLoader.continueLoading(positionUs);
}
@Override
public long getNextLoadPositionUs() {
return compositeSequenceableLoader.getNextLoadPositionUs();
}
@Override
public long readDiscontinuity() {
if (!notifiedReadingStarted) {
eventDispatcher.readingStarted();
notifiedReadingStarted = true;
}
return C.TIME_UNSET;
}
@Override
public long getBufferedPositionUs() {
return compositeSequenceableLoader.getBufferedPositionUs();
}
@Override
public long seekToUs(long positionUs) {
for (ChunkSampleStream<DashChunkSource> sampleStream : sampleStreams) {
sampleStream.seekToUs(positionUs);
}
for (EventSampleStream sampleStream : eventSampleStreams) {
sampleStream.seekToUs(positionUs);
}
return positionUs;
}
@Override
public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) {
for (ChunkSampleStream<DashChunkSource> sampleStream : sampleStreams) {
if (sampleStream.primaryTrackType == C.TRACK_TYPE_VIDEO) {
return sampleStream.getAdjustedSeekPositionUs(positionUs, seekParameters);
}
}
return positionUs;
}
// SequenceableLoader.Callback implementation.
@Override
public void onContinueLoadingRequested(ChunkSampleStream<DashChunkSource> sampleStream) {
callback.onContinueLoadingRequested(this);
}
// Internal methods.
private int[] getStreamIndexToTrackGroupIndex(TrackSelection[] selections) {
int[] streamIndexToTrackGroupIndex = new int[selections.length];
for (int i = 0; i < selections.length; i++) {
if (selections[i] != null) {
streamIndexToTrackGroupIndex[i] = trackGroups.indexOf(selections[i].getTrackGroup());
} else {
streamIndexToTrackGroupIndex[i] = C.INDEX_UNSET;
}
}
return streamIndexToTrackGroupIndex;
}
private void releaseDisabledStreams(
TrackSelection[] selections, boolean[] mayRetainStreamFlags, SampleStream[] streams) {
for (int i = 0; i < selections.length; i++) {
if (selections[i] == null || !mayRetainStreamFlags[i]) {
if (streams[i] instanceof ChunkSampleStream) {
@SuppressWarnings("unchecked")
ChunkSampleStream<DashChunkSource> stream =
(ChunkSampleStream<DashChunkSource>) streams[i];
stream.release(this);
} else if (streams[i] instanceof EmbeddedSampleStream) {
((EmbeddedSampleStream) streams[i]).release();
}
streams[i] = null;
}
}
}
private void releaseOrphanEmbeddedStreams(
TrackSelection[] selections, SampleStream[] streams, int[] streamIndexToTrackGroupIndex) {
for (int i = 0; i < selections.length; i++) {
if (streams[i] instanceof EmptySampleStream || streams[i] instanceof EmbeddedSampleStream) {
// We need to release an embedded stream if the corresponding primary stream is released.
int primaryStreamIndex = getPrimaryStreamIndex(i, streamIndexToTrackGroupIndex);
boolean mayRetainStream;
if (primaryStreamIndex == C.INDEX_UNSET) {
// If the corresponding primary stream is not selected, we may retain an existing
// EmptySampleStream.
mayRetainStream = streams[i] instanceof EmptySampleStream;
} else {
// If the corresponding primary stream is selected, we may retain the embedded stream if
// the stream's parent still matches.
mayRetainStream =
(streams[i] instanceof EmbeddedSampleStream)
&& ((EmbeddedSampleStream) streams[i]).parent == streams[primaryStreamIndex];
}
if (!mayRetainStream) {
if (streams[i] instanceof EmbeddedSampleStream) {
((EmbeddedSampleStream) streams[i]).release();
}
streams[i] = null;
}
}
}
}
private void selectNewStreams(
TrackSelection[] selections,
SampleStream[] streams,
boolean[] streamResetFlags,
long positionUs,
int[] streamIndexToTrackGroupIndex) {
// Create newly selected primary and event streams.
for (int i = 0; i < selections.length; i++) {
if (streams[i] == null && selections[i] != null) {
streamResetFlags[i] = true;
int trackGroupIndex = streamIndexToTrackGroupIndex[i];
TrackGroupInfo trackGroupInfo = trackGroupInfos[trackGroupIndex];
if (trackGroupInfo.trackGroupCategory == TrackGroupInfo.CATEGORY_PRIMARY) {
streams[i] = buildSampleStream(trackGroupInfo, selections[i], positionUs);
} else if (trackGroupInfo.trackGroupCategory == TrackGroupInfo.CATEGORY_MANIFEST_EVENTS) {
EventStream eventStream = eventStreams.get(trackGroupInfo.eventStreamGroupIndex);
Format format = selections[i].getTrackGroup().getFormat(0);
streams[i] = new EventSampleStream(eventStream, format, manifest.dynamic);
}
}
}
// Create newly selected embedded streams from the corresponding primary stream. Note that this
// second pass is needed because the primary stream may not have been created yet in a first
// pass if the index of the primary stream is greater than the index of the embedded stream.
for (int i = 0; i < selections.length; i++) {
if (streams[i] == null && selections[i] != null) {
int trackGroupIndex = streamIndexToTrackGroupIndex[i];
TrackGroupInfo trackGroupInfo = trackGroupInfos[trackGroupIndex];
if (trackGroupInfo.trackGroupCategory == TrackGroupInfo.CATEGORY_EMBEDDED) {
int primaryStreamIndex = getPrimaryStreamIndex(i, streamIndexToTrackGroupIndex);
if (primaryStreamIndex == C.INDEX_UNSET) {
// If an embedded track is selected without the corresponding primary track, create an
// empty sample stream instead.
streams[i] = new EmptySampleStream();
} else {
streams[i] =
((ChunkSampleStream) streams[primaryStreamIndex])
.selectEmbeddedTrack(positionUs, trackGroupInfo.trackType);
}
}
}
}
}
private int getPrimaryStreamIndex(int embeddedStreamIndex, int[] streamIndexToTrackGroupIndex) {
int embeddedTrackGroupIndex = streamIndexToTrackGroupIndex[embeddedStreamIndex];
if (embeddedTrackGroupIndex == C.INDEX_UNSET) {
return C.INDEX_UNSET;
}
int primaryTrackGroupIndex = trackGroupInfos[embeddedTrackGroupIndex].primaryTrackGroupIndex;
for (int i = 0; i < streamIndexToTrackGroupIndex.length; i++) {
int trackGroupIndex = streamIndexToTrackGroupIndex[i];
if (trackGroupIndex == primaryTrackGroupIndex
&& trackGroupInfos[trackGroupIndex].trackGroupCategory
== TrackGroupInfo.CATEGORY_PRIMARY) {
return i;
}
}
return C.INDEX_UNSET;
}
private static Pair<TrackGroupArray, TrackGroupInfo[]> buildTrackGroups(
List<AdaptationSet> adaptationSets, List<EventStream> eventStreams) {
int[][] groupedAdaptationSetIndices = getGroupedAdaptationSetIndices(adaptationSets);
int primaryGroupCount = groupedAdaptationSetIndices.length;
boolean[] primaryGroupHasEventMessageTrackFlags = new boolean[primaryGroupCount];
Format[][] primaryGroupCea608TrackFormats = new Format[primaryGroupCount][];
int totalEmbeddedTrackGroupCount =
identifyEmbeddedTracks(
primaryGroupCount,
adaptationSets,
groupedAdaptationSetIndices,
primaryGroupHasEventMessageTrackFlags,
primaryGroupCea608TrackFormats);
int totalGroupCount = primaryGroupCount + totalEmbeddedTrackGroupCount + eventStreams.size();
TrackGroup[] trackGroups = new TrackGroup[totalGroupCount];
TrackGroupInfo[] trackGroupInfos = new TrackGroupInfo[totalGroupCount];
int trackGroupCount =
buildPrimaryAndEmbeddedTrackGroupInfos(
adaptationSets,
groupedAdaptationSetIndices,
primaryGroupCount,
primaryGroupHasEventMessageTrackFlags,
primaryGroupCea608TrackFormats,
trackGroups,
trackGroupInfos);
buildManifestEventTrackGroupInfos(eventStreams, trackGroups, trackGroupInfos, trackGroupCount);
return Pair.create(new TrackGroupArray(trackGroups), trackGroupInfos);
}
private static int[][] getGroupedAdaptationSetIndices(List<AdaptationSet> adaptationSets) {
int adaptationSetCount = adaptationSets.size();
SparseIntArray idToIndexMap = new SparseIntArray(adaptationSetCount);
for (int i = 0; i < adaptationSetCount; i++) {
idToIndexMap.put(adaptationSets.get(i).id, i);
}
int[][] groupedAdaptationSetIndices = new int[adaptationSetCount][];
boolean[] adaptationSetUsedFlags = new boolean[adaptationSetCount];
int groupCount = 0;
for (int i = 0; i < adaptationSetCount; i++) {
if (adaptationSetUsedFlags[i]) {
// This adaptation set has already been included in a group.
continue;
}
adaptationSetUsedFlags[i] = true;
Descriptor adaptationSetSwitchingProperty = findAdaptationSetSwitchingProperty(
adaptationSets.get(i).supplementalProperties);
if (adaptationSetSwitchingProperty == null) {
groupedAdaptationSetIndices[groupCount++] = new int[] {i};
} else {
String[] extraAdaptationSetIds = Util.split(adaptationSetSwitchingProperty.value, ",");
int[] adaptationSetIndices = new int[1 + extraAdaptationSetIds.length];
adaptationSetIndices[0] = i;
int outputIndex = 1;
for (int j = 0; j < extraAdaptationSetIds.length; j++) {
int extraIndex =
idToIndexMap.get(
Integer.parseInt(extraAdaptationSetIds[j]), /* valueIfKeyNotFound= */ -1);
if (extraIndex != -1) {
adaptationSetUsedFlags[extraIndex] = true;
adaptationSetIndices[outputIndex] = extraIndex;
outputIndex++;
}
}
if (outputIndex < adaptationSetIndices.length) {
adaptationSetIndices = Arrays.copyOf(adaptationSetIndices, outputIndex);
}
groupedAdaptationSetIndices[groupCount++] = adaptationSetIndices;
}
}
return groupCount < adaptationSetCount
? Arrays.copyOf(groupedAdaptationSetIndices, groupCount) : groupedAdaptationSetIndices;
}
/**
* Iterates through list of primary track groups and identifies embedded tracks.
*
* @param primaryGroupCount The number of primary track groups.
* @param adaptationSets The list of {@link AdaptationSet} of the current DASH period.
* @param groupedAdaptationSetIndices The indices of {@link AdaptationSet} that belongs to the
* same primary group, grouped in primary track groups order.
* @param primaryGroupHasEventMessageTrackFlags An output array to be filled with flags indicating
* whether each of the primary track groups contains an embedded event message track.
* @param primaryGroupCea608TrackFormats An output array to be filled with track formats for
* CEA-608 tracks embedded in each of the primary track groups.
* @return Total number of embedded track groups.
*/
private static int identifyEmbeddedTracks(
int primaryGroupCount,
List<AdaptationSet> adaptationSets,
int[][] groupedAdaptationSetIndices,
boolean[] primaryGroupHasEventMessageTrackFlags,
Format[][] primaryGroupCea608TrackFormats) {
int numEmbeddedTrackGroups = 0;
for (int i = 0; i < primaryGroupCount; i++) {
if (hasEventMessageTrack(adaptationSets, groupedAdaptationSetIndices[i])) {
primaryGroupHasEventMessageTrackFlags[i] = true;
numEmbeddedTrackGroups++;
}
primaryGroupCea608TrackFormats[i] =
getCea608TrackFormats(adaptationSets, groupedAdaptationSetIndices[i]);
if (primaryGroupCea608TrackFormats[i].length != 0) {
numEmbeddedTrackGroups++;
}
}
return numEmbeddedTrackGroups;
}
private static int buildPrimaryAndEmbeddedTrackGroupInfos(
List<AdaptationSet> adaptationSets,
int[][] groupedAdaptationSetIndices,
int primaryGroupCount,
boolean[] primaryGroupHasEventMessageTrackFlags,
Format[][] primaryGroupCea608TrackFormats,
TrackGroup[] trackGroups,
TrackGroupInfo[] trackGroupInfos) {
int trackGroupCount = 0;
for (int i = 0; i < primaryGroupCount; i++) {
int[] adaptationSetIndices = groupedAdaptationSetIndices[i];
List<Representation> representations = new ArrayList<>();
for (int adaptationSetIndex : adaptationSetIndices) {
representations.addAll(adaptationSets.get(adaptationSetIndex).representations);
}
Format[] formats = new Format[representations.size()];
for (int j = 0; j < formats.length; j++) {
formats[j] = representations.get(j).format;
}
AdaptationSet firstAdaptationSet = adaptationSets.get(adaptationSetIndices[0]);
int primaryTrackGroupIndex = trackGroupCount++;
int eventMessageTrackGroupIndex =
primaryGroupHasEventMessageTrackFlags[i] ? trackGroupCount++ : C.INDEX_UNSET;
int cea608TrackGroupIndex =
primaryGroupCea608TrackFormats[i].length != 0 ? trackGroupCount++ : C.INDEX_UNSET;
trackGroups[primaryTrackGroupIndex] = new TrackGroup(formats);
trackGroupInfos[primaryTrackGroupIndex] =
TrackGroupInfo.primaryTrack(
firstAdaptationSet.type,
adaptationSetIndices,
primaryTrackGroupIndex,
eventMessageTrackGroupIndex,
cea608TrackGroupIndex);
if (eventMessageTrackGroupIndex != C.INDEX_UNSET) {
Format format = Format.createSampleFormat(firstAdaptationSet.id + ":emsg",
MimeTypes.APPLICATION_EMSG, null, Format.NO_VALUE, null);
trackGroups[eventMessageTrackGroupIndex] = new TrackGroup(format);
trackGroupInfos[eventMessageTrackGroupIndex] =
TrackGroupInfo.embeddedEmsgTrack(adaptationSetIndices, primaryTrackGroupIndex);
}
if (cea608TrackGroupIndex != C.INDEX_UNSET) {
trackGroups[cea608TrackGroupIndex] = new TrackGroup(primaryGroupCea608TrackFormats[i]);
trackGroupInfos[cea608TrackGroupIndex] =
TrackGroupInfo.embeddedCea608Track(adaptationSetIndices, primaryTrackGroupIndex);
}
}
return trackGroupCount;
}
private static void buildManifestEventTrackGroupInfos(List<EventStream> eventStreams,
TrackGroup[] trackGroups, TrackGroupInfo[] trackGroupInfos, int existingTrackGroupCount) {
for (int i = 0; i < eventStreams.size(); i++) {
EventStream eventStream = eventStreams.get(i);
Format format = Format.createSampleFormat(eventStream.id(), MimeTypes.APPLICATION_EMSG, null,
Format.NO_VALUE, null);
trackGroups[existingTrackGroupCount] = new TrackGroup(format);
trackGroupInfos[existingTrackGroupCount++] = TrackGroupInfo.mpdEventTrack(i);
}
}
private ChunkSampleStream<DashChunkSource> buildSampleStream(TrackGroupInfo trackGroupInfo,
TrackSelection selection, long positionUs) {
int embeddedTrackCount = 0;
boolean enableEventMessageTrack =
trackGroupInfo.embeddedEventMessageTrackGroupIndex != C.INDEX_UNSET;
TrackGroup embeddedEventMessageTrackGroup = null;
if (enableEventMessageTrack) {
embeddedEventMessageTrackGroup =
trackGroups.get(trackGroupInfo.embeddedEventMessageTrackGroupIndex);
embeddedTrackCount++;
}
boolean enableCea608Tracks = trackGroupInfo.embeddedCea608TrackGroupIndex != C.INDEX_UNSET;
TrackGroup embeddedCea608TrackGroup = null;
if (enableCea608Tracks) {
embeddedCea608TrackGroup = trackGroups.get(trackGroupInfo.embeddedCea608TrackGroupIndex);
embeddedTrackCount += embeddedCea608TrackGroup.length;
}
Format[] embeddedTrackFormats = new Format[embeddedTrackCount];
int[] embeddedTrackTypes = new int[embeddedTrackCount];
embeddedTrackCount = 0;
if (enableEventMessageTrack) {
embeddedTrackFormats[embeddedTrackCount] = embeddedEventMessageTrackGroup.getFormat(0);
embeddedTrackTypes[embeddedTrackCount] = C.TRACK_TYPE_METADATA;
embeddedTrackCount++;
}
List<Format> embeddedCea608TrackFormats = new ArrayList<>();
if (enableCea608Tracks) {
for (int i = 0; i < embeddedCea608TrackGroup.length; i++) {
embeddedTrackFormats[embeddedTrackCount] = embeddedCea608TrackGroup.getFormat(i);
embeddedTrackTypes[embeddedTrackCount] = C.TRACK_TYPE_TEXT;
embeddedCea608TrackFormats.add(embeddedTrackFormats[embeddedTrackCount]);
embeddedTrackCount++;
}
}
PlayerTrackEmsgHandler trackPlayerEmsgHandler =
manifest.dynamic && enableEventMessageTrack
? playerEmsgHandler.newPlayerTrackEmsgHandler()
: null;
DashChunkSource chunkSource =
chunkSourceFactory.createDashChunkSource(
manifestLoaderErrorThrower,
manifest,
periodIndex,
trackGroupInfo.adaptationSetIndices,
selection,
trackGroupInfo.trackType,
elapsedRealtimeOffsetMs,
enableEventMessageTrack,
embeddedCea608TrackFormats,
trackPlayerEmsgHandler,
transferListener);
ChunkSampleStream<DashChunkSource> stream =
new ChunkSampleStream<>(
trackGroupInfo.trackType,
embeddedTrackTypes,
embeddedTrackFormats,
chunkSource,
this,
allocator,
positionUs,
loadErrorHandlingPolicy,
eventDispatcher);
synchronized (this) {
// The map is also accessed on the loading thread so synchronize access.
trackEmsgHandlerBySampleStream.put(stream, trackPlayerEmsgHandler);
}
return stream;
}
private static Descriptor findAdaptationSetSwitchingProperty(List<Descriptor> descriptors) {
for (int i = 0; i < descriptors.size(); i++) {
Descriptor descriptor = descriptors.get(i);
if ("urn:mpeg:dash:adaptation-set-switching:2016".equals(descriptor.schemeIdUri)) {
return descriptor;
}
}
return null;
}
private static boolean hasEventMessageTrack(List<AdaptationSet> adaptationSets,
int[] adaptationSetIndices) {
for (int i : adaptationSetIndices) {
List<Representation> representations = adaptationSets.get(i).representations;
for (int j = 0; j < representations.size(); j++) {
Representation representation = representations.get(j);
if (!representation.inbandEventStreams.isEmpty()) {
return true;
}
}
}
return false;
}
private static Format[] getCea608TrackFormats(
List<AdaptationSet> adaptationSets, int[] adaptationSetIndices) {
for (int i : adaptationSetIndices) {
AdaptationSet adaptationSet = adaptationSets.get(i);
List<Descriptor> descriptors = adaptationSets.get(i).accessibilityDescriptors;
for (int j = 0; j < descriptors.size(); j++) {
Descriptor descriptor = descriptors.get(j);
if ("urn:scte:dash:cc:cea-608:2015".equals(descriptor.schemeIdUri)) {
String value = descriptor.value;
if (value == null) {
// There are embedded CEA-608 tracks, but service information is not declared.
return new Format[] {buildCea608TrackFormat(adaptationSet.id)};
}
String[] services = Util.split(value, ";");
Format[] formats = new Format[services.length];
for (int k = 0; k < services.length; k++) {
Matcher matcher = CEA608_SERVICE_DESCRIPTOR_REGEX.matcher(services[k]);
if (!matcher.matches()) {
// If we can't parse service information for all services, assume a single track.
return new Format[] {buildCea608TrackFormat(adaptationSet.id)};
}
formats[k] =
buildCea608TrackFormat(
adaptationSet.id,
/* language= */ matcher.group(2),
/* accessibilityChannel= */ Integer.parseInt(matcher.group(1)));
}
return formats;
}
}
}
return new Format[0];
}
private static Format buildCea608TrackFormat(int adaptationSetId) {
return buildCea608TrackFormat(
adaptationSetId, /* language= */ null, /* accessibilityChannel= */ Format.NO_VALUE);
}
private static Format buildCea608TrackFormat(
int adaptationSetId, String language, int accessibilityChannel) {
return Format.createTextSampleFormat(
adaptationSetId
+ ":cea608"
+ (accessibilityChannel != Format.NO_VALUE ? ":" + accessibilityChannel : ""),
MimeTypes.APPLICATION_CEA608,
/* codecs= */ null,
/* bitrate= */ Format.NO_VALUE,
/* selectionFlags= */ 0,
language,
accessibilityChannel,
/* drmInitData= */ null,
Format.OFFSET_SAMPLE_RELATIVE,
/* initializationData= */ null);
}
@SuppressWarnings("unchecked")
private static ChunkSampleStream<DashChunkSource>[] newSampleStreamArray(int length) {
return new ChunkSampleStream[length];
}
private static final class TrackGroupInfo {
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({CATEGORY_PRIMARY, CATEGORY_EMBEDDED, CATEGORY_MANIFEST_EVENTS})
public @interface TrackGroupCategory {}
/**
* A normal track group that has its samples drawn from the stream.
* For example: a video Track Group or an audio Track Group.
*/
private static final int CATEGORY_PRIMARY = 0;
/**
* A track group whose samples are embedded within one of the primary streams. For example: an
* EMSG track has its sample embedded in emsg atoms in one of the primary streams.
*/
private static final int CATEGORY_EMBEDDED = 1;
/**
* A track group that has its samples listed explicitly in the DASH manifest file.
* For example: an EventStream track has its sample (Events) included directly in the DASH
* manifest file.
*/
private static final int CATEGORY_MANIFEST_EVENTS = 2;
public final int[] adaptationSetIndices;
public final int trackType;
@TrackGroupCategory public final int trackGroupCategory;
public final int eventStreamGroupIndex;
public final int primaryTrackGroupIndex;
public final int embeddedEventMessageTrackGroupIndex;
public final int embeddedCea608TrackGroupIndex;
public static TrackGroupInfo primaryTrack(
int trackType,
int[] adaptationSetIndices,
int primaryTrackGroupIndex,
int embeddedEventMessageTrackGroupIndex,
int embeddedCea608TrackGroupIndex) {
return new TrackGroupInfo(
trackType,
CATEGORY_PRIMARY,
adaptationSetIndices,
primaryTrackGroupIndex,
embeddedEventMessageTrackGroupIndex,
embeddedCea608TrackGroupIndex,
/* eventStreamGroupIndex= */ -1);
}
public static TrackGroupInfo embeddedEmsgTrack(int[] adaptationSetIndices,
int primaryTrackGroupIndex) {
return new TrackGroupInfo(
C.TRACK_TYPE_METADATA,
CATEGORY_EMBEDDED,
adaptationSetIndices,
primaryTrackGroupIndex,
C.INDEX_UNSET,
C.INDEX_UNSET,
/* eventStreamGroupIndex= */ -1);
}
public static TrackGroupInfo embeddedCea608Track(int[] adaptationSetIndices,
int primaryTrackGroupIndex) {
return new TrackGroupInfo(
C.TRACK_TYPE_TEXT,
CATEGORY_EMBEDDED,
adaptationSetIndices,
primaryTrackGroupIndex,
C.INDEX_UNSET,
C.INDEX_UNSET,
/* eventStreamGroupIndex= */ -1);
}
public static TrackGroupInfo mpdEventTrack(int eventStreamIndex) {
return new TrackGroupInfo(
C.TRACK_TYPE_METADATA,
CATEGORY_MANIFEST_EVENTS,
new int[0],
/* primaryTrackGroupIndex= */ -1,
C.INDEX_UNSET,
C.INDEX_UNSET,
eventStreamIndex);
}
private TrackGroupInfo(
int trackType,
@TrackGroupCategory int trackGroupCategory,
int[] adaptationSetIndices,
int primaryTrackGroupIndex,
int embeddedEventMessageTrackGroupIndex,
int embeddedCea608TrackGroupIndex,
int eventStreamGroupIndex) {
this.trackType = trackType;
this.adaptationSetIndices = adaptationSetIndices;
this.trackGroupCategory = trackGroupCategory;
this.primaryTrackGroupIndex = primaryTrackGroupIndex;
this.embeddedEventMessageTrackGroupIndex = embeddedEventMessageTrackGroupIndex;
this.embeddedCea608TrackGroupIndex = embeddedCea608TrackGroupIndex;
this.eventStreamGroupIndex = eventStreamGroupIndex;
}
}
}
| |
package com.github.davidmoten.rtree;
import static com.github.davidmoten.rtree.geometry.Geometries.rectangle;
import static java.util.Optional.of;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import com.github.davidmoten.guavamini.Lists;
import com.github.davidmoten.guavamini.annotations.VisibleForTesting;
import com.github.davidmoten.rtree.geometry.Circle;
import com.github.davidmoten.rtree.geometry.Geometry;
import com.github.davidmoten.rtree.geometry.HasGeometry;
import com.github.davidmoten.rtree.geometry.Intersects;
import com.github.davidmoten.rtree.geometry.Line;
import com.github.davidmoten.rtree.geometry.Point;
import com.github.davidmoten.rtree.geometry.Rectangle;
import com.github.davidmoten.rtree.internal.Comparators;
import com.github.davidmoten.rtree.internal.NodeAndEntries;
import com.github.davidmoten.rtree.internal.operators.OperatorBoundedPriorityQueue;
import rx.Observable;
import rx.functions.Func1;
import rx.functions.Func2;
/**
* Immutable in-memory 2D R-Tree with configurable splitter heuristic.
*
* @param <T>
* the entry value type
* @param <S>
* the entry geometry type
*/
public final class RTree<T, S extends Geometry> {
public static final Rectangle ZERO_RECTANGLE = rectangle(0, 0, 0, 0);
private final Optional<? extends Node<T, S>> root;
private final Context<T, S> context;
/**
* Benchmarks show that this is a good choice for up to O(10,000) entries when
* using Quadratic splitter (Guttman).
*/
public static final int MAX_CHILDREN_DEFAULT_GUTTMAN = 4;
/**
* Benchmarks show that this is the sweet spot for up to O(10,000) entries when
* using R*-tree heuristics.
*/
public static final int MAX_CHILDREN_DEFAULT_STAR = 4;
/**
* Current size in Entries of the RTree.
*/
private final int size;
private static final Func2<Optional<Rectangle>, Entry<Object, Geometry>, Optional<Rectangle>> RECTANGLE_ACCUMULATOR =
(rectangle, entry) ->
rectangle.map(value -> Optional.of(value.add(entry.geometry().mbr())))
.orElseGet(() -> Optional.of(entry.geometry().mbr()));
/**
* Constructor.
*
* @param root
* the root node of the tree if present
* @param context
* options for the R-tree
*/
private RTree(Optional<? extends Node<T, S>> root, int size, Context<T, S> context) {
this.root = root;
this.size = size;
this.context = context;
}
private RTree() {
this(Optional.empty(), 0, null);
}
/**
* Constructor.
*
* @param root
* the root node of the R-tree
* @param context
* options for the R-tree
*/
private RTree(Node<T, S> root, int size, Context<T, S> context) {
this(of(root), size, context);
}
static <T, S extends Geometry> RTree<T, S> create(Optional<? extends Node<T, S>> root, int size,
Context<T, S> context) {
return new RTree<T, S>(root, size, context);
}
/**
* Returns a new Builder instance for {@link RTree}. Defaults to
* maxChildren=128, minChildren=64, splitter=QuadraticSplitter.
*
* @param <T>
* the value type of the entries in the tree
* @param <S>
* the geometry type of the entries in the tree
* @return a new RTree instance
*/
public static <T, S extends Geometry> RTree<T, S> create() {
return new Builder().create();
}
/**
* Construct an Rtree through STR bulk loading. Default to maxChildren=128,
* minChildren=64 and fill nodes by a factor of 0.7
*
* @param entries
* entries to add to the R-tree
*
* @param <T>
* the value type of the entries in the tree
* @param <S>
* the geometry type of the entries in the tree
* @return a new RTree instance
*/
public static <T, S extends Geometry> RTree<T, S> create(List<Entry<T, S>> entries) {
return new Builder().create(entries);
}
/**
* The tree is scanned for depth and the depth returned. This involves recursing
* down to the leaf level of the tree to get the current depth. Should be
* <code>log(n)</code> in complexity.
*
* @return depth of the R-tree
*/
public int calculateDepth() {
return calculateDepth(root);
}
private static <T, S extends Geometry> int calculateDepth(Optional<? extends Node<T, S>> root) {
return root.map(node -> calculateDepth(node, 0)).orElse(0);
}
private static <T, S extends Geometry> int calculateDepth(Node<T, S> node, int depth) {
if (node instanceof Leaf) {
return depth + 1;
} else {
return calculateDepth(((NonLeaf<T, S>) node).child(0), depth + 1);
}
}
/**
* When the number of children in an R-tree node drops below this number the
* node is deleted and the children are added on to the R-tree again.
*
* @param minChildren
* less than this number of children in a node triggers a node
* deletion and redistribution of its members
* @return builder
*/
public static Builder minChildren(int minChildren) {
return new Builder().minChildren(minChildren);
}
/**
* Sets the max number of children in an R-tree node.
*
* @param maxChildren
* max number of children in an R-tree node
* @return builder
*/
public static Builder maxChildren(int maxChildren) {
return new Builder().maxChildren(maxChildren);
}
/**
* Sets the {@link Splitter} to use when maxChildren is reached.
*
* @param splitter
* the splitter algorithm to use
* @return builder
*/
public static Builder splitter(Splitter splitter) {
return new Builder().splitter(splitter);
}
/**
* Sets the node {@link Selector} which decides which branches to follow when
* inserting or searching.
*
* @param selector
* determines which branches to follow when inserting or searching
* @return builder
*/
public static Builder selector(Selector selector) {
return new Builder().selector(selector);
}
/**
* Sets the splitter to {@link SplitterRStar} and selector to
* {@link SelectorRStar} and defaults to minChildren=10.
*
* @return builder
*/
public static Builder star() {
return new Builder().star();
}
/**
* RTree Builder.
*/
public static class Builder {
/**
* According to http://dbs.mathematik.uni-marburg.de/publications/myPapers
* /1990/BKSS90.pdf (R*-tree paper), best filling ratio is 0.4 for both
* quadratic split and R*-tree split.
*/
private static final double DEFAULT_FILLING_FACTOR = 0.4;
private static final double DEFAULT_LOADING_FACTOR = 0.7;
private Optional<Integer> maxChildren = Optional.empty();
private Optional<Integer> minChildren = Optional.empty();
private Splitter splitter = new SplitterQuadratic();
private Selector selector = new SelectorMinimalAreaIncrease();
private double loadingFactor;
private boolean star = false;
private Factory<Object, Geometry> factory = Factories.defaultFactory();
private Builder() {
loadingFactor = DEFAULT_LOADING_FACTOR;
}
/**
* The factor is used as the fill ratio during bulk loading.
*
* @param factor
* loading factor
* @return this
*/
public Builder loadingFactor(double factor) {
this.loadingFactor = factor;
return this;
}
/**
* When the number of children in an R-tree node drops below this number the
* node is deleted and the children are added on to the R-tree again.
*
* @param minChildren
* less than this number of children in a node triggers a
* redistribution of its children.
* @return builder
*/
public Builder minChildren(int minChildren) {
this.minChildren = of(minChildren);
return this;
}
/**
* Sets the max number of children in an R-tree node.
*
* @param maxChildren
* max number of children in R-tree node.
* @return builder
*/
public Builder maxChildren(int maxChildren) {
this.maxChildren = of(maxChildren);
return this;
}
/**
* Sets the {@link Splitter} to use when maxChildren is reached.
*
* @param splitter
* node splitting method to use
* @return builder
*/
public Builder splitter(Splitter splitter) {
this.splitter = splitter;
return this;
}
/**
* Sets the node {@link Selector} which decides which branches to follow when
* inserting or searching.
*
* @param selector
* selects the branch to follow when inserting or searching
* @return builder
*/
public Builder selector(Selector selector) {
this.selector = selector;
return this;
}
/**
* Sets the splitter to {@link SplitterRStar} and selector to
* {@link SelectorRStar} and defaults to minChildren=10.
*
* @return builder
*/
public Builder star() {
selector = new SelectorRStar();
splitter = new SplitterRStar();
star = true;
return this;
}
@SuppressWarnings("unchecked")
public Builder factory(Factory<?, ? extends Geometry> factory) {
// TODO could change the signature of Builder to have types to
// support this method but would be breaking change for existing
// clients
this.factory = (Factory<Object, Geometry>) factory;
return this;
}
/**
* Builds the {@link RTree}.
*
* @param <T>
* value type
* @param <S>
* geometry type
* @return RTree
*/
@SuppressWarnings("unchecked")
public <T, S extends Geometry> RTree<T, S> create() {
setDefaultCapacity();
return new RTree<T, S>(Optional.<Node<T, S>>empty(), 0,
new Context<T, S>(minChildren.get(), maxChildren.get(), selector, splitter,
(Factory<T, S>) factory));
}
/**
* Create an RTree by bulk loading, using the STR method. STR: a simple and
* efficient algorithm for R-tree packing
* http://ieeexplore.ieee.org/abstract/document/582015/
* <p>
* Note: this method mutates the input entries, the internal order of the List
* may be changed.
* </p>
*
* @param entries
* entries to be added to the r-tree
* @return a loaded RTree
*/
@SuppressWarnings("unchecked")
public <T, S extends Geometry> RTree<T, S> create(List<Entry<T, S>> entries) {
setDefaultCapacity();
Context<T, S> context = new Context<T, S>(minChildren.get(), maxChildren.get(),
selector, splitter, (Factory<T, S>) factory);
return packingSTR(entries, true, entries.size(), context);
}
private void setDefaultCapacity() {
if (!maxChildren.isPresent()) {
if (star) {
maxChildren = Optional.of(MAX_CHILDREN_DEFAULT_STAR);
} else {
maxChildren = Optional.of(MAX_CHILDREN_DEFAULT_GUTTMAN);
}
}
if (!minChildren.isPresent()) {
minChildren = Optional.of((int) Math.round(maxChildren.get() * DEFAULT_FILLING_FACTOR));
}
}
@SuppressWarnings("unchecked")
private <T, S extends Geometry> RTree<T, S> packingSTR(List<? extends HasGeometry> objects,
boolean isLeaf, int size, Context<T, S> context) {
int capacity = (int) Math.round(maxChildren.get() * loadingFactor);
int nodeCount = (int) Math.ceil(1.0 * objects.size() / capacity);
if (nodeCount == 0) {
return create();
} else if (nodeCount == 1) {
Node<T, S> root;
if (isLeaf) {
root = context.factory().createLeaf((List<Entry<T, S>>) objects, context);
} else {
root = context.factory().createNonLeaf((List<Node<T, S>>) objects, context);
}
return new RTree<T, S>(of(root), size, context);
}
int nodePerSlice = (int) Math.ceil(Math.sqrt(nodeCount));
int sliceCapacity = nodePerSlice * capacity;
int sliceCount = (int) Math.ceil(1.0 * objects.size() / sliceCapacity);
Collections.sort(objects, new MidComparator((short) 0));
List<Node<T, S>> nodes = new ArrayList<Node<T, S>>(nodeCount);
for (int s = 0; s < sliceCount; s++) {
@SuppressWarnings("rawtypes")
List slice = objects.subList(s * sliceCapacity,
Math.min((s + 1) * sliceCapacity, objects.size()));
Collections.sort(slice, new MidComparator((short) 1));
for (int i = 0; i < slice.size(); i += capacity) {
if (isLeaf) {
List<Entry<T, S>> entries = slice.subList(i,
Math.min(slice.size(), i + capacity));
Node<T, S> leaf = context.factory().createLeaf(entries, context);
nodes.add(leaf);
} else {
List<Node<T, S>> children = slice.subList(i,
Math.min(slice.size(), i + capacity));
Node<T, S> nonleaf = context.factory().createNonLeaf(children, context);
nodes.add(nonleaf);
}
}
}
return packingSTR(nodes, false, size, context);
}
private static final class MidComparator implements Comparator<HasGeometry> {
private final short dimension; // leave space for multiple dimensions, 0 for x, 1 for y,
// ...
public MidComparator(short dim) {
dimension = dim;
}
@Override
public int compare(HasGeometry o1, HasGeometry o2) {
return Double.compare(mid(o1), mid(o2));
}
private double mid(HasGeometry o) {
Rectangle mbr = o.geometry().mbr();
if (dimension == 0)
return (mbr.x1() + mbr.x2()) / 2;
else
return (mbr.y1() + mbr.y2()) / 2;
}
}
}
/**
* Returns an immutable copy of the RTree with the addition of given entry.
*
* @param entry
* item to add to the R-tree.
* @return a new immutable R-tree including the new entry
*/
@SuppressWarnings("unchecked")
public RTree<T, S> add(Entry<? extends T, ? extends S> entry) {
if (root.isPresent()) {
List<Node<T, S>> nodes = root.get().add(entry);
Node<T, S> node;
if (nodes.size() == 1)
node = nodes.get(0);
else {
node = context.factory().createNonLeaf(nodes, context);
}
return new RTree<T, S>(node, size + 1, context);
} else {
Leaf<T, S> node = context.factory().createLeaf(Lists.newArrayList((Entry<T, S>) entry),
context);
return new RTree<T, S>(node, size + 1, context);
}
}
/**
* Returns an immutable copy of the RTree with the addition of an entry
* comprised of the given value and Geometry.
*
* @param value
* the value of the {@link Entry} to be added
* @param geometry
* the geometry of the {@link Entry} to be added
* @return a new immutable R-tree including the new entry
*/
public RTree<T, S> add(T value, S geometry) {
return add(context.factory().createEntry(value, geometry));
}
/**
* Returns an immutable RTree with the current entries and the additional
* entries supplied as a parameter.
*
* @param entries
* entries to add
* @return R-tree with entries added
*/
public RTree<T, S> add(Iterable<Entry<T, S>> entries) {
RTree<T, S> tree = this;
for (Entry<T, S> entry : entries)
tree = tree.add(entry);
return tree;
}
/**
* Returns the Observable sequence of trees created by progressively adding
* entries.
*
* @param entries
* the entries to add
* @return a sequence of trees
*/
public Observable<RTree<T, S>> add(Observable<Entry<T, S>> entries) {
return entries.scan(this, (tree, entry) -> tree.add(entry));
}
/**
* Returns the Observable sequence of trees created by progressively deleting
* entries.
*
* @param entries
* the entries to add
* @param all
* if true delete all matching otherwise just first matching
* @return a sequence of trees
*/
public Observable<RTree<T, S>> delete(Observable<Entry<T, S>> entries, final boolean all) {
return entries.scan(this, new Func2<RTree<T, S>, Entry<T, S>, RTree<T, S>>() {
@Override
public RTree<T, S> call(RTree<T, S> tree, Entry<T, S> entry) {
return tree.delete(entry, all);
}
});
}
/**
* Returns a new R-tree with the given entries deleted. If <code>all</code> is
* false deletes only one if exists. If <code>all</code> is true deletes all
* matching entries.
*
* @param entries
* entries to delete
* @param all
* if false deletes one if exists else deletes all
* @return R-tree with entries deleted
*/
public RTree<T, S> delete(Iterable<Entry<T, S>> entries, boolean all) {
RTree<T, S> tree = this;
for (Entry<T, S> entry : entries)
tree = tree.delete(entry, all);
return tree;
}
/**
* Returns a new R-tree with the given entries deleted but only one matching
* occurence of each entry is deleted.
*
* @param entries
* entries to delete
* @return R-tree with entries deleted up to one matching occurence per entry
*/
public RTree<T, S> delete(Iterable<Entry<T, S>> entries) {
RTree<T, S> tree = this;
for (Entry<T, S> entry : entries)
tree = tree.delete(entry);
return tree;
}
/**
* If <code>all</code> is false deletes one entry matching the given value and
* Geometry. If <code>all</code> is true deletes all entries matching the given
* value and geometry. This method has no effect if the entry is not present.
* The entry must match on both value and geometry to be deleted.
*
* @param value
* the value of the {@link Entry} to be deleted
* @param geometry
* the geometry of the {@link Entry} to be deleted
* @param all
* if false deletes one if exists else deletes all
* @return a new immutable R-tree without one or many instances of the specified
* entry if it exists otherwise returns the original RTree object
*/
public RTree<T, S> delete(T value, S geometry, boolean all) {
return delete(context.factory().createEntry(value, geometry), all);
}
/**
* Deletes maximum one entry matching the given value and geometry. This method
* has no effect if the entry is not present. The entry must match on both value
* and geometry to be deleted.
*
* @param value
* the value to be matched for deletion
* @param geometry
* the geometry to be matched for deletion
* @return an immutable RTree without one entry (if found) matching the given
* value and geometry
*/
public RTree<T, S> delete(T value, S geometry) {
return delete(context.factory().createEntry(value, geometry), false);
}
/**
* Deletes one or all matching entries depending on the value of
* <code>all</code>. If multiple copies of the entry are in the R-tree only one
* will be deleted if all is false otherwise all matching entries will be
* deleted. The entry must match on both value and geometry to be deleted.
*
* @param entry
* the {@link Entry} to be deleted
* @param all
* if true deletes all matches otherwise deletes first found
* @return a new immutable R-tree without one instance of the specified entry
*/
public RTree<T, S> delete(Entry<? extends T, ? extends S> entry, boolean all) {
if (root.isPresent()) {
NodeAndEntries<T, S> nodeAndEntries = root.get().delete(entry, all);
if (nodeAndEntries.node().isPresent() && nodeAndEntries.node().get() == root.get())
return this;
else
return new RTree<T, S>(nodeAndEntries.node(),
size - nodeAndEntries.countDeleted() - nodeAndEntries.entriesToAdd().size(),
context).add(nodeAndEntries.entriesToAdd());
} else
return this;
}
/**
* Deletes one entry if it exists, returning an immutable copy of the RTree
* without that entry. If multiple copies of the entry are in the R-tree only
* one will be deleted. The entry must match on both value and geometry to be
* deleted.
*
* @param entry
* the {@link Entry} to be deleted
* @return a new immutable R-tree without one instance of the specified entry
*/
public RTree<T, S> delete(Entry<? extends T, ? extends S> entry) {
return delete(entry, false);
}
/**
* <p>
* Returns an Observable sequence of {@link Entry} that satisfy the given
* condition. Note that this method is well-behaved only if:
*
*
* <p>
* {@code condition(g)} is true for {@link Geometry} g implies
* {@code condition(r)} is true for the minimum bounding rectangles of the
* ancestor nodes.
*
* <p>
* {@code distance(g) < D} is an example of such a condition.
*
*
* @param condition
* return Entries whose geometry satisfies the given condition
* @return sequence of matching entries
*/
@VisibleForTesting
Observable<Entry<T, S>> search(Func1<? super Geometry, Boolean> condition) {
return root
.map(node -> Observable.unsafeCreate(new OnSubscribeSearch<>(node, condition)))
.orElseGet(Observable::empty);
}
/**
* Returns a predicate function that indicates if {@link Geometry} intersects
* with a given rectangle.
*
* @param r
* the rectangle to check intersection with
* @return whether the geometry and the rectangle intersect
*/
public static Func1<Geometry, Boolean> intersects(final Rectangle r) {
return g -> g.intersects(r);
}
/**
* Returns the always true predicate. See {@link RTree#entries()} for example
* use.
*/
private static final Func1<Geometry, Boolean> ALWAYS_TRUE = rectangle -> true;
/**
* Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree
* whose minimum bounding rectangle intersects with the given rectangle.
*
* @param r
* rectangle to check intersection with the entry mbr
* @return entries that intersect with the rectangle r
*/
public Observable<Entry<T, S>> search(final Rectangle r) {
return search(intersects(r));
}
/**
* Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree
* whose minimum bounding rectangle intersects with the given point.
*
* @param p
* point to check intersection with the entry mbr
* @return entries that intersect with the point p
*/
public Observable<Entry<T, S>> search(final Point p) {
return search(p.mbr());
}
public Observable<Entry<T, S>> search(Circle circle) {
return search(circle, Intersects.geometryIntersectsCircle);
}
public Observable<Entry<T, S>> search(Line line) {
return search(line, Intersects.geometryIntersectsLine);
}
/**
* Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree
* whose minimum bounding rectangles are strictly less than maxDistance from the
* given rectangle.
*
* @param r
* rectangle to measure distance from
* @param maxDistance
* entries returned must be within this distance from rectangle r
* @return the sequence of matching entries
*/
public Observable<Entry<T, S>> search(final Rectangle r, final double maxDistance) {
return search(g -> g.distance(r) < maxDistance);
}
/**
* Returns the intersections with the the given (arbitrary) geometry using an
* intersection function to filter the search results returned from a search of
* the mbr of <code>g</code>.
*
* @param <R>
* type of geometry being searched for intersection with
* @param g
* geometry being searched for intersection with
* @param intersects
* function to determine if the two geometries intersect
* @return a sequence of entries that intersect with g
*/
public <R extends Geometry> Observable<Entry<T, S>> search(final R g,
final Func2<? super S, ? super R, Boolean> intersects) {
return search(g.mbr()).filter(entry -> intersects.call(entry.geometry(), g));
}
/**
* Returns all entries strictly less than <code>maxDistance</code> from the
* given geometry. Because the geometry may be of an arbitrary type it is
* necessary to also pass a distance function.
*
* @param <R>
* type of the geometry being searched for
* @param g
* geometry to search for entries within maxDistance of
* @param maxDistance
* strict max distance that entries must be from g
* @param distance
* function to calculate the distance between geometries of type S
* and R.
* @return entries strictly less than maxDistance from g
*/
public <R extends Geometry> Observable<Entry<T, S>> search(final R g, final double maxDistance,
final Func2<? super S, ? super R, Double> distance) {
// just use the mbr initially
return search(entry -> entry.distance(g.mbr()) < maxDistance)
// refine with distance function
.filter(entry -> distance.call(entry.geometry(), g) < maxDistance);
}
/**
* Returns an {@link Observable} sequence of all {@link Entry}s in the R-tree
* whose minimum bounding rectangles are within maxDistance from the given
* point.
*
* @param p
* point to measure distance from
* @param maxDistance
* entries returned must be within this distance from point p
* @return the sequence of matching entries
*/
public Observable<Entry<T, S>> search(final Point p, final double maxDistance) {
return search(p.mbr(), maxDistance);
}
/**
* Returns the nearest k entries (k=maxCount) to the given rectangle where the
* entries are strictly less than a given maximum distance from the rectangle.
*
* @param r
* rectangle
* @param maxDistance
* max distance of returned entries from the rectangle
* @param maxCount
* max number of entries to return
* @return nearest entries to maxCount, in ascending order of distance
*/
public Observable<Entry<T, S>> nearest(final Rectangle r, final double maxDistance,
int maxCount) {
return search(r, maxDistance).lift(new OperatorBoundedPriorityQueue<Entry<T, S>>(maxCount,
Comparators.<T, S>ascendingDistance(r)));
}
/**
* Returns the nearest k entries (k=maxCount) to the given point where the
* entries are strictly less than a given maximum distance from the point.
*
* @param p
* point
* @param maxDistance
* max distance of returned entries from the point
* @param maxCount
* max number of entries to return
* @return nearest entries to maxCount, in ascending order of distance
*/
public Observable<Entry<T, S>> nearest(final Point p, final double maxDistance, int maxCount) {
return nearest(p.mbr(), maxDistance, maxCount);
}
/**
* Returns all entries in the tree as an {@link Observable} sequence.
*
* @return all entries in the R-tree
*/
public Observable<Entry<T, S>> entries() {
return search(ALWAYS_TRUE);
}
/**
* Returns a {@link Visualizer} for an image of given width and height and
* restricted to the given view of the coordinates. The points in the view are
* scaled to match the aspect ratio defined by the width and height.
*
* @param width
* of the image in pixels
* @param height
* of the image in pixels
* @param view
* using the coordinate system of the entries
* @return visualizer
*/
@SuppressWarnings("unchecked")
public Visualizer visualize(int width, int height, Rectangle view) {
return new Visualizer((RTree<?, Geometry>) this, width, height, view);
}
/**
* Returns a {@link Visualizer} for an image of given width and height and
* restricted to the the smallest view that fully contains the coordinates. The
* points in the view are scaled to match the aspect ratio defined by the width
* and height.
*
* @param width
* of the image in pixels
* @param height
* of the image in pixels
* @return visualizer
*/
public Visualizer visualize(int width, int height) {
return visualize(width, height, calculateMaxView(this));
}
private Rectangle calculateMaxView(RTree<T, S> tree) {
@SuppressWarnings("unchecked")
Func2<Optional<Rectangle>, Entry<T, S>, Optional<Rectangle>> ra = //
(Func2<Optional<Rectangle>, Entry<T, S>, Optional<Rectangle>>) //
(Func2<?,?,?>) //
RECTANGLE_ACCUMULATOR;
return tree.entries()
.reduce(Optional.empty(), ra)
.toBlocking().single()
.orElse(ZERO_RECTANGLE);
}
public Optional<? extends Node<T, S>> root() {
return root;
}
/**
* If the RTree has no entries returns {@link Optional#absent} otherwise returns
* the minimum bounding rectangle of all entries in the RTree.
*
* @return minimum bounding rectangle of all entries in RTree
*/
public Optional<Rectangle> mbr() {
return root.map(r -> r.geometry().mbr());
}
/**
* Returns true if and only if the R-tree is empty of entries.
*
* @return is R-tree empty
*/
public boolean isEmpty() {
return size == 0;
}
/**
* Returns the number of entries in the RTree.
*
* @return the number of entries
*/
public int size() {
return size;
}
/**
* Returns a {@link Context} containing the configuration of the RTree at the
* time of instantiation.
*
* @return the configuration of the RTree prior to instantiation
*/
public Context<T, S> context() {
return context;
}
/**
* Returns a human readable form of the RTree. Here's an example:
*
* <pre>
* mbr=Rectangle [x1=10.0, y1=4.0, x2=62.0, y2=85.0]
* mbr=Rectangle [x1=28.0, y1=4.0, x2=34.0, y2=85.0]
* entry=Entry [value=2, geometry=Point [x=29.0, y=4.0]]
* entry=Entry [value=1, geometry=Point [x=28.0, y=19.0]]
* entry=Entry [value=4, geometry=Point [x=34.0, y=85.0]]
* mbr=Rectangle [x1=10.0, y1=45.0, x2=62.0, y2=63.0]
* entry=Entry [value=5, geometry=Point [x=62.0, y=45.0]]
* entry=Entry [value=3, geometry=Point [x=10.0, y=63.0]]
* </pre>
*
* @return a string representation of the RTree
*/
public String asString() {
if (!root.isPresent())
return "";
else
return asString(root.get(), "");
}
private static final String MARGIN_INCREMENT = " ";
private String asString(Node<T, S> node, String margin) {
StringBuilder s = new StringBuilder();
s.append(margin);
s.append("mbr=");
s.append(node.geometry());
s.append('\n');
if (node instanceof NonLeaf) {
NonLeaf<T, S> n = (NonLeaf<T, S>) node;
for (int i = 0; i < n.count(); i++) {
Node<T, S> child = n.child(i);
s.append(asString(child, margin + MARGIN_INCREMENT));
}
} else {
Leaf<T, S> leaf = (Leaf<T, S>) node;
for (Entry<T, S> entry : leaf.entries()) {
s.append(margin);
s.append(MARGIN_INCREMENT);
s.append("entry=");
s.append(entry);
s.append('\n');
}
}
return s.toString();
}
}
| |
/*
Copyright 2010-2017 BusinessCode GmbH, Germany
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.businesscode.util;
import static de.businesscode.bcdui.wrs.load.WrsDataWriter.WRS_XML_NAMESPACE;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.namespace.QName;
import javax.xml.soap.Detail;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPBody;
import javax.xml.soap.SOAPConstants;
import javax.xml.soap.SOAPElement;
import javax.xml.soap.SOAPException;
import javax.xml.soap.SOAPFault;
import javax.xml.soap.SOAPMessage;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.transform.dom.DOMResult;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.w3c.dom.Document;
import de.businesscode.bcdui.toolbox.ServletUtils;
import de.businesscode.bcdui.web.filters.RequestLifeCycleFilter;
import de.businesscode.bcdui.wrs.save.exc.WrsValidationException;
import de.businesscode.util.jdbc.SQLDetailException;
/**
* <p>
* Utility class to create a {@link SOAPFault}
* </p>
* <p>
* Due to security concerns the server should never reveal exception internals to the client,
* rather respond with a general message. Generally, the {@link RequestLifeCycleFilter} does
* so in case a request failed to process due to a thrown exception.
* </p>
* <p>However, sometimes you want to respond with a business-exception to the client (yet yielding
* a positive http response status) so it knows what went wrong and why. In such a case
* you can handle a business exception in your servlet implementation and respond with a
* SOAPFault using this class.
* </p>
*/
public class SOAPFaultMessage {
private SOAPMessage message;
private static Logger logger = LogManager.getLogger(SOAPFaultMessage.class);
/**
* Convenience method to write a SOAP massage response. The responseOutputStream must not be opened yet.
* It captures all Exceptions happening during that and writes them to log. The http response status is NOT set
* by this method. The response content-type is set to 'text/xml'.
* @param resp
* @param requestDocument - optional request document to serialize into SOAPFault
* @param requestURL - optional URL to appear in SOAPFault
* @param faultException - optional exception caused this fault, must not reveal security relevant facts
* @param faultMessage - optional literal message provided as FaultReason in SOAPFault must not reveal security relevant facts
* @return true if exception was thrown, false if a SOAPFault could not be produced an an error log was created instead
*/
static public boolean writeSOAPFaultToHTTPResponse(HttpServletResponse resp, Document requestDocument, String requestURL, Throwable faultException, String faultMessage)
{
try {
resp.setContentType("text/xml");
return writeSOAPFault(resp.getOutputStream(), requestDocument, requestURL, faultException, faultMessage);
} catch( Exception e ) {
logger.error(e+" causer "+faultException+" for "+requestURL);
return false;
}
}
/**
* Convenience method to write a SOAP message response. The responseOutputStream must not be opened yet.
* It captures all Exceptions happening during that and writes them to log. The http response status is NOT set
* by this method. The response content-type is set to 'text/xml'.
* @param request
* @param response
* @param faultException - optional exception caused this fault, must not reveal security relevant facts
* @param faultMessage - must not reveal security relevant facts
* @return true if exception was thrown, false if a SOAPFault could not be produced an an error log was created instead
*/
static public boolean writeSOAPFaultToHTTPResponse(HttpServletRequest request, HttpServletResponse response, Exception faultException, String faultMessage) {
return writeSOAPFaultToHTTPResponse(response, (Document) request.getAttribute("guiStatusDoc"), ServletUtils.getInstance().reconstructURL(request), faultException, faultMessage);
}
/**
* Convenience method to write a SOAP massage into a stream. It captures all Exceptions happening during that and writes them to log
* @param os
* @param requestDocument
* @param requestURL
* @param faultException
* @param faultMessage
* @return true if exception was thrown, false if a SOAPFault could not be produced an an error log was created instead
*/
static private boolean writeSOAPFault(OutputStream os, Document requestDocument, String requestURL, Throwable faultException, String faultMessage) {
try {
SOAPFaultMessage sFM = new SOAPFaultMessage(requestDocument, requestURL, faultException, faultMessage);
sFM.writeTo(os);
} catch( Exception e ) {
logger.error(faultMessage, faultException);
return false;
}
return true;
}
/**
* SOAPFaultMessage
*
* @param requestDocument - optional request document to serialize into SOAPFault
* @param requestURL - optional URL to appear in SOAPFault
* @param faultException - optional exception caused this fault
* @param message - optional literal message provided as FaultReason in SOAPFault
*/
public SOAPFaultMessage(Document requestDocument, String requestURL, Throwable faultException, String message) throws SOAPException {
this.message = createMessage(requestDocument, requestURL, faultException, message);
}
/**
* @return the message
*/
public SOAPMessage getMessage() {
return message;
}
/**
* @param out
* @throws SOAPException
* @throws IOException
* @see javax.xml.soap.SOAPMessage#writeTo(java.io.OutputStream)
*/
public void writeTo(OutputStream out) throws SOAPException, IOException {
getMessage().writeTo(out);
}
/**
* @param requestDocument - optional request document to serialize into SOAPFault
* @param requestURL - optional URL to appear in SOAPFault
* @param faultException - optional exception caused this fault
* @param faultMessage - optional literal message provided as FaultReason in SOAPFault
* @return the newly created message
* @throws SOAPException
*/
private SOAPMessage createMessage(Document requestDocument, String requestURL, Throwable faultException, String faultMessage) throws SOAPException {
SOAPMessage message = MessageFactory.newInstance(SOAPConstants.SOAP_1_2_PROTOCOL).createMessage();
SOAPBody body = message.getSOAPBody();
//
// write fault part
SOAPFault fault = body.addFault();
fault.setFaultCode(SOAPConstants.SOAP_SENDER_FAULT); // TODO analyze exception and send the right code. Later.
// faultMessage takes precedence
if(faultMessage != null){
fault.addFaultReasonText(faultMessage, Locale.ENGLISH);
} else if(faultException != null){
fault.addFaultReasonText(faultException.getMessage(), Locale.ENGLISH);
}
//
// write body part
Detail detailNode = fault.addDetail();
detailNode.addNamespaceDeclaration("", WRS_XML_NAMESPACE);
//
if (faultException != null) {
SOAPElement exceptionsNode = detailNode.addChildElement("Exception", "");
writeExceptionAsSoapDetails(exceptionsNode, faultException);
}
if (requestDocument != null) {
detailNode.appendChild(detailNode.getOwnerDocument().adoptNode(requestDocument.getDocumentElement()));
}
if (requestURL != null) {
detailNode.addChildElement("Url", "").addTextNode(requestURL);
}
return message;
}
/**
* Send the exception details. We send just root and cause exceptions without the full stack trace.
*
* @param rootElement
* @param exception - may also be of type {@link WrsValidationException}, which is properly serialized and can be
* processed by client
* @throws SOAPException
*/
private void writeExceptionAsSoapDetails(SOAPElement rootElement, Throwable exception) throws SOAPException {
if (exception != null) {
SOAPElement element = rootElement.addChildElement("Cause", "");
element.addAttribute(new QName("class"), exception.getClass().getName());
// skip SQL details
if (exception.getMessage() != null && ! (exception instanceof SQLDetailException) ) {
element.addTextNode(exception.getMessage());
}
// serialize WrsValidationException
if(exception instanceof WrsValidationException) {
WrsValidationException wrsExc = (WrsValidationException)exception;
XMLStreamWriter sw = null;
try {
sw = XMLOutputFactory.newInstance().createXMLStreamWriter(new DOMResult(rootElement));
wrsExc.getValidationResult().serializeTo(sw);
sw.flush();
} catch (Exception e) {
logger.error("serialization of WrsValidationException failed", e);
rootElement.setTextContent("WrsValidationException serialization failed.");
} finally {
if(sw != null) {
try {
sw.close();
} catch (Exception e) {
}
}
}
}
//
writeExceptionAsSoapDetails(rootElement, exception.getCause());
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.Configuration;
import alluxio.PropertyKey;
import alluxio.Seekable;
import alluxio.annotation.PublicApi;
import alluxio.client.BoundedStream;
import alluxio.client.PositionedReadable;
import alluxio.client.block.AlluxioBlockStore;
import alluxio.client.block.stream.BlockInStream;
import alluxio.client.file.options.InStreamOptions;
import alluxio.exception.PreconditionMessage;
import alluxio.exception.status.DeadlineExceededException;
import alluxio.exception.status.UnavailableException;
import alluxio.network.netty.NettyRPC;
import alluxio.network.netty.NettyRPCContext;
import alluxio.proto.dataserver.Protocol;
import alluxio.retry.CountingRetry;
import alluxio.util.proto.ProtoMessage;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Preconditions;
import io.netty.channel.Channel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.concurrent.NotThreadSafe;
/**
* A streaming API to read a file. This API represents a file as a stream of bytes and provides a
* collection of {@link #read} methods to access this stream of bytes. In addition, one can seek
* into a given offset of the stream to read.
*
* This class wraps the block in stream for each of the blocks in the file and abstracts the
* switching between streams. The backing streams can read from Alluxio space in the local machine,
* remote machines, or the under storage system.
*
* The internal bookkeeping works as follows:
*
* 1. {@link #updateStream()} is a potentially expensive operation and is responsible for
* creating new BlockInStreams and updating {@link #mBlockInStream}. After calling this method,
* {@link #mBlockInStream} is ready to serve reads from the current {@link #mPosition}.
* 2. {@link #mPosition} can become out of sync with {@link #mBlockInStream} when seek or skip is
* called. When this happens, {@link #mBlockInStream} is set to null and no effort is made to
* sync between the two until {@link #updateStream()} is called.
* 3. {@link #updateStream()} is only called when followed by a read request. Thus, if a
* {@link #mBlockInStream} is created, it is guaranteed we read at least one byte from it.
*/
@PublicApi
@NotThreadSafe
public class FileInStream extends InputStream implements BoundedStream, PositionedReadable,
Seekable {
private static final Logger LOG = LoggerFactory.getLogger(FileInStream.class);
private static final int MAX_WORKERS_TO_RETRY =
Configuration.getInt(PropertyKey.USER_BLOCK_WORKER_CLIENT_READ_RETRY);
private final URIStatus mStatus;
private final InStreamOptions mOptions;
private final AlluxioBlockStore mBlockStore;
private final FileSystemContext mContext;
/* Convenience values derived from mStatus, use these instead of querying mStatus. */
/** Length of the file in bytes. */
private final long mLength;
/** Block size in bytes. */
private final long mBlockSize;
/* Underlying stream and associated bookkeeping. */
/** Current offset in the file. */
private long mPosition;
/** Underlying block stream, null if a position change has invalidated the previous stream. */
private BlockInStream mBlockInStream;
/** A map of worker addresses to the most recent epoch time when client fails to read from it. */
private Map<WorkerNetAddress, Long> mFailedWorkers = new HashMap<>();
protected FileInStream(URIStatus status, InStreamOptions options, FileSystemContext context) {
mStatus = status;
mOptions = options;
mBlockStore = AlluxioBlockStore.create(context);
mContext = context;
mLength = mStatus.getLength();
mBlockSize = mStatus.getBlockSizeBytes();
mPosition = 0;
mBlockInStream = null;
}
/* Input Stream methods */
@Override
public int read() throws IOException {
if (mPosition == mLength) { // at end of file
return -1;
}
CountingRetry retry = new CountingRetry(MAX_WORKERS_TO_RETRY);
IOException lastException = null;
while (retry.attempt()) {
updateStream();
try {
int result = mBlockInStream.read();
if (result != -1) {
mPosition++;
}
return result;
} catch (UnavailableException | DeadlineExceededException | ConnectException e) {
lastException = e;
handleRetryableException(mBlockInStream, e);
mBlockInStream = null;
}
}
throw lastException;
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
Preconditions.checkArgument(b != null, PreconditionMessage.ERR_READ_BUFFER_NULL);
Preconditions.checkArgument(off >= 0 && len >= 0 && len + off <= b.length,
PreconditionMessage.ERR_BUFFER_STATE.toString(), b.length, off, len);
if (len == 0) {
return 0;
}
if (mPosition == mLength) { // at end of file
return -1;
}
int bytesLeft = len;
int currentOffset = off;
CountingRetry retry = new CountingRetry(MAX_WORKERS_TO_RETRY);
IOException lastException = null;
while (bytesLeft > 0 && mPosition != mLength && retry.attempt()) {
updateStream();
try {
int bytesRead = mBlockInStream.read(b, currentOffset, bytesLeft);
if (bytesRead > 0) {
bytesLeft -= bytesRead;
currentOffset += bytesRead;
mPosition += bytesRead;
}
retry.reset();
lastException = null;
} catch (UnavailableException | ConnectException | DeadlineExceededException e) {
lastException = e;
handleRetryableException(mBlockInStream, e);
mBlockInStream = null;
}
}
if (lastException != null) {
throw lastException;
}
return len - bytesLeft;
}
@Override
public long skip(long n) throws IOException {
if (n <= 0) {
return 0;
}
long toSkip = Math.min(n, mLength - mPosition);
seek(mPosition + toSkip);
return toSkip;
}
@Override
public void close() throws IOException {
closeBlockInStream(mBlockInStream);
}
/* Bounded Stream methods */
@Override
public long remaining() {
return mLength - mPosition;
}
/* Positioned Readable methods */
@Override
public int positionedRead(long pos, byte[] b, int off, int len) throws IOException {
return positionedReadInternal(pos, b, off, len);
}
private int positionedReadInternal(long pos, byte[] b, int off, int len) throws IOException {
if (pos < 0 || pos >= mLength) {
return -1;
}
int lenCopy = len;
CountingRetry retry = new CountingRetry(MAX_WORKERS_TO_RETRY);
IOException lastException = null;
while (len > 0 && retry.attempt()) {
if (pos >= mLength) {
break;
}
long blockId = mStatus.getBlockIds().get(Math.toIntExact(pos / mBlockSize));
BlockInStream stream = mBlockStore.getInStream(blockId, mOptions, mFailedWorkers);
try {
long offset = pos % mBlockSize;
int bytesRead =
stream.positionedRead(offset, b, off, (int) Math.min(mBlockSize - offset, len));
Preconditions.checkState(bytesRead > 0, "No data is read before EOF");
pos += bytesRead;
off += bytesRead;
len -= bytesRead;
retry.reset();
lastException = null;
} catch (UnavailableException | DeadlineExceededException | ConnectException e) {
lastException = e;
handleRetryableException(stream, e);
stream = null;
} finally {
closeBlockInStream(stream);
}
}
if (lastException != null) {
throw lastException;
}
return lenCopy - len;
}
/* Seekable methods */
@Override
public long getPos() {
return mPosition;
}
@Override
public void seek(long pos) throws IOException {
if (mPosition == pos) {
return;
}
Preconditions.checkArgument(pos >= 0, PreconditionMessage.ERR_SEEK_NEGATIVE.toString(), pos);
Preconditions.checkArgument(pos <= mLength,
PreconditionMessage.ERR_SEEK_PAST_END_OF_FILE.toString(), pos);
if (mBlockInStream == null) { // no current stream open, advance position
mPosition = pos;
return;
}
long delta = pos - mPosition;
if (delta <= mBlockInStream.remaining() && delta >= -mBlockInStream.getPos()) { // within block
mBlockInStream.seek(mBlockInStream.getPos() + delta);
} else { // close the underlying stream as the new position is no longer in bounds
closeBlockInStream(mBlockInStream);
}
mPosition += delta;
}
/**
* Initializes the underlying block stream if necessary. This method must be called before
* reading from mBlockInStream.
*/
private void updateStream() throws IOException {
if (mBlockInStream != null && mBlockInStream.remaining() > 0) { // can still read from stream
return;
}
if (mBlockInStream != null && mBlockInStream.remaining() == 0) { // current stream is done
closeBlockInStream(mBlockInStream);
}
/* Create a new stream to read from mPosition. */
// Calculate block id.
long blockId = mStatus.getBlockIds().get(Math.toIntExact(mPosition / mBlockSize));
// Create stream
mBlockInStream = mBlockStore.getInStream(blockId, mOptions, mFailedWorkers);
// Set the stream to the correct position.
long offset = mPosition % mBlockSize;
mBlockInStream.seek(offset);
}
private void closeBlockInStream(BlockInStream stream) throws IOException {
if (stream != null) {
// Get relevant information from the stream.
WorkerNetAddress dataSource = stream.getAddress();
long blockId = stream.getId();
BlockInStream.BlockInStreamSource blockSource = stream.getSource();
stream.close();
// TODO(calvin): we should be able to do a close check instead of using null
if (stream == mBlockInStream) { // if stream is instance variable, set to null
mBlockInStream = null;
}
if (blockSource == BlockInStream.BlockInStreamSource.LOCAL) {
return;
}
// Send an async cache request to a worker based on read type and passive cache options.
boolean cache = mOptions.getOptions().getReadType().isCache();
boolean passiveCache = Configuration.getBoolean(PropertyKey.USER_FILE_PASSIVE_CACHE_ENABLED);
long channelTimeout = Configuration.getMs(PropertyKey.USER_NETWORK_NETTY_TIMEOUT_MS);
if (cache) {
WorkerNetAddress worker;
if (passiveCache && mContext.hasLocalWorker()) { // send request to local worker
worker = mContext.getLocalWorker();
} else { // send request to data source
worker = dataSource;
}
try {
// Construct the async cache request
long blockLength = mOptions.getBlockInfo(blockId).getLength();
Protocol.AsyncCacheRequest request =
Protocol.AsyncCacheRequest.newBuilder().setBlockId(blockId).setLength(blockLength)
.setOpenUfsBlockOptions(mOptions.getOpenUfsBlockOptions(blockId))
.setSourceHost(dataSource.getHost()).setSourcePort(dataSource.getDataPort())
.build();
Channel channel = mContext.acquireNettyChannel(worker);
try {
NettyRPCContext rpcContext =
NettyRPCContext.defaults().setChannel(channel).setTimeout(channelTimeout);
NettyRPC.fireAndForget(rpcContext, new ProtoMessage(request));
} finally {
mContext.releaseNettyChannel(worker, channel);
}
} catch (Exception e) {
LOG.warn("Failed to complete async cache request for block {} at worker {}: {}", blockId,
worker, e.getMessage());
}
}
}
}
private void handleRetryableException(BlockInStream stream, IOException e) {
WorkerNetAddress workerAddress = stream.getAddress();
LOG.warn("Failed to read block {} from worker {}, will retry: {}",
stream.getId(), workerAddress, e.getMessage());
try {
stream.close();
} catch (Exception ex) {
// Do not throw doing a best effort close
LOG.warn("Failed to close input stream for block {}: {}", stream.getId(), ex.getMessage());
}
mFailedWorkers.put(workerAddress, System.currentTimeMillis());
}
}
| |
package com.bazaarvoice.emodb.sor.condition.impl;
import com.bazaarvoice.emodb.sor.condition.Condition;
import com.bazaarvoice.emodb.sor.condition.ConditionVisitor;
import com.bazaarvoice.emodb.sor.condition.Conditions;
import com.bazaarvoice.emodb.sor.condition.LikeCondition;
import com.bazaarvoice.emodb.sor.delta.deser.DeltaJson;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.Writer;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
abstract public class LikeConditionImpl extends AbstractCondition implements LikeCondition {
private final String _condition;
public static LikeConditionImpl create(Object value) {
checkArgument(value instanceof String, "Like expression only supports strings");
return create(value.toString());
}
public static LikeConditionImpl create(final String condition) {
requireNonNull(condition, "Like expression cannot be null");
// Optimize for the most common case where an expression contains a single wildcard.
int firstWildcard = -1;
List<Integer> remainingWildcards = null;
String unescaped = condition;
int length = unescaped.length();
int i = 0;
while (i < length) {
switch (unescaped.charAt(i)) {
case '\\':
if (i == length-1) {
throw new IllegalArgumentException("Invalid terminal escape character at position " + i);
}
// Remove the escape character and preserve the following character.
// For example, "abc\\*def" becomes "abc*def" and evaluation of the string
// continues at the first character after the '*' ('d').
unescaped = unescaped.substring(0, i) + unescaped.substring(i+1);
length -= 1;
break;
case '*':
// Record the index of the wildcard
if (firstWildcard == -1) {
firstWildcard = i;
} else {
if (remainingWildcards == null) {
remainingWildcards = Lists.newArrayListWithCapacity(3);
}
remainingWildcards.add(i);
}
// Consecutive wildcards are redundant. If there are any remove them now.
int endConsecWilds = i+1;
while (endConsecWilds != length && unescaped.charAt(endConsecWilds) == '*') {
endConsecWilds += 1;
}
if (endConsecWilds != i+1) {
unescaped = unescaped.substring(0, i+1) + unescaped.substring(endConsecWilds);
length -= endConsecWilds - i - 1;
}
break;
}
i += 1;
}
if (firstWildcard == -1) {
// There were no wildcards. Ideally the caller should use a simple equality condition. We'll
// optimize by returning a predicate which performs a simple equality check.
return new ExactMatch(condition, unescaped);
}
if (length == 1) {
// The entire string was nothing but wildcards. Ideally the caller should use "is(string)" instead.
return AnyString.getInstance(condition);
}
if (remainingWildcards == null) {
// Simple case where there is exactly one wildcard in the expression
if (firstWildcard == 0) {
// Suffix case, such as "*:testcustomer"
return new EndsWith(condition, unescaped.substring(1));
} else if (firstWildcard == length-1) {
// Prefix case, such as "review:*"
return new StartsWith(condition, unescaped.substring(0, firstWildcard));
} else {
// Surrounds case, such as "source:*:testcustomer"
return new Surrounds(condition, unescaped.substring(0, firstWildcard), unescaped.substring(firstWildcard+1));
}
}
// Multiple wildcards. The final optimization is the contains case, such as "*review*"
if (firstWildcard == 0 && remainingWildcards.size() == 1 && remainingWildcards.get(0) == length-1) {
return new Contains(condition, unescaped.substring(1, length-1));
}
// Break the string up into constant substrings separated by wildcards. Notice that if an expressions
// starts with a wildcard then the first substring will be the empty string, "". This is intentional since
// the empty string will match the beginning of all input strings. The same logic applies if the
// expression ends with a wildcard.
List<String> substrings = Lists.newArrayListWithCapacity(remainingWildcards.size() + 2);
substrings.add(unescaped.substring(0, firstWildcard));
for (int nextWildcard : remainingWildcards) {
substrings.add(unescaped.substring(firstWildcard+1, nextWildcard));
firstWildcard = nextWildcard;
}
substrings.add(unescaped.substring(firstWildcard+1));
return new Complex(condition, substrings);
}
protected LikeConditionImpl(String condition) {
_condition = condition;
}
@Override
public <T, V> V visit(ConditionVisitor<T, V> visitor, @Nullable T context) {
return visitor.visit(this, context);
}
@Override
public void appendTo(Appendable buf) throws IOException {
// Use a writer so the re can be correctly converted to json using DeltaJson.
Writer out = CharStreams.asWriter(buf);
out.write("like(");
DeltaJson.write(out, _condition);
out.write(")");
}
@Override
public String getCondition() {
return _condition;
}
@Override
public boolean overlaps(LikeCondition condition) {
// If either condition is a constant then the other condition must contain the the condition's string to overlap.
// For example, "door" overlaps "d*r"
if (!hasWildcards()) {
return condition.matches(getCondition());
} else if (!condition.hasWildcards()) {
return matches(condition.getCondition());
}
// Any internal wildcards surrounded by constants can match any other internal values, so determining overlap
// only depends on the prefixes and suffixes.
String prefix = getPrefix();
String otherPrefix = condition.getPrefix();
String suffix = getSuffix();
String otherSuffix = condition.getSuffix();
return (prefix == null || otherPrefix == null || prefix.startsWith(otherPrefix) || otherPrefix.startsWith(prefix)) &&
(suffix == null || otherSuffix == null || suffix.endsWith(otherSuffix) || otherSuffix.endsWith(suffix));
}
@Override
public boolean isSubsetOf(LikeCondition condition) {
// This condition is a subset of the other condition if this condition, with all wildcards replaced with
// unique characters, matches the other condition.
String testString = substituteWildcardsWith("\u0000");
return condition.matches(testString);
}
/**
* Default implementation returns null, subclasses with a prefix must override.
*/
@Override
public String getPrefix() {
return null;
}
/**
* Default implementation returns null, subclasses with a suffix must override.
*/
@Override
public String getSuffix() {
return null;
}
/**
* Default implementation returns true, the one subclass where this is false, {@link ExactMatch}, overrides.
*/
@Override
public boolean hasWildcards() {
return true;
}
/**
* Returns this condition with all wildcards substituted with the provided string.
*/
abstract protected String substituteWildcardsWith(String substitute);
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof LikeCondition)) {
return false;
}
LikeConditionImpl that = (LikeConditionImpl) o;
return _condition.equals(that._condition);
}
@Override
public int hashCode() {
return _condition.hashCode();
}
/**
* Returns a simpler equivalent representation of this same condition if one exists. For example,
* <code>like("constant_string")</code> can be reduced to the equality condition "constant_string".
* By default the base class returns itself; subclasses can override as appropriate.
*/
public Condition simplify() {
return this;
}
/** Implementation for exactly matching a string, such as "review:client" */
public static class ExactMatch extends LikeConditionImpl {
private final String _expression;
private ExactMatch(String condition, String expression) {
super(condition);
_expression = expression;
}
@Override
public boolean matches(String input) {
return _expression.equals(input);
}
@Override
public Condition simplify() {
return Conditions.equal(_expression);
}
@Override
public boolean hasWildcards() {
return false;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _expression;
}
}
/** Implementation for matching all strings, such as "*" */
public static class AnyString extends LikeConditionImpl {
public static AnyString _defaultInstance = new AnyString("*");
private static AnyString getInstance(String condition) {
// Most frequently the condition that spawned this instance is a simple single wildcard character,
// "*". If this is the case then reuse the default singleton. Otherwise create a new instance
// to preserve the original condition.
if ("*".equals(condition)) {
return _defaultInstance;
}
return new AnyString(condition);
}
private AnyString(String condition) {
super(condition);
}
@Override
public boolean matches(String input) {
return true;
}
@Override
public Condition simplify() {
return Conditions.isString();
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute;
}
}
/** Implementation for matching a prefix, such as "review:*" */
public static class StartsWith extends LikeConditionImpl {
private final String _prefix;
private StartsWith(String condition, String prefix) {
super(condition);
_prefix = prefix;
}
@Override
public boolean matches(String input) {
return input.startsWith(_prefix);
}
@Override
public String getPrefix() {
return _prefix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute;
}
}
/** Implementation for matching a suffix, such as "*:client" */
public static class EndsWith extends LikeConditionImpl {
private final String _suffix;
private EndsWith(String condition, String suffix) {
super(condition);
_suffix = suffix;
}
@Override
public boolean matches(String input) {
return input.endsWith(_suffix);
}
@Override
public String getSuffix() {
return _suffix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute + _suffix;
}
}
/** Implementation for matching surrounded wildcard, such as "group:*:client" */
public static class Surrounds extends LikeConditionImpl {
private final String _prefix;
private final String _suffix;
private final int _minLength;
private Surrounds(String condition, String prefix, String suffix) {
super(condition);
_prefix = prefix;
_suffix = suffix;
_minLength = _prefix.length() + _suffix.length();
}
@Override
public boolean matches(String input) {
return input.length() >= _minLength &&
input.startsWith(_prefix) &&
input.endsWith(_suffix);
}
@Override
public String getPrefix() {
return _prefix;
}
@Override
public String getSuffix() {
return _suffix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute + _suffix;
}
}
/** Implementation for matching a contained expression, such as "*client*" */
public static class Contains extends LikeConditionImpl {
private final String _expression;
private Contains(String condition, String expression) {
super(condition);
_expression = expression;
}
@Override
public boolean matches(String input) {
return input.contains(_expression);
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute + _expression + substitute;
}
}
/**
* Implementation for matching complex expressions with multiple wildcards that doesn't match
* any of the previous more efficient computations.
*/
public static class Complex extends LikeConditionImpl {
private final String _prefix;
private final String _suffix;
private final List<String> _innerSubstrings;
private final int _minLength;
private Complex(String condition, List<String> substrings) {
super(condition);
int length = substrings.size();
_prefix = substrings.get(0);
_suffix = substrings.get(length-1);
_innerSubstrings = ImmutableList.copyOf(substrings.subList(1, length-1));
int minLength = 0;
for (String substring : substrings) {
minLength += substring.length();
}
_minLength = minLength;
}
@Override
public boolean matches(String input) {
// Fastest initial checks are whether the total string is at least as long as all substrings
// followed by a prefix and suffix check
if (input.length() < _minLength || !input.startsWith(_prefix) || !input.endsWith(_suffix)) {
return false;
}
// Ensure each inner string appears in-order non-overlapping within the input string starting
// after the prefix.
int idx = _prefix.length();
for (String substring : _innerSubstrings) {
if ((idx = input.indexOf(substring, idx)) == -1) {
return false;
}
idx += substring.length();
}
// Ensure the final inner string terminated before the suffix
return idx <= input.length() - _suffix.length();
}
@Override
public String getPrefix() {
return _prefix.length() != 0 ? _prefix : null;
}
@Override
public String getSuffix() {
return _suffix.length() != 0 ? _suffix : null;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute +
Joiner.on(substitute).join(_innerSubstrings) +
substitute + _suffix;
}
/**
* Of all of the "like" condition variants the complex implementation is slightly more expensive to compute.
*/
@Override
public int weight() {
return 2;
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.emitter.ambari.metrics;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.Collections;
import java.util.List;
public class AmbariMetricsEmitterConfig
{
private final static int DEFAULT_BATCH_SIZE = 100;
private final static Long DEFAULT_FLUSH_PERIOD_MILLIS = (long) (60 * 1000); // flush every one minute
private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
private final static String DEFAULT_PROTOCOL = "http";
@JsonProperty
private final String hostname;
@JsonProperty
private final int port;
@JsonProperty
private final String protocol;
@JsonProperty
private final String trustStorePath;
@JsonProperty
private final String trustStoreType;
@JsonProperty
private final String trustStorePassword;
@JsonProperty
private final int batchSize;
@JsonProperty
private final long flushPeriod;
@JsonProperty
private final int maxQueueSize;
@JsonProperty("eventConverter")
private final DruidToTimelineMetricConverter druidToTimelineEventConverter;
@JsonProperty
private final List<String> alertEmitters;
@JsonProperty
private final long emitWaitTime;
//waiting up to the specified wait time if necessary for an event to become available.
@JsonProperty
private final long waitForEventTime;
@JsonCreator
public AmbariMetricsEmitterConfig(
@JsonProperty("hostname") String hostname,
@JsonProperty("port") Integer port,
@JsonProperty("protocol") String protocol,
@JsonProperty("trustStorePath") String trustStorePath,
@JsonProperty("trustStoreType") String trustStoretype,
@JsonProperty("trustStorePassword") String trustStorePassword,
@JsonProperty("batchSize") Integer batchSize,
@JsonProperty("flushPeriod") Long flushPeriod,
@JsonProperty("maxQueueSize") Integer maxQueueSize,
@JsonProperty("eventConverter") DruidToTimelineMetricConverter druidToTimelineEventConverter,
@JsonProperty("alertEmitters") List<String> alertEmitters,
@JsonProperty("emitWaitTime") Long emitWaitTime,
@JsonProperty("waitForEventTime") Long waitForEventTime
)
{
this.hostname = Preconditions.checkNotNull(hostname, "hostname can not be null");
this.port = Preconditions.checkNotNull(port, "port can not be null");
this.protocol = protocol == null ? DEFAULT_PROTOCOL : protocol;
this.trustStorePath = trustStorePath;
this.trustStoreType = trustStoretype;
this.trustStorePassword = trustStorePassword;
this.batchSize = (batchSize == null) ? DEFAULT_BATCH_SIZE : batchSize;
this.flushPeriod = flushPeriod == null ? DEFAULT_FLUSH_PERIOD_MILLIS : flushPeriod;
this.maxQueueSize = maxQueueSize == null ? Integer.MAX_VALUE : maxQueueSize;
this.druidToTimelineEventConverter = Preconditions.checkNotNull(
druidToTimelineEventConverter,
"Event converter can not be null"
);
this.alertEmitters = alertEmitters == null ? Collections.<String>emptyList() : alertEmitters;
this.emitWaitTime = emitWaitTime == null ? 0 : emitWaitTime;
this.waitForEventTime = waitForEventTime == null ? DEFAULT_GET_TIMEOUT : waitForEventTime;
}
@JsonProperty
public String getHostname()
{
return hostname;
}
@JsonProperty
public int getPort()
{
return port;
}
@JsonProperty
public String getProtocol()
{
return protocol;
}
@JsonProperty
public String getTrustStorePath()
{
return trustStorePath;
}
@JsonProperty
public String getTrustStoreType()
{
return trustStoreType;
}
@JsonProperty
public String getTrustStorePassword()
{
return trustStorePassword;
}
@JsonProperty
public int getBatchSize()
{
return batchSize;
}
@JsonProperty
public int getMaxQueueSize()
{
return maxQueueSize;
}
@JsonProperty
public long getFlushPeriod()
{
return flushPeriod;
}
@JsonProperty
public DruidToTimelineMetricConverter getDruidToTimelineEventConverter()
{
return druidToTimelineEventConverter;
}
@JsonProperty
public List<String> getAlertEmitters()
{
return alertEmitters;
}
@JsonProperty
public long getEmitWaitTime()
{
return emitWaitTime;
}
@JsonProperty
public long getWaitForEventTime()
{
return waitForEventTime;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AmbariMetricsEmitterConfig that = (AmbariMetricsEmitterConfig) o;
if (port != that.port) {
return false;
}
if (batchSize != that.batchSize) {
return false;
}
if (flushPeriod != that.flushPeriod) {
return false;
}
if (maxQueueSize != that.maxQueueSize) {
return false;
}
if (emitWaitTime != that.emitWaitTime) {
return false;
}
if (waitForEventTime != that.waitForEventTime) {
return false;
}
if (!hostname.equals(that.hostname)) {
return false;
}
if (!protocol.equals(that.protocol)) {
return false;
}
if (trustStorePath != null ? !trustStorePath.equals(that.trustStorePath) : that.trustStorePath != null) {
return false;
}
if (trustStoreType != null ? !trustStoreType.equals(that.trustStoreType) : that.trustStoreType != null) {
return false;
}
if (trustStorePassword != null
? !trustStorePassword.equals(that.trustStorePassword)
: that.trustStorePassword != null) {
return false;
}
if (!druidToTimelineEventConverter.equals(that.druidToTimelineEventConverter)) {
return false;
}
return alertEmitters.equals(that.alertEmitters);
}
@Override
public int hashCode()
{
int result = hostname.hashCode();
result = 31 * result + port;
result = 31 * result + protocol.hashCode();
result = 31 * result + (trustStorePath != null ? trustStorePath.hashCode() : 0);
result = 31 * result + (trustStoreType != null ? trustStoreType.hashCode() : 0);
result = 31 * result + (trustStorePassword != null ? trustStorePassword.hashCode() : 0);
result = 31 * result + batchSize;
result = 31 * result + (int) (flushPeriod ^ (flushPeriod >>> 32));
result = 31 * result + maxQueueSize;
result = 31 * result + druidToTimelineEventConverter.hashCode();
result = 31 * result + alertEmitters.hashCode();
result = 31 * result + (int) (emitWaitTime ^ (emitWaitTime >>> 32));
result = 31 * result + (int) (waitForEventTime ^ (waitForEventTime >>> 32));
return result;
}
}
| |
package com.hubspot.singularity;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.inject.name.Names.named;
import java.io.IOException;
import java.net.SocketException;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ScheduledExecutorService;
import javax.inject.Inject;
import javax.inject.Provider;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.leader.LeaderLatch;
import org.apache.curator.framework.recipes.leader.LeaderLatchListener;
import org.apache.curator.framework.state.ConnectionStateListener;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.security.AWSCredentials;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import com.google.inject.Binder;
import com.google.inject.Module;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import com.google.inject.Singleton;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.mesos.client.MesosClient;
import com.hubspot.singularity.config.CustomExecutorConfiguration;
import com.hubspot.singularity.config.HistoryPurgingConfiguration;
import com.hubspot.singularity.config.MesosConfiguration;
import com.hubspot.singularity.config.S3Configuration;
import com.hubspot.singularity.config.S3GroupOverrideConfiguration;
import com.hubspot.singularity.config.SMTPConfiguration;
import com.hubspot.singularity.config.SentryConfiguration;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.config.ZooKeeperConfiguration;
import com.hubspot.singularity.guice.DropwizardObjectMapperProvider;
import com.hubspot.singularity.hooks.LoadBalancerClient;
import com.hubspot.singularity.hooks.LoadBalancerClientImpl;
import com.hubspot.singularity.hooks.SingularityWebhookPoller;
import com.hubspot.singularity.hooks.SingularityWebhookSender;
import com.hubspot.singularity.sentry.NotifyingExceptionMapper;
import com.hubspot.singularity.sentry.SingularityExceptionNotifier;
import com.hubspot.singularity.sentry.SingularityExceptionNotifierManaged;
import com.hubspot.singularity.smtp.JadeTemplateLoader;
import com.hubspot.singularity.smtp.MailTemplateHelpers;
import com.hubspot.singularity.smtp.SingularityMailRecordCleaner;
import com.hubspot.singularity.smtp.SingularityMailer;
import com.hubspot.singularity.smtp.SingularitySmtpSender;
import com.ning.http.client.AsyncHttpClient;
import de.neuland.jade4j.parser.Parser;
import de.neuland.jade4j.parser.node.Node;
import de.neuland.jade4j.template.JadeTemplate;
import io.dropwizard.jetty.HttpConnectorFactory;
import io.dropwizard.server.SimpleServerFactory;
public class SingularityMainModule implements Module {
public static final String HOSTNAME_PROPERTY = "singularity.hostname";
public static final String HTTP_PORT_PROPERTY = "singularity.http.port";
public static final String TASK_TEMPLATE = "task.template";
public static final String REQUEST_IN_COOLDOWN_TEMPLATE = "request.in.cooldown.template";
public static final String REQUEST_MODIFIED_TEMPLATE = "request.modified.template";
public static final String RATE_LIMITED_TEMPLATE = "rate.limited.template";
public static final String SERVER_ID_PROPERTY = "singularity.server.id";
public static final String HOST_ADDRESS_PROPERTY = "singularity.host.address";
public static final String HTTP_HOST_AND_PORT = "http.host.and.port";
public static final String SINGULARITY_URI_BASE = "_singularity_uri_base";
public static final String HEALTHCHECK_THREADPOOL_NAME = "_healthcheck_threadpool";
public static final Named HEALTHCHECK_THREADPOOL_NAMED = Names.named(HEALTHCHECK_THREADPOOL_NAME);
public static final String NEW_TASK_THREADPOOL_NAME = "_new_task_threadpool";
public static final Named NEW_TASK_THREADPOOL_NAMED = Names.named(NEW_TASK_THREADPOOL_NAME);
private final SingularityConfiguration configuration;
public SingularityMainModule(final SingularityConfiguration configuration) {
this.configuration = configuration;
}
@Override
public void configure(Binder binder) {
binder.bind(HostAndPort.class).annotatedWith(named(HTTP_HOST_AND_PORT)).toProvider(SingularityHostAndPortProvider.class).in(Scopes.SINGLETON);
binder.bind(LeaderLatch.class).to(SingularityLeaderLatch.class).in(Scopes.SINGLETON);
binder.bind(CuratorFramework.class).toProvider(SingularityCuratorProvider.class).in(Scopes.SINGLETON);
Multibinder<ConnectionStateListener> connectionStateListeners = Multibinder.newSetBinder(binder, ConnectionStateListener.class);
connectionStateListeners.addBinding().to(SingularityAbort.class).in(Scopes.SINGLETON);
Multibinder<LeaderLatchListener> leaderLatchListeners = Multibinder.newSetBinder(binder, LeaderLatchListener.class);
leaderLatchListeners.addBinding().to(SingularityLeaderController.class).in(Scopes.SINGLETON);
binder.bind(SingularityDriverManager.class).in(Scopes.SINGLETON);
binder.bind(SingularityLeaderController.class).in(Scopes.SINGLETON);
binder.bind(SingularityMailer.class).in(Scopes.SINGLETON);
binder.bind(SingularitySmtpSender.class).in(Scopes.SINGLETON);
binder.bind(MailTemplateHelpers.class).in(Scopes.SINGLETON);
binder.bind(SingularityExceptionNotifier.class).in(Scopes.SINGLETON);
binder.bind(LoadBalancerClient.class).to(LoadBalancerClientImpl.class).in(Scopes.SINGLETON);
binder.bind(SingularityMailRecordCleaner.class).in(Scopes.SINGLETON);
binder.bind(SingularityWebhookPoller.class).in(Scopes.SINGLETON);
binder.bind(MesosClient.class).in(Scopes.SINGLETON);
binder.bind(SingularityAbort.class).in(Scopes.SINGLETON);
binder.bind(SingularityExceptionNotifierManaged.class).in(Scopes.SINGLETON);
binder.bind(SingularityWebhookSender.class).in(Scopes.SINGLETON);
binder.bind(NotifyingExceptionMapper.class).in(Scopes.SINGLETON);
binder.bind(ObjectMapper.class).toProvider(DropwizardObjectMapperProvider.class).in(Scopes.SINGLETON);
binder.bind(AsyncHttpClient.class).to(SingularityHttpClient.class).in(Scopes.SINGLETON);
binder.bind(ServerProvider.class).in(Scopes.SINGLETON);
binder.bind(SingularityDropwizardHealthcheck.class).in(Scopes.SINGLETON);
binder.bindConstant().annotatedWith(Names.named(SERVER_ID_PROPERTY)).to(UUID.randomUUID().toString());
binder.bind(SingularityManagedScheduledExecutorServiceFactory.class).in(Scopes.SINGLETON);
binder.bind(ScheduledExecutorService.class).annotatedWith(HEALTHCHECK_THREADPOOL_NAMED).toProvider(new SingularityManagedScheduledExecutorServiceProvider(configuration.getHealthcheckStartThreads(),
configuration.getThreadpoolShutdownDelayInSeconds(),
"healthcheck")).in(Scopes.SINGLETON);
binder.bind(ScheduledExecutorService.class).annotatedWith(NEW_TASK_THREADPOOL_NAMED).toProvider(new SingularityManagedScheduledExecutorServiceProvider(configuration.getCheckNewTasksScheduledThreads(),
configuration.getThreadpoolShutdownDelayInSeconds(),
"check-new-task")).in(Scopes.SINGLETON);
try {
binder.bindConstant().annotatedWith(Names.named(HOST_ADDRESS_PROPERTY)).to(JavaUtils.getHostAddress());
} catch (SocketException e) {
throw Throwables.propagate(e);
}
}
public static class SingularityHostAndPortProvider implements Provider<HostAndPort> {
private final String hostname;
private final int httpPort;
@Inject
SingularityHostAndPortProvider(final SingularityConfiguration configuration, @Named(HOST_ADDRESS_PROPERTY) String hostAddress) {
checkNotNull(configuration, "configuration is null");
this.hostname = configuration.getHostname().or(JavaUtils.getHostName().or(hostAddress));
SimpleServerFactory simpleServerFactory = (SimpleServerFactory) configuration.getServerFactory();
HttpConnectorFactory httpFactory = (HttpConnectorFactory) simpleServerFactory.getConnector();
this.httpPort = httpFactory.getPort();
}
@Override
public HostAndPort get() {
return HostAndPort.fromParts(hostname, httpPort);
}
}
@Provides
@Named(SINGULARITY_URI_BASE)
String getSingularityUriBase(final SingularityConfiguration configuration) {
final String singularityUiPrefix = configuration.getUiConfiguration().getBaseUrl().or(((SimpleServerFactory) configuration.getServerFactory()).getApplicationContextPath());
return (singularityUiPrefix.endsWith("/")) ? singularityUiPrefix.substring(0, singularityUiPrefix.length() - 1) : singularityUiPrefix;
}
@Provides
@Singleton
public ZooKeeperConfiguration zooKeeperConfiguration(final SingularityConfiguration config) {
return config.getZooKeeperConfiguration();
}
@Provides
@Singleton
public Optional<SentryConfiguration> sentryConfiguration(final SingularityConfiguration config) {
return config.getSentryConfiguration();
}
@Provides
@Singleton
public Optional<S3Service> s3Service(Optional<S3Configuration> config) throws S3ServiceException {
if (!config.isPresent()) {
return Optional.absent();
}
return Optional.<S3Service>of(new RestS3Service(new AWSCredentials(config.get().getS3AccessKey(), config.get().getS3SecretKey())));
}
@Provides
@Singleton
public Map<String, S3Service> s3ServiceGroupOverrides(Optional<S3Configuration> config) throws S3ServiceException {
if (!config.isPresent() || config.get().getGroupOverrides().isEmpty()) {
return Collections.emptyMap();
}
final ImmutableMap.Builder<String, S3Service> s3ServiceBuilder = ImmutableMap.builder();
for (Map.Entry<String, S3GroupOverrideConfiguration> entry : config.get().getGroupOverrides().entrySet()) {
s3ServiceBuilder.put(entry.getKey(), new RestS3Service(new AWSCredentials(entry.getValue().getS3AccessKey(), entry.getValue().getS3SecretKey())));
}
return s3ServiceBuilder.build();
}
@Provides
@Singleton
public MesosConfiguration mesosConfiguration(final SingularityConfiguration config) {
return config.getMesosConfiguration();
}
@Provides
@Singleton
public CustomExecutorConfiguration customExecutorConfiguration(final SingularityConfiguration config) {
return config.getCustomExecutorConfiguration();
}
@Provides
@Singleton
public Optional<SMTPConfiguration> smtpConfiguration(final SingularityConfiguration config) {
return config.getSmtpConfiguration();
}
@Provides
@Singleton
public Optional<S3Configuration> s3Configuration(final SingularityConfiguration config) {
return config.getS3Configuration();
}
@Provides
@Singleton
public HistoryPurgingConfiguration historyPurgingConfiguration(final SingularityConfiguration config) {
return config.getHistoryPurgingConfiguration();
}
private JadeTemplate getJadeTemplate(String name) throws IOException {
Parser parser = new Parser("templates/" + name, JadeTemplateLoader.JADE_LOADER);
Node root = parser.parse();
final JadeTemplate jadeTemplate = new JadeTemplate();
jadeTemplate.setTemplateLoader(JadeTemplateLoader.JADE_LOADER);
jadeTemplate.setRootNode(root);
return jadeTemplate;
}
@Provides
@Singleton
@Named(TASK_TEMPLATE)
public JadeTemplate getTaskTemplate() throws IOException {
return getJadeTemplate("task.jade");
}
@Provides
@Singleton
@Named(REQUEST_IN_COOLDOWN_TEMPLATE)
public JadeTemplate getRequestPausedTemplate() throws IOException {
return getJadeTemplate("request_in_cooldown.jade");
}
@Provides
@Singleton
@Named(REQUEST_MODIFIED_TEMPLATE)
public JadeTemplate getRequestModifiedTemplate() throws IOException {
return getJadeTemplate("request_modified.jade");
}
@Provides
@Singleton
@Named(RATE_LIMITED_TEMPLATE)
public JadeTemplate getRateLimitedTemplate() throws IOException {
return getJadeTemplate("rate_limited.jade");
}
}
| |
/*******************************************************************************
* Copyright 2011-2014 Sergey Tarasevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.nostra13.universalimageloader.core;
import android.graphics.Bitmap;
import android.os.Handler;
import com.nostra13.universalimageloader.core.assist.FailReason;
import com.nostra13.universalimageloader.core.assist.FailReason.FailType;
import com.nostra13.universalimageloader.core.assist.ImageScaleType;
import com.nostra13.universalimageloader.core.assist.ImageSize;
import com.nostra13.universalimageloader.core.assist.LoadedFrom;
import com.nostra13.universalimageloader.core.assist.ViewScaleType;
import com.nostra13.universalimageloader.core.decode.ImageDecoder;
import com.nostra13.universalimageloader.core.decode.ImageDecodingInfo;
import com.nostra13.universalimageloader.core.download.ImageDownloader;
import com.nostra13.universalimageloader.core.download.ImageDownloader.Scheme;
import com.nostra13.universalimageloader.core.imageaware.ImageAware;
import com.nostra13.universalimageloader.core.listener.ImageLoadingListener;
import com.nostra13.universalimageloader.core.listener.ImageLoadingProgressListener;
import com.nostra13.universalimageloader.utils.IoUtils;
import com.nostra13.universalimageloader.utils.L;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
/**
* Presents load'n'display image task. Used to load image from Internet or file system, decode it to {@link Bitmap}, and
* display it in {@link ImageAware} using {@link DisplayBitmapTask}.
*
* @author Sergey Tarasevich (nostra13[at]gmail[dot]com)
* @see ImageLoaderConfiguration
* @see ImageLoadingInfo
* @since 1.3.1
*/
final class LoadAndDisplayImageTask implements Runnable, IoUtils.CopyListener {
private static final String LOG_WAITING_FOR_RESUME = "ImageLoader is paused. Waiting... [%s]";
private static final String LOG_RESUME_AFTER_PAUSE = ".. Resume loading [%s]";
private static final String LOG_DELAY_BEFORE_LOADING = "Delay %d ms before loading... [%s]";
private static final String LOG_START_DISPLAY_IMAGE_TASK = "Start display image task [%s]";
private static final String LOG_WAITING_FOR_IMAGE_LOADED = "Image already is loading. Waiting... [%s]";
private static final String LOG_GET_IMAGE_FROM_MEMORY_CACHE_AFTER_WAITING = "...Get cached bitmap from memory after waiting. [%s]";
private static final String LOG_LOAD_IMAGE_FROM_NETWORK = "Load image from network [%s]";
private static final String LOG_LOAD_IMAGE_FROM_DISK_CACHE = "Load image from disk cache [%s]";
private static final String LOG_RESIZE_CACHED_IMAGE_FILE = "Resize image in disk cache [%s]";
private static final String LOG_PREPROCESS_IMAGE = "PreProcess image before caching in memory [%s]";
private static final String LOG_POSTPROCESS_IMAGE = "PostProcess image before displaying [%s]";
private static final String LOG_CACHE_IMAGE_IN_MEMORY = "Cache image in memory [%s]";
private static final String LOG_CACHE_IMAGE_ON_DISK = "Cache image on disk [%s]";
private static final String LOG_PROCESS_IMAGE_BEFORE_CACHE_ON_DISK = "Process image before cache on disk [%s]";
private static final String LOG_TASK_CANCELLED_IMAGEAWARE_REUSED = "ImageAware is reused for another image. Task is cancelled. [%s]";
private static final String LOG_TASK_CANCELLED_IMAGEAWARE_COLLECTED = "ImageAware was collected by GC. Task is cancelled. [%s]";
private static final String LOG_TASK_INTERRUPTED = "Task was interrupted [%s]";
private static final String ERROR_NO_IMAGE_STREAM = "No stream for image [%s]";
private static final String ERROR_PRE_PROCESSOR_NULL = "Pre-processor returned null [%s]";
private static final String ERROR_POST_PROCESSOR_NULL = "Post-processor returned null [%s]";
private static final String ERROR_PROCESSOR_FOR_DISK_CACHE_NULL = "Bitmap processor for disk cache returned null [%s]";
private final ImageLoaderEngine engine;
private final ImageLoadingInfo imageLoadingInfo;
private final Handler handler;
// Helper references
private final ImageLoaderConfiguration configuration;
private final ImageDownloader downloader;
private final ImageDownloader networkDeniedDownloader;
private final ImageDownloader slowNetworkDownloader;
private final ImageDecoder decoder;
final String uri;
private final String memoryCacheKey;
final ImageAware imageAware;
private final ImageSize targetSize;
final DisplayImageOptions options;
final ImageLoadingListener listener;
final ImageLoadingProgressListener progressListener;
private final boolean syncLoading;
// State vars
private LoadedFrom loadedFrom = LoadedFrom.NETWORK;
public LoadAndDisplayImageTask(ImageLoaderEngine engine, ImageLoadingInfo imageLoadingInfo, Handler handler) {
this.engine = engine;
this.imageLoadingInfo = imageLoadingInfo;
this.handler = handler;
configuration = engine.configuration;
downloader = configuration.downloader;
networkDeniedDownloader = configuration.networkDeniedDownloader;
slowNetworkDownloader = configuration.slowNetworkDownloader;
decoder = configuration.decoder;
uri = imageLoadingInfo.uri;
memoryCacheKey = imageLoadingInfo.memoryCacheKey;
imageAware = imageLoadingInfo.imageAware;
targetSize = imageLoadingInfo.targetSize;
options = imageLoadingInfo.options;
listener = imageLoadingInfo.listener;
progressListener = imageLoadingInfo.progressListener;
syncLoading = options.isSyncLoading();
}
@Override
public void run() {
if (waitIfPaused()) return;
if (delayIfNeed()) return;
ReentrantLock loadFromUriLock = imageLoadingInfo.loadFromUriLock;
L.d(LOG_START_DISPLAY_IMAGE_TASK, memoryCacheKey);
if (loadFromUriLock.isLocked()) {
L.d(LOG_WAITING_FOR_IMAGE_LOADED, memoryCacheKey);
}
loadFromUriLock.lock();
Bitmap bmp;
try {
checkTaskNotActual();
bmp = configuration.memoryCache.get(memoryCacheKey);
if (bmp == null || bmp.isRecycled()) {
bmp = tryLoadBitmap();
if (bmp == null) return; // listener callback already was fired
checkTaskNotActual();
checkTaskInterrupted();
if (options.shouldPreProcess()) {
L.d(LOG_PREPROCESS_IMAGE, memoryCacheKey);
bmp = options.getPreProcessor().process(bmp);
if (bmp == null) {
L.e(ERROR_PRE_PROCESSOR_NULL, memoryCacheKey);
}
}
if (bmp != null && options.isCacheInMemory()) {
L.d(LOG_CACHE_IMAGE_IN_MEMORY, memoryCacheKey);
configuration.memoryCache.put(memoryCacheKey, bmp);
}
} else {
loadedFrom = LoadedFrom.MEMORY_CACHE;
L.d(LOG_GET_IMAGE_FROM_MEMORY_CACHE_AFTER_WAITING, memoryCacheKey);
}
if (bmp != null && options.shouldPostProcess()) {
L.d(LOG_POSTPROCESS_IMAGE, memoryCacheKey);
bmp = options.getPostProcessor().process(bmp);
if (bmp == null) {
L.e(ERROR_POST_PROCESSOR_NULL, memoryCacheKey);
}
}
checkTaskNotActual();
checkTaskInterrupted();
} catch (TaskCancelledException e) {
fireCancelEvent();
return;
} finally {
loadFromUriLock.unlock();
}
DisplayBitmapTask displayBitmapTask = new DisplayBitmapTask(bmp, imageLoadingInfo, engine, loadedFrom);
runTask(displayBitmapTask, syncLoading, handler, engine);
}
/** @return <b>true</b> - if task should be interrupted; <b>false</b> - otherwise */
private boolean waitIfPaused() {
AtomicBoolean pause = engine.getPause();
if (pause.get()) {
synchronized (engine.getPauseLock()) {
if (pause.get()) {
L.d(LOG_WAITING_FOR_RESUME, memoryCacheKey);
try {
engine.getPauseLock().wait();
} catch (InterruptedException e) {
L.e(LOG_TASK_INTERRUPTED, memoryCacheKey);
return true;
}
L.d(LOG_RESUME_AFTER_PAUSE, memoryCacheKey);
}
}
}
return isTaskNotActual();
}
/** @return <b>true</b> - if task should be interrupted; <b>false</b> - otherwise */
private boolean delayIfNeed() {
if (options.shouldDelayBeforeLoading()) {
L.d(LOG_DELAY_BEFORE_LOADING, options.getDelayBeforeLoading(), memoryCacheKey);
try {
Thread.sleep(options.getDelayBeforeLoading());
} catch (InterruptedException e) {
L.e(LOG_TASK_INTERRUPTED, memoryCacheKey);
return true;
}
return isTaskNotActual();
}
return false;
}
private Bitmap tryLoadBitmap() throws TaskCancelledException {
Bitmap bitmap = null;
try {
File imageFile = configuration.diskCache.get(memoryCacheKey);
if (imageFile != null && imageFile.exists()) {
L.d(LOG_LOAD_IMAGE_FROM_DISK_CACHE, memoryCacheKey);
loadedFrom = LoadedFrom.DISC_CACHE;
checkTaskNotActual();
bitmap = decodeImage(Scheme.FILE.wrap(imageFile.getAbsolutePath()));
}
if (bitmap == null || bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
L.d(LOG_LOAD_IMAGE_FROM_NETWORK, memoryCacheKey);
loadedFrom = LoadedFrom.NETWORK;
String imageUriForDecoding = memoryCacheKey;
if (options.isCacheOnDisk() && tryCacheImageOnDisk()) {
imageFile = configuration.diskCache.get(memoryCacheKey);
if (imageFile != null) {
imageUriForDecoding = Scheme.FILE.wrap(imageFile.getAbsolutePath());
}
}
checkTaskNotActual();
bitmap = decodeImage(imageUriForDecoding);
if (bitmap == null || bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
fireFailEvent(FailType.DECODING_ERROR, null);
}
}
} catch (IllegalStateException e) {
fireFailEvent(FailType.NETWORK_DENIED, null);
} catch (TaskCancelledException e) {
throw e;
} catch (IOException e) {
L.e(e);
fireFailEvent(FailType.IO_ERROR, e);
} catch (OutOfMemoryError e) {
L.e(e);
fireFailEvent(FailType.OUT_OF_MEMORY, e);
} catch (Throwable e) {
L.e(e);
fireFailEvent(FailType.UNKNOWN, e);
}
return bitmap;
}
private Bitmap decodeImage(String imageUri) throws IOException {
ViewScaleType viewScaleType = imageAware.getScaleType();
ImageDecodingInfo decodingInfo = new ImageDecodingInfo(memoryCacheKey, imageUri, uri, targetSize, viewScaleType,
getDownloader(), options);
return decoder.decode(decodingInfo);
}
/** @return <b>true</b> - if image was downloaded successfully; <b>false</b> - otherwise */
private boolean tryCacheImageOnDisk() throws TaskCancelledException {
L.d(LOG_CACHE_IMAGE_ON_DISK, memoryCacheKey);
boolean loaded;
try {
loaded = downloadImage();
if (loaded) {
int width = configuration.maxImageWidthForDiskCache;
int height = configuration.maxImageHeightForDiskCache;
if (width > 0 || height > 0) {
L.d(LOG_RESIZE_CACHED_IMAGE_FILE, memoryCacheKey);
resizeAndSaveImage(width, height); // TODO : process boolean result
}
}
} catch (IOException e) {
L.e(e);
loaded = false;
}
return loaded;
}
private boolean downloadImage() throws IOException {
InputStream is = getDownloader().getStream(uri, options.getExtraForDownloader());
if (is == null) {
L.e(ERROR_NO_IMAGE_STREAM, memoryCacheKey);
return false;
} else {
try {
return configuration.diskCache.save(memoryCacheKey, is, this);
} finally {
IoUtils.closeSilently(is);
}
}
}
/** Decodes image file into Bitmap, resize it and save it back */
private boolean resizeAndSaveImage(int maxWidth, int maxHeight) throws IOException {
// Decode image file, compress and re-save it
boolean saved = false;
File targetFile = configuration.diskCache.get(memoryCacheKey);
if (targetFile != null && targetFile.exists()) {
ImageSize targetImageSize = new ImageSize(maxWidth, maxHeight);
DisplayImageOptions specialOptions = new DisplayImageOptions.Builder().cloneFrom(options)
.imageScaleType(ImageScaleType.IN_SAMPLE_INT).build();
ImageDecodingInfo decodingInfo = new ImageDecodingInfo(memoryCacheKey,
Scheme.FILE.wrap(targetFile.getAbsolutePath()), uri, targetImageSize, ViewScaleType.FIT_INSIDE,
getDownloader(), specialOptions);
Bitmap bmp = decoder.decode(decodingInfo);
if (bmp != null && configuration.processorForDiskCache != null) {
L.d(LOG_PROCESS_IMAGE_BEFORE_CACHE_ON_DISK, memoryCacheKey);
bmp = configuration.processorForDiskCache.process(bmp);
if (bmp == null) {
L.e(ERROR_PROCESSOR_FOR_DISK_CACHE_NULL, memoryCacheKey);
}
}
if (bmp != null) {
saved = configuration.diskCache.save(memoryCacheKey, bmp);
bmp.recycle();
}
}
return saved;
}
@Override
public boolean onBytesCopied(int current, int total) {
return syncLoading || fireProgressEvent(current, total);
}
/** @return <b>true</b> - if loading should be continued; <b>false</b> - if loading should be interrupted */
private boolean fireProgressEvent(final int current, final int total) {
if (isTaskInterrupted() || isTaskNotActual()) return false;
if (progressListener != null) {
Runnable r = new Runnable() {
@Override
public void run() {
progressListener.onProgressUpdate(uri, imageAware.getWrappedView(), current, total);
}
};
runTask(r, false, handler, engine);
}
return true;
}
private void fireFailEvent(final FailType failType, final Throwable failCause) {
if (syncLoading || isTaskInterrupted() || isTaskNotActual()) return;
Runnable r = new Runnable() {
@Override
public void run() {
if (options.shouldShowImageOnFail()) {
imageAware.setImageDrawable(options.getImageOnFail(configuration.resources));
}
listener.onLoadingFailed(uri, imageAware.getWrappedView(), new FailReason(failType, failCause));
}
};
runTask(r, false, handler, engine);
}
private void fireCancelEvent() {
if (syncLoading || isTaskInterrupted()) return;
Runnable r = new Runnable() {
@Override
public void run() {
listener.onLoadingCancelled(uri, imageAware.getWrappedView());
}
};
runTask(r, false, handler, engine);
}
private ImageDownloader getDownloader() {
ImageDownloader d;
if (engine.isNetworkDenied()) {
d = networkDeniedDownloader;
} else if (engine.isSlowNetwork()) {
d = slowNetworkDownloader;
} else {
d = downloader;
}
return d;
}
/**
* @throws TaskCancelledException if task is not actual (target ImageAware is collected by GC or the image URI of
* this task doesn't match to image URI which is actual for current ImageAware at
* this moment)
*/
private void checkTaskNotActual() throws TaskCancelledException {
checkViewCollected();
checkViewReused();
}
/**
* @return <b>true</b> - if task is not actual (target ImageAware is collected by GC or the image URI of this task
* doesn't match to image URI which is actual for current ImageAware at this moment)); <b>false</b> - otherwise
*/
private boolean isTaskNotActual() {
return isViewCollected() || isViewReused();
}
/** @throws TaskCancelledException if target ImageAware is collected */
private void checkViewCollected() throws TaskCancelledException {
if (isViewCollected()) {
throw new TaskCancelledException();
}
}
/** @return <b>true</b> - if target ImageAware is collected by GC; <b>false</b> - otherwise */
private boolean isViewCollected() {
if (imageAware.isCollected()) {
L.d(LOG_TASK_CANCELLED_IMAGEAWARE_COLLECTED, memoryCacheKey);
return true;
}
return false;
}
/** @throws TaskCancelledException if target ImageAware is collected by GC */
private void checkViewReused() throws TaskCancelledException {
if (isViewReused()) {
throw new TaskCancelledException();
}
}
/** @return <b>true</b> - if current ImageAware is reused for displaying another image; <b>false</b> - otherwise */
private boolean isViewReused() {
String currentCacheKey = engine.getLoadingUriForView(imageAware);
// Check whether memory cache key (image URI) for current ImageAware is actual.
// If ImageAware is reused for another task then current task should be cancelled.
boolean imageAwareWasReused = !memoryCacheKey.equals(currentCacheKey);
if (imageAwareWasReused) {
L.d(LOG_TASK_CANCELLED_IMAGEAWARE_REUSED, memoryCacheKey);
return true;
}
return false;
}
/** @throws TaskCancelledException if current task was interrupted */
private void checkTaskInterrupted() throws TaskCancelledException {
if (isTaskInterrupted()) {
throw new TaskCancelledException();
}
}
/** @return <b>true</b> - if current task was interrupted; <b>false</b> - otherwise */
private boolean isTaskInterrupted() {
if (Thread.interrupted()) {
L.d(LOG_TASK_INTERRUPTED, memoryCacheKey);
return true;
}
return false;
}
String getLoadingUri() {
return uri;
}
static void runTask(Runnable r, boolean sync, Handler handler, ImageLoaderEngine engine) {
if (sync) {
r.run();
} else if (handler == null) {
engine.fireCallback(r);
} else {
handler.post(r);
}
}
/**
* Exceptions for case when task is cancelled (thread is interrupted, image view is reused for another task, view is
* collected by GC).
*
* @author Sergey Tarasevich (nostra13[at]gmail[dot]com)
* @since 1.9.1
*/
class TaskCancelledException extends Exception {
}
}
| |
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.orm.jpa;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.sql.DataSource;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.persistenceunit.PersistenceUnitManager;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
/**
* Convenient builder for JPA EntityManagerFactory instances. Collects common
* configuration when constructed and then allows you to create one or more
* {@link LocalContainerEntityManagerFactoryBean} through a fluent builder pattern. The
* most common options are covered in the builder, but you can always manipulate the
* product of the builder if you need more control, before returning it from a
* {@code @Bean} definition.
*
* @author Dave Syer
* @author Phillip Webb
* @author Stephane Nicoll
* @since 1.3.0
*/
public class EntityManagerFactoryBuilder {
private final JpaVendorAdapter jpaVendorAdapter;
private final PersistenceUnitManager persistenceUnitManager;
private final Map<String, Object> jpaProperties;
private final URL persistenceUnitRootLocation;
private AsyncTaskExecutor bootstrapExecutor;
private EntityManagerFactoryBeanCallback callback;
/**
* Create a new instance passing in the common pieces that will be shared if multiple
* EntityManagerFactory instances are created.
* @param jpaVendorAdapter a vendor adapter
* @param jpaProperties the JPA properties to be passed to the persistence provider
* @param persistenceUnitManager optional source of persistence unit information (can
* be null)
*/
public EntityManagerFactoryBuilder(JpaVendorAdapter jpaVendorAdapter,
Map<String, ?> jpaProperties, PersistenceUnitManager persistenceUnitManager) {
this(jpaVendorAdapter, jpaProperties, persistenceUnitManager, null);
}
/**
* Create a new instance passing in the common pieces that will be shared if multiple
* EntityManagerFactory instances are created.
* @param jpaVendorAdapter a vendor adapter
* @param jpaProperties the JPA properties to be passed to the persistence provider
* @param persistenceUnitManager optional source of persistence unit information (can
* be null)
* @param persistenceUnitRootLocation the persistence unit root location to use as a
* fallback (can be null)
* @since 1.4.1
*/
public EntityManagerFactoryBuilder(JpaVendorAdapter jpaVendorAdapter,
Map<String, ?> jpaProperties, PersistenceUnitManager persistenceUnitManager,
URL persistenceUnitRootLocation) {
this.jpaVendorAdapter = jpaVendorAdapter;
this.persistenceUnitManager = persistenceUnitManager;
this.jpaProperties = new LinkedHashMap<>(jpaProperties);
this.persistenceUnitRootLocation = persistenceUnitRootLocation;
}
public Builder dataSource(DataSource dataSource) {
return new Builder(dataSource);
}
/**
* Configure the bootstrap executor to be used by the
* {@link LocalContainerEntityManagerFactoryBean}.
* @param bootstrapExecutor the executor
* @since 2.1.0
*/
public void setBootstrapExecutor(AsyncTaskExecutor bootstrapExecutor) {
this.bootstrapExecutor = bootstrapExecutor;
}
/**
* An optional callback for new entity manager factory beans.
* @param callback the entity manager factory bean callback
*/
public void setCallback(EntityManagerFactoryBeanCallback callback) {
this.callback = callback;
}
/**
* A fluent builder for a LocalContainerEntityManagerFactoryBean.
*/
public final class Builder {
private DataSource dataSource;
private String[] packagesToScan;
private String persistenceUnit;
private Map<String, Object> properties = new HashMap<>();
private String[] mappingResources;
private boolean jta;
private Builder(DataSource dataSource) {
this.dataSource = dataSource;
}
/**
* The names of packages to scan for {@code @Entity} annotations.
* @param packagesToScan packages to scan
* @return the builder for fluent usage
*/
public Builder packages(String... packagesToScan) {
this.packagesToScan = packagesToScan;
return this;
}
/**
* The classes whose packages should be scanned for {@code @Entity} annotations.
* @param basePackageClasses the classes to use
* @return the builder for fluent usage
*/
public Builder packages(Class<?>... basePackageClasses) {
Set<String> packages = new HashSet<>();
for (Class<?> type : basePackageClasses) {
packages.add(ClassUtils.getPackageName(type));
}
this.packagesToScan = StringUtils.toStringArray(packages);
return this;
}
/**
* The name of the persistence unit. If only building one EntityManagerFactory you
* can omit this, but if there are more than one in the same application you
* should give them distinct names.
* @param persistenceUnit the name of the persistence unit
* @return the builder for fluent usage
*/
public Builder persistenceUnit(String persistenceUnit) {
this.persistenceUnit = persistenceUnit;
return this;
}
/**
* Generic properties for standard JPA or vendor-specific configuration. These
* properties override any values provided in the constructor.
* @param properties the properties to use
* @return the builder for fluent usage
*/
public Builder properties(Map<String, ?> properties) {
this.properties.putAll(properties);
return this;
}
/**
* The mapping resources (equivalent to {@code <mapping-file>} entries in
* {@code persistence.xml}) for the persistence unit.
* <p>
* Note that mapping resources must be relative to the classpath root, e.g.
* "META-INF/mappings.xml" or "com/mycompany/repository/mappings.xml", so that
* they can be loaded through {@code ClassLoader.getResource()}.
* @param mappingResources the mapping resources to use
* @return the builder for fluent usage
*/
public Builder mappingResources(String... mappingResources) {
this.mappingResources = mappingResources;
return this;
}
/**
* Configure if using a JTA {@link DataSource}, i.e. if
* {@link LocalContainerEntityManagerFactoryBean#setDataSource(DataSource)
* setDataSource} or
* {@link LocalContainerEntityManagerFactoryBean#setJtaDataSource(DataSource)
* setJtaDataSource} should be called on the
* {@link LocalContainerEntityManagerFactoryBean}.
* @param jta if the data source is JTA
* @return the builder for fluent usage
*/
public Builder jta(boolean jta) {
this.jta = jta;
return this;
}
public LocalContainerEntityManagerFactoryBean build() {
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
if (EntityManagerFactoryBuilder.this.persistenceUnitManager != null) {
entityManagerFactoryBean.setPersistenceUnitManager(
EntityManagerFactoryBuilder.this.persistenceUnitManager);
}
if (this.persistenceUnit != null) {
entityManagerFactoryBean.setPersistenceUnitName(this.persistenceUnit);
}
entityManagerFactoryBean.setJpaVendorAdapter(
EntityManagerFactoryBuilder.this.jpaVendorAdapter);
if (this.jta) {
entityManagerFactoryBean.setJtaDataSource(this.dataSource);
}
else {
entityManagerFactoryBean.setDataSource(this.dataSource);
}
entityManagerFactoryBean.setPackagesToScan(this.packagesToScan);
entityManagerFactoryBean.getJpaPropertyMap()
.putAll(EntityManagerFactoryBuilder.this.jpaProperties);
entityManagerFactoryBean.getJpaPropertyMap().putAll(this.properties);
if (!ObjectUtils.isEmpty(this.mappingResources)) {
entityManagerFactoryBean.setMappingResources(this.mappingResources);
}
URL rootLocation = EntityManagerFactoryBuilder.this.persistenceUnitRootLocation;
if (rootLocation != null) {
entityManagerFactoryBean
.setPersistenceUnitRootLocation(rootLocation.toString());
}
if (EntityManagerFactoryBuilder.this.bootstrapExecutor != null) {
entityManagerFactoryBean.setBootstrapExecutor(
EntityManagerFactoryBuilder.this.bootstrapExecutor);
}
if (EntityManagerFactoryBuilder.this.callback != null) {
EntityManagerFactoryBuilder.this.callback
.execute(entityManagerFactoryBean);
}
return entityManagerFactoryBean;
}
}
/**
* A callback for new entity manager factory beans created by a Builder.
*/
@FunctionalInterface
public interface EntityManagerFactoryBeanCallback {
void execute(LocalContainerEntityManagerFactoryBean factory);
}
}
| |
package controller;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.List;
import java.util.logging.Logger;
import codeprocessor.CodeProcessor;
import codeprocessor.Filter;
import codeprocessor.GraphBuilder;
import dfa.framework.AnalysisLoader;
import dfa.framework.DFAExecution;
import dfa.framework.DFAFactory;
import dfa.framework.DFAPrecalcController;
import dfa.framework.LatticeElement;
import dfa.framework.SimpleBlockGraph;
import dfa.framework.StaticAnalysisLoader;
import dfa.framework.Worklist;
import dfa.framework.WorklistManager;
import gui.visualgraph.VisualGraphPanel;
import gui.ControlPanelState;
import gui.GenericBox;
import gui.MessageBox;
import gui.MethodSelectionBox;
import gui.Option;
import gui.ProgramFrame;
import gui.visualgraph.GraphUIController;
/**
* Central unit, that is responsible for the communication between the GUI and
* the remaining packages of the program.
*
* @author Anika Nietzer
*/
public class Controller {
private static final String ABORT_PRECALC_MESSAGE = "Do you want to stop the precalculation? You can also show intermediate results if the analysis state allows this.";
private static final String ABORT_MESSAGE = "This leads to a complete deletion of the graph and the calculation. Would you like to continue?";
private static final String EXCEPTION_TITLE = "Exception caused by analysis calculation";
private static final int WAIT_FOR_STOP = 500;
// path for release, load analyses dynamically
private static final String DYNAMIC_ANALYSES_PATH = findJarPath();
private static final String PROGRAM_OUTPUT_PATH = System.getProperty("user.home")
+ System.getProperty("file.separator") + "visualDfa" + System.getProperty("file.separator");
private OptionFileParser fileParser;
private ProgramFrame programFrame;
private DFAExecution<? extends LatticeElement> dfaExecution;
private GraphUIController graphUIController;
private VisualGraphPanel visualGraphPanel;
private AnalysisLoader analysisLoader;
private WorklistManager worklistManager;
private Thread precalcThread;
private DFAPrecalcController precalcController;
private boolean shouldContinue = false;
private static String findJarPath() {
String path = Controller.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String decodedPath = null;
try {
decodedPath = URLDecoder.decode(path, "UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
File sourceDirectory = (new File(decodedPath)).getParentFile();
return sourceDirectory.getAbsolutePath();
}
/**
* Creates a new {@code Controller} and loads the available analyses with an
* instance of {@code AnalysisLoader}. Creates a {@code VisualGraphPanel}
* and a {@code GraphUIController}.
*/
public Controller() {
try {
this.analysisLoader = new StaticAnalysisLoader(DYNAMIC_ANALYSES_PATH);
// TODO note: use this line instead of the line above to load analyses dynamically
// this.analysisLoader = new AnalysisLoader(DYNAMIC_ANALYSES_PATH);
} catch (IllegalStateException e) {
e.printStackTrace();
}
this.analysisLoader.loadAnalyses(Logger.getAnonymousLogger());
this.worklistManager = WorklistManager.getInstance();
this.visualGraphPanel = new VisualGraphPanel();
this.graphUIController = new GraphUIController(visualGraphPanel);
}
/**
* Defines the code that is written in the editor of the input panel at
* program start.
*/
public void setDefaultCode() {
//@formatter:off
String codeExample =
"public class Example {" + System.lineSeparator()
+ " public void helloWorld(boolean print, int x) {" + System.lineSeparator()
+ " if (print) {" + System.lineSeparator()
+ " System.out.println(\"Hello World!\");" + System.lineSeparator()
+ " while (x < 10) {" + System.lineSeparator()
+ " x = x + 1;" + System.lineSeparator()
+ " if (x == 5) {" + System.lineSeparator()
+ " int y = 5;" + System.lineSeparator()
+ " x = y * 3;" + System.lineSeparator()
+ " }" + System.lineSeparator()
+ " }" + System.lineSeparator()
+ " } else {" + System.lineSeparator()
+ " x = 0;" + System.lineSeparator()
+ " }" + System.lineSeparator()
+ " }" + System.lineSeparator()
+ "}";
//@formatter:on
this.programFrame.getInputPanel().setCode(codeExample);
}
/**
* Leads to the calculation of the next block by the DFAFramework and
* ensures that the {@code GraphUIController} updates the
* {@code VisualGraphPanel}.
*
* @return whether there was a next block to show or not
*/
public boolean nextBlock() {
boolean hasNextBlock = this.dfaExecution.nextBlockStep();
if (hasNextBlock) {
this.refreshProgramFrame();
} else {
this.dfaExecution.setCurrentElementaryStep(this.dfaExecution.getTotalElementarySteps() - 1);
this.refreshProgramFrame();
}
return hasNextBlock;
}
/**
* Leads to the calculation of the next line by the DFAFramework and ensures
* that the {@code GraphUIController} updates the {@code VisualGraphPanel}.
*
* @return whether there was a next line to show or not
*/
public boolean nextLine() {
boolean hasNextLine = this.dfaExecution.nextElementaryStep();
if (hasNextLine) {
this.refreshProgramFrame();
}
return hasNextLine;
}
/**
* Leads to the calculation of the previous line by the DFAFramework and
* ensures that the {@code GraphUIController} updates the
* {@code VisualGraphPanel}.
*
* @return whether there was a previous line to show or not
*/
public boolean previousLine() {
boolean hasPreviousLine = this.dfaExecution.previousElementaryStep();
if (hasPreviousLine) {
this.refreshProgramFrame();
}
return hasPreviousLine;
}
/**
* Leads to the calculation of the previous block by the DFAFramework and
* ensures that the {@code GraphUIController} updates the
* {@code VisualGraphPanel}.
*
* @return whether there was a previous block to show or not
*/
public boolean previousBlock() {
int currentStep = this.dfaExecution.getCurrentElementaryStep();
this.dfaExecution.setCurrentBlockStep(this.dfaExecution.getCurrentBlockStep());
if (currentStep == this.dfaExecution.getCurrentElementaryStep()) {
this.dfaExecution.previousBlockStep();
}
this.refreshProgramFrame();
return true;
}
private void refreshProgramFrame() {
try {
this.graphUIController.refresh();
} catch (IllegalStateException e) {
e.printStackTrace();
}
this.programFrame.getControlPanel().setSliderStep(this.dfaExecution.getCurrentElementaryStep());
}
/**
* Leads to the calculation of a given step in the analysis from the
* DFAFramework and ensures that the {@code GraphUIController} updates the
* {@code VisualGraphPanel}.
*
* @param step
* step to show in the animation
*/
public void jumpToStep(int step) {
try {
this.dfaExecution.setCurrentElementaryStep(step);
this.graphUIController.refresh();
} catch (IndexOutOfBoundsException e) {
e.printStackTrace();
}
}
private void searchBreakPosition() {
while (this.dfaExecution.getCurrentElementaryStep() < this.dfaExecution.getTotalElementarySteps() - 1) {
this.dfaExecution.nextElementaryStep();
if (this.dfaExecution.isAtBreakpoint()) {
return;
}
}
this.dfaExecution.nextElementaryStep();
return;
}
/**
* Creates a new {@code AutoplayDriver} to replay the different steps of the
* analysis if a delay bigger than zero is selected or jumps to the last
* step of the analysis if the chosen delay is zero.
*/
public void play() {
if (this.dfaExecution.getTotalElementarySteps() - 1 == this.dfaExecution.getCurrentElementaryStep()) {
return;
}
if (getDelay() == 0) {
searchBreakPosition();
this.programFrame.getControlPanel().setSliderStep(this.dfaExecution.getCurrentElementaryStep());
this.graphUIController.refresh();
return;
}
AutoplayDriver autoplayDriver = null;
try {
autoplayDriver = new AutoplayDriver(this);
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
visibilityPlaying();
this.shouldContinue = true;
Thread autoplayThread = new Thread(autoplayDriver);
try {
autoplayThread.start();
} catch (IllegalThreadStateException e) {
e.printStackTrace();
}
}
/**
* Method that return the chosen delay selected in the delaySlider of the
* {@code ControlPanel}.
*
* @return delay, the user has set
*/
public int getDelay() {
return (int) (this.programFrame.getControlPanel().getDelay() * 1000);
}
/**
* Method that checks if the Thread of the {@code AutoplayDriver} should
* continue or not. The {@code AutoplayDriver} should stop if the animation
* has reached a breakpoint or if the user paused it.
*
* @return {@code true} if the autoplay should continue, {@code false}
* otherwise
*/
public boolean shouldContinue() {
if (this.dfaExecution == null) {
visibilityInput();
return false;
}
if (this.dfaExecution.isAtBreakpoint()) {
visibilityWorking();
return false;
} else {
return this.shouldContinue;
}
}
/**
* Stops the {@code AutoplayDriver}.
*/
public void pause() {
this.shouldContinue = false;
visibilityWorking();
}
/**
* Creates a new {@code CodeProcessor} to process the input of the user and
* creates a {@code SimpleBlockGraph} of the chosen method. Precalculates
* the steps of the analysis with the {@code DFAPrecalculator} and an
* instance of {@code DFAPrecalcController}. The {@code GraphUIController}
* is invoked to display the CFG. The {@code ControlPanel}, the
* {@code StatePanel} and the {@code VisualGraphPanel} are activated and the
* {@code InputPanel} is deactivated.
*
* @param methodSignature
* name of method that should be analysed
* @return the used {@code DFAPrecalcController}
*/
public DFAPrecalcController startAnalysis(String methodSignature) {
// Collect information
visibilityPrecalculating();
String analysisName = programFrame.getInputPanel().getAnalysis();
String worklistName = programFrame.getInputPanel().getWorklist();
String code = programFrame.getInputPanel().getCode();
// Process code with instance of {@code CodeProcessor}
CodeProcessor processor = new CodeProcessor(code);
if (!processor.wasSuccessful()) {
new MessageBox(programFrame, "Compilation Error", processor.getErrorMessage());
visibilityInput();
return null;
}
String packageName = processor.getPath();
String className = processor.getClassName();
// build Graph with {@code GraphBuilder}
GraphBuilder graphBuilder = new GraphBuilder(packageName, className);
Filter filter = new Filter();
List<String> methodList = graphBuilder.getMethods(filter);
if (methodList.size() == 1) {
methodSignature = methodList.get(0);
}
if (methodSignature == null) {
MethodSelectionBox selectionBox = new MethodSelectionBox(programFrame, methodList);
if (selectionBox.getOption() == Option.CANCEL_OPTION) {
visibilityInput();
return null;
}
methodSignature = selectionBox.getSelectedMethod();
}
SimpleBlockGraph blockGraph = graphBuilder.buildGraph(methodSignature);
this.precalcController = new DFAPrecalcController();
DFAPrecalculator precalculator = null;
try {
Worklist worklist = this.worklistManager.getWorklist(worklistName, blockGraph);
DFAFactory<? extends LatticeElement> dfaFactory = analysisLoader.getDFAFactory(analysisName);
precalculator = new DFAPrecalculator(dfaFactory, worklist, blockGraph, this.precalcController, this);
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
this.precalcThread = new Thread(precalculator);
try {
this.precalcThread.start();
} catch (IllegalThreadStateException e) {
e.printStackTrace();
}
visibilityPrecalculating();
return this.precalcController;
}
/**
* Method that is invoked when the precalcution is terminated. It sets the
* {@code DFAExecution} and controls the setting of the {@code ControlPanel}
* . The {@code GraphUIController} is started.
*/
public void completedAnalysis() {
this.dfaExecution = this.precalcController.getResult();
this.dfaExecution.setCurrentElementaryStep(0);
this.programFrame.getControlPanel().setTotalSteps(this.dfaExecution.getTotalElementarySteps());
this.programFrame.getControlPanel().setSliderStep(0);
this.graphUIController.start(this.dfaExecution);
this.visualGraphPanel.setJumpToAction(true);
this.graphUIController.refresh();
visibilityWorking();
}
/**
* Deletes the current {@code DFAExecution} and the content of the
* {@code VisualGraphPanel} through the {@code GraphUIController}. The
* {@code ControlPanel}, the {@code StatePanel} and the
* {@code VisualGraphPanel} are deactivated and the {@code InputPanel} is
* activated. First the Thread in that the precalculation is running is
* stopped with the {@code DFAPrecalcController}. If that does not work the
* deprecated method {@code Thread.stop} is needed to deal with infinite
* loops in the precalculation. The used Dataflow Analysis can be
* implemented by the user and the correctness of theses analyses can not be
* assumed.
*/
@SuppressWarnings("deprecation")
public void stopAnalysis() {
if (precalcController == null) {
visibilityInput();
return;
}
if (precalcController.getPrecalcState() == DFAPrecalcController.PrecalcState.CALCULATING
|| precalcController.getPrecalcState() == DFAPrecalcController.PrecalcState.PAUSED) {
GenericBox closeBox = new GenericBox(this.programFrame, "Stop Calculation", ABORT_PRECALC_MESSAGE, "Yes",
"No", "Intermediate Results", false, Option.NO_OPTION);
if (closeBox.getOption() == Option.NO_OPTION) {
if (!(precalcController.getPrecalcState() == DFAPrecalcController.PrecalcState.CALCULATING)) {
visibilityInput();
return;
}
this.precalcController.stopPrecalc();
synchronized (this) {
try {
wait(WAIT_FOR_STOP);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
if (this.precalcThread.isAlive()) {
this.precalcThread.stop();
}
visibilityInput();
} else if ((closeBox.getOption() == Option.YES_OPTION)) {
// shows an intermediate result if possible
if (!(precalcController.getPrecalcState() == DFAPrecalcController.PrecalcState.CALCULATING)) {
return;
}
this.precalcController.stopPrecalc();
synchronized (this) {
try {
wait(WAIT_FOR_STOP);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
if (this.precalcThread.isAlive()) {
this.precalcThread.stop();
visibilityInput();
}
}
} else if (this.fileParser.shouldShowBox()) {
GenericBox closeBox = new GenericBox(this.programFrame, "Stop", ABORT_MESSAGE, "Yes", "No", null, true,
Option.NO_OPTION);
if (!closeBox.showAgain()) {
this.fileParser.setShowBox(false);
}
if (closeBox.getOption() == Option.NO_OPTION) {
return;
}
}
this.graphUIController.stop();
this.programFrame.getStatePanelOpen().reset();
this.dfaExecution = null;
visibilityInput();
}
protected void visibilityPrecalculating() {
this.visualGraphPanel.setActivated(false);
this.programFrame.getInputPanel().setActivated(false);
this.programFrame.getControlPanel().setActivated(ControlPanelState.PRECALCULATING);
this.programFrame.getStatePanelOpen().setActivated(false);
}
protected void visibilityPlaying() {
this.visualGraphPanel.setActivated(false);
this.programFrame.getInputPanel().setActivated(false);
this.programFrame.getControlPanel().setActivated(ControlPanelState.PLAYING);
this.programFrame.getStatePanelOpen().setActivated(true);
}
protected void visibilityInput() {
this.visualGraphPanel.setActivated(false);
this.programFrame.getInputPanel().setActivated(true);
this.programFrame.getControlPanel().setActivated(ControlPanelState.DEACTIVATED);
this.programFrame.getStatePanelOpen().setActivated(false);
}
protected void visibilityWorking() {
this.visualGraphPanel.setActivated(true);
this.programFrame.getInputPanel().setActivated(false);
this.programFrame.getControlPanel().setActivated(ControlPanelState.ACTIVATED);
this.programFrame.getStatePanelOpen().setActivated(true);
}
/**
* Method that shows the message of the exception occured during a DFA
* precalculation in a message box.
*
* @param message
* message of the exception
*/
public void createExceptionBox(String message) {
new MessageBox(this.programFrame, EXCEPTION_TITLE, message);
}
/**
* Sets the {@code ProgramFrame} to programFrame.
*
* @param programFrame
* programFrame that should be set
*/
public void setProgramFrame(ProgramFrame programFrame) {
if (programFrame == null) {
throw new IllegalStateException("programFrame must not be null");
}
this.programFrame = programFrame;
this.visualGraphPanel.setParentFrame(this.programFrame);
this.graphUIController.setStatePanel(this.programFrame.getStatePanelOpen());
visibilityInput();
}
/**
* Returns a list of the names of the analyses that were found during
* program start by the {@code AnalysisLoader}.
*
* @return list of names of the found analyses
*/
public List<String> getAnalyses() {
List<String> analyses = null;
try {
analyses = this.analysisLoader.getAnalysesNames();
} catch (IllegalStateException e) {
e.printStackTrace();
}
return analyses;
}
/**
* Returns a list of names of the {@code Worklist} currently available in
* the {@code WorklistManager}.
*
* @return a list of names of the available {@code Worklist}s
*/
public List<String> getWorklists() {
return this.worklistManager.getWorklistNames();
}
/**
* Returns the {@code VisualGraphPanel}.
*
* @return the instance of {@code VisualGraphPanel}
*/
public VisualGraphPanel getVisualGraphPanel() {
if (this.visualGraphPanel == null) {
throw new IllegalStateException("visualGraphPanel must not be null");
}
return this.visualGraphPanel;
}
/**
* Returns the program output path.
*
* @return the path were output files of the programs are stored
*/
public static String getProgramOutputPath() {
return Controller.PROGRAM_OUTPUT_PATH;
}
/**
* Creates a new {@code OptionFileParser} that reads out the option File or
* creates one if it not exists.
*/
public void parseOptionFile() {
this.fileParser = new OptionFileParser(Controller.PROGRAM_OUTPUT_PATH, this.programFrame);
}
/**
* Return the {@code DFAExecution} for testing purposes
*
* @return the DFAExecution
*/
public DFAExecution<? extends LatticeElement> getDFAExecution() {
return this.dfaExecution;
}
}
| |
package org.nybatis.core.db.datasource.driver;
import org.nybatis.core.util.StringUtil;
import org.nybatis.core.validation.Validator;
/**
* Driver Attrubites
*
* @author nayasis@gmail.com
* @since 2015-10-29
*/
public class DatabaseAttribute {
public static final String PAGE_PARAM_START = "NybatisPagebuilder.START";
public static final String PAGE_PARAM_END = "NybatisPagebuilder.END";
public static final String PAGE_PARAM_OFFSET = "NybatisPagebuilder.OFFSET";
public static final String PAGE_PARAM_COUNT = "NybatisPagebuilder.COUNT";
public static final String DATABASE_UNKOWN = "unknown";
private String database = DATABASE_UNKOWN;
private String patternToMatchClassName = "";
private boolean enableToGetParameterType = true;
private boolean enableToDoLobPrefetch = true;
private String pageSqlPre = "";
private String pageSqlPost = String.format( "\nLIMIT #{%s}, #{%s}", PAGE_PARAM_START, PAGE_PARAM_END );
private String countSqlPre = "SELECT COUNT(1) AS CNT FROM (\n";
private String countSqlPost = "\n) NybatisCountQuery";
private String pingQuery = "SELECT 1";
/**
* Driver Attributes
*/
public DatabaseAttribute() {}
/**
* Driver Attributes
*
* @see DatabaseName
* @param database Driver name (oracle, mysql, maria, sqlite, h2, hsqldb, mssql, postgresql, sybase, db2, odbc ...)
* @param patternToMatchClassName Driver name pattern to match with Database name. It must be regular expression.
*/
public DatabaseAttribute( String database, String patternToMatchClassName ) {
setDatabase( database );
setPatternToMatchClassName( patternToMatchClassName );
}
/**
* Driver Attributes
*
* @see DatabaseName
* @param database Driver name (oracle, mysql, maria, sqlite, h2, hsqldb, mssql, postgresql, sybase, db2, odbc ...)
*/
public DatabaseAttribute( DatabaseName database ) {
setDatabase( database.name );
setPatternToMatchClassName( database.driverNamePattern );
}
/**
* get database type
*
* @see DatabaseName
* @return database type (oracle, mysql, maria, sqlite, h2, hsqldb, mssql, postgresql, sybase, db2, odbc ...)
*/
public String getDatabase() {
return database;
}
public DatabaseAttribute setDatabase( String database ) {
this.database = StringUtil.compressSpaceOrEnter( database );
return this;
}
public String getPatternToMatchClassName() {
return patternToMatchClassName;
}
public DatabaseAttribute setPatternToMatchClassName( String pattern ) {
this.patternToMatchClassName = StringUtil.compressSpaceOrEnter( pattern );
return this;
}
/**
* Check whether connection class name is matched with setted pattern.
*
* @param driverName driver name
* @return if matched with pattern, return true
*/
public boolean isMatched( String driverName ) {
return Validator.isFound( driverName, patternToMatchClassName );
}
public boolean enableToGetParameterType() {
return enableToGetParameterType;
}
public DatabaseAttribute enableToGetParameterType( boolean enable ) {
this.enableToGetParameterType = enable;
return this;
}
public boolean enableToDoLobPrefetch() {
return enableToDoLobPrefetch;
}
public DatabaseAttribute enableToDoLobPrefetch( boolean enable ) {
this.enableToDoLobPrefetch = enable;
return this;
}
public String getPageSqlPre() {
return pageSqlPre;
}
public DatabaseAttribute setPageSqlPre( String sql ) {
pageSqlPre = StringUtil.compressSpaceOrEnter( sql ) + " ";
pageSqlPre = pageSqlPre
.replaceAll( "(?i)#\\{start\\}", "#{" + PAGE_PARAM_START + "}" )
.replaceAll( "(?i)#\\{end\\}", "#{" + PAGE_PARAM_END + "}" )
.replaceAll( "(?i)#\\{offset\\}", "#{" + PAGE_PARAM_OFFSET + "}" )
.replaceAll( "(?i)#\\{count\\}", "#{" + PAGE_PARAM_COUNT + "}" )
;
return this;
}
public String getPageSqlPost() {
return pageSqlPost;
}
public DatabaseAttribute setPageSqlPost( String sql ) {
pageSqlPost = StringUtil.compressSpaceOrEnter( sql ) + " ";
pageSqlPost = pageSqlPost
.replaceAll( "(?i)#\\{start\\}", "#{" + PAGE_PARAM_START + "}" )
.replaceAll( "(?i)#\\{end\\}", "#{" + PAGE_PARAM_END + "}" )
.replaceAll( "(?i)#\\{offset\\}", "#{" + PAGE_PARAM_OFFSET + "}" )
.replaceAll( "(?i)#\\{count\\}", "#{" + PAGE_PARAM_COUNT + "}" )
;
return this;
}
public String getCountSqlPre() {
return countSqlPre;
}
public DatabaseAttribute setCountSqlPre( String sql ) {
this.countSqlPre = StringUtil.compressSpaceOrEnter( sql ) + " ";
return this;
}
public String getCountSqlPost() {
return countSqlPost;
}
public DatabaseAttribute setCountSqlPost( String sql ) {
this.countSqlPost = StringUtil.compressSpaceOrEnter( sql ) + " ";
return this;
}
public String getPingQuery() {
return pingQuery;
}
public DatabaseAttribute setPingQuery( String query ) {
pingQuery = StringUtil.compressSpaceOrEnter( query );
return this;
}
public String toString() {
return String.format(
"Database\n" +
" - name : [%s]\n" +
" - Pattern to match with class name: [%s]\n" +
"Page Sql\n" +
" - pre :\n%s\n" +
" - post:\n%s\n" +
"Ping Query:\n" +
" - %s"
,
getDatabase(),
patternToMatchClassName,
getPageSqlPre(),
getPageSqlPost(),
getPingQuery()
);
}
public DatabaseAttribute clone() {
DatabaseAttribute attribute = new DatabaseAttribute();
attribute.database = database;
attribute.patternToMatchClassName = patternToMatchClassName;
attribute.enableToGetParameterType = enableToGetParameterType;
attribute.enableToDoLobPrefetch = enableToDoLobPrefetch;
attribute.pageSqlPre = pageSqlPre;
attribute.pageSqlPost = pageSqlPost;
attribute.countSqlPre = countSqlPre;
attribute.countSqlPost = countSqlPost;
attribute.pingQuery = pingQuery;
return attribute;
}
}
| |
package jenkins.plugins.logstash;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Objects;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import hudson.util.Secret;
import org.kohsuke.stapler.StaplerRequest;
import com.cloudbees.syslog.MessageFormat;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.http.client.utils.URIBuilder;
import hudson.Extension;
import hudson.init.InitMilestone;
import hudson.init.Initializer;
import jenkins.model.GlobalConfiguration;
import jenkins.plugins.logstash.LogstashInstallation.Descriptor;
import jenkins.plugins.logstash.configuration.ElasticSearch;
import jenkins.plugins.logstash.configuration.LogstashIndexer;
import jenkins.plugins.logstash.configuration.RabbitMq;
import jenkins.plugins.logstash.configuration.Redis;
import jenkins.plugins.logstash.configuration.Syslog;
import jenkins.plugins.logstash.persistence.LogstashIndexerDao;
import jenkins.plugins.logstash.persistence.LogstashIndexerDao.IndexerType;
import net.sf.json.JSONObject;
@Extension
public class LogstashConfiguration extends GlobalConfiguration
{
private static final Logger LOGGER = Logger.getLogger(LogstashConfiguration.class.getName());
private static final FastDateFormat MILLIS_FORMATTER = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
private static final FastDateFormat LEGACY_FORMATTER = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ssZ");
private LogstashIndexer<?> logstashIndexer;
private Boolean enabled;
private boolean dataMigrated = false;
private boolean enableGlobally = false;
private boolean milliSecondTimestamps = true;
private transient LogstashIndexer<?> activeIndexer;
// a flag indicating if we're currently in the configure method.
private transient boolean configuring = false;
public LogstashConfiguration()
{
load();
if (enabled == null)
{
if (logstashIndexer == null)
{
enabled = false;
}
else
{
enabled = true;
}
}
activeIndexer = logstashIndexer;
}
public boolean isEnabled()
{
return enabled == null ? false: enabled;
}
public void setEnabled(boolean enabled)
{
this.enabled = enabled;
}
public boolean isEnableGlobally()
{
return enableGlobally;
}
public void setEnableGlobally(boolean enableGlobally)
{
this.enableGlobally = enableGlobally;
}
public boolean isMilliSecondTimestamps()
{
return milliSecondTimestamps;
}
public void setMilliSecondTimestamps(boolean milliSecondTimestamps)
{
this.milliSecondTimestamps = milliSecondTimestamps;
}
public FastDateFormat getDateFormatter()
{
if (milliSecondTimestamps)
{
return MILLIS_FORMATTER;
}
else
{
return LEGACY_FORMATTER;
}
}
/**
* Returns the current logstash indexer configuration.
*
* @return configuration instance
*/
public LogstashIndexer<?> getLogstashIndexer()
{
return logstashIndexer;
}
public void setLogstashIndexer(LogstashIndexer<?> logstashIndexer)
{
this.logstashIndexer = logstashIndexer;
if (!configuring && !Objects.equals(logstashIndexer, activeIndexer))
{
activeIndexer = logstashIndexer;
}
}
/**
* Returns the actual instance of the logstash dao.
* @return dao instance
*/
@CheckForNull
public LogstashIndexerDao getIndexerInstance()
{
if (activeIndexer != null)
{
return activeIndexer.getInstance();
}
return null;
}
public List<?> getIndexerTypes()
{
return LogstashIndexer.all();
}
@SuppressWarnings("deprecation")
@Initializer(after = InitMilestone.JOB_LOADED)
public void migrateData()
{
if (!dataMigrated)
{
Descriptor descriptor = LogstashInstallation.getLogstashDescriptor();
if (descriptor.getType() != null)
{
IndexerType type = descriptor.getType();
switch (type)
{
case REDIS:
LOGGER.log(Level.INFO, "Migrating logstash configuration for Redis");
Redis redis = new Redis();
redis.setHost(descriptor.getHost());
redis.setPort(descriptor.getPort());
redis.setKey(descriptor.getKey());
redis.setPassword(Secret.fromString(descriptor.getPassword()));
logstashIndexer = redis;
enabled = true;
break;
case ELASTICSEARCH:
LOGGER.log(Level.INFO, "Migrating logstash configuration for Elastic Search");
URI uri;
try
{
uri = (new URIBuilder(descriptor.getHost()))
.setPort(descriptor.getPort())
.setPath("/" + descriptor.getKey()).build();
ElasticSearch es = new ElasticSearch();
es.setUri(uri);
es.setUsername(descriptor.getUsername());
es.setPassword(Secret.fromString(descriptor.getPassword()));
logstashIndexer = es;
enabled = true;
}
catch (URISyntaxException e)
{
enabled = false;
LOGGER.log(Level.INFO, "Migrating logstash configuration for Elastic Search failed: " + e.toString());
}
break;
case RABBIT_MQ:
LOGGER.log(Level.INFO, "Migrating logstash configuration for RabbitMQ");
RabbitMq rabbitMq = new RabbitMq("");
rabbitMq.setHost(descriptor.getHost());
rabbitMq.setPort(descriptor.getPort());
rabbitMq.setQueue(descriptor.getKey());
rabbitMq.setUsername(descriptor.getUsername());
rabbitMq.setPassword(Secret.fromString(descriptor.getPassword()));
logstashIndexer = rabbitMq;
enabled = true;
break;
case SYSLOG:
LOGGER.log(Level.INFO, "Migrating logstash configuration for SYSLOG");
Syslog syslog = new Syslog();
syslog.setHost(descriptor.getHost());
syslog.setPort(descriptor.getPort());
syslog.setSyslogProtocol(descriptor.getSyslogProtocol());
switch (descriptor.getSyslogFormat())
{
case RFC3164:
syslog.setMessageFormat(MessageFormat.RFC_3164);
break;
case RFC5424:
syslog.setMessageFormat(MessageFormat.RFC_5424);
break;
default:
syslog.setMessageFormat(MessageFormat.RFC_3164);
break;
}
logstashIndexer = syslog;
enabled = true;
break;
default:
LOGGER.log(Level.INFO, "unknown logstash Indexer type: " + type);
enabled = false;
break;
}
milliSecondTimestamps = false;
activeIndexer = logstashIndexer;
}
dataMigrated = true;
save();
}
}
@Override
public boolean configure(StaplerRequest staplerRequest, JSONObject json) throws FormException
{
// When not enabling the plugin we just save the enabled state
// without binding the JSON and then return. This avoids problems with missing configuration
// like URLs which can't be parsed when empty, which would lead to errors in the UI.
Boolean e = json.getBoolean("enabled");
if (!e)
{
enabled = false;
save();
return true;
}
configuring = true;
// when we bind the stapler request we get a new instance of logstashIndexer.
// logstashIndexer is holder for the dao instance.
// To avoid that we get a new dao instance in case there was no change in configuration
// we compare it to the currently active configuration.
try
{
staplerRequest.bindJSON(this, json);
try {
// validate
logstashIndexer.validate();
} catch (Exception ex) {
// You are here which means user is trying to save invalid indexer configuration.
// Exception will be thrown here so that it gets displayed on UI.
// But before that revert back to original configuration (in-memory)
// so that when user refreshes the configuration page, last saved settings will be displayed again.
logstashIndexer = activeIndexer;
throw new IllegalArgumentException(ex);
}
if (!Objects.equals(logstashIndexer, activeIndexer))
{
activeIndexer = logstashIndexer;
}
save();
return true;
}
finally
{
configuring = false;
}
}
public static LogstashConfiguration getInstance()
{
return GlobalConfiguration.all().get(LogstashConfiguration.class);
}
}
| |
/*
* Copyright 2015 Daniel Dittmar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package dan.dit.whatsthat.util.general;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.util.Base64;
import android.util.Log;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.SecureRandom;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.RSAKeyGenParameterSpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Arrays;
import javax.crypto.Cipher;
import dan.dit.whatsthat.util.image.ExternalStorage;
/**
* Non high security cryptography helper class to asymmetrically
* encrypt and decrypt strings. Offers a public key to encrypt messages
* that can only be read by the developers or people that can break 1024 bit codes.<br>
* Offers also a way to generate a key pair and reading these keys from a file in external storage.
* This is especially important as the secret key must not be included in the source code!
* Created by daniel on 04.08.15.
*/
public class SimpleCrypto {
private static final String PUBLIC_KEY_FILE = "dev_key_public.txt";
private static final String PRIVATE_KEY_FILE = "dev_key_private.txt";
private static final String DEVELOPER_PUBLIC_KEY_ENCODED = "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCYT5vZ5Wof4Hh3hgNjVVAd13bUrPnqyiHXqCRT\n" +
"zvEUVPAnokpr+Uw2Ft2YFPSw9J4USHrqWqVdumiABJameWx6MuvNPUU4yNd/xWd3UYpCMwJHaJm3\n" +
"WP481XbUk5qU5JZWAPPZGHYBEm5FXA1kC5L8jfT41+F1ca2R0dA7S3GXEQIDAQAB";
private static Key DEVELOPER_PUBLIC_KEY;
private SimpleCrypto() {}
private static String encodeToString(byte[] data) {
return Base64.encodeToString(data, Base64.DEFAULT);
}
private static byte[] encodedToBytes(String encoded) {
return Base64.decode(encoded, Base64.DEFAULT);
}
/**
* Saves the given key pair to external storage in two separate files writing
* only the key encoded by Base64 with default settings. No descriptive or other metadata
* is provided to and into the file.
* @param pair The key pair to save to files.
*/
public static void saveKeyPair(@NonNull KeyPair pair) {
DEVELOPER_PUBLIC_KEY = pair.getPublic();
FileWriter writer = null;
try {
writer = new FileWriter(new File(ExternalStorage.getExternalStoragePathIfMounted(null) + "/" + PUBLIC_KEY_FILE));
writer.write(encodeToString(pair.getPublic().getEncoded()));
} catch (IOException e) {
Log.e("HomeStuff", "Error during writing public key." + e);
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ioe) {
Log.e("HomeStuff", "Error closing file writer while writing public key. " + ioe);
}
}
}
try {
writer = new FileWriter(new File(ExternalStorage.getExternalStoragePathIfMounted(null) + "/" + PRIVATE_KEY_FILE));
writer.write(encodeToString(pair.getPrivate().getEncoded()));
} catch (IOException e) {
Log.e("HomeStuff", "Error during writing private key." + e);
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ioe) {
Log.e("HomeStuff", "Error closing file writer while writing private key. " + ioe);
}
}
}
}
/**
* Retrieves the fixed developer public key. The key is initialized the first
* time this method is invoked and future invocations will return the same key.
* This method will return a static key but can be altered to read the key from the file
* if needed for debugging.
* @return The developer's public key. Can be null on error.
*/
public static synchronized Key getDeveloperPublicKey() {
if (DEVELOPER_PUBLIC_KEY != null) {
return DEVELOPER_PUBLIC_KEY;
}
String keyEncoded = DEVELOPER_PUBLIC_KEY_ENCODED;
if (TextUtils.isEmpty(keyEncoded)) {
// attempt to read key from file
StringBuilder builder = new StringBuilder();
FileReader reader = null;
try {
reader = new FileReader(new File(ExternalStorage.getExternalStoragePathIfMounted(null) + "/" + PUBLIC_KEY_FILE));
char[] buffer = new char[64];
int read;
while ((read = reader.read(buffer)) > 0) {
builder.append(buffer, 0, read);
}
} catch (Exception e) {
Log.e("HomeStuff", "Error trying to read public key file: " + e);
return null;
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ioe) {
Log.e("HomeStuff", "Error closing file reader when reading public key file.");
}
}
}
keyEncoded = builder.toString();
}
X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(SimpleCrypto.encodedToBytes(keyEncoded));
try {
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
DEVELOPER_PUBLIC_KEY = keyFactory.generatePublic(x509KeySpec);
return DEVELOPER_PUBLIC_KEY;
} catch (Exception e) {
Log.e("HomeStuff", "Error with keyfactory when decoding public key" + e);
return null;
}
}
/**
* Returns the developer's private key by reading it from file.
* The key is not kept in memory, but this is not top secret so pieces
* can and will be leaked into memory.
* @return The developer's private key read from file or null on error.
*/
public static Key getDeveloperPrivateKey() {
StringBuilder builder = new StringBuilder();
FileReader reader = null;
try {
reader = new FileReader(new File(ExternalStorage.getExternalStoragePathIfMounted(null) + "/" + PRIVATE_KEY_FILE));
char[] buffer = new char[64];
int read;
while ((read = reader.read(buffer)) > 0) {
builder.append(buffer, 0, read);
}
Arrays.fill(buffer, '\0');
} catch (Exception e) {
Log.e("HomeStuff", "Error trying to read private key file: " + e);
return null;
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ioe) {
Log.e("HomeStuff", "Error closing file reader when reading private key file.");
}
}
}
byte[] data = SimpleCrypto.encodedToBytes(builder.toString());
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(data);
try {
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
Key privateKey = keyFactory.generatePrivate(keySpec);
Arrays.fill(data, (byte) 0);
return privateKey;
} catch (Exception e) {
Log.e("HomeStuff", "Error with keyfactory when decoding private key" + e);
return null;
}
}
/**
* Generates a new 2014-bit RSA key pair.
* @return A new RSA key pair or null on error.
*/
public static KeyPair generateKeyPair() {
// Generate key pair for 1024-bit RSA encryption and decryption
try {
SecureRandom random = new SecureRandom();
RSAKeyGenParameterSpec spec = new RSAKeyGenParameterSpec(1024, RSAKeyGenParameterSpec.F4);
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(spec, random);
return generator.genKeyPair();
} catch (Exception e) {
Log.e("HomeStuff", "RSA key pair error" + e);
return null;
}
}
/**
* Encrypts the given data string with the given public key, returning
* the encrypted encoded data in Base64.
* @param publicKey The public key used for the RSA encryption.
* @param data The data to encrypt.
* @return The encrypted data or null on error or illegal parameter.
*/
public static String encrypt(Key publicKey, String data) {
if (publicKey == null || TextUtils.isEmpty(data)) {
return null;
}
// Encode the original data with RSA public key
byte[] encodedBytes;
try {
Cipher c = Cipher.getInstance("RSA/ECB/PKCS1Padding");
c.init(Cipher.ENCRYPT_MODE, publicKey);
encodedBytes = c.doFinal(data.getBytes());
return encodeToString(encodedBytes);
} catch (Exception e) {
Log.e("HomeStuff", "RSA encryption error");
return null;
}
}
/**
* Decrypts the given encrypted encoded data in Base64 using the given
* private key.
* @param privateKey The private RSA key belonging to the key pair with
* the public key used for encrypting the given encrypted data.
* @param encrypted The encrypted data to decrypt.
* @return The decrypted data or null on error or if illegal parameter were given.
*/
public static String decrypt(Key privateKey, String encrypted) {
if (privateKey == null || TextUtils.isEmpty(encrypted)) {
return null;
}
// Decode the encoded data with RSA private key
byte[] decodedBytes;
try {
Cipher c = Cipher.getInstance("RSA/ECB/PKCS1Padding");
c.init(Cipher.DECRYPT_MODE, privateKey);
decodedBytes = c.doFinal(encodedToBytes(encrypted));
return new String(decodedBytes);
} catch (Exception e) {
Log.e("HomeStuff", "RSA decryption error");
return null;
}
}
}
| |
/**
* This file is part of the JCROM project.
* Copyright (C) 2008-2015 - All rights reserved.
* Authors: Olafur Gauti Gudmundsson, Nicolas Dos Santos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jcrom.util.io;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
/**
* General IO stream manipulation utilities.
*
* <p>This class provides static utility methods for input/output operations.</p>
*/
public final class IOUtils {
/**
* The Unix directory separator character.
*/
public static final char DIR_SEPARATOR_UNIX = '/';
/**
* The Windows directory separator character.
*/
public static final char DIR_SEPARATOR_WINDOWS = '\\';
/**
* The system directory separator character.
*/
public static final char DIR_SEPARATOR = File.separatorChar;
/**
* The Unix line separator string.
*/
public static final String LINE_SEPARATOR_UNIX = "\n";
/**
* The Windows line separator string.
*/
public static final String LINE_SEPARATOR_WINDOWS = "\r\n";
/**
* The system line separator string.
*/
public static final String LINE_SEPARATOR;
static {
// avoid security issues
StringWriter buf = new StringWriter(4);
PrintWriter out = new PrintWriter(buf);
out.println();
LINE_SEPARATOR = buf.toString();
out.close();
}
/**
* The default buffer size to use for
* {@link #copyLarge(InputStream, OutputStream)}
* and
* {@link #copyLarge(Reader, Writer)}
*/
private static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
private IOUtils() {
}
/**
* Unconditionally close an <code>InputStream</code>.
* <p>
* Equivalent to {@link InputStream#close()}, except any exceptions will be ignored.
* This is typically used in finally blocks.
* <p>
* Example code:
* <pre>
* byte[] data = new byte[1024];
* InputStream in = null;
* try {
* in = new FileInputStream("foo.txt");
* in.read(data);
* in.close(); //close errors are handled
* } catch (Exception e) {
* // error handling
* } finally {
* IOUtils.closeQuietly(in);
* }
* </pre>
*
* @param input the InputStream to close, may be null or already closed
*/
public static void closeQuietly(InputStream input) {
closeQuietly((Closeable) input);
}
/**
* Unconditionally close a <code>Closeable</code>.
* <p>
* Equivalent to {@link Closeable#close()}, except any exceptions will be ignored.
* This is typically used in finally blocks.
* <p>
* Example code:
* <pre>
* Closeable closeable = null;
* try {
* closeable = new FileReader("foo.txt");
* // process closeable
* closeable.close();
* } catch (Exception e) {
* // error handling
* } finally {
* IOUtils.closeQuietly(closeable);
* }
* </pre>
*
* @param closeable the object to close, may be null or already closed
* @since Commons IO 2.0
*/
public static void closeQuietly(Closeable closeable) {
try {
if (closeable != null) {
closeable.close();
}
} catch (IOException ioe) {
// ignore
}
}
/**
* Get the contents of an <code>InputStream</code> as a <code>byte[]</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
*
* @param input the <code>InputStream</code> to read from
* @return the requested byte array
* @throws NullPointerException if the input is null
* @throws IOException if an I/O error occurs
*/
public static byte[] toByteArray(InputStream input) throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
copy(input, output);
return output.toByteArray();
}
/**
* Get contents of an <code>InputStream</code> as a <code>byte[]</code>.
* Use this method instead of <code>toByteArray(InputStream)</code>
* when <code>InputStream</code> size is known.
* <b>NOTE:</b> the method checks that the length can safely be cast to an int without truncation
* before using {@link IOUtils#toByteArray(java.io.InputStream, int)} to read into the byte array.
* (Arrays can have no more than Integer.MAX_VALUE entries anyway)
*
* @param input the <code>InputStream</code> to read from
* @param size the size of <code>InputStream</code>
* @return the requested byte array
* @throws IOException if an I/O error occurs or <code>InputStream</code> size differ from parameter size
* @throws IllegalArgumentException if size is less than zero or size is greater than Integer.MAX_VALUE
* @see IOUtils#toByteArray(java.io.InputStream, int)
* @since Commons IO 2.1
*/
public static byte[] toByteArray(InputStream input, long size) throws IOException {
if (size > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Size cannot be greater than Integer max value: " + size);
}
return toByteArray(input, (int) size);
}
/**
* Get the contents of an <code>InputStream</code> as a <code>byte[]</code>.
* Use this method instead of <code>toByteArray(InputStream)</code>
* when <code>InputStream</code> size is known
* @param input the <code>InputStream</code> to read from
* @param size the size of <code>InputStream</code>
* @return the requested byte array
* @throws IOException if an I/O error occurs or <code>InputStream</code> size differ from parameter size
* @throws IllegalArgumentException if size is less than zero
* @since Commons IO 2.1
*/
public static byte[] toByteArray(InputStream input, int size) throws IOException {
if (size < 0) {
throw new IllegalArgumentException("Size must be equal or greater than zero: " + size);
}
if (size == 0) {
return new byte[0];
}
byte[] data = new byte[size];
int offset = 0;
int readed;
while (offset < size && (readed = input.read(data, offset, (size - offset))) != -1) {
offset += readed;
}
if (offset != size) {
throw new IOException("Unexpected readed size. current: " + offset + ", excepted: " + size);
}
return data;
}
/**
* Get the contents of an <code>InputStream</code> as a String
* using the specified character encoding.
* <p>
* Character encoding names can be found at
* <a href="http://www.iana.org/assignments/character-sets">IANA</a>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
*
* @param input the <code>InputStream</code> to read from
* @param encoding the encoding to use, null means platform default
* @return the requested String
* @throws NullPointerException if the input is null
* @throws IOException if an I/O error occurs
*/
public static String toString(InputStream input, String encoding) throws IOException {
StringWriter sw = new StringWriter();
copy(input, sw, encoding);
return sw.toString();
}
/**
* Copy bytes from an <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* Large streams (over 2GB) will return a bytes copied value of
* <code>-1</code> after the copy has completed since the correct
* number of bytes cannot be returned as an int. For large streams
* use the <code>copyLarge(InputStream, OutputStream)</code> method.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @return the number of bytes copied, or -1 if > Integer.MAX_VALUE
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.1
*/
public static int copy(InputStream input, OutputStream output) throws IOException {
long count = copyLarge(input, output);
if (count > Integer.MAX_VALUE) {
return -1;
}
return (int) count;
}
/**
* Copy bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @return the number of bytes copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.3
*/
public static long copyLarge(InputStream input, OutputStream output) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
long count = 0;
int n = 0;
while (-1 != (n = input.read(buffer))) {
output.write(buffer, 0, n);
count += n;
}
return count;
}
/**
* Copy bytes from an <code>InputStream</code> to chars on a
* <code>Writer</code> using the default character encoding of the platform.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* This method uses {@link InputStreamReader}.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>Writer</code> to write to
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.1
*/
public static void copy(InputStream input, Writer output) throws IOException {
InputStreamReader in = new InputStreamReader(input);
copy(in, output);
}
/**
* Copy bytes from an <code>InputStream</code> to chars on a
* <code>Writer</code> using the specified character encoding.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* Character encoding names can be found at
* <a href="http://www.iana.org/assignments/character-sets">IANA</a>.
* <p>
* This method uses {@link InputStreamReader}.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>Writer</code> to write to
* @param encoding the encoding to use, null means platform default
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.1
*/
public static void copy(InputStream input, Writer output, String encoding) throws IOException {
if (encoding == null) {
copy(input, output);
} else {
InputStreamReader in = new InputStreamReader(input, encoding);
copy(in, output);
}
}
/**
* Copy chars from a <code>Reader</code> to a <code>Writer</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedReader</code>.
* <p>
* Large streams (over 2GB) will return a chars copied value of
* <code>-1</code> after the copy has completed since the correct
* number of chars cannot be returned as an int. For large streams
* use the <code>copyLarge(Reader, Writer)</code> method.
*
* @param input the <code>Reader</code> to read from
* @param output the <code>Writer</code> to write to
* @return the number of characters copied, or -1 if > Integer.MAX_VALUE
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.1
*/
public static int copy(Reader input, Writer output) throws IOException {
long count = copyLarge(input, output);
if (count > Integer.MAX_VALUE) {
return -1;
}
return (int) count;
}
/**
* Copy chars from a large (over 2GB) <code>Reader</code> to a <code>Writer</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedReader</code>.
*
* @param input the <code>Reader</code> to read from
* @param output the <code>Writer</code> to write to
* @return the number of characters copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.3
*/
public static long copyLarge(Reader input, Writer output) throws IOException {
char[] buffer = new char[DEFAULT_BUFFER_SIZE];
long count = 0;
int n = 0;
while (-1 != (n = input.read(buffer))) {
output.write(buffer, 0, n);
count += n;
}
return count;
}
/**
* Copy chars from a <code>Reader</code> to bytes on an
* <code>OutputStream</code> using the default character encoding of the
* platform, and calling flush.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedReader</code>.
* <p>
* Due to the implementation of OutputStreamWriter, this method performs a
* flush.
* <p>
* This method uses {@link OutputStreamWriter}.
*
* @param input the <code>Reader</code> to read from
* @param output the <code>OutputStream</code> to write to
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since Commons IO 1.1
*/
public static void copy(Reader input, OutputStream output) throws IOException {
OutputStreamWriter out = new OutputStreamWriter(output);
copy(input, out);
// XXX Unless anyone is planning on rewriting OutputStreamWriter, we
// have to flush here.
out.flush();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.nio.tests.java.nio;
import dalvik.annotation.TestLevel;
import dalvik.annotation.TestTargetNew;
import dalvik.annotation.TestTargetClass;
import dalvik.annotation.AndroidOnly;
import java.nio.BufferOverflowException;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.InvalidMarkException;
/**
* Tests java.nio.FloatBuffer
*
*/
@TestTargetClass(java.nio.FloatBuffer.class)
public abstract class FloatBufferTest extends AbstractBufferTest {
protected static final int SMALL_TEST_LENGTH = 5;
protected static final int BUFFER_LENGTH = 20;
protected FloatBuffer buf;
protected void setUp() throws Exception {
capacity = BUFFER_LENGTH;
buf = FloatBuffer.allocate(BUFFER_LENGTH);
loadTestData1(buf);
baseBuf = buf;
}
protected void tearDown() throws Exception {
buf = null;
baseBuf = null;
}
/*
* test for method static FloatBuffer allocate(int capacity) test covers
* following usecases: 1. case for check FloatBuffer testBuf properties 2.
* case expected IllegalArgumentException
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "allocate",
args = {int.class}
)
public void test_AllocateI() {
// case: FloatBuffer testBuf properties is satisfy the conditions
// specification
FloatBuffer testBuf = FloatBuffer.allocate(20);
assertEquals(0, testBuf.position());
assertNotNull(testBuf.array());
assertEquals(0, testBuf.arrayOffset());
assertEquals(20, testBuf.limit());
assertEquals(20, testBuf.capacity());
testBuf = FloatBuffer.allocate(0);
assertEquals(0, testBuf.position());
assertNotNull(testBuf.array());
assertEquals(0, testBuf.arrayOffset());
assertEquals(0, testBuf.limit());
assertEquals(0, testBuf.capacity());
// case: expected IllegalArgumentException
try {
testBuf = FloatBuffer.allocate(-20);
fail("allocate method does not throws expected exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Verifies boundary values.",
method = "put",
args = {float.class}
)
public void testNaNs() {
int[] nans = new int[] { 0x7f800000, 0xff800000, 0x7fc00000 };
for (int i = 0; i < nans.length; i++) {
int intBitsIn = nans[i];
float flt = Float.intBitsToFloat(intBitsIn);
int intBitsOut = Float.floatToRawIntBits(flt);
// Sanity check
assertTrue(intBitsIn == intBitsOut);
// Store the float and retrieve it
ByteBuffer buffer = ByteBuffer.allocate(8);
buffer.putFloat(flt);
float bufFloatOut = buffer.getFloat(0);
// Check the bits sequence was not normalized
int bufIntOut = Float.floatToRawIntBits(bufFloatOut);
assertTrue(intBitsIn == bufIntOut);
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "array",
args = {}
)
public void testArray() {
float array[] = buf.array();
assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity());
loadTestData1(array, buf.arrayOffset(), buf.capacity());
assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity());
loadTestData2(array, buf.arrayOffset(), buf.capacity());
assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity());
loadTestData1(buf);
assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity());
loadTestData2(buf);
assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity());
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "arrayOffset",
args = {}
)
public void testArrayOffset() {
float array[] = buf.array();
for(int i = 0; i < buf.capacity(); i++) {
array[i] = i;
}
int offset = buf.arrayOffset();
assertContentEquals(buf, array, offset, buf.capacity());
FloatBuffer wrapped = FloatBuffer.wrap(array, 3, array.length - 3);
loadTestData1(array, wrapped.arrayOffset(), wrapped.capacity());
assertContentEquals(buf, array, offset, buf.capacity());
loadTestData2(array, wrapped.arrayOffset(), wrapped.capacity());
assertContentEquals(buf, array, offset, buf.capacity());
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "asReadOnlyBuffer",
args = {}
)
public void testAsReadOnlyBuffer() {
buf.clear();
buf.mark();
buf.position(buf.limit());
// readonly's contents should be the same as buf
FloatBuffer readonly = buf.asReadOnlyBuffer();
assertNotSame(buf, readonly);
assertTrue(readonly.isReadOnly());
assertEquals(buf.position(), readonly.position());
assertEquals(buf.limit(), readonly.limit());
assertEquals(buf.isDirect(), readonly.isDirect());
assertEquals(buf.order(), readonly.order());
assertContentEquals(buf, readonly);
// readonly's position, mark, and limit should be independent to buf
readonly.reset();
assertEquals(readonly.position(), 0);
readonly.clear();
assertEquals(buf.position(), buf.limit());
buf.reset();
assertEquals(buf.position(), 0);
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "compact",
args = {}
)
@AndroidOnly("Fails on RI. See comment below")
public void testCompact() {
// case: buffer is full
buf.clear();
buf.mark();
loadTestData1(buf);
FloatBuffer ret = buf.compact();
assertSame(ret, buf);
assertEquals(buf.position(), buf.capacity());
assertEquals(buf.limit(), buf.capacity());
assertContentLikeTestData1(buf, 0, 0.0f, buf.capacity());
try {
buf.reset();
fail("Should throw Exception");
} catch (InvalidMarkException e) {
// expected
}
// case: buffer is empty
buf.position(0);
buf.limit(0);
buf.mark();
ret = buf.compact();
assertSame(ret, buf);
assertEquals(buf.position(), 0);
assertEquals(buf.limit(), buf.capacity());
assertContentLikeTestData1(buf, 0, 0.0f, buf.capacity());
try {
// Fails on RI. Spec doesn't specify the behavior if
// actually nothing to be done by compact(). So RI doesn't reset
// mark position
buf.reset();
fail("Should throw Exception");
} catch (InvalidMarkException e) {
// expected
}
// case: normal
assertTrue(buf.capacity() > 5);
buf.position(1);
buf.limit(5);
buf.mark();
ret = buf.compact();
assertSame(ret, buf);
assertEquals(buf.position(), 4);
assertEquals(buf.limit(), buf.capacity());
assertContentLikeTestData1(buf, 0, 1.0f, 4);
try {
buf.reset();
fail("Should throw Exception");
} catch (InvalidMarkException e) {
// expected
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "compareTo",
args = {java.nio.FloatBuffer.class}
)
public void testCompareTo() {
try {
buf.compareTo(null);
fail("Should throw NPE");
} catch (NullPointerException e) {
// expected
}
// compare to self
assertEquals(0, buf.compareTo(buf));
// normal cases
assertTrue(buf.capacity() > 5);
buf.clear();
FloatBuffer other = FloatBuffer.allocate(buf.capacity());
loadTestData1(other);
assertEquals(0, buf.compareTo(other));
assertEquals(0, other.compareTo(buf));
buf.position(1);
assertTrue(buf.compareTo(other) > 0);
assertTrue(other.compareTo(buf) < 0);
other.position(2);
assertTrue(buf.compareTo(other) < 0);
assertTrue(other.compareTo(buf) > 0);
buf.position(2);
other.limit(5);
assertTrue(buf.compareTo(other) > 0);
assertTrue(other.compareTo(buf) < 0);
FloatBuffer fbuffer1 = FloatBuffer.wrap(new float[] { Float.NaN });
FloatBuffer fbuffer2 = FloatBuffer.wrap(new float[] { Float.NaN });
FloatBuffer fbuffer3 = FloatBuffer.wrap(new float[] { 42f });
assertEquals("Failed equal comparison with NaN entry", 0, fbuffer1
.compareTo(fbuffer2));
assertEquals("Failed greater than comparison with NaN entry", 1, fbuffer3
.compareTo(fbuffer1));
assertEquals("Failed greater than comparison with NaN entry", 1, fbuffer1
.compareTo(fbuffer3));
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "duplicate",
args = {}
)
public void testDuplicate() {
buf.clear();
buf.mark();
buf.position(buf.limit());
// duplicate's contents should be the same as buf
FloatBuffer duplicate = buf.duplicate();
assertNotSame(buf, duplicate);
assertEquals(buf.position(), duplicate.position());
assertEquals(buf.limit(), duplicate.limit());
assertEquals(buf.isReadOnly(), duplicate.isReadOnly());
assertEquals(buf.isDirect(), duplicate.isDirect());
assertEquals(buf.order(), duplicate.order());
assertContentEquals(buf, duplicate);
// duplicate's position, mark, and limit should be independent to buf
duplicate.reset();
assertEquals(duplicate.position(), 0);
duplicate.clear();
assertEquals(buf.position(), buf.limit());
buf.reset();
assertEquals(buf.position(), 0);
// duplicate share the same content with buf
if (!duplicate.isReadOnly()) {
loadTestData1(buf);
assertContentEquals(buf, duplicate);
loadTestData2(duplicate);
assertContentEquals(buf, duplicate);
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "equals",
args = {java.lang.Object.class}
)
public void testEquals() {
// equal to self
assertTrue(buf.equals(buf));
FloatBuffer readonly = buf.asReadOnlyBuffer();
assertTrue(buf.equals(readonly));
FloatBuffer duplicate = buf.duplicate();
assertTrue(buf.equals(duplicate));
// always false, if type mismatch
assertFalse(buf.equals(Boolean.TRUE));
assertTrue(buf.capacity() > 5);
buf.limit(buf.capacity()).position(0);
readonly.limit(readonly.capacity()).position(1);
assertFalse(buf.equals(readonly));
buf.limit(buf.capacity() - 1).position(0);
duplicate.limit(duplicate.capacity()).position(0);
assertFalse(buf.equals(duplicate));
}
/*
* Class under test for float get()
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "get",
args = {}
)
public void testGet() {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), i);
assertEquals(buf.get(), buf.get(i), 0.01);
}
try {
buf.get();
fail("Should throw Exception");
} catch (BufferUnderflowException e) {
// expected
}
}
/*
* Class under test for java.nio.FloatBuffer get(float[])
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "get",
args = {float[].class}
)
public void testGetfloatArray() {
float array[] = new float[1];
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), i);
FloatBuffer ret = buf.get(array);
assertEquals(array[0], buf.get(i), 0.01);
assertSame(ret, buf);
}
buf.get(new float[0]);
try {
buf.get(array);
fail("Should throw Exception");
} catch (BufferUnderflowException e) {
// expected
}
try {
buf.get((float[])null);
fail("Should throw Exception");
} catch (NullPointerException e) {
// expected
}
}
/*
* Class under test for java.nio.FloatBuffer get(float[], int, int)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "get",
args = {float[].class, int.class, int.class}
)
public void testGetfloatArrayintint() {
buf.clear();
float array[] = new float[buf.capacity()];
try {
buf.get(new float[buf.capacity() + 1], 0, buf.capacity() + 1);
fail("Should throw Exception");
} catch (BufferUnderflowException e) {
// expected
}
assertEquals(buf.position(), 0);
try {
buf.get(array, -1, array.length);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
buf.get(array, array.length, 0);
try {
buf.get(array, array.length + 1, 1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
assertEquals(buf.position(), 0);
try {
buf.get(array, 2, -1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.get((float[])null, 2, -1);
fail("Should throw Exception");
} catch (NullPointerException e) {
// expected
}
try {
buf.get(array, 2, array.length);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.get(array, 1, Integer.MAX_VALUE);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.get(array, Integer.MAX_VALUE, 1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
assertEquals(buf.position(), 0);
buf.clear();
FloatBuffer ret = buf.get(array, 0, array.length);
assertEquals(buf.position(), buf.capacity());
assertContentEquals(buf, array, 0, array.length);
assertSame(ret, buf);
}
/*
* Class under test for float get(int)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "get",
args = {int.class}
)
public void testGetint() {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), i);
assertEquals(buf.get(), buf.get(i), 0.01);
}
try {
buf.get(-1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.get(buf.limit());
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "hasArray",
args = {}
)
public void testHasArray() {
if (buf.hasArray()) {
assertNotNull(buf.array());
} else {
try {
buf.array();
fail("Should throw Exception");
} catch (UnsupportedOperationException e) {
// expected
// Note:can not tell when to catch
// UnsupportedOperationException or
// ReadOnlyBufferException, so catch all.
}
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "hashCode",
args = {}
)
public void testHashCode() {
buf.clear();
FloatBuffer readonly = buf.asReadOnlyBuffer();
FloatBuffer duplicate = buf.duplicate();
assertTrue(buf.hashCode() == readonly.hashCode());
assertTrue(buf.capacity() > 5);
duplicate.position(buf.capacity() / 2);
assertTrue(buf.hashCode() != duplicate.hashCode());
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify direct buffer.",
method = "isDirect",
args = {}
)
public void testIsDirect() {
assertFalse(buf.isDirect());
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Abstract method.",
method = "isReadOnly",
args = {}
)
public void testIsReadOnly() {
assertFalse(buf.isReadOnly());
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "order",
args = {}
)
public void testOrder() {
buf.order();
if (buf.hasArray()) {
assertEquals(ByteOrder.nativeOrder(), buf.order());
}
}
/*
* Class under test for java.nio.FloatBuffer put(float)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify ReadOnlyBufferException.",
method = "put",
args = {float.class}
)
public void testPutfloat() {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), i);
FloatBuffer ret = buf.put((float) i);
assertEquals(buf.get(i), (float) i, 0.0);
assertSame(ret, buf);
}
try {
buf.put(0);
fail("Should throw Exception");
} catch (BufferOverflowException e) {
// expected
}
}
/*
* Class under test for java.nio.FloatBuffer put(float[])
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify ReadOnlyBufferException.",
method = "put",
args = {float[].class}
)
public void testPutfloatArray() {
float array[] = new float[1];
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), i);
array[0] = (float) i;
FloatBuffer ret = buf.put(array);
assertEquals(buf.get(i), (float) i, 0.0);
assertSame(ret, buf);
}
try {
buf.put(array);
fail("Should throw Exception");
} catch (BufferOverflowException e) {
// expected
}
try {
buf.position(buf.limit());
buf.put((float[])null);
fail("Should throw Exception");
} catch (NullPointerException e) {
// expected
}
}
/*
* Class under test for java.nio.FloatBuffer put(float[], int, int)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify ReadOnlyBufferException.",
method = "put",
args = {float[].class, int.class, int.class}
)
public void testPutfloatArrayintint() {
buf.clear();
float array[] = new float[buf.capacity()];
try {
buf.put(new float[buf.capacity() + 1], 0, buf.capacity() + 1);
fail("Should throw Exception");
} catch (BufferOverflowException e) {
// expected
}
assertEquals(buf.position(), 0);
try {
buf.put(array, -1, array.length);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.put(array, array.length + 1, 0);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
buf.put(array, array.length, 0);
assertEquals(buf.position(), 0);
try {
buf.put(array, 0, -1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.put((float[])null, 0, -1);
fail("Should throw Exception");
} catch (NullPointerException e) {
// expected
}
try {
buf.put(array, 2, array.length);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.put(array, Integer.MAX_VALUE, 1);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.put(array, 1, Integer.MAX_VALUE);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
assertEquals(buf.position(), 0);
loadTestData2(array, 0, array.length);
FloatBuffer ret = buf.put(array, 0, array.length);
assertEquals(buf.position(), buf.capacity());
assertContentEquals(buf, array, 0, array.length);
assertSame(ret, buf);
}
/*
* Class under test for java.nio.FloatBuffer put(java.nio.FloatBuffer)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify ReadOnlyBufferException.",
method = "put",
args = {java.nio.FloatBuffer.class}
)
public void testPutFloatBuffer() {
FloatBuffer other = FloatBuffer.allocate(buf.capacity());
try {
buf.put(buf);
fail("Should throw Exception");
} catch (IllegalArgumentException e) {
// expected
}
try {
buf.put(FloatBuffer.allocate(buf.capacity() + 1));
fail("Should throw Exception");
} catch (BufferOverflowException e) {
// expected
}
try {
buf.flip();
buf.put((FloatBuffer)null);
fail("Should throw Exception");
} catch (NullPointerException e) {
// expected
}
buf.clear();
loadTestData2(other);
other.clear();
buf.clear();
FloatBuffer ret = buf.put(other);
assertEquals(other.position(), other.capacity());
assertEquals(buf.position(), buf.capacity());
assertContentEquals(other, buf);
assertSame(ret, buf);
}
/*
* Class under test for java.nio.FloatBuffer put(int, float)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify ReadOnlyBufferException.",
method = "put",
args = {int.class, float.class}
)
public void testPutintfloat() {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.position(), 0);
FloatBuffer ret = buf.put(i, (float) i);
assertEquals(buf.get(i), (float) i, 0.0);
assertSame(ret, buf);
}
try {
buf.put(-1, 0);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
try {
buf.put(buf.limit(), 0);
fail("Should throw Exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "slice",
args = {}
)
public void testSlice() {
assertTrue(buf.capacity() > 5);
buf.position(1);
buf.limit(buf.capacity() - 1);
FloatBuffer slice = buf.slice();
assertEquals(buf.isReadOnly(), slice.isReadOnly());
assertEquals(buf.isDirect(), slice.isDirect());
assertEquals(buf.order(), slice.order());
assertEquals(slice.position(), 0);
assertEquals(slice.limit(), buf.remaining());
assertEquals(slice.capacity(), buf.remaining());
try {
slice.reset();
fail("Should throw Exception");
} catch (InvalidMarkException e) {
// expected
}
// slice share the same content with buf
if (!slice.isReadOnly()) {
loadTestData1(slice);
assertContentLikeTestData1(buf, 1, 0, slice.capacity());
buf.put(2, 500);
assertEquals(slice.get(1), 500, 0.0);
}
}
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "toString",
args = {}
)
public void testToString() {
String str = buf.toString();
assertTrue(str.indexOf("Float") >= 0 || str.indexOf("float") >= 0);
assertTrue(str.indexOf("" + buf.position()) >= 0);
assertTrue(str.indexOf("" + buf.limit()) >= 0);
assertTrue(str.indexOf("" + buf.capacity()) >= 0);
}
/*
* test for method static FloatBuffer wrap(float[] array) test covers
* following usecases: 1. case for check FloatBuffer buf2 properties 2. case
* for check equal between buf2 and float array[] 3. case for check a buf2
* dependens to array[]
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "wrap",
args = {float[].class}
)
public void test_Wrap$S() {
float array[] = new float[BUFFER_LENGTH];
loadTestData1(array, 0, BUFFER_LENGTH);
FloatBuffer buf2 = FloatBuffer.wrap(array);
// case: FloatBuffer buf2 properties is satisfy the conditions
// specification
assertEquals(buf2.capacity(), array.length);
assertEquals(buf2.limit(), array.length);
assertEquals(buf2.position(), 0);
// case: FloatBuffer buf2 is equal to float array[]
assertContentEquals(buf2, array, 0, array.length);
// case: FloatBuffer buf2 is depended to float array[]
loadTestData2(array, 0, buf.capacity());
assertContentEquals(buf2, array, 0, array.length);
}
/*
* test for method static FloatBuffer wrap(float[] array, int offset, int
* length) test covers following usecases: 1. case for check FloatBuffer
* buf2 properties 2. case for check equal between buf2 and float array[] 3.
* case for check a buf2 dependens to array[] 4. case expected
* IndexOutOfBoundsException
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "",
method = "wrap",
args = {float[].class, int.class, int.class}
)
public void test_Wrap$SII() {
float array[] = new float[BUFFER_LENGTH];
int offset = 5;
int length = BUFFER_LENGTH - offset;
loadTestData1(array, 0, BUFFER_LENGTH);
FloatBuffer buf2 = FloatBuffer.wrap(array, offset, length);
// case: FloatBuffer buf2 properties is satisfy the conditions
// specification
assertEquals(buf2.capacity(), array.length);
assertEquals(buf2.position(), offset);
assertEquals(buf2.limit(), offset + length);
assertEquals(buf2.arrayOffset(), 0);
// case: FloatBuffer buf2 is equal to float array[]
assertContentEquals(buf2, array, 0, array.length);
// case: FloatBuffer buf2 is depended to float array[]
loadTestData2(array, 0, buf.capacity());
assertContentEquals(buf2, array, 0, array.length);
// case: expected IndexOutOfBoundsException
try {
offset = 7;
buf2 = FloatBuffer.wrap(array, offset, length);
fail("wrap method does not throws expected exception");
} catch (IndexOutOfBoundsException e) {
// expected
}
}
void loadTestData1(float array[], int offset, int length) {
for (int i = 0; i < length; i++) {
array[offset + i] = (float) i;
}
}
void loadTestData2(float array[], int offset, int length) {
for (int i = 0; i < length; i++) {
array[offset + i] = (float) length - i;
}
}
void loadTestData1(FloatBuffer buf) {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
buf.put(i, (float) i);
}
}
void loadTestData2(FloatBuffer buf) {
buf.clear();
for (int i = 0; i < buf.capacity(); i++) {
buf.put(i, (float) buf.capacity() - i);
}
}
void assertContentEquals(FloatBuffer buf, float array[],
int offset, int length) {
for (int i = 0; i < length; i++) {
assertEquals(buf.get(i), array[offset + i], 0.01);
}
}
void assertContentEquals(FloatBuffer buf, FloatBuffer other) {
assertEquals(buf.capacity(), other.capacity());
for (int i = 0; i < buf.capacity(); i++) {
assertEquals(buf.get(i), other.get(i), 0.01);
}
}
void assertContentLikeTestData1(FloatBuffer buf,
int startIndex, float startValue, int length) {
float value = startValue;
for (int i = 0; i < length; i++) {
assertEquals(buf.get(startIndex + i), value, 0.01);
value = value + 1.0f;
}
}
}
| |
package org.yeastrc.xlink.www.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.slf4j.LoggerFactory; import org.slf4j.Logger;
import org.yeastrc.xlink.db.DBConnectionFactory;
import org.yeastrc.xlink.www.dto.DataPageSavedViewDTO;
/**
* DAO for data_page_saved_view_tbl table
*
*/
public class DataPageSavedViewDAO {
private static final Logger log = LoggerFactory.getLogger( DataPageSavedViewDAO.class);
// private constructor
private DataPageSavedViewDAO() { }
/**
* @return newly created instance
*/
public static DataPageSavedViewDAO getInstance() {
return new DataPageSavedViewDAO();
}
/**
* @param projectSearchId
* @param pageName
* @return null if not found
* @throws Exception
*/
public DataPageSavedViewDTO getForProjectSearchIdPageName( int projectSearchId, String pageName ) throws Exception {
DataPageSavedViewDTO returnItem = null;
Connection conn = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
final String sql = "SELECT * FROM data_page_saved_view_tbl WHERE single_project_search_id__default_view = ? AND page_name = ?";
try {
conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
pstmt = conn.prepareStatement( sql );
pstmt.setInt( 1, projectSearchId );
pstmt.setString( 2, pageName );
rs = pstmt.executeQuery();
if( rs.next() ) {
returnItem = populateResultObject( rs );
}
} catch ( Exception e ) {
String msg = "Failed to select DataPageSavedViewDTO, projectSearchId: " + projectSearchId + ", sql: " + sql;
log.error( msg, e );
throw e;
} finally {
// be sure database handles are closed
if( rs != null ) {
try { rs.close(); } catch( Throwable t ) { ; }
rs = null;
}
if( pstmt != null ) {
try { pstmt.close(); } catch( Throwable t ) { ; }
pstmt = null;
}
if( conn != null ) {
try { conn.close(); } catch( Throwable t ) { ; }
conn = null;
}
}
return returnItem;
}
// Not currently used
// /**
// * @param projectSearchId
// * @return
// * @throws Exception
// */
// public List<DataPageSavedViewDTO> getForProjectSearchId( int projectSearchId ) throws Exception {
// List<DataPageSavedViewDTO> results = new ArrayList<>();
// Connection conn = null;
// PreparedStatement pstmt = null;
// ResultSet rs = null;
// final String sql = "SELECT * FROM project WHERE project_search_id = ? ";
// try {
// conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
// pstmt = conn.prepareStatement( sql );
// pstmt.setInt( 1, projectSearchId );
// rs = pstmt.executeQuery();
// while( rs.next() ) {
// DataPageSavedViewDTO item = populateResultObject( rs );
// results.add( item );
// }
// } catch ( Exception e ) {
// String msg = "Failed to select DataPageSavedViewDTO, projectSearchId: " + projectSearchId + ", sql: " + sql;
// log.error( msg, e );
// throw e;
// } finally {
// // be sure database handles are closed
// if( rs != null ) {
// try { rs.close(); } catch( Throwable t ) { ; }
// rs = null;
// }
// if( pstmt != null ) {
// try { pstmt.close(); } catch( Throwable t ) { ; }
// pstmt = null;
// }
// if( conn != null ) {
// try { conn.close(); } catch( Throwable t ) { ; }
// conn = null;
// }
// }
// return results;
// }
/**
* @param rs
* @return
* @throws SQLException
*/
private DataPageSavedViewDTO populateResultObject(ResultSet rs) throws SQLException {
DataPageSavedViewDTO returnItem = new DataPageSavedViewDTO();
returnItem.setProjectId( rs.getInt( "project_id" ) );
returnItem.setPageName( rs.getString( "page_name" ) );
returnItem.setLabel( rs.getString( "label" ) );
returnItem.setUrlStartAtPageName( rs.getString( "url_start_at_page_name" ) );
returnItem.setPageQueryJSONString( rs.getString( "page_query_json_string" ) );
returnItem.setAuthUserIdCreated( rs.getInt( "auth_user_id_created_record" ) );
returnItem.setAuthUserIdLastUpdated( rs.getInt( "auth_user_id_last_updated_record" ) );
returnItem.setDateCreated( rs.getDate( "date_record_created" ) );
returnItem.setDateLastUpdated( rs.getDate( "date_record_last_updated" ) );
return returnItem;
}
/**
* Return the numeric fields for id
*
* @param id
* @return null if not found, only the numeric fields
* @throws SQLException
*/
public DataPageSavedViewDTO getNumericFieldsById( int id ) throws Exception {
DataPageSavedViewDTO result = null;
final String querySQL = "SELECT project_id, auth_user_id_created_record, auth_user_id_last_updated_record FROM data_page_saved_view_tbl WHERE id = ? ";
try ( Connection dbConnection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
PreparedStatement preparedStatement = dbConnection.prepareStatement( querySQL ) ) {
preparedStatement.setInt( 1, id );
try ( ResultSet rs = preparedStatement.executeQuery() ) {
if ( rs.next() ) {
result = new DataPageSavedViewDTO();
result.setId( id );
result.setProjectId( rs.getInt( "project_id" ) );
result.setAuthUserIdCreated( rs.getInt( "auth_user_id_created_record" ) );
result.setAuthUserIdLastUpdated( rs.getInt( "auth_user_id_last_updated_record" ) );
}
}
} catch ( RuntimeException e ) {
String msg = "SQL: " + querySQL;
log.error( msg, e );
throw e;
} catch ( SQLException e ) {
String msg = "SQL: " + querySQL;
log.error( msg, e );
throw e;
}
return result;
}
/**
* @param item
* @throws Exception
*/
public void save( DataPageSavedViewDTO item ) throws Exception {
Connection dbConnection = null;
try {
dbConnection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
save( item, dbConnection );
} finally {
if( dbConnection != null ) {
try { dbConnection.close(); } catch( Throwable t ) { ; }
dbConnection = null;
}
}
}
///////
private static final String INSERT_SQL =
"INSERT INTO data_page_saved_view_tbl "
+ " ( project_id, page_name, "
+ " label, url_start_at_page_name, page_query_json_string,"
+ " auth_user_id_created_record, auth_user_id_last_updated_record ) "
+ " VALUES ( ?, ?, ?, ?, ?, ?, ? )";
/**
* @param item
* @throws Exception
*/
public void save( DataPageSavedViewDTO item, Connection dbConnection ) throws Exception {
PreparedStatement pstmt = null;
ResultSet rs = null;
final String sql = INSERT_SQL;
try {
// pstmt = dbConnection.prepareStatement( sql );
pstmt = dbConnection.prepareStatement( sql, Statement.RETURN_GENERATED_KEYS );
int counter = 0;
counter++;
pstmt.setInt( counter, item.getProjectId() );
counter++;
pstmt.setString( counter, item.getPageName() );
counter++;
pstmt.setString( counter, item.getLabel() );
counter++;
pstmt.setString( counter, item.getUrlStartAtPageName() );
counter++;
pstmt.setString( counter, item.getPageQueryJSONString() );
counter++;
pstmt.setInt( counter, item.getAuthUserIdCreated() );
counter++;
pstmt.setInt( counter, item.getAuthUserIdLastUpdated() );
pstmt.executeUpdate();
rs = pstmt.getGeneratedKeys();
if( rs.next() ) {
item.setId( rs.getInt( 1 ) );
} else {
String msg = "Failed to insert DataPageSavedViewDTO, generated key not found.";
log.error( msg );
throw new Exception( msg );
}
} catch ( Exception e ) {
String msg = "Failed to insert DataPageSavedViewDTO, sql: " + sql;
log.error( msg, e );
throw e;
} finally {
// be sure database handles are closed
if( rs != null ) {
try { rs.close(); } catch( Throwable t ) { ; }
rs = null;
}
if( pstmt != null ) {
try { pstmt.close(); } catch( Throwable t ) { ; }
pstmt = null;
}
}
}
/**
* @param label
* @param id
*/
public void updateLabel( String label, int userId, int id ) throws Exception {
final String UPDATE_SQL = "UPDATE data_page_saved_view_tbl SET label = ?, auth_user_id_last_updated_record = ? WHERE id = ?";
try ( Connection dbConnection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
PreparedStatement preparedStatement = dbConnection.prepareStatement( UPDATE_SQL ) ) {
int counter = 0;
counter++;
preparedStatement.setString( counter, label );
counter++;
preparedStatement.setInt( counter, userId );
counter++;
preparedStatement.setInt( counter, id );
preparedStatement.executeUpdate();
} catch ( Exception e ) {
String msg = "label: " + label + ", id: " + id + ", SQL: " + UPDATE_SQL;
log.error( msg, e );
throw e;
}
}
/**
* @param id
*/
public void delete( int id ) throws Exception {
final String DELETE_SQL = "DELETE FROM data_page_saved_view_tbl WHERE id = ?";
try ( Connection dbConnection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
PreparedStatement preparedStatement = dbConnection.prepareStatement( DELETE_SQL ) ) {
int counter = 0;
counter++;
preparedStatement.setInt( counter, id );
preparedStatement.executeUpdate();
} catch ( Exception e ) {
String msg = "id: " + id + ", SQL: " + DELETE_SQL;
log.error( msg, e );
throw e;
}
}
}
| |
package multirefactor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import recoder.abstraction.ClassType;
import recoder.abstraction.Method;
import recoder.abstraction.Type;
import recoder.convenience.AbstractTreeWalker;
import recoder.convenience.ForestWalker;
import recoder.convenience.TreeWalker;
import recoder.java.CompilationUnit;
import recoder.java.ProgramElement;
import recoder.java.declaration.ClassDeclaration;
import recoder.java.declaration.FieldDeclaration;
import recoder.java.declaration.FieldSpecification;
import recoder.java.declaration.InterfaceDeclaration;
import recoder.java.declaration.MemberDeclaration;
import recoder.java.declaration.MethodDeclaration;
import recoder.java.declaration.TypeDeclaration;
import recoder.java.declaration.VariableDeclaration;
import recoder.java.declaration.modifier.Private;
import recoder.java.declaration.modifier.Protected;
import recoder.java.declaration.modifier.Public;
import recoder.java.declaration.modifier.VisibilityModifier;
import recoder.java.reference.MethodReference;
import recoder.java.reference.TypeReference;
import recoder.kit.MethodKit;
import recoder.service.CrossReferenceSourceInfo;
import recoder.service.SourceInfo;
import refactorings.Refactoring;
// Calculates various software metrics from the source code input.
// Contains implementations of the full QMOOD suite and 2 metrics from the CK suite.
public class Metrics
{
private List<CompilationUnit> units;
private ArrayList<String> affectedClasses;
private HashMap<String, Integer> elementDiversity;
private HashMap<String, Integer> elementScores;
public Metrics(List<CompilationUnit> units)
{
this.units = units;
this.elementScores = new HashMap<String, Integer>();
}
// Amount of classes in the project.
// Includes both ordinary classes and interfaces.
public int classDesignSize()
{
int classCounter = 0;
ForestWalker tw = new ForestWalker(this.units);
while (tw.next(TypeDeclaration.class))
{
TypeDeclaration td = (TypeDeclaration) tw.getProgramElement();
if ((td.getName() != null) && ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration)))
classCounter++;
}
return classCounter;
}
// Amount of distinct class hierarchies in the project.
// Excludes classes from external libraries.
public int numberOfHierarchies()
{
SourceInfo si = this.units.get(0).getFactory().getServiceConfiguration().getSourceInfo();
Set<String> baseTypes = new HashSet<String>();
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if (td.isOrdinaryClass())
{
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
if (!(td.getSupertypes().get(0) instanceof TypeDeclaration) && (si.getSubtypes(td).size() > 0))
baseTypes.add(td.getFullName());
}
}
}
return baseTypes.size();
}
// Average amount of classes away from the root per class.
// Excludes classes from external libraries.
public float averageNumberOfAncestors()
{
int classCounter = 0;
int superTypeCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (ClassType ct : getAllTypes(this.units.get(i)))
{
if (ct.isOrdinaryClass())
{
// Prevents "Zero Service" outputs logged to the console.
if (ct.getProgramModelInfo() == null)
((ClassDeclaration) ct).getFactory().getServiceConfiguration().getChangeHistory().updateModel();
classCounter++;
while (ct.getSupertypes().get(0) instanceof TypeDeclaration)
{
superTypeCounter++;
ct = ct.getSupertypes().get(0);
}
}
}
}
return (float) superTypeCounter / (float) classCounter;
}
// Average ratio of the amount of private, package or
// protected attributes in a class to the overall amount per class.
public float dataAccessMetric()
{
int counter, nonPublicCounter;
int classCounter = 0;
float dataAccessMetric = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 0;
nonPublicCounter = 0;
classCounter++;
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof FieldDeclaration)
{
counter++;
if (!(((FieldDeclaration) md).getVisibilityModifier() instanceof Public))
nonPublicCounter++;
}
}
if (counter > 0)
dataAccessMetric += (float) nonPublicCounter / (float) counter;
}
}
}
return dataAccessMetric / (float) classCounter;
}
// Average number of other distinct classes each class depends on per class.
// Only includes user defined classes from the project.
public float directClassCoupling()
{
int couplingCounter = 0;
int classCounter = 0;
Set<String> distinctTypes;
SourceInfo si = this.units.get(0).getFactory().getServiceConfiguration().getSourceInfo();
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
distinctTypes = new HashSet<String>();
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
for (Type t : ((MethodDeclaration) md).getSignature())
if ((t != null) && ((t instanceof ClassDeclaration) || (t instanceof InterfaceDeclaration)))
distinctTypes.add(t.getFullName());
for (FieldSpecification fs : td.getFieldsInScope())
{
TreeWalker tw = new TreeWalker(fs);
while (tw.next(TypeReference.class))
{
Type t = si.getType(tw.getProgramElement());
if ((t != null) && ((t instanceof ClassDeclaration) || (t instanceof InterfaceDeclaration)))
distinctTypes.add(t.getFullName());
}
}
couplingCounter += distinctTypes.size();
}
}
}
return (float) couplingCounter / (float) classCounter;
}
// Average cohesion among methods ratio per class.
// Ratio gets the accumulation of the amount of distinct parameter types for each method
// over the maximum possible amount of distinct parameter types across all the methods.
// Denominator is calculated by multiplying the amount of methods by the amount of
// distinct parameter types in all of the methods of the class.
public float cohesionAmongMethods()
{
int methodCounter, cohesionCounter;
int classCounter = 0;
float cohesionAmongMethods = 0;
ArrayList<String> types;
ArrayList<String> allTypes;
Set<String> distinctTypes;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
methodCounter = 0;
cohesionCounter = 0;
allTypes = new ArrayList<String>();
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
methodCounter++;
types = new ArrayList<String>();
for (Type t : ((MethodDeclaration) md).getSignature())
{
types.add(t.getFullName());
allTypes.add(t.getFullName());
}
distinctTypes = new HashSet<String>(types);
cohesionCounter += distinctTypes.size();
}
}
distinctTypes = new HashSet<String>(allTypes);
if ((methodCounter * distinctTypes.size()) > 0)
cohesionAmongMethods += (float) cohesionCounter / (float) (methodCounter * distinctTypes.size());
}
}
}
return cohesionAmongMethods / (float) classCounter;
}
// Average amount of user defined attributes declared per class.
// Only counts classes defined in the project.
public float aggregation()
{
int counter = 0;
int classCounter = 0;
SourceInfo si = this.units.get(0).getFactory().getServiceConfiguration().getSourceInfo();
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
for (FieldSpecification f : td.getFieldsInScope())
{
Type t = si.getType(f);
if ((t != null) && ((t instanceof ClassDeclaration) || (t instanceof InterfaceDeclaration)))
counter++;
}
}
}
}
return (float) counter / (float) classCounter;
}
// Average functional abstraction ratio per class.
// Ratio gets the amount of inherited methods accessible within a class
// (methods declared in a super class of the current class that are public
// or protected, or are package and contain the same package) over the overall
// amount of methods accessible (inherited and declared within the class) to the class.
// Excludes methods inherited from external library classes.
public float functionalAbstraction()
{
int counter, inheritedCounter;
int classCounter = 0;
float functionalAbstraction = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
inheritedCounter = 0;
counter = 0;
TypeDeclaration superType = td;
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
counter++;
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
while (superType.getSupertypes().get(0) instanceof TypeDeclaration)
{
superType = (TypeDeclaration) superType.getSupertypes().get(0);
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
if ((md.isPublic()) || (md.isProtected()) || (!(md.isPrivate()) && (td.getPackage().equals(superType.getPackage()))))
inheritedCounter++;
}
counter += inheritedCounter;
if (counter > 0)
functionalAbstraction += (float) inheritedCounter / (float) counter;
}
}
}
return functionalAbstraction / (float) classCounter;
}
// Average amount of polymorphic methods
// (methods that are redefined/overwritten) per class.
// Abstract method declarations and constructors are included.
public float numberOfPolymorphicMethods()
{
int counter = 0;
int classCounter = 0;
SourceInfo si = this.units.get(0).getFactory().getServiceConfiguration().getSourceInfo();
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
classCounter++;
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
if (MethodKit.getRedefiningMethods((CrossReferenceSourceInfo) si, (Method) md).size() > 0)
counter++;
}
}
}
return (float) counter / (float) classCounter;
}
// Average amount of public methods per class.
public float classInterfaceSize()
{
int counter = 0;
int classCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
for (MemberDeclaration md : td.getMembers())
if ((md instanceof MethodDeclaration) && (((MethodDeclaration) md).getVisibilityModifier() instanceof Public))
counter++;
}
}
}
return (float) counter / (float) classCounter;
}
// Average amount of methods per class.
public float numberOfMethods()
{
int classCounter = 0;
int methodCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
methodCounter++;
}
}
}
return (float) methodCounter / (float) classCounter;
}
// Average amount of complexity of all methods per class.
// The complexity is calculated using the amount of lines of code per method.
public float weightedMethodsPerClass()
{
int classCounter = 0;
int methodCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
for (MemberDeclaration md : td.getMembers())
if (md instanceof MethodDeclaration)
methodCounter += (md.getEndPosition().getLine() - md.getStartPosition().getLine() + 1);
}
}
}
return (float) methodCounter / (float) classCounter;
}
// Average amount of direct child classes per class.
// Only includes ordinary classes within the project.
public float numberOfChildren()
{
int childCounter = 0;
int classCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if (td.isOrdinaryClass())
{
// Prevents "Zero Service" outputs logged to the console.
if (td.getProgramModelInfo() == null)
td.getFactory().getServiceConfiguration().getChangeHistory().updateModel();
classCounter++;
if (td.getSupertypes().get(0) instanceof TypeDeclaration)
childCounter++;
}
}
}
return (float) childCounter / (float) classCounter;
}
// Ratio of the amount of interfaces over the overall amount of classes.
public float abstractness()
{
int classCounter = 0;
int interfaceCounter = 0;
ForestWalker tw = new ForestWalker(this.units);
while (tw.next(TypeDeclaration.class))
{
TypeDeclaration td = (TypeDeclaration) tw.getProgramElement();
if ((td.getName() != null) && ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration)))
{
classCounter++;
if (td instanceof InterfaceDeclaration)
interfaceCounter++;
}
}
float answer = (float) interfaceCounter / (float) classCounter;
return answer;
}
// Average ratio of abstract elements over abstract
// and potentially abstract elements per class.
// Variables can't be abstract.
public float abstractRatio()
{
int counter, abstractCounter;
int classCounter = 0;
float abstractAmount = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 1;
abstractCounter = 0;
classCounter++;
if (td.isAbstract())
abstractCounter++;
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
counter++;
if (((MethodDeclaration) md).isAbstract())
abstractCounter++;
}
}
abstractAmount += (float) abstractCounter / (float) counter;
}
}
}
return abstractAmount / (float) classCounter;
}
// Average ratio of static elements over static
// and potentially static elements per class.
// Of the variable declarations, only a field can be static.
public float staticRatio()
{
int counter, staticCounter;
int classCounter = 0;
float staticAmount = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 1;
staticCounter = 0;
classCounter++;
if (td.isStatic())
staticCounter++;
for (MemberDeclaration md : td.getMembers())
{
if ((md instanceof MethodDeclaration) || (md instanceof FieldDeclaration))
{
counter++;
if (md.isStatic())
staticCounter++;
}
}
staticAmount += (float) staticCounter / (float) counter;
}
}
}
return staticAmount / (float) classCounter;
}
// Average ratio of final elements over final
// and potentially final elements per class.
public float finalRatio()
{
int counter, finalCounter;
int classCounter = 0;
float finalAmount = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 1;
finalCounter = 0;
classCounter++;
if (td.isFinal())
finalCounter++;
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
counter++;
if (((MethodDeclaration) md).isFinal())
finalCounter++;
TreeWalker tw = new TreeWalker(md);
while (tw.next(VariableDeclaration.class))
{
counter++;
VariableDeclaration vd = (VariableDeclaration)(tw.getProgramElement());
if (vd.isFinal())
finalCounter++;
}
}
else if (md instanceof FieldDeclaration)
{
counter++;
if (((FieldDeclaration) md).isFinal())
finalCounter++;
}
}
finalAmount += (float) finalCounter / (float) counter;
}
}
}
return finalAmount / (float) classCounter;
}
// Average ratio of constant elements over constant
// and potentially constant elements per class.
// Of the variable declarations, only a field can be constant.
public float constantRatio()
{
int counter, constantCounter;
int classCounter = 0;
float constantAmount = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 1;
constantCounter = 0;
classCounter++;
if ((td.isStatic()) && (td.isFinal()))
constantCounter++;
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
counter++;
if ((md.isStatic()) && (((MethodDeclaration) md).isFinal()))
constantCounter++;
}
else if (md instanceof FieldDeclaration)
{
counter++;
if ((md.isStatic()) && (((FieldDeclaration) md).isFinal()))
constantCounter++;
}
}
constantAmount += (float) constantCounter / (float) counter;
}
}
}
return constantAmount / (float) classCounter;
}
// Ratio of amount of classes in the project that are
// declared inside other classes over the amount of classes.
public float innerClassRatio()
{
int innerClassCounter = 0;
int classCounter = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
if (td.getContainingClassType() != null)
innerClassCounter++;
}
}
}
return (float) innerClassCounter / (float) classCounter;
}
// Average referenced inherited methods ratio per class.
// Ratio gets the accumulation of the amount of inherited external methods accessed within
// the methods of a class (methods declared in a super class of the current class) over
// the overall distinct amount of external methods accessed in the methods of the class.
public float referencedMethodsRatio()
{
int methodCount, inheritedMethodCount;
int classCounter = 0;
float referencedMethodsRatio = 0;
ArrayList<MethodDeclaration> methods;
SourceInfo si = this.units.get(0).getFactory().getServiceConfiguration().getSourceInfo();
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
classCounter++;
methods = new ArrayList<MethodDeclaration>();
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
TreeWalker tw = new TreeWalker(md);
while (tw.next(MethodReference.class))
{
ProgramElement pe = tw.getProgramElement();
if (si.getMethod((MethodReference) pe) instanceof MethodDeclaration)
{
MethodDeclaration method = (MethodDeclaration) si.getMethod((MethodReference) pe);
if (!(methods.contains(method)) && !(method.getContainingClassType().equals(td)))
methods.add(method);
}
}
}
}
methodCount = methods.size();
inheritedMethodCount = 0;
for (MethodDeclaration md : methods)
if (td.getAllSupertypes().contains(md.getContainingClassType()))
inheritedMethodCount++;
if (methodCount > 0)
referencedMethodsRatio += (float) inheritedMethodCount / (float) methodCount;
}
}
}
return referencedMethodsRatio / (float) classCounter;
}
// Average visibility ratio per class.
// Ratio calculates the accumulative visibility value among
// type, method and variable declarations over the amount of
// declarations, where a higher value means more visibility.
public float visibilityRatio()
{
int counter;
float visibilityCounter;
int classCounter = 0;
float visibility = 0;
for (int i = 0; i < this.units.size(); i++)
{
for (TypeDeclaration td : getAllTypes(this.units.get(i)))
{
if ((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration))
{
counter = 1;
classCounter++;
visibilityCounter = identifier(td.getVisibilityModifier());
for (MemberDeclaration md : td.getMembers())
{
if (md instanceof MethodDeclaration)
{
counter++;
visibilityCounter += identifier(((MethodDeclaration) md).getVisibilityModifier());
}
else if (md instanceof FieldDeclaration)
{
counter++;
visibilityCounter += identifier(((FieldDeclaration) md).getVisibilityModifier());
}
}
visibility += (float) visibilityCounter / (float) counter;
}
}
}
return visibility / (float) classCounter;
}
// Amount of lines of code in the project.
public int linesOfCode()
{
int lineCounter = 0;
for (CompilationUnit u : this.units)
lineCounter += u.getEndPosition().getLine();
return lineCounter;
}
// Amount of java source files in the project.
public int numberOfFiles()
{
return this.units.size();
}
// Instances of priority classes (most important classes determined
// by the user) affected by the refactorings of a solution.
public int priorityNotNormalised(ArrayList<String> priorityClasses)
{
int priorityAmount = 0;
for (String s1 : this.affectedClasses)
{
for (String s2 : priorityClasses)
{
if (s1.endsWith(s2))
{
priorityAmount++;
break;
}
}
}
return priorityAmount;
}
// Instances of priority classes affected by the refactorings of a solution. This
// override also incorporates a list of non priority classes (classes where
// modifications are undesirable). The instances of non priority classes are also
// calculated and then taken away from the priority classes amount to give an overall value.
public int priorityNotNormalised(ArrayList<String> priorityClasses, ArrayList<String> nonPriorityClasses)
{
int nonPriorityAmount = 0;
int priorityAmount = priorityNotNormalised(priorityClasses);
for (String s1 : this.affectedClasses)
{
for (String s2 : nonPriorityClasses)
{
if (s1.endsWith(s2))
{
nonPriorityAmount++;
break;
}
}
}
return priorityAmount - nonPriorityAmount;
}
// Priority objective updated to be normalised as a ratio between 0 and 1. The original
// score is divided by the highest value it could be i.e. the overall amount of affected classes.
public float priority(ArrayList<String> priorityClasses)
{
int priorityAmount = 0;
for (String s1 : this.affectedClasses)
{
for (String s2 : priorityClasses)
{
if (s1.endsWith(s2))
{
priorityAmount++;
break;
}
}
}
return (float) priorityAmount / (float) this.affectedClasses.size();
}
// Objective updated to be normalised as a ratio between -1 and 1.
// The non priority score is normalised the same way as the priority
// score. Then the non priority score is taken away from the priority score.
public float priority(ArrayList<String> priorityClasses, ArrayList<String> nonPriorityClasses)
{
float nonPriorityAmount = 0.0f;
float priorityAmount = priority(priorityClasses);
for (String s1 : this.affectedClasses)
{
for (String s2 : nonPriorityClasses)
{
if (s1.endsWith(s2))
{
nonPriorityAmount++;
break;
}
}
}
nonPriorityAmount /= this.affectedClasses.size();
return priorityAmount - nonPriorityAmount;
}
// Diversity of refactorings in refactoring solution. This is calculated by
// finding the average amount of refactorings per refactored element, and then
// dividing the amount of distinct refactored elements by this average. In the method
// this calculation is a little more streamlined. Average = refactoring count / elements.
// Therefore elements / average = elements * (elements / refactoring count).
// The metric is calculated by finding elements squared over refactoring count.
public float diversityNotNormalised()
{
int numerator = this.elementDiversity.size() * this.elementDiversity.size();
int denominator = 0;
for (Integer value : this.elementDiversity.values())
denominator += value;
return (float) numerator / (float) denominator;
}
// Diversity objective updated to be normalised as a ratio between 0 and 1.
// The original score is divided by the highest value it could be i.e. the
// amount of distinct elements divided by 1. Again, this calculation is
// rearranged to improve efficiency. (elements / average) / elements =
// 1 / average => (1 / (refactoring count / elements)) => elements / refactoring count.
public float diversity()
{
int numerator = 0;
for (Integer value : this.elementDiversity.values())
numerator += value;
return (float) this.elementDiversity.size() / (float) numerator;
}
// Element recentness in refactoring solution. This is calculated by
// finding how far back the element appeared amongst the previous versions of the code,
// denoted with an integer. The older the element is, the larger its corresponding value.
// This value is calculated or extracted for each relevant element in the refactoring
// solution, and an accumulative value is calculated to give an overall measure of recentness.
public int elementRecentnessNotNormalised(ArrayList<List<CompilationUnit>> previousUnits)
{
int numerator = 0;
for (Entry<String, Integer> e : this.elementDiversity.entrySet())
{
String key = e.getKey();
int value = e.getValue();
int amount = previousUnits.size();
if (this.elementScores.containsKey(key))
{
amount = this.elementScores.get(key);
}
else
{
String name;
int elementType;
if (!(key.contains(":")))
{
elementType = 1;
name = key.substring(key.lastIndexOf('\\') + 1);
}
else if (key.charAt(1) == ':')
{
elementType = 3;
name = key.substring(2);
}
else if (key.endsWith(":"))
{
elementType = 2;
name = key.substring(1, key.length() - 1);
}
else
{
elementType = 4;
name = key.substring(key.lastIndexOf(':') + 1);
}
for (int i = previousUnits.size() - 1; i >= 0; i--)
{
ForestWalker tw = new ForestWalker(previousUnits.get(i));
boolean breakout = true;
if (elementType == 1)
{
while (tw.next(TypeDeclaration.class))
{
TypeDeclaration td = (TypeDeclaration) tw.getProgramElement();
if (((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration)) &&
(td.getName() != null) && (td.getName().equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 2)
{
while (tw.next(MethodDeclaration.class))
{
MethodDeclaration md = (MethodDeclaration) tw.getProgramElement();
if ((md.getName() != null) && (Refactoring.getMethodName(md).equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 3)
{
while (tw.next(FieldDeclaration.class))
{
FieldDeclaration fd = (FieldDeclaration) tw.getProgramElement();
if ((fd.toString() != null) && (fd.toString().equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 4)
{
while (tw.next(VariableDeclaration.class))
{
VariableDeclaration vd = (VariableDeclaration) tw.getProgramElement();
if ((vd.toString() != null) && (vd.toString().equals(name)))
{
breakout = false;
break;
}
}
}
if (breakout)
break;
amount--;
}
this.elementScores.put(key, amount);
}
numerator += (amount * value);
}
return numerator;
}
// Element recentness objective updated to be normalised as a ratio between 0 and 1.
// The original score is divided by the amount of elements to get an average value per element.
// This is then divided by the highest value it could be i.e. the accumulative value that represents if
// every element were only found in the current version of the project, in order to get a normalised ratio.
public float elementRecentness(ArrayList<List<CompilationUnit>> previousUnits)
{
int numerator = 0;
int elements = 0;
for (Entry<String, Integer> e : this.elementDiversity.entrySet())
{
String key = e.getKey();
int value = e.getValue();
int amount = previousUnits.size();
if (this.elementScores.containsKey(key))
{
amount = this.elementScores.get(key);
}
else
{
String name;
int elementType;
if (!(key.contains(":")))
{
elementType = 1;
name = key.substring(key.lastIndexOf('\\') + 1);
}
else if (key.charAt(1) == ':')
{
elementType = 3;
name = key.substring(2);
}
else if (key.endsWith(":"))
{
elementType = 2;
name = key.substring(1, key.length() - 1);
}
else
{
elementType = 4;
name = key.substring(key.lastIndexOf(':') + 1);
}
for (int i = previousUnits.size() - 1; i >= 0; i--)
{
ForestWalker tw = new ForestWalker(previousUnits.get(i));
boolean breakout = true;
if (elementType == 1)
{
while (tw.next(TypeDeclaration.class))
{
TypeDeclaration td = (TypeDeclaration) tw.getProgramElement();
if (((td instanceof ClassDeclaration) || (td instanceof InterfaceDeclaration)) &&
(td.getName() != null) && (td.getName().equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 2)
{
while (tw.next(MethodDeclaration.class))
{
MethodDeclaration md = (MethodDeclaration) tw.getProgramElement();
if ((md.getName() != null) && (Refactoring.getMethodName(md).equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 3)
{
while (tw.next(FieldDeclaration.class))
{
FieldDeclaration fd = (FieldDeclaration) tw.getProgramElement();
if ((fd.toString() != null) && (fd.toString().equals(name)))
{
breakout = false;
break;
}
}
}
else if (elementType == 4)
{
while (tw.next(VariableDeclaration.class))
{
VariableDeclaration vd = (VariableDeclaration) tw.getProgramElement();
if ((vd.toString() != null) && (vd.toString().equals(name)))
{
breakout = false;
break;
}
}
}
if (breakout)
break;
amount--;
}
this.elementScores.put(key, amount);
}
numerator += (amount * value);
elements += value;
}
float answer = (float) numerator / (float) elements;
return answer / previousUnits.size();
}
// Returns a value to represent the visibility of a modifier.
private float identifier(VisibilityModifier vm)
{
if (vm instanceof Public)
return 1;
else if (vm instanceof Protected)
return (float) (2 / 3);
else if (vm instanceof Private)
return 0;
else
return (float) (1 / 3);
}
// Returns all the types in a compilation unit including nested types.
private ArrayList<TypeDeclaration> getAllTypes(CompilationUnit cu)
{
AbstractTreeWalker tw = new TreeWalker(cu);
ArrayList<TypeDeclaration> types = new ArrayList<TypeDeclaration>();
while (tw.next(TypeDeclaration.class))
{
TypeDeclaration td = (TypeDeclaration) tw.getProgramElement();
if (td.getName() != null)
types.add(td);
}
return types;
}
public void setUnits(List<CompilationUnit> units)
{
this.units = units;
}
public void setAffectedClasses(ArrayList<String> affectedClasses)
{
this.affectedClasses = affectedClasses;
}
public void setElementDiversity(HashMap<String, Integer> elementDiversity)
{
this.elementDiversity = elementDiversity;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.core.service;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.DeviceManagementException;
import org.wso2.carbon.device.mgt.common.EnrolmentInfo;
import org.wso2.carbon.device.mgt.common.FeatureManager;
import org.wso2.carbon.device.mgt.common.InvalidDeviceException;
import org.wso2.carbon.device.mgt.common.MonitoringOperation;
import org.wso2.carbon.device.mgt.common.PaginationRequest;
import org.wso2.carbon.device.mgt.common.PaginationResult;
import org.wso2.carbon.device.mgt.common.configuration.mgt.PlatformConfiguration;
import org.wso2.carbon.device.mgt.common.license.mgt.License;
import org.wso2.carbon.device.mgt.common.operation.mgt.Activity;
import org.wso2.carbon.device.mgt.common.operation.mgt.Operation;
import org.wso2.carbon.device.mgt.common.operation.mgt.OperationManagementException;
import org.wso2.carbon.device.mgt.common.policy.mgt.PolicyMonitoringManager;
import org.wso2.carbon.device.mgt.common.push.notification.NotificationStrategy;
import org.wso2.carbon.device.mgt.core.DeviceManagementPluginRepository;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
/**
* Proxy class for all Device Management related operations that take the corresponding plugin type in
* and resolve the appropriate plugin implementation
*/
public interface DeviceManagementProviderService {
/**
* Method to retrieve all the devices of a given device type.
*
* @param deviceType Device-type of the required devices
* @return List of devices of given device-type.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* devices.
*/
List<Device> getAllDevices(String deviceType) throws DeviceManagementException;
/**
* Method to retrieve all the devices registered in the system.
*
* @return List of registered devices.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* devices.
*/
List<Device> getAllDevices() throws DeviceManagementException;
/**
* Method to retrieve all the devices with pagination support.
*
* @param request PaginationRequest object holding the data for pagination
* @return PaginationResult - Result including the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* devices.
*/
PaginationResult getDevicesByType(PaginationRequest request) throws DeviceManagementException;
/**
* Method to retrieve all the devices with pagination support.
*
* @param request PaginationRequest object holding the data for pagination
* @return PaginationResult - Result including the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* devices.
*/
PaginationResult getAllDevices(PaginationRequest request) throws DeviceManagementException;
void sendEnrolmentInvitation(String templateName, EmailMetaInfo metaInfo) throws DeviceManagementException;
void sendRegistrationEmail(EmailMetaInfo metaInfo) throws DeviceManagementException;
FeatureManager getFeatureManager(String deviceType) throws DeviceManagementException;
/**
* Proxy method to get the tenant configuration of a given platform.
*
* @param deviceType Device platform
* @return Tenant configuration settings of the particular tenant and platform.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* configuration.
*/
PlatformConfiguration getConfiguration(String deviceType) throws DeviceManagementException;
/**
* Method to get the list of devices owned by an user with paging information.
*
* @param request PaginationRequest object holding the data for pagination
* @return List of devices owned by a particular user along with the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
PaginationResult getDevicesOfUser(PaginationRequest request) throws DeviceManagementException;
/**
* Method to get the list of devices filtered by the ownership with paging information.
*
* @param request PaginationRequest object holding the data for pagination
* @return List of devices owned by a particular user along with the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
PaginationResult getDevicesByOwnership(PaginationRequest request) throws DeviceManagementException;
/**
* Method to get the list of devices owned by an user.
*
* @param userName Username of the user
* @return List of devices owned by a particular user
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
List<Device> getDevicesOfUser(String userName) throws DeviceManagementException;
/**
* This method returns the list of device owned by a user of given device type.
*
* @param userName user name.
* @param deviceType device type name
* @return
* @throws DeviceManagementException
*/
List<Device> getDevicesOfUser(String userName, String deviceType) throws DeviceManagementException;
/**
* Method to get the list of devices owned by users of a particular user-role.
*
* @param roleName Role name of the users
* @return List of devices owned by users of a particular role
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
List<Device> getAllDevicesOfRole(String roleName) throws DeviceManagementException;
/**
* Method to get the device count of user.
*
* @return device count
* @throws DeviceManagementException If some unusual behaviour is observed while counting
* the devices
*/
int getDeviceCount(String username) throws DeviceManagementException;
/**
* Method to get the count of all types of devices.
*
* @return device count
* @throws DeviceManagementException If some unusual behaviour is observed while counting
* the devices
*/
int getDeviceCount() throws DeviceManagementException;
/**
* Method to get the list of devices that matches with the given device name.
*
* @param deviceName name of the device
* @return List of devices that matches with the given device name.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
List<Device> getDevicesByNameAndType(String deviceName, String type, int offset, int limit) throws DeviceManagementException;
/**
* This method is used to retrieve list of devices that matches with the given device name with paging information.
*
* @param request PaginationRequest object holding the data for pagination
* @return List of devices in given status along with the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
PaginationResult getDevicesByName(PaginationRequest request) throws DeviceManagementException;
void updateDeviceEnrolmentInfo(Device device, EnrolmentInfo.Status active) throws DeviceManagementException;
/**
* This method is used to retrieve list of devices based on the device status.
*
* @param status Device status
* @return List of devices
* @throws DeviceManagementException
*/
List<Device> getDevicesByStatus(EnrolmentInfo.Status status) throws DeviceManagementException;
/**
* This method is used to retrieve list of devices based on the device status with paging information.
*
* @param request PaginationRequest object holding the data for pagination
* @return List of devices in given status along with the required parameters necessary to do pagination.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the
* device list
*/
PaginationResult getDevicesByStatus(PaginationRequest request) throws DeviceManagementException;
/**
* This method is used to check whether the device is enrolled with the give user.
*
* @param deviceId identifier of the device that needs to be checked against the user.
* @param user username of the device owner.
* @return true if the user owns the device else will return false.
* @throws DeviceManagementException If some unusual behaviour is observed while fetching the device.
*/
boolean isEnrolled(DeviceIdentifier deviceId, String user) throws DeviceManagementException;
/**
* This method is used to get notification strategy for given device type
*
* @param deviceType Device type
* @return Notification Strategy for device type
* @throws DeviceManagementException
*/
NotificationStrategy getNotificationStrategyByDeviceType(String deviceType) throws DeviceManagementException;
License getLicense(String deviceType, String languageCode) throws DeviceManagementException;
void addLicense(String deviceType, License license) throws DeviceManagementException;
boolean modifyEnrollment(Device device) throws DeviceManagementException;
boolean enrollDevice(Device device) throws DeviceManagementException;
PlatformConfiguration getConfiguration() throws DeviceManagementException;
boolean saveConfiguration(PlatformConfiguration configuration) throws DeviceManagementException;
boolean disenrollDevice(DeviceIdentifier deviceId) throws DeviceManagementException;
boolean isEnrolled(DeviceIdentifier deviceId) throws DeviceManagementException;
boolean isActive(DeviceIdentifier deviceId) throws DeviceManagementException;
boolean setActive(DeviceIdentifier deviceId, boolean status) throws DeviceManagementException;
/**
* Returns the device of specified id.
*
* @param deviceId device Id
* @return Device returns null when device is not avaialble.
* @throws DeviceManagementException
*/
Device getDevice(DeviceIdentifier deviceId) throws DeviceManagementException;
Device getDevice(DeviceIdentifier deviceId, Date since) throws DeviceManagementException;
HashMap<Integer, Device> getTenantedDevice(DeviceIdentifier deviceIdentifier) throws DeviceManagementException;
Device getDevice(DeviceIdentifier deviceId, EnrolmentInfo.Status status) throws DeviceManagementException;
List<String> getAvailableDeviceTypes() throws DeviceManagementException;
boolean updateDeviceInfo(DeviceIdentifier deviceIdentifier, Device device) throws DeviceManagementException;
boolean setOwnership(DeviceIdentifier deviceId, String ownershipType) throws DeviceManagementException;
boolean isClaimable(DeviceIdentifier deviceId) throws DeviceManagementException;
boolean setStatus(DeviceIdentifier deviceId, String currentOwner,
EnrolmentInfo.Status status) throws DeviceManagementException;
void notifyOperationToDevices(Operation operation,
List<DeviceIdentifier> deviceIds) throws DeviceManagementException;
Activity addOperation(String type, Operation operation,
List<DeviceIdentifier> devices) throws OperationManagementException, InvalidDeviceException;
List<? extends Operation> getOperations(DeviceIdentifier deviceId) throws OperationManagementException;
PaginationResult getOperations(DeviceIdentifier deviceId,
PaginationRequest request) throws OperationManagementException;
List<? extends Operation> getPendingOperations(
DeviceIdentifier deviceId) throws OperationManagementException;
Operation getNextPendingOperation(DeviceIdentifier deviceId) throws OperationManagementException;
void updateOperation(DeviceIdentifier deviceId, Operation operation) throws OperationManagementException;
void deleteOperation(String type, int operationId) throws OperationManagementException;
Operation getOperationByDeviceAndOperationId(DeviceIdentifier deviceId, int operationId)
throws OperationManagementException;
List<? extends Operation> getOperationsByDeviceAndStatus(DeviceIdentifier identifier,
Operation.Status status)
throws OperationManagementException, DeviceManagementException;
Operation getOperation(String type, int operationId) throws OperationManagementException;
Activity getOperationByActivityId(String activity) throws OperationManagementException;
List<Activity> getActivitiesUpdatedAfter(long timestamp) throws OperationManagementException;
List<Activity> getActivitiesUpdatedAfter(long timestamp, int limit, int offset) throws OperationManagementException;
int getActivityCountUpdatedAfter(long timestamp) throws OperationManagementException;
List<MonitoringOperation> getMonitoringOperationList(String deviceType);
int getDeviceMonitoringFrequency(String deviceType);
boolean isDeviceMonitoringEnabled(String deviceType);
PolicyMonitoringManager getPolicyMonitoringManager(String deviceType);
DeviceManagementPluginRepository getPluginRepository();
/**
* Change device status.
*
* @param deviceIdentifier {@link DeviceIdentifier} object
* @param newStatus New status of the device
* @return Whether status is changed or not
* @throws DeviceManagementException on errors while trying to change device status
*/
boolean changeDeviceStatus(DeviceIdentifier deviceIdentifier, EnrolmentInfo.Status newStatus)
throws DeviceManagementException;
}
| |
/*
* Copyright 2002-2006,2009 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opensymphony.xwork2.config.entities;
import com.opensymphony.xwork2.util.location.Located;
import com.opensymphony.xwork2.util.location.Location;
import com.opensymphony.xwork2.util.logging.Logger;
import com.opensymphony.xwork2.util.logging.LoggerFactory;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Configuration for Package.
* <p/>
* In the xml configuration file this is defined as the <code>package</code> tag.
*
* @author Rainer Hermanns
* @version $Revision: 1215225 $
*/
public class PackageConfig extends Located implements Comparable, Serializable, InterceptorLocator {
private static final Logger LOG = LoggerFactory.getLogger(PackageConfig.class);
protected Map<String, ActionConfig> actionConfigs;
protected Map<String, ResultConfig> globalResultConfigs;
protected Map<String, Object> interceptorConfigs;
protected Map<String, ResultTypeConfig> resultTypeConfigs;
protected List<ExceptionMappingConfig> globalExceptionMappingConfigs;
protected List<PackageConfig> parents;
protected String defaultInterceptorRef;
protected String defaultActionRef;
protected String defaultResultType;
protected String defaultClassRef;
protected String name;
protected String namespace = "";
protected boolean isAbstract = false;
protected boolean needsRefresh;
protected PackageConfig(String name) {
this.name = name;
actionConfigs = new LinkedHashMap<String, ActionConfig>();
globalResultConfigs = new LinkedHashMap<String, ResultConfig>();
interceptorConfigs = new LinkedHashMap<String, Object>();
resultTypeConfigs = new LinkedHashMap<String, ResultTypeConfig>();
globalExceptionMappingConfigs = new ArrayList<ExceptionMappingConfig>();
parents = new ArrayList<PackageConfig>();
}
protected PackageConfig(PackageConfig orig) {
this.defaultInterceptorRef = orig.defaultInterceptorRef;
this.defaultActionRef = orig.defaultActionRef;
this.defaultResultType = orig.defaultResultType;
this.defaultClassRef = orig.defaultClassRef;
this.name = orig.name;
this.namespace = orig.namespace;
this.isAbstract = orig.isAbstract;
this.needsRefresh = orig.needsRefresh;
this.actionConfigs = new LinkedHashMap<String, ActionConfig>(orig.actionConfigs);
this.globalResultConfigs = new LinkedHashMap<String, ResultConfig>(orig.globalResultConfigs);
this.interceptorConfigs = new LinkedHashMap<String, Object>(orig.interceptorConfigs);
this.resultTypeConfigs = new LinkedHashMap<String, ResultTypeConfig>(orig.resultTypeConfigs);
this.globalExceptionMappingConfigs = new ArrayList<ExceptionMappingConfig>(orig.globalExceptionMappingConfigs);
this.parents = new ArrayList<PackageConfig>(orig.parents);
}
public boolean isAbstract() {
return isAbstract;
}
public Map<String, ActionConfig> getActionConfigs() {
return actionConfigs;
}
/**
* returns the Map of all the ActionConfigs available in the current package.
* ActionConfigs defined in ancestor packages will be included in this Map.
*
* @return a Map of ActionConfig Objects with the action name as the key
* @see ActionConfig
*/
public Map<String, ActionConfig> getAllActionConfigs() {
Map<String, ActionConfig> retMap = new LinkedHashMap<String, ActionConfig>();
if (!parents.isEmpty()) {
for (PackageConfig parent : parents) {
retMap.putAll(parent.getAllActionConfigs());
}
}
retMap.putAll(getActionConfigs());
return retMap;
}
/**
* returns the Map of all the global ResultConfigs available in the current package.
* Global ResultConfigs defined in ancestor packages will be included in this Map.
*
* @return a Map of Result Objects with the result name as the key
* @see ResultConfig
*/
public Map<String, ResultConfig> getAllGlobalResults() {
Map<String, ResultConfig> retMap = new LinkedHashMap<String, ResultConfig>();
if (!parents.isEmpty()) {
for (PackageConfig parentConfig : parents) {
retMap.putAll(parentConfig.getAllGlobalResults());
}
}
retMap.putAll(getGlobalResultConfigs());
return retMap;
}
/**
* returns the Map of all InterceptorConfigs and InterceptorStackConfigs available in the current package.
* InterceptorConfigs defined in ancestor packages will be included in this Map.
*
* @return a Map of InterceptorConfig and InterceptorStackConfig Objects with the ref-name as the key
* @see InterceptorConfig
* @see InterceptorStackConfig
*/
public Map<String, Object> getAllInterceptorConfigs() {
Map<String, Object> retMap = new LinkedHashMap<String, Object>();
if (!parents.isEmpty()) {
for (PackageConfig parentContext : parents) {
retMap.putAll(parentContext.getAllInterceptorConfigs());
}
}
retMap.putAll(getInterceptorConfigs());
return retMap;
}
/**
* returns the Map of all the ResultTypeConfigs available in the current package.
* ResultTypeConfigs defined in ancestor packages will be included in this Map.
*
* @return a Map of ResultTypeConfig Objects with the result type name as the key
* @see ResultTypeConfig
*/
public Map<String, ResultTypeConfig> getAllResultTypeConfigs() {
Map<String, ResultTypeConfig> retMap = new LinkedHashMap<String, ResultTypeConfig>();
if (!parents.isEmpty()) {
for (PackageConfig parentContext : parents) {
retMap.putAll(parentContext.getAllResultTypeConfigs());
}
}
retMap.putAll(getResultTypeConfigs());
return retMap;
}
/**
* returns the List of all the ExceptionMappingConfigs available in the current package.
* ExceptionMappingConfigs defined in ancestor packages will be included in this list.
*
* @return a List of ExceptionMappingConfigs Objects with the result type name as the key
* @see ExceptionMappingConfig
*/
public List<ExceptionMappingConfig> getAllExceptionMappingConfigs() {
List<ExceptionMappingConfig> allExceptionMappings = new ArrayList<ExceptionMappingConfig>();
if (!parents.isEmpty()) {
for (PackageConfig parentContext : parents) {
allExceptionMappings.addAll(parentContext.getAllExceptionMappingConfigs());
}
}
allExceptionMappings.addAll(getGlobalExceptionMappingConfigs());
return allExceptionMappings;
}
public String getDefaultInterceptorRef() {
return defaultInterceptorRef;
}
public String getDefaultActionRef() {
return defaultActionRef;
}
public String getDefaultClassRef() {
if ((defaultClassRef == null) && !parents.isEmpty()) {
for (PackageConfig parent : parents) {
String parentDefault = parent.getDefaultClassRef();
if (parentDefault != null) {
return parentDefault;
}
}
}
return defaultClassRef;
}
/**
* Returns the default result type for this package.
*/
public String getDefaultResultType() {
return defaultResultType;
}
/**
* gets the default interceptor-ref name. If this is not set on this PackageConfig, it searches the parent
* PackageConfigs in order until it finds one.
*/
public String getFullDefaultInterceptorRef() {
if ((defaultInterceptorRef == null) && !parents.isEmpty()) {
for (PackageConfig parent : parents) {
String parentDefault = parent.getFullDefaultInterceptorRef();
if (parentDefault != null) {
return parentDefault;
}
}
}
return defaultInterceptorRef;
}
/**
* gets the default action-ref name. If this is not set on this PackageConfig, it searches the parent
* PackageConfigs in order until it finds one.
*/
public String getFullDefaultActionRef() {
if ((defaultActionRef == null) && !parents.isEmpty()) {
for (PackageConfig parent : parents) {
String parentDefault = parent.getFullDefaultActionRef();
if (parentDefault != null) {
return parentDefault;
}
}
}
return defaultActionRef;
}
/**
* Returns the default result type for this package.
* <p/>
* If there is no default result type, but this package has parents - we will try to
* look up the default result type of a parent.
*/
public String getFullDefaultResultType() {
if ((defaultResultType == null) && !parents.isEmpty()) {
for (PackageConfig parent : parents) {
String parentDefault = parent.getFullDefaultResultType();
if (parentDefault != null) {
return parentDefault;
}
}
}
return defaultResultType;
}
/**
* gets the global ResultConfigs local to this package
*
* @return a Map of ResultConfig objects keyed by result name
* @see ResultConfig
*/
public Map<String, ResultConfig> getGlobalResultConfigs() {
return globalResultConfigs;
}
/**
* gets the InterceptorConfigs and InterceptorStackConfigs local to this package
*
* @return a Map of InterceptorConfig and InterceptorStackConfig objects keyed by ref-name
* @see InterceptorConfig
* @see InterceptorStackConfig
*/
public Map<String, Object> getInterceptorConfigs() {
return interceptorConfigs;
}
public String getName() {
return name;
}
public String getNamespace() {
return namespace;
}
public List<PackageConfig> getParents() {
return new ArrayList<PackageConfig>(parents);
}
/**
* gets the ResultTypeConfigs local to this package
*
* @return a Map of ResultTypeConfig objects keyed by result name
* @see ResultTypeConfig
*/
public Map<String, ResultTypeConfig> getResultTypeConfigs() {
return resultTypeConfigs;
}
public boolean isNeedsRefresh() {
return needsRefresh;
}
/**
* gets the ExceptionMappingConfigs local to this package
*
* @return a Map of ExceptionMappingConfig objects keyed by result name
* @see ExceptionMappingConfig
*/
public List<ExceptionMappingConfig> getGlobalExceptionMappingConfigs() {
return globalExceptionMappingConfigs;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof PackageConfig)) {
return false;
}
final PackageConfig packageConfig = (PackageConfig) o;
if (isAbstract != packageConfig.isAbstract) {
return false;
}
if ((actionConfigs != null) ? (!actionConfigs.equals(packageConfig.actionConfigs)) : (packageConfig.actionConfigs != null)) {
return false;
}
if ((defaultResultType != null) ? (!defaultResultType.equals(packageConfig.defaultResultType)) : (packageConfig.defaultResultType != null)) {
return false;
}
if ((defaultClassRef != null) ? (!defaultClassRef.equals(packageConfig.defaultClassRef)) : (packageConfig.defaultClassRef != null)) {
return false;
}
if ((globalResultConfigs != null) ? (!globalResultConfigs.equals(packageConfig.globalResultConfigs)) : (packageConfig.globalResultConfigs != null)) {
return false;
}
if ((interceptorConfigs != null) ? (!interceptorConfigs.equals(packageConfig.interceptorConfigs)) : (packageConfig.interceptorConfigs != null)) {
return false;
}
if ((name != null) ? (!name.equals(packageConfig.name)) : (packageConfig.name != null)) {
return false;
}
if ((namespace != null) ? (!namespace.equals(packageConfig.namespace)) : (packageConfig.namespace != null)) {
return false;
}
if ((parents != null) ? (!parents.equals(packageConfig.parents)) : (packageConfig.parents != null)) {
return false;
}
if ((resultTypeConfigs != null) ? (!resultTypeConfigs.equals(packageConfig.resultTypeConfigs)) : (packageConfig.resultTypeConfigs != null)) {
return false;
}
if ((globalExceptionMappingConfigs != null) ? (!globalExceptionMappingConfigs.equals(packageConfig.globalExceptionMappingConfigs)) : (packageConfig.globalExceptionMappingConfigs != null)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result;
result = ((name != null) ? name.hashCode() : 0);
result = (29 * result) + ((parents != null) ? parents.hashCode() : 0);
result = (29 * result) + ((actionConfigs != null) ? actionConfigs.hashCode() : 0);
result = (29 * result) + ((globalResultConfigs != null) ? globalResultConfigs.hashCode() : 0);
result = (29 * result) + ((interceptorConfigs != null) ? interceptorConfigs.hashCode() : 0);
result = (29 * result) + ((resultTypeConfigs != null) ? resultTypeConfigs.hashCode() : 0);
result = (29 * result) + ((globalExceptionMappingConfigs != null) ? globalExceptionMappingConfigs.hashCode() : 0);
result = (29 * result) + ((defaultResultType != null) ? defaultResultType.hashCode() : 0);
result = (29 * result) + ((defaultClassRef != null) ? defaultClassRef.hashCode() : 0);
result = (29 * result) + ((namespace != null) ? namespace.hashCode() : 0);
result = (29 * result) + (isAbstract ? 1 : 0);
return result;
}
@Override
public String toString() {
return "{PackageConfig Name:" + name + " namespace:" + namespace + " parents:" + parents + "}";
}
public int compareTo(Object o) {
PackageConfig other = (PackageConfig) o;
String full = namespace + "!" + name;
String otherFull = other.namespace + "!" + other.name;
// note, this isn't perfect (could come from different parents), but it is "good enough"
return full.compareTo(otherFull);
}
public Object getInterceptorConfig(String name) {
return getAllInterceptorConfigs().get(name);
}
/**
* The builder for this object. An instance of this object is the only way to construct a new instance. The
* purpose is to enforce the immutability of the object. The methods are structured in a way to support chaining.
* After setting any values you need, call the {@link #build()} method to create the object.
*/
public static class Builder implements InterceptorLocator {
protected PackageConfig target;
private boolean strictDMI;
public Builder(String name) {
target = new PackageConfig(name);
}
public Builder(PackageConfig config) {
target = new PackageConfig(config);
}
public Builder name(String name) {
target.name = name;
return this;
}
public Builder isAbstract(boolean isAbstract) {
target.isAbstract = isAbstract;
return this;
}
public Builder defaultInterceptorRef(String name) {
target.defaultInterceptorRef = name;
return this;
}
public Builder defaultActionRef(String name) {
target.defaultActionRef = name;
return this;
}
public Builder defaultClassRef(String defaultClassRef) {
target.defaultClassRef = defaultClassRef;
return this;
}
/**
* sets the default Result type for this package
*
* @param defaultResultType
*/
public Builder defaultResultType(String defaultResultType) {
target.defaultResultType = defaultResultType;
return this;
}
public Builder namespace(String namespace) {
if (namespace == null) {
target.namespace = "";
} else {
target.namespace = namespace;
}
return this;
}
public Builder needsRefresh(boolean needsRefresh) {
target.needsRefresh = needsRefresh;
return this;
}
public Builder addActionConfig(String name, ActionConfig action) {
target.actionConfigs.put(name, action);
return this;
}
public Builder addParents(List<PackageConfig> parents) {
for (PackageConfig config : parents) {
addParent(config);
}
return this;
}
public Builder addGlobalResultConfig(ResultConfig resultConfig) {
target.globalResultConfigs.put(resultConfig.getName(), resultConfig);
return this;
}
public Builder addGlobalResultConfigs(Map<String, ResultConfig> resultConfigs) {
target.globalResultConfigs.putAll(resultConfigs);
return this;
}
public Builder addExceptionMappingConfig(ExceptionMappingConfig exceptionMappingConfig) {
target.globalExceptionMappingConfigs.add(exceptionMappingConfig);
return this;
}
public Builder addGlobalExceptionMappingConfigs(List<ExceptionMappingConfig> exceptionMappingConfigs) {
target.globalExceptionMappingConfigs.addAll(exceptionMappingConfigs);
return this;
}
public Builder addInterceptorConfig(InterceptorConfig config) {
target.interceptorConfigs.put(config.getName(), config);
return this;
}
public Builder addInterceptorStackConfig(InterceptorStackConfig config) {
target.interceptorConfigs.put(config.getName(), config);
return this;
}
public Builder addParent(PackageConfig parent) {
target.parents.add(0, parent);
return this;
}
public Builder addResultTypeConfig(ResultTypeConfig config) {
target.resultTypeConfigs.put(config.getName(), config);
return this;
}
public Builder location(Location loc) {
target.location = loc;
return this;
}
public boolean isNeedsRefresh() {
return target.needsRefresh;
}
public String getDefaultClassRef() {
return target.defaultClassRef;
}
public String getName() {
return target.name;
}
public String getNamespace() {
return target.namespace;
}
public String getFullDefaultResultType() {
return target.getFullDefaultResultType();
}
public ResultTypeConfig getResultType(String type) {
return target.getAllResultTypeConfigs().get(type);
}
public Object getInterceptorConfig(String name) {
return target.getAllInterceptorConfigs().get(name);
}
public Builder strictMethodInvocation(boolean strict) {
strictDMI = strict;
return this;
}
public boolean isStrictMethodInvocation() {
return strictDMI;
}
public PackageConfig build() {
embalmTarget();
PackageConfig result = target;
target = new PackageConfig(result);
return result;
}
protected void embalmTarget() {
target.actionConfigs = Collections.unmodifiableMap(target.actionConfigs);
target.globalResultConfigs = Collections.unmodifiableMap(target.globalResultConfigs);
target.interceptorConfigs = Collections.unmodifiableMap(target.interceptorConfigs);
target.resultTypeConfigs = Collections.unmodifiableMap(target.resultTypeConfigs);
target.globalExceptionMappingConfigs = Collections.unmodifiableList(target.globalExceptionMappingConfigs);
target.parents = Collections.unmodifiableList(target.parents);
}
@Override
public String toString() {
return "[BUILDER] " + target.toString();
}
}
}
| |
package com.intellij.ide.util.treeView;
import com.intellij.ui.treeStructure.Tree;
import javax.annotation.Nonnull;
import consulo.disposer.Disposer;
import consulo.ui.annotation.RequiredUIAccess;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import java.util.ArrayList;
public abstract class AbstractTreeBuilderCrashTest extends BaseTreeTestCase {
private AbstractTreeStructure myStructure;
private DefaultTreeModel myTreeModel;
private CachedNode myRoot;
protected AbstractTreeBuilderCrashTest(boolean yeild, boolean bg) {
super(yeild, bg);
}
public void testElementMovedButNodeIsStillInStructure() throws Exception {
assertNodeMove(new Runnable() {
@Override
public void run() {
getBuilder().addSubtreeToUpdateByElement(myRoot.getChild("folder1"));
getBuilder().addSubtreeToUpdateByElement(myRoot.getChild("folder2"));
}
});
}
public void testElementMovedButNodeIsStillInStructure2() throws Exception {
assertNodeMove(new Runnable() {
@Override
public void run() {
getBuilder().addSubtreeToUpdateByElement(myRoot.getChild("folder2"));
getBuilder().addSubtreeToUpdateByElement(myRoot.getChild("folder1"));
}
});
}
private void assertNodeMove(final Runnable update) throws Exception {
myRoot = new CachedNode("root");
final CachedNode folder1 = myRoot.addChild("folder1");
final CachedNode file = folder1.addChild("file");
final CachedNode folder2 = myRoot.addChild("folder2");
initTree(myRoot);
getBuilder().updateFromRoot();
getBuilder().select(file, null);
assertTree(
"-root\n" +
" -folder1\n" +
" [file]\n" +
" folder2\n");
folder1.myChildren.remove(file);
folder2.addChild("file");
update.run();
assertTree(
"-root\n" +
" folder1\n" +
" -folder2\n" +
" [file]\n");
}
public void testNewUniqueChilden() throws Exception {
final boolean[] changes = new boolean[1];
final Node root = new Node("root", null) {
@Override
Node[] getChildren() {
if (changes[0]) {
return new Node[] {new Node("node1", this, "node1id", "changedNode1") {
@Override
Node[] getChildren() {
return new Node[] {new Node("newNode11", this, "node11id"), new Node("node13", this, "node13id")};
}
}, new Node("node2", this, "node2id")};
} else {
return new Node[] {new Node("node1", this, "node1id") {
@Override
Node[] getChildren() {
return new Node[] {new Node("node11", this, "node11id"), new Node("node12", this, "node12id")};
}
}, new Node("node2", this, "node2id")};
}
}
};
initTree(root);
updateFromRoot();
assertTree("-root\n" +
" -node1\n" +
" node11\n" +
" node12\n" +
" node2\n");
collapsePath(new TreePath(myTreeModel.getRoot()));
getBuilder().expand(root, null);
assertTree("-root\n" +
" -node1\n" +
" node11\n" +
" node12\n" +
" node2\n");
updateFromRoot();
assertTree("-root\n" +
" -node1\n" +
" node11\n" +
" node12\n" +
" node2\n");
changes[0] = true;
updateFromRoot();
assertTree("-root\n" +
" -node1\n" +
" newNode11\n" +
" node13\n" +
" node2\n");
}
private void initTree(final Node root) throws Exception {
myStructure = new BaseStructure() {
@Override
public Object getRootElement() {
return root;
}
@Override
public Object[] doGetChildElements(final Object element) {
return ((Node)element).getChildren();
}
@Override
public Object getParentElement(final Object element) {
return ((Node)element).getParent();
}
@Override
@Nonnull
public NodeDescriptor doCreateDescriptor(final Object element, final NodeDescriptor parentDescriptor) {
return (NodeDescriptor)element;
}
};
myTreeModel = new DefaultTreeModel(new DefaultMutableTreeNode());
myTree = new Tree(myTreeModel);
myTree.setRootVisible(false);
initBuilder(new BaseTreeBuilder(myTree, myTreeModel, myStructure, AlphaComparator.INSTANCE) {
@Override
protected boolean isAutoExpandNode(final NodeDescriptor nodeDescriptor) {
return true;
}
@Override
protected boolean isDisposeOnCollapsing(final NodeDescriptor nodeDescriptor) {
return false;
}
});
Disposer.register(getRootDisposable(), getBuilder());
showTree();
}
class Node extends NodeDescriptor {
Node myParent;
String myId;
String myEqualityString;
String myComment;
Node(String id, final Node parent) {
this(id, parent, id, null);
}
Node(String id, final Node parent, String equalityString) {
this(id, parent, equalityString, null);
}
Node(String id, final Node parent, String equalityString, String comment) {
super(null, parent);
myParent = parent;
myId = id;
myEqualityString = equalityString;
myComment = comment;
}
Node getParent() {
return myParent;
}
Node[] getChildren() {
return new Node[0];
}
@RequiredUIAccess
@Override
public boolean update() {
return false;
}
@Override
public Object getElement() {
return this;
}
@Override
public String toString() {
return myId;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (!(o instanceof Node)) return false;
final Node node = (Node)o;
if (!myEqualityString.equals(node.myEqualityString)) return false;
return true;
}
@Override
public int hashCode() {
return myId.hashCode();
}
}
class CachedNode extends Node {
private final ArrayList<CachedNode> myChildren = new ArrayList<CachedNode>();
CachedNode(final String id) {
super(id, null);
}
CachedNode addChild(String id) {
final CachedNode node = new CachedNode(id);
myChildren.add(node);
node.myParent = this;
return node;
}
CachedNode getChild(String id) {
for (CachedNode each : myChildren) {
if (id.equals(each.myId)) return each;
}
return null;
}
@Override
final Node[] getChildren() {
return myChildren.toArray(new Node[myChildren.size()]);
}
}
public abstract static class NoYieldNoBackground extends AbstractTreeBuilderCrashTest {
public NoYieldNoBackground() {
super(false, false);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.db.marshal;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.apache.cassandra.serializers.MarshalException;
import org.junit.Test;
import static org.junit.Assert.fail;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.composites.*;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.utils.*;
public class DynamicCompositeTypeTest extends SchemaLoader
{
private static final String cfName = "StandardDynamicComposite";
private static final DynamicCompositeType comparator;
static
{
Map<Byte, AbstractType<?>> aliases = new HashMap<Byte, AbstractType<?>>();
aliases.put((byte)'b', BytesType.instance);
aliases.put((byte)'B', ReversedType.getInstance(BytesType.instance));
aliases.put((byte)'t', TimeUUIDType.instance);
aliases.put((byte)'T', ReversedType.getInstance(TimeUUIDType.instance));
comparator = DynamicCompositeType.getInstance(aliases);
}
private static final int UUID_COUNT = 3;
private static final UUID[] uuids = new UUID[UUID_COUNT];
static
{
for (int i = 0; i < UUID_COUNT; ++i)
uuids[i] = UUIDGen.getTimeUUID();
}
@Test
public void testEndOfComponent()
{
ByteBuffer[] cnames = {
createDynamicCompositeKey("test1", uuids[0], -1, false),
createDynamicCompositeKey("test1", uuids[1], 24, false),
createDynamicCompositeKey("test1", uuids[1], 42, false),
createDynamicCompositeKey("test1", uuids[1], 83, false),
createDynamicCompositeKey("test1", uuids[2], -1, false),
createDynamicCompositeKey("test1", uuids[2], 42, false),
};
ByteBuffer start = createDynamicCompositeKey("test1", uuids[1], -1, false);
ByteBuffer stop = createDynamicCompositeKey("test1", uuids[1], -1, true);
for (int i = 0; i < 1; ++i)
{
assert comparator.compare(start, cnames[i]) > 0;
assert comparator.compare(stop, cnames[i]) > 0;
}
for (int i = 1; i < 4; ++i)
{
assert comparator.compare(start, cnames[i]) < 0;
assert comparator.compare(stop, cnames[i]) > 0;
}
for (int i = 4; i < cnames.length; ++i)
{
assert comparator.compare(start, cnames[i]) < 0;
assert comparator.compare(stop, cnames[i]) < 0;
}
}
@Test
public void testGetString()
{
String test1Hex = ByteBufferUtil.bytesToHex(ByteBufferUtil.bytes("test1"));
ByteBuffer key = createDynamicCompositeKey("test1", uuids[1], 42, false);
assert comparator.getString(key).equals("b@" + test1Hex + ":t@" + uuids[1] + ":IntegerType@42");
key = createDynamicCompositeKey("test1", uuids[1], -1, true);
assert comparator.getString(key).equals("b@" + test1Hex + ":t@" + uuids[1] + ":!");
}
@Test
public void testFromString()
{
String test1Hex = ByteBufferUtil.bytesToHex(ByteBufferUtil.bytes("test1"));
ByteBuffer key = createDynamicCompositeKey("test1", uuids[1], 42, false);
assert key.equals(comparator.fromString("b@" + test1Hex + ":t@" + uuids[1] + ":IntegerType@42"));
key = createDynamicCompositeKey("test1", uuids[1], -1, true);
assert key.equals(comparator.fromString("b@" + test1Hex + ":t@" + uuids[1] + ":!"));
}
@Test
public void testValidate()
{
ByteBuffer key = createDynamicCompositeKey("test1", uuids[1], 42, false);
comparator.validate(key);
key = createDynamicCompositeKey("test1", null, -1, false);
comparator.validate(key);
key = createDynamicCompositeKey("test1", uuids[2], -1, true);
comparator.validate(key);
key.get(); // make sure we're not aligned anymore
try
{
comparator.validate(key);
fail("Should not validate");
}
catch (MarshalException e) {}
key = ByteBuffer.allocate(5 + "test1".length() + 5 + 14);
key.putShort((short) (0x8000 | 'b'));
key.putShort((short) "test1".length());
key.put(ByteBufferUtil.bytes("test1"));
key.put((byte) 0);
key.putShort((short) (0x8000 | 't'));
key.putShort((short) 14);
key.rewind();
try
{
comparator.validate(key);
fail("Should not validate");
}
catch (MarshalException e)
{
assert e.toString().contains("should be 16 or 0 bytes");
}
key = createDynamicCompositeKey("test1", UUID.randomUUID(), 42, false);
try
{
comparator.validate(key);
fail("Should not validate");
}
catch (MarshalException e)
{
assert e.toString().contains("Invalid version for TimeUUID type");
}
}
@Test
public void testFullRound() throws Exception
{
Keyspace keyspace = Keyspace.open("Keyspace1");
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(cfName);
ByteBuffer cname1 = createDynamicCompositeKey("test1", null, -1, false);
ByteBuffer cname2 = createDynamicCompositeKey("test1", uuids[0], 24, false);
ByteBuffer cname3 = createDynamicCompositeKey("test1", uuids[0], 42, false);
ByteBuffer cname4 = createDynamicCompositeKey("test2", uuids[0], -1, false);
ByteBuffer cname5 = createDynamicCompositeKey("test2", uuids[1], 42, false);
ByteBuffer key = ByteBufferUtil.bytes("k");
Mutation rm = new Mutation("Keyspace1", key);
addColumn(rm, cname5);
addColumn(rm, cname1);
addColumn(rm, cname4);
addColumn(rm, cname2);
addColumn(rm, cname3);
rm.apply();
ColumnFamily cf = cfs.getColumnFamily(QueryFilter.getIdentityFilter(Util.dk("k"), cfName, System.currentTimeMillis()));
Iterator<Cell> iter = cf.getSortedColumns().iterator();
assert iter.next().name().toByteBuffer().equals(cname1);
assert iter.next().name().toByteBuffer().equals(cname2);
assert iter.next().name().toByteBuffer().equals(cname3);
assert iter.next().name().toByteBuffer().equals(cname4);
assert iter.next().name().toByteBuffer().equals(cname5);
}
@Test
public void testFullRoundReversed() throws Exception
{
Keyspace keyspace = Keyspace.open("Keyspace1");
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(cfName);
ByteBuffer cname1 = createDynamicCompositeKey("test1", null, -1, false, true);
ByteBuffer cname2 = createDynamicCompositeKey("test1", uuids[0], 24, false, true);
ByteBuffer cname3 = createDynamicCompositeKey("test1", uuids[0], 42, false, true);
ByteBuffer cname4 = createDynamicCompositeKey("test2", uuids[0], -1, false, true);
ByteBuffer cname5 = createDynamicCompositeKey("test2", uuids[1], 42, false, true);
ByteBuffer key = ByteBufferUtil.bytes("kr");
Mutation rm = new Mutation("Keyspace1", key);
addColumn(rm, cname5);
addColumn(rm, cname1);
addColumn(rm, cname4);
addColumn(rm, cname2);
addColumn(rm, cname3);
rm.apply();
ColumnFamily cf = cfs.getColumnFamily(QueryFilter.getIdentityFilter(Util.dk("kr"), cfName, System.currentTimeMillis()));
Iterator<Cell> iter = cf.getSortedColumns().iterator();
assert iter.next().name().toByteBuffer().equals(cname5);
assert iter.next().name().toByteBuffer().equals(cname4);
assert iter.next().name().toByteBuffer().equals(cname1); // null UUID < reversed value
assert iter.next().name().toByteBuffer().equals(cname3);
assert iter.next().name().toByteBuffer().equals(cname2);
}
@Test
public void testUncomparableColumns()
{
ByteBuffer bytes = ByteBuffer.allocate(2 + 2 + 4 + 1);
bytes.putShort((short)(0x8000 | 'b'));
bytes.putShort((short) 4);
bytes.put(new byte[4]);
bytes.put((byte) 0);
bytes.rewind();
ByteBuffer uuid = ByteBuffer.allocate(2 + 2 + 16 + 1);
uuid.putShort((short)(0x8000 | 't'));
uuid.putShort((short) 16);
uuid.put(UUIDGen.decompose(uuids[0]));
uuid.put((byte) 0);
uuid.rewind();
try
{
int c = comparator.compare(bytes, uuid);
assert c == -1 : "Expecting bytes to sort before uuid, but got " + c;
}
catch (Exception e)
{
fail("Shouldn't throw exception");
}
}
@Test
public void testUncomparableReversedColumns()
{
ByteBuffer uuid = ByteBuffer.allocate(2 + 2 + 16 + 1);
uuid.putShort((short)(0x8000 | 'T'));
uuid.putShort((short) 16);
uuid.put(UUIDGen.decompose(uuids[0]));
uuid.put((byte) 0);
uuid.rewind();
ByteBuffer bytes = ByteBuffer.allocate(2 + 2 + 4 + 1);
bytes.putShort((short)(0x8000 | 'B'));
bytes.putShort((short) 4);
bytes.put(new byte[4]);
bytes.put((byte) 0);
bytes.rewind();
try
{
int c = comparator.compare(uuid, bytes);
assert c == 1 : "Expecting bytes to sort before uuid, but got " + c;
}
catch (Exception e)
{
fail("Shouldn't throw exception");
}
}
public void testCompatibility() throws Exception
{
assert TypeParser.parse("DynamicCompositeType()").isCompatibleWith(TypeParser.parse("DynamicCompositeType()"));
assert TypeParser.parse("DynamicCompositeType(a => IntegerType)").isCompatibleWith(TypeParser.parse("DynamicCompositeType()"));
assert TypeParser.parse("DynamicCompositeType(b => BytesType, a => IntegerType)").isCompatibleWith(TypeParser.parse("DynamicCompositeType(a => IntegerType)"));
assert !TypeParser.parse("DynamicCompositeType(a => BytesType)").isCompatibleWith(TypeParser.parse("DynamicCompositeType(a => AsciiType)"));
assert !TypeParser.parse("DynamicCompositeType(a => BytesType)").isCompatibleWith(TypeParser.parse("DynamicCompositeType(a => BytesType, b => AsciiType)"));
}
private void addColumn(Mutation rm, ByteBuffer cname)
{
rm.add(cfName, CellNames.simpleDense(cname), ByteBufferUtil.EMPTY_BYTE_BUFFER, 0);
}
private ByteBuffer createDynamicCompositeKey(String s, UUID uuid, int i, boolean lastIsOne)
{
return createDynamicCompositeKey(s, uuid, i, lastIsOne, false);
}
private ByteBuffer createDynamicCompositeKey(String s, UUID uuid, int i, boolean lastIsOne,
final boolean reversed)
{
String intType = (reversed ? "ReversedType(IntegerType)" : "IntegerType");
ByteBuffer bytes = ByteBufferUtil.bytes(s);
int totalSize = 0;
if (s != null)
{
totalSize += 2 + 2 + bytes.remaining() + 1;
if (uuid != null)
{
totalSize += 2 + 2 + 16 + 1;
if (i != -1)
{
totalSize += 2 + intType.length() + 2 + 1 + 1;
}
}
}
ByteBuffer bb = ByteBuffer.allocate(totalSize);
if (s != null)
{
bb.putShort((short)(0x8000 | (reversed ? 'B' : 'b')));
bb.putShort((short) bytes.remaining());
bb.put(bytes);
bb.put(uuid == null && lastIsOne ? (byte)1 : (byte)0);
if (uuid != null)
{
bb.putShort((short)(0x8000 | (reversed ? 'T' : 't')));
bb.putShort((short) 16);
bb.put(UUIDGen.decompose(uuid));
bb.put(i == -1 && lastIsOne ? (byte)1 : (byte)0);
if (i != -1)
{
bb.putShort((short) intType.length());
bb.put(ByteBufferUtil.bytes(intType));
// We are putting a byte only because our test use ints that fit in a byte *and* IntegerType.fromString() will
// return something compatible (i.e, putting a full int here would break 'fromStringTest')
bb.putShort((short) 1);
bb.put((byte)i);
bb.put(lastIsOne ? (byte)1 : (byte)0);
}
}
}
bb.rewind();
return bb;
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.KeyType;
import org.wso2.developerstudio.eclipse.gmf.esb.NamedEndpoint;
import org.wso2.developerstudio.eclipse.gmf.esb.NamedEndpointInputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.NamedEndpointOutputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Named Endpoint</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getInputConnector <em>Input Connector</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getOutputConnector <em>Output Connector</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getName <em>Name</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getReferringEndpointType <em>Referring Endpoint Type</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getDynamicReferenceKey <em>Dynamic Reference Key</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.NamedEndpointImpl#getStaticReferenceKey <em>Static Reference Key</em>}</li>
* </ul>
*
* @generated
*/
public class NamedEndpointImpl extends AbstractEndPointImpl implements NamedEndpoint {
/**
* The cached value of the '{@link #getInputConnector() <em>Input Connector</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getInputConnector()
* @generated
* @ordered
*/
protected NamedEndpointInputConnector inputConnector;
/**
* The cached value of the '{@link #getOutputConnector() <em>Output Connector</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOutputConnector()
* @generated
* @ordered
*/
protected NamedEndpointOutputConnector outputConnector;
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The default value of the '{@link #getReferringEndpointType() <em>Referring Endpoint Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getReferringEndpointType()
* @generated
* @ordered
*/
protected static final KeyType REFERRING_ENDPOINT_TYPE_EDEFAULT = KeyType.STATIC;
/**
* The cached value of the '{@link #getReferringEndpointType() <em>Referring Endpoint Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getReferringEndpointType()
* @generated
* @ordered
*/
protected KeyType referringEndpointType = REFERRING_ENDPOINT_TYPE_EDEFAULT;
/**
* The cached value of the '{@link #getDynamicReferenceKey() <em>Dynamic Reference Key</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDynamicReferenceKey()
* @generated
* @ordered
*/
protected NamespacedProperty dynamicReferenceKey;
/**
* The cached value of the '{@link #getStaticReferenceKey() <em>Static Reference Key</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStaticReferenceKey()
* @generated
* @ordered
*/
protected RegistryKeyProperty staticReferenceKey;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated NOT
*/
protected NamedEndpointImpl() {
super();
NamespacedProperty dynamicKey = EsbFactoryImpl.eINSTANCE.createNamespacedProperty();
dynamicKey.setPrettyName("Endpoint Expression");
dynamicKey.setPropertyName("expression");
dynamicKey.setPropertyValue("/default/expression");
setDynamicReferenceKey(dynamicKey);
RegistryKeyProperty staticKey = EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty();
staticKey.setKeyName("Endpoint Key");
staticKey.setPrettyName("Endpoint Key");
staticKey.setKeyValue("");
setStaticReferenceKey(staticKey);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return EsbPackage.Literals.NAMED_ENDPOINT;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamedEndpointInputConnector getInputConnector() {
return inputConnector;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetInputConnector(NamedEndpointInputConnector newInputConnector,
NotificationChain msgs) {
NamedEndpointInputConnector oldInputConnector = inputConnector;
inputConnector = newInputConnector;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR, oldInputConnector, newInputConnector);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setInputConnector(NamedEndpointInputConnector newInputConnector) {
if (newInputConnector != inputConnector) {
NotificationChain msgs = null;
if (inputConnector != null)
msgs = ((InternalEObject)inputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR, null, msgs);
if (newInputConnector != null)
msgs = ((InternalEObject)newInputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR, null, msgs);
msgs = basicSetInputConnector(newInputConnector, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR, newInputConnector, newInputConnector));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamedEndpointOutputConnector getOutputConnector() {
return outputConnector;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetOutputConnector(NamedEndpointOutputConnector newOutputConnector,
NotificationChain msgs) {
NamedEndpointOutputConnector oldOutputConnector = outputConnector;
outputConnector = newOutputConnector;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR, oldOutputConnector, newOutputConnector);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOutputConnector(NamedEndpointOutputConnector newOutputConnector) {
if (newOutputConnector != outputConnector) {
NotificationChain msgs = null;
if (outputConnector != null)
msgs = ((InternalEObject)outputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR, null, msgs);
if (newOutputConnector != null)
msgs = ((InternalEObject)newOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR, null, msgs);
msgs = basicSetOutputConnector(newOutputConnector, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR, newOutputConnector, newOutputConnector));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public KeyType getReferringEndpointType() {
return referringEndpointType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setReferringEndpointType(KeyType newReferringEndpointType) {
KeyType oldReferringEndpointType = referringEndpointType;
referringEndpointType = newReferringEndpointType == null ? REFERRING_ENDPOINT_TYPE_EDEFAULT : newReferringEndpointType;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__REFERRING_ENDPOINT_TYPE, oldReferringEndpointType, referringEndpointType));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamespacedProperty getDynamicReferenceKey() {
return dynamicReferenceKey;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetDynamicReferenceKey(NamespacedProperty newDynamicReferenceKey,
NotificationChain msgs) {
NamespacedProperty oldDynamicReferenceKey = dynamicReferenceKey;
dynamicReferenceKey = newDynamicReferenceKey;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY, oldDynamicReferenceKey, newDynamicReferenceKey);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDynamicReferenceKey(NamespacedProperty newDynamicReferenceKey) {
if (newDynamicReferenceKey != dynamicReferenceKey) {
NotificationChain msgs = null;
if (dynamicReferenceKey != null)
msgs = ((InternalEObject)dynamicReferenceKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY, null, msgs);
if (newDynamicReferenceKey != null)
msgs = ((InternalEObject)newDynamicReferenceKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY, null, msgs);
msgs = basicSetDynamicReferenceKey(newDynamicReferenceKey, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY, newDynamicReferenceKey, newDynamicReferenceKey));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RegistryKeyProperty getStaticReferenceKey() {
return staticReferenceKey;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetStaticReferenceKey(RegistryKeyProperty newStaticReferenceKey,
NotificationChain msgs) {
RegistryKeyProperty oldStaticReferenceKey = staticReferenceKey;
staticReferenceKey = newStaticReferenceKey;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY, oldStaticReferenceKey, newStaticReferenceKey);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStaticReferenceKey(RegistryKeyProperty newStaticReferenceKey) {
if (newStaticReferenceKey != staticReferenceKey) {
NotificationChain msgs = null;
if (staticReferenceKey != null)
msgs = ((InternalEObject)staticReferenceKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY, null, msgs);
if (newStaticReferenceKey != null)
msgs = ((InternalEObject)newStaticReferenceKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY, null, msgs);
msgs = basicSetStaticReferenceKey(newStaticReferenceKey, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY, newStaticReferenceKey, newStaticReferenceKey));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR:
return basicSetInputConnector(null, msgs);
case EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR:
return basicSetOutputConnector(null, msgs);
case EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY:
return basicSetDynamicReferenceKey(null, msgs);
case EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY:
return basicSetStaticReferenceKey(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR:
return getInputConnector();
case EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR:
return getOutputConnector();
case EsbPackage.NAMED_ENDPOINT__NAME:
return getName();
case EsbPackage.NAMED_ENDPOINT__REFERRING_ENDPOINT_TYPE:
return getReferringEndpointType();
case EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY:
return getDynamicReferenceKey();
case EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY:
return getStaticReferenceKey();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR:
setInputConnector((NamedEndpointInputConnector)newValue);
return;
case EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR:
setOutputConnector((NamedEndpointOutputConnector)newValue);
return;
case EsbPackage.NAMED_ENDPOINT__NAME:
setName((String)newValue);
return;
case EsbPackage.NAMED_ENDPOINT__REFERRING_ENDPOINT_TYPE:
setReferringEndpointType((KeyType)newValue);
return;
case EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY:
setDynamicReferenceKey((NamespacedProperty)newValue);
return;
case EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY:
setStaticReferenceKey((RegistryKeyProperty)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR:
setInputConnector((NamedEndpointInputConnector)null);
return;
case EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR:
setOutputConnector((NamedEndpointOutputConnector)null);
return;
case EsbPackage.NAMED_ENDPOINT__NAME:
setName(NAME_EDEFAULT);
return;
case EsbPackage.NAMED_ENDPOINT__REFERRING_ENDPOINT_TYPE:
setReferringEndpointType(REFERRING_ENDPOINT_TYPE_EDEFAULT);
return;
case EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY:
setDynamicReferenceKey((NamespacedProperty)null);
return;
case EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY:
setStaticReferenceKey((RegistryKeyProperty)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case EsbPackage.NAMED_ENDPOINT__INPUT_CONNECTOR:
return inputConnector != null;
case EsbPackage.NAMED_ENDPOINT__OUTPUT_CONNECTOR:
return outputConnector != null;
case EsbPackage.NAMED_ENDPOINT__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case EsbPackage.NAMED_ENDPOINT__REFERRING_ENDPOINT_TYPE:
return referringEndpointType != REFERRING_ENDPOINT_TYPE_EDEFAULT;
case EsbPackage.NAMED_ENDPOINT__DYNAMIC_REFERENCE_KEY:
return dynamicReferenceKey != null;
case EsbPackage.NAMED_ENDPOINT__STATIC_REFERENCE_KEY:
return staticReferenceKey != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(", referringEndpointType: ");
result.append(referringEndpointType);
result.append(')');
return result.toString();
}
} // NamedEndpointImpl
| |
package sagan.team;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import org.hibernate.annotations.Type;
import org.springframework.util.StringUtils;
@Entity
public class MemberProfile {
public static final MemberProfile NOT_FOUND = new MemberProfile();
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = true)
private String name;
@Column(nullable = true)
private String jobTitle;
@Column(nullable = true)
private String location;
@Column(nullable = true)
@Type(type = "text")
private String bio;
@Column(nullable = true)
private String avatarUrl;
@Column(nullable = true)
private String gravatarEmail;
@Column(nullable = true)
private String githubUsername;
@Column(nullable = false)
private String username;
@Column(nullable = true)
private String speakerdeckUsername;
@Column(nullable = true)
private String twitterUsername;
@Column(nullable = true)
private String gplusId;
@Column(nullable = true)
private String lanyrdUsername;
@Column(nullable = true)
private Long githubId;
@Column
private GeoLocation geoLocation;
@Column
@Type(type = "text")
private String videoEmbeds;
@Column
private boolean hidden;
public MemberProfile() {
}
/** For unit testing purposes */
MemberProfile(Long id) {
this.id = id;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFullName() {
return name == null ? getUsername() : name;
}
public void setJobTitle(String jobTitle) {
this.jobTitle = jobTitle;
}
public String getJobTitle() {
return jobTitle;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public String getBio() {
return bio;
}
public void setBio(String bio) {
this.bio = bio;
}
public String getAvatarUrl() {
return avatarUrl;
}
public void setAvatarUrl(String avatarUrl) {
this.avatarUrl = avatarUrl;
}
public String getGithubUsername() {
return githubUsername;
}
public void setGithubUsername(String githubUsername) {
this.githubUsername = githubUsername;
}
public boolean hasGithubUsername() {
return !StringUtils.isEmpty(githubUsername);
}
public Long getGithubId() {
return githubId;
}
public void setGithubId(Long githubId) {
this.githubId = githubId;
}
public String getGravatarEmail() {
return gravatarEmail;
}
public void setGravatarEmail(String gravatarEmail) {
this.gravatarEmail = gravatarEmail;
}
public String getSpeakerdeckUsername() {
return speakerdeckUsername;
}
public void setSpeakerdeckUsername(String speakerdeckUsername) {
this.speakerdeckUsername = speakerdeckUsername;
}
public boolean hasSpeakerdeckUsername() {
return !StringUtils.isEmpty(speakerdeckUsername);
}
public String getTwitterUsername() {
return twitterUsername;
}
public void setTwitterUsername(String twitterUsername) {
this.twitterUsername = twitterUsername;
}
public boolean hasTwitterUsername() {
return !StringUtils.isEmpty(twitterUsername);
}
public String getLanyrdUsername() {
return lanyrdUsername;
}
public void setLanyrdUsername(String lanyrdUsername) {
this.lanyrdUsername = lanyrdUsername;
}
public boolean hasLanyrdUsername() {
return !StringUtils.isEmpty(lanyrdUsername);
}
public String getGplusId() {
return gplusId;
}
public void setGplusId(String gplusId) {
this.gplusId = gplusId;
}
public boolean hasGplusId() {
return !StringUtils.isEmpty(gplusId);
}
public Link getTwitterLink() {
if (StringUtils.isEmpty(getTwitterUsername())) {
return null;
}
return new Link(String.format("http://twitter.com/%s", getTwitterUsername()), "@" + getTwitterUsername());
}
public Link getSpeakerdeckLink() {
if (StringUtils.isEmpty(getSpeakerdeckUsername())) {
return null;
}
String pathAndHost = String.format("speakerdeck.com/%s", getSpeakerdeckUsername());
return new Link("https://" + pathAndHost, pathAndHost);
}
public Link getGithubLink() {
if (StringUtils.isEmpty(getGithubUsername())) {
return null;
}
String pathAndHost = String.format("github.com/%s", getGithubUsername());
return new Link("https://" + pathAndHost, pathAndHost);
}
public Link getLanyrdLink() {
if (StringUtils.isEmpty(getLanyrdUsername())) {
return null;
}
String pathAndHost = String.format("lanyrd.com/profile/%s", getLanyrdUsername());
return new Link("https://" + pathAndHost, pathAndHost);
}
public Link getGplusLink() {
if (StringUtils.isEmpty(getGplusId())) {
return null;
}
String pathAndHost = String.format("plus.google.com/%s", getGplusId());
return new Link("https://" + pathAndHost, pathAndHost);
}
public GeoLocation getGeoLocation() {
return geoLocation;
}
public void setGeoLocation(GeoLocation geoLocation) {
this.geoLocation = geoLocation;
}
public TeamLocation getTeamLocation() {
if (geoLocation == null)
return null;
return new TeamLocation(name, geoLocation.getLatitude(), geoLocation.getLongitude(), getId());
}
public String getVideoEmbeds() {
return videoEmbeds;
}
public void setVideoEmbeds(String videoEmbeds) {
this.videoEmbeds = videoEmbeds;
}
public boolean isHidden() {
return hidden;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
MemberProfile that = (MemberProfile) o;
if (hidden != that.hidden)
return false;
if (avatarUrl != null ? !avatarUrl.equals(that.avatarUrl) : that.avatarUrl != null)
return false;
if (bio != null ? !bio.equals(that.bio) : that.bio != null)
return false;
if (geoLocation != null ? !geoLocation.equals(that.geoLocation) : that.geoLocation != null)
return false;
if (githubId != null ? !githubId.equals(that.githubId) : that.githubId != null)
return false;
if (githubUsername != null ? !githubUsername.equals(that.githubUsername) : that.githubUsername != null)
return false;
if (gravatarEmail != null ? !gravatarEmail.equals(that.gravatarEmail) : that.gravatarEmail != null)
return false;
if (id != null ? !id.equals(that.id) : that.id != null)
return false;
if (jobTitle != null ? !jobTitle.equals(that.jobTitle) : that.jobTitle != null)
return false;
if (lanyrdUsername != null ? !lanyrdUsername.equals(that.lanyrdUsername) : that.lanyrdUsername != null)
return false;
if (gplusId != null ? !gplusId.equals(that.gplusId) : that.gplusId != null)
return false;
if (location != null ? !location.equals(that.location) : that.location != null)
return false;
if (name != null ? !name.equals(that.name) : that.name != null)
return false;
if (speakerdeckUsername != null ? !speakerdeckUsername.equals(that.speakerdeckUsername)
: that.speakerdeckUsername != null)
return false;
if (twitterUsername != null ? !twitterUsername.equals(that.twitterUsername) : that.twitterUsername != null)
return false;
if (!username.equals(that.username))
return false;
if (videoEmbeds != null ? !videoEmbeds.equals(that.videoEmbeds) : that.videoEmbeds != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (jobTitle != null ? jobTitle.hashCode() : 0);
result = 31 * result + (location != null ? location.hashCode() : 0);
result = 31 * result + (bio != null ? bio.hashCode() : 0);
result = 31 * result + (avatarUrl != null ? avatarUrl.hashCode() : 0);
result = 31 * result + (gravatarEmail != null ? gravatarEmail.hashCode() : 0);
result = 31 * result + (githubUsername != null ? githubUsername.hashCode() : 0);
result = 31 * result + username.hashCode();
result = 31 * result + (speakerdeckUsername != null ? speakerdeckUsername.hashCode() : 0);
result = 31 * result + (twitterUsername != null ? twitterUsername.hashCode() : 0);
result = 31 * result + (lanyrdUsername != null ? lanyrdUsername.hashCode() : 0);
result = 31 * result + (githubId != null ? githubId.hashCode() : 0);
result = 31 * result + (gplusId != null ? gplusId.hashCode() : 0);
result = 31 * result + (geoLocation != null ? geoLocation.hashCode() : 0);
result = 31 * result + (videoEmbeds != null ? videoEmbeds.hashCode() : 0);
result = 31 * result + (hidden ? 1 : 0);
return result;
}
}
| |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.commands.expressions.types.dtable;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.api.definition.v1_1.DecisionRule;
import org.kie.workbench.common.dmn.api.definition.v1_1.DecisionTable;
import org.kie.workbench.common.dmn.api.definition.v1_1.InputClause;
import org.kie.workbench.common.dmn.api.definition.v1_1.UnaryTests;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapperHelper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DescriptionColumn;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.InputClauseColumn;
import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView;
import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.command.Command;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.ext.wires.core.grids.client.model.GridData;
import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class DeleteInputClauseCommandTest {
@Mock
private RowNumberColumn uiRowNumberColumn;
@Mock
private InputClauseColumn uiInputClauseColumn;
@Mock
private DescriptionColumn uiDescriptionColumn;
@Mock
private ListSelectorView.Presenter listSelector;
@Mock
private AbstractCanvasHandler canvasHandler;
@Mock
private GraphCommandExecutionContext graphCommandExecutionContext;
@Mock
private org.uberfire.mvp.Command canvasOperation;
private DecisionTable dtable;
private InputClause inputClause;
private GridData uiModel;
private DecisionTableUIModelMapper uiModelMapper;
private DeleteInputClauseCommand command;
@Before
public void setup() {
this.dtable = new DecisionTable();
this.inputClause = new InputClause();
this.dtable.getInput().add(inputClause);
this.uiModel = new DMNGridData();
this.uiModel.appendColumn(uiRowNumberColumn);
this.uiModel.appendColumn(uiInputClauseColumn);
this.uiModel.appendColumn(uiDescriptionColumn);
this.uiModelMapper = new DecisionTableUIModelMapper(() -> uiModel,
() -> Optional.of(dtable),
listSelector);
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
doReturn(0).when(uiRowNumberColumn).getIndex();
doReturn(1).when(uiInputClauseColumn).getIndex();
doReturn(2).when(uiDescriptionColumn).getIndex();
}
private void makeCommand(final int uiColumnIndex) {
this.command = spy(new DeleteInputClauseCommand(dtable,
uiModel,
uiColumnIndex,
uiModelMapper,
canvasOperation));
}
@Test
public void testGraphCommandAllow() throws Exception {
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandCheck() throws Exception {
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandExecuteRemoveMiddle() {
final InputClause firstInput = mock(InputClause.class);
final InputClause lastInput = mock(InputClause.class);
dtable.getInput().add(0, firstInput);
dtable.getInput().add(lastInput);
final UnaryTests inputOneValue = mock(UnaryTests.class);
final UnaryTests inputTwoValue = mock(UnaryTests.class);
final UnaryTests inputThreeValue = mock(UnaryTests.class);
final DecisionRule rule = new DecisionRule();
rule.getInputEntry().add(inputOneValue);
rule.getInputEntry().add(inputTwoValue);
rule.getInputEntry().add(inputThreeValue);
dtable.getRule().add(rule);
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT + 1);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(2,
dtable.getInput().size());
assertEquals(firstInput,
dtable.getInput().get(0));
assertEquals(lastInput,
dtable.getInput().get(1));
assertEquals(2,
dtable.getRule().get(0).getInputEntry().size());
assertEquals(inputOneValue,
dtable.getRule().get(0).getInputEntry().get(0));
assertEquals(inputThreeValue,
dtable.getRule().get(0).getInputEntry().get(1));
}
@Test
public void testGraphCommandExecute() {
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(0,
dtable.getInput().size());
}
@Test
public void testGraphCommandExecuteAndThenUndo() {
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
assertEquals(0,
dtable.getInput().size());
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(1,
dtable.getInput().size());
assertEquals(inputClause,
dtable.getInput().get(0));
}
@Test
public void testCanvasCommandAllow() throws Exception {
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.allow(canvasHandler));
}
@Test
public void testCanvasCommandExecute() throws Exception {
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddRuleCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddRuleCommand.execute(canvasHandler));
assertThat(uiModel.getColumns()).containsOnly(uiRowNumberColumn,
uiDescriptionColumn);
verify(canvasOperation).execute();
verify(command).updateParentInformation();
}
@Test
public void testCanvasCommandExecuteAndThenUndo() throws Exception {
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddRuleCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddRuleCommand.execute(canvasHandler));
assertThat(uiModel.getColumns()).containsOnly(uiRowNumberColumn,
uiDescriptionColumn);
reset(canvasOperation, command);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddRuleCommand.undo(canvasHandler));
assertThat(uiModel.getColumns()).containsOnly(uiRowNumberColumn,
uiInputClauseColumn,
uiDescriptionColumn);
verify(canvasOperation).execute();
verify(command).updateParentInformation();
}
}
| |
/*
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2004 Klaus Bartz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.util.helper;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import com.izforge.izpack.api.adaptator.IXMLElement;
import com.izforge.izpack.api.adaptator.IXMLParser;
import com.izforge.izpack.api.adaptator.impl.XMLParser;
import com.izforge.izpack.api.exception.InstallerException;
import com.izforge.izpack.api.exception.ResourceNotFoundException;
import com.izforge.izpack.api.resource.Resources;
import com.izforge.izpack.api.substitutor.VariableSubstitutor;
/**
* This class contains some helper methods to simplify handling of xml specification files.
*
* @author Klaus Bartz
*/
public class SpecHelper
{
public static final String YES = "yes";
public static final String NO = "no";
private String specFilename;
private IXMLElement spec;
private boolean _haveSpec;
/**
* The resources.
*/
private final Resources resources;
private static final String PACK_KEY = "pack";
private static final String PACK_NAME = "name";
/**
* Constructs a <tt>SpecHelper</tt>.
*
* @param resources the resources
*/
public SpecHelper(Resources resources)
{
super();
this.resources = resources;
}
/*--------------------------------------------------------------------------*/
/**
* Reads the XML specification given by the file name. The result is stored in spec.
*
* @throws Exception for any problems in reading the specification
*/
/*--------------------------------------------------------------------------*/
public void readSpec(String specFileName) throws Exception
{
readSpec(specFileName, null);
}
/*--------------------------------------------------------------------------*/
/**
* Reads the XML specification given by the file name. The result is stored in spec.
*
* @throws Exception for any problems in reading the specification
*/
/*--------------------------------------------------------------------------*/
public void readSpec(String specFileName, VariableSubstitutor substitutor) throws Exception
{
// open an input stream
InputStream input = null;
try
{
input = getResource(specFileName);
}
catch (Exception exception)
{
_haveSpec = false;
return;
}
if (input == null)
{
_haveSpec = false;
return;
}
readSpec(input, substitutor);
// close the stream
input.close();
this.specFilename = specFileName;
}
/*--------------------------------------------------------------------------*/
/**
* Reads the XML specification given by the input stream. The result is stored in spec.
*
* @throws Exception for any problems in reading the specification
*/
/*--------------------------------------------------------------------------*/
public void readSpec(InputStream input) throws Exception
{
readSpec(input, null);
}
/*--------------------------------------------------------------------------*/
/**
* Reads the XML specification given by the input stream. The result is stored in spec.
*
* @throws Exception for any problems in reading the specification
*/
/*--------------------------------------------------------------------------*/
public void readSpec(InputStream input, VariableSubstitutor substitutor) throws Exception
{
// first try to substitute the variables
if (substitutor != null)
{
input = substituteVariables(input, substitutor);
}
// initialize the parser
IXMLParser parser = new XMLParser();
// get the data
spec = parser.parse(input);
_haveSpec = true;
}
/**
* Gets the stream to a resource.
*
* @param res The resource id.
* @return The resource value, null if not found
*/
public InputStream getResource(String res)
{
try
{
// System.out.println ("retrieving resource " + res);
return resources.getInputStream(res);
}
catch (ResourceNotFoundException exception)
{
return null;
}
}
/**
* Returns a XML element which represents the pack for the given name.
*
* @param packDestName name of the pack which should be returned
* @return a XML element which represents the pack for the given name
*/
public IXMLElement getPackForName(String packDestName)
{
List<IXMLElement> packs = getSpec().getChildrenNamed(PACK_KEY);
if (packs == null)
{
return (null);
}
for (IXMLElement pack : packs)
{
String packName = pack.getAttribute(PACK_NAME);
if (packName.equals(packDestName))
{
return (pack);
}
}
return (null);
}
/**
* Create parse error with consistent messages. Includes file name and line # of parent. It is
* an error for 'parent' to be null.
*
* @param parent The element in which the error occured
* @param message Brief message explaining error
*/
public void parseError(IXMLElement parent, String message) throws InstallerException
{
throw new InstallerException(specFilename + ":" + parent.getLineNr() + ": " + message);
}
/**
* Returns true if a specification exist, else false.
*
* @return true if a specification exist, else false
*/
public boolean haveSpec()
{
return _haveSpec;
}
/**
* Returns the specification.
*
* @return the specification
*/
public IXMLElement getSpec()
{
return spec;
}
/**
* Sets the specifaction to the given XML element.
*
* @param element
*/
public void setSpec(IXMLElement element)
{
spec = element;
}
/**
* Returns a Vector with all leafs of the tree which is described with childdef.
*
* @param root the IXMLElement which is the current root for the search
* @param childdef a String array which describes the tree; the last element contains the leaf
* name
* @return a Vector of XMLElements of all leafs founded under root
*/
public List<IXMLElement> getAllSubChildren(IXMLElement root, String[] childdef)
{
return (getSubChildren(root, childdef, 0));
}
/**
* Returns a Vector with all leafs of the tree which is described with childdef beginning at the
* given depth.
*
* @param root the IXMLElement which is the current root for the search
* @param childdef a String array which describes the tree; the last element contains the leaf
* name
* @param depth depth to start in childdef
* @return a Vector of XMLElements of all leafs founded under root
*/
private List<IXMLElement> getSubChildren(IXMLElement root, String[] childdef, int depth)
{
List<IXMLElement> retval = null;
List<IXMLElement> retval2 = null;
List<IXMLElement> children = root != null ? root.getChildrenNamed(childdef[depth]) : null;
if (children == null)
{
return (null);
}
if (depth < childdef.length - 1)
{
for (IXMLElement child : children)
{
retval2 = getSubChildren(child, childdef, depth + 1);
if (retval2 != null)
{
if (retval == null)
{
retval = new ArrayList<IXMLElement>();
}
retval.addAll(retval2);
}
}
}
else
{
return (children);
}
return (retval);
}
/**
* Creates an temp file in to the substitutor the substituted contents of input writes; close it
* and (re)open it as FileInputStream. The file will be deleted on exit.
*
* @param input the opened input stream which contents should be substituted
* @param substitutor substitutor which should substitute the contents of input
* @return a file input stream of the created temporary file
* @throws Exception
*/
public InputStream substituteVariables(InputStream input, VariableSubstitutor substitutor)
throws Exception
{
File tempFile = File.createTempFile("izpacksubs", "");
FileOutputStream fos = null;
tempFile.deleteOnExit();
try
{
fos = new FileOutputStream(tempFile);
substitutor.substitute(input, fos, null, "UTF-8");
}
finally
{
if (fos != null)
{
fos.close();
}
}
return new FileInputStream(tempFile);
}
/**
* Returns whether the value to the given attribute is "yes" or not. If the attribute does not
* exist, or the value is not "yes" and not "no", the default value is returned.
*
* @param element the XML element which contains the attribute
* @param attribute the name of the attribute
* @param defaultValue the default value
* @return whether the value to the given attribute is "yes" or not
*/
public boolean isAttributeYes(IXMLElement element, String attribute, boolean defaultValue)
{
String value = element.getAttribute(attribute, (defaultValue ? YES : NO));
if (value.equalsIgnoreCase(YES))
{
return true;
}
if (value.equalsIgnoreCase(NO))
{
return false;
}
return defaultValue;
}
/**
* Returns the attribute for the given attribute name. If no attribute exist, an
* InstallerException with a detail message is thrown.
*
* @param element XML element which should contain the attribute
* @param attrName key of the attribute
* @return the attribute as string
* @throws InstallerException
*/
public String getRequiredAttribute(IXMLElement element, String attrName)
throws InstallerException
{
String attr = element.getAttribute(attrName);
if (attr == null)
{
parseError(element, "<" + element.getName() + "> requires attribute '" + attrName
+ "'.");
}
return (attr);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 11-Nov-2008
*/
package org.jetbrains.idea.eclipse.conversion;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Function;
import gnu.trove.THashMap;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.eclipse.ConversionException;
import org.jetbrains.idea.eclipse.EclipseModuleManager;
import org.jetbrains.idea.eclipse.EclipseXml;
import org.jetbrains.idea.eclipse.IdeaXml;
import org.jetbrains.idea.eclipse.config.EclipseModuleManagerImpl;
import java.util.Arrays;
import java.util.Map;
public class EclipseClasspathWriter {
public static final Logger LOG = Logger.getInstance(EclipseClasspathWriter.class);
private final Map<String, Element> myOldEntries = new THashMap<String, Element>();
@NotNull
public Element writeClasspath(@Nullable Element oldRoot, @NotNull ModuleRootModel model) {
Element classpathElement = new Element(EclipseXml.CLASSPATH_TAG);
if (oldRoot != null) {
for (Element oldChild : oldRoot.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
String oldKind = oldChild.getAttributeValue(EclipseXml.KIND_ATTR);
String oldPath = oldChild.getAttributeValue(EclipseXml.PATH_ATTR);
myOldEntries.put(oldKind + getJREKey(oldPath), oldChild);
}
}
for (OrderEntry orderEntry : model.getOrderEntries()) {
createClasspathEntry(orderEntry, classpathElement, model);
}
String outputPath = "bin";
final String compilerOutputUrl = model.getModuleExtension(CompilerModuleExtension.class).getCompilerOutputUrl();
final EclipseModuleManager eclipseModuleManager = EclipseModuleManagerImpl.getInstance(model.getModule());
final String linkedPath = eclipseModuleManager.getEclipseLinkedVarPath(compilerOutputUrl);
if (linkedPath != null) {
outputPath = linkedPath;
}
else {
VirtualFile contentRoot = EPathUtil.getContentRoot(model);
VirtualFile output = model.getModuleExtension(CompilerModuleExtension.class).getCompilerOutputPath();
if (contentRoot != null && output != null && VfsUtilCore.isAncestor(contentRoot, output, false)) {
outputPath = EPathUtil.collapse2EclipsePath(output.getUrl(), model);
}
else if (output == null && compilerOutputUrl != null) {
outputPath = EPathUtil.collapse2EclipsePath(compilerOutputUrl, model);
}
}
for (String support : eclipseModuleManager.getUsedCons()) {
addOrderEntry(EclipseXml.CON_KIND, support, classpathElement, eclipseModuleManager.getSrcPlace(support));
}
setAttributeIfAbsent(addOrderEntry(EclipseXml.OUTPUT_KIND, outputPath, classpathElement), EclipseXml.PATH_ATTR, EclipseXml.BIN_DIR);
return classpathElement;
}
private void createClasspathEntry(@NotNull OrderEntry entry, @NotNull Element classpathRoot, @NotNull final ModuleRootModel model) throws ConversionException {
EclipseModuleManager eclipseModuleManager = EclipseModuleManagerImpl.getInstance(entry.getOwnerModule());
if (entry instanceof ModuleSourceOrderEntry) {
boolean shouldPlaceSeparately = eclipseModuleManager.isExpectedModuleSourcePlace(Arrays.binarySearch(model.getOrderEntries(), entry));
for (ContentEntry contentEntry : model.getContentEntries()) {
VirtualFile contentRoot = contentEntry.getFile();
for (SourceFolder sourceFolder : contentEntry.getSourceFolders()) {
String srcUrl = sourceFolder.getUrl();
String relativePath = EPathUtil.collapse2EclipsePath(srcUrl, model);
if (!Comparing.equal(contentRoot, EPathUtil.getContentRoot(model))) {
String linkedPath = EclipseModuleManagerImpl.getInstance(entry.getOwnerModule()).getEclipseLinkedSrcVariablePath(srcUrl);
if (linkedPath != null) {
relativePath = linkedPath;
}
}
int index = eclipseModuleManager.getSrcPlace(srcUrl);
addOrderEntry(EclipseXml.SRC_KIND, relativePath, classpathRoot, shouldPlaceSeparately && index != -1 ? index : -1);
}
}
}
else if (entry instanceof ModuleOrderEntry) {
Element orderEntry = addOrderEntry(EclipseXml.SRC_KIND, '/' + ((ModuleOrderEntry)entry).getModuleName(), classpathRoot);
setAttributeIfAbsent(orderEntry, EclipseXml.COMBINEACCESSRULES_ATTR, EclipseXml.FALSE_VALUE);
setExported(orderEntry, ((ExportableOrderEntry)entry));
}
else if (entry instanceof LibraryOrderEntry) {
final LibraryOrderEntry libraryOrderEntry = (LibraryOrderEntry)entry;
final String libraryName = libraryOrderEntry.getLibraryName();
if (libraryOrderEntry.isModuleLevel()) {
final String[] files = libraryOrderEntry.getRootUrls(OrderRootType.CLASSES);
if (files.length > 0) {
if (libraryName != null &&
libraryName.contains(IdeaXml.JUNIT) &&
Comparing.strEqual(files[0], EclipseClasspathReader.getJunitClsUrl(libraryName.contains("4")))) {
final Element orderEntry =
addOrderEntry(EclipseXml.CON_KIND, EclipseXml.JUNIT_CONTAINER + "/" + libraryName.substring(IdeaXml.JUNIT.length()),
classpathRoot);
setExported(orderEntry, libraryOrderEntry);
}
else {
boolean newVarLibrary = false;
boolean link = false;
String eclipseVariablePath = eclipseModuleManager.getEclipseVariablePath(files[0]);
if (eclipseVariablePath == null) {
eclipseVariablePath = eclipseModuleManager.getEclipseLinkedVarPath(files[0]);
link = eclipseVariablePath != null;
}
if (eclipseVariablePath == null && !eclipseModuleManager.isEclipseLibUrl(files[0])) { //new library was added
newVarLibrary = true;
eclipseVariablePath = EPathUtil.collapse2EclipseVariabledPath(libraryOrderEntry, OrderRootType.CLASSES);
}
Element orderEntry;
if (eclipseVariablePath != null) {
orderEntry = addOrderEntry(link ? EclipseXml.LIB_KIND : EclipseXml.VAR_KIND, eclipseVariablePath, classpathRoot);
}
else {
LOG.assertTrue(!StringUtil.isEmptyOrSpaces(files[0]), "Library: " + libraryName);
orderEntry = addOrderEntry(EclipseXml.LIB_KIND, EPathUtil.collapse2EclipsePath(files[0], model), classpathRoot);
}
final String srcRelativePath;
String eclipseSrcVariablePath = null;
boolean addSrcRoots = true;
String[] srcFiles = libraryOrderEntry.getRootUrls(OrderRootType.SOURCES);
if (srcFiles.length == 0) {
srcRelativePath = null;
}
else {
final String srcFile = srcFiles[0];
srcRelativePath = EPathUtil.collapse2EclipsePath(srcFile, model);
if (eclipseVariablePath != null) {
eclipseSrcVariablePath = eclipseModuleManager.getEclipseSrcVariablePath(srcFile);
if (eclipseSrcVariablePath == null) {
eclipseSrcVariablePath = eclipseModuleManager.getEclipseLinkedSrcVariablePath(srcFile);
}
if (eclipseSrcVariablePath == null) {
eclipseSrcVariablePath = EPathUtil.collapse2EclipseVariabledPath(libraryOrderEntry, OrderRootType.SOURCES);
if (eclipseSrcVariablePath != null) {
eclipseSrcVariablePath = "/" + eclipseSrcVariablePath;
}
else {
if (newVarLibrary) { //new library which cannot be replaced with vars
orderEntry.detach();
orderEntry = addOrderEntry(EclipseXml.LIB_KIND, EPathUtil.collapse2EclipsePath(files[0], model), classpathRoot);
}
else {
LOG.info("Added root " + srcRelativePath + " (in existing var library) can't be replaced with any variable; src roots placed in .eml only");
addSrcRoots = false;
}
}
}
}
}
setOrRemoveAttribute(orderEntry, EclipseXml.SOURCEPATH_ATTR, addSrcRoots ? (eclipseSrcVariablePath != null ? eclipseSrcVariablePath : srcRelativePath) : null);
EJavadocUtil.setupJavadocAttributes(orderEntry, libraryOrderEntry, model);
final String[] nativeRoots = libraryOrderEntry.getUrls(NativeLibraryOrderRootType.getInstance());
if (nativeRoots.length > 0) {
EJavadocUtil.setupAttributes(orderEntry, new Function<String, String>() {
@Override
public String fun(String nativeRoot) {
return EPathUtil.collapse2EclipsePath(nativeRoot, model);
}
}, EclipseXml.DLL_LINK, nativeRoots);
}
setExported(orderEntry, libraryOrderEntry);
}
}
}
else {
Element orderEntry;
if (eclipseModuleManager.getUnknownCons().contains(libraryName)) {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, libraryName, classpathRoot);
}
else if (Comparing.strEqual(libraryName, IdeaXml.ECLIPSE_LIBRARY)) {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, EclipseXml.ECLIPSE_PLATFORM, classpathRoot);
}
else {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, EclipseXml.USER_LIBRARY + '/' + libraryName, classpathRoot);
}
setExported(orderEntry, libraryOrderEntry);
}
}
else if (entry instanceof JdkOrderEntry) {
if (entry instanceof InheritedJdkOrderEntry) {
if (!EclipseModuleManagerImpl.getInstance(entry.getOwnerModule()).isForceConfigureJDK()) {
addOrderEntry(EclipseXml.CON_KIND, EclipseXml.JRE_CONTAINER, classpathRoot);
}
}
else {
final Sdk jdk = ((JdkOrderEntry)entry).getJdk();
String jdkLink;
if (jdk == null) {
jdkLink = EclipseXml.JRE_CONTAINER;
}
else {
jdkLink = EclipseXml.JRE_CONTAINER;
if (jdk.getSdkType() instanceof JavaSdkType) {
jdkLink += EclipseXml.JAVA_SDK_TYPE;
}
jdkLink += '/' + jdk.getName();
}
addOrderEntry(EclipseXml.CON_KIND, jdkLink, classpathRoot);
}
}
else {
throw new ConversionException("Unknown EclipseProjectModel.ClasspathEntry: " + entry.getClass());
}
}
private Element addOrderEntry(String kind, String path, Element classpathRoot) {
return addOrderEntry(kind, path, classpathRoot, -1);
}
private Element addOrderEntry(@NotNull String kind, String path, Element classpathRoot, int index) {
Element element = myOldEntries.get(kind + getJREKey(path));
if (element != null) {
Element clonedElement = element.clone();
if (index == -1 || index >= classpathRoot.getContentSize()) {
classpathRoot.addContent(clonedElement);
}
else {
classpathRoot.addContent(index, clonedElement);
}
return clonedElement;
}
Element orderEntry = new Element(EclipseXml.CLASSPATHENTRY_TAG);
orderEntry.setAttribute(EclipseXml.KIND_ATTR, kind);
if (path != null) {
orderEntry.setAttribute(EclipseXml.PATH_ATTR, path);
}
if (index == -1 || index >= classpathRoot.getContentSize()) {
classpathRoot.addContent(orderEntry);
}
else {
classpathRoot.addContent(index, orderEntry);
}
return orderEntry;
}
private static String getJREKey(String path) {
return path.startsWith(EclipseXml.JRE_CONTAINER) ? EclipseXml.JRE_CONTAINER : path;
}
private static void setExported(Element orderEntry, ExportableOrderEntry dependency) {
setOrRemoveAttribute(orderEntry, EclipseXml.EXPORTED_ATTR, dependency.isExported() ? EclipseXml.TRUE_VALUE : null);
}
private static void setOrRemoveAttribute(@NotNull Element element, @NotNull String name, @Nullable String value) {
if (value == null) {
element.removeAttribute(name);
}
else {
element.setAttribute(name, value);
}
}
private static void setAttributeIfAbsent(@NotNull Element element, String name, String value) {
if (element.getAttribute(name) == null) {
element.setAttribute(name, value);
}
}
}
| |
package ca.uhn.fhir.jpa.sched;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.util.AopTestUtils;
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.junit.jupiter.api.Assertions.fail;
@ContextConfiguration(classes = SchedulerServiceImplTest.TestConfiguration.class)
@ExtendWith(SpringExtension.class)
@DirtiesContext
public class SchedulerServiceImplTest {
private static final Logger ourLog = LoggerFactory.getLogger(SchedulerServiceImplTest.class);
private static long ourTaskDelay;
@Autowired
private ISchedulerService mySvc;
@BeforeEach
public void before() {
ourTaskDelay = 0;
}
@Test
public void testScheduleTask() {
ScheduledJobDefinition def = new ScheduledJobDefinition()
.setId(CountingJob.class.getName())
.setJobClass(CountingJob.class);
mySvc.scheduleLocalJob(100, def);
sleepAtLeast(1000);
ourLog.info("Fired {} times", CountingJob.ourCount);
assertThat(CountingJob.ourCount, greaterThan(3));
assertThat(CountingJob.ourCount, lessThan(20));
}
@Test
public void testStopAndStartService() throws SchedulerException {
ScheduledJobDefinition def = new ScheduledJobDefinition()
.setId(CountingJob.class.getName())
.setJobClass(CountingJob.class);
BaseSchedulerServiceImpl svc = AopTestUtils.getTargetObject(mySvc);
svc.stop();
svc.create();
svc.start();
mySvc.scheduleLocalJob(100, def);
sleepAtLeast(1000);
ourLog.info("Fired {} times", CountingJob.ourCount);
await().until(() -> CountingJob.ourCount, greaterThan(3));
assertThat(CountingJob.ourCount, lessThan(50));
}
@Test
public void testScheduleTaskLongRunningDoesntRunConcurrently() {
ScheduledJobDefinition def = new ScheduledJobDefinition()
.setId(CountingJob.class.getName())
.setJobClass(CountingJob.class);
ourTaskDelay = 500;
mySvc.scheduleLocalJob(100, def);
sleepAtLeast(1000);
ourLog.info("Fired {} times", CountingJob.ourCount);
await().until(() -> CountingJob.ourCount, greaterThanOrEqualTo(1));
assertThat(CountingJob.ourCount, lessThan(5));
}
@Test
public void testIntervalJob() {
ScheduledJobDefinition def = new ScheduledJobDefinition()
.setId(CountingIntervalJob.class.getName())
.setJobClass(CountingIntervalJob.class);
ourTaskDelay = 500;
mySvc.scheduleLocalJob(100, def);
sleepAtLeast(2000);
ourLog.info("Fired {} times", CountingIntervalJob.ourCount);
await().until(() -> CountingIntervalJob.ourCount, greaterThanOrEqualTo(2));
assertThat(CountingIntervalJob.ourCount, lessThan(6));
}
@AfterEach
public void after() throws SchedulerException {
CountingJob.ourCount = 0;
CountingIntervalJob.ourCount = 0;
mySvc.purgeAllScheduledJobsForUnitTest();
}
@DisallowConcurrentExecution
public static class CountingJob implements Job, ApplicationContextAware {
private static int ourCount;
@Autowired
@Qualifier("stringBean")
private String myStringBean;
private ApplicationContext myAppCtx;
@Override
public void execute(JobExecutionContext theContext) {
if (!"String beans are good.".equals(myStringBean)) {
fail("Did not autowire stringBean correctly, found: " + myStringBean);
}
if (myAppCtx == null) {
fail("Did not populate appctx");
}
if (ourTaskDelay > 0) {
ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
sleepAtLeast(ourTaskDelay);
ourLog.info("Done sleeping");
} else {
ourLog.info("Job has fired...");
}
ourCount++;
}
@Override
public void setApplicationContext(ApplicationContext theAppCtx) throws BeansException {
myAppCtx = theAppCtx;
}
}
public static class CountingIntervalJob implements HapiJob {
private static int ourCount;
@Autowired
@Qualifier("stringBean")
private String myStringBean;
private ApplicationContext myAppCtx;
@Override
public void execute(JobExecutionContext theContext) {
ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
sleepAtLeast(ourTaskDelay);
ourCount++;
}
}
@Configuration
public static class TestConfiguration {
@Bean
public ISchedulerService schedulerService() {
return new HapiSchedulerServiceImpl();
}
@Bean
public String stringBean() {
return "String beans are good.";
}
@Bean
public AutowiringSpringBeanJobFactory springBeanJobFactory() {
return new AutowiringSpringBeanJobFactory();
}
}
}
| |
/**
*
*/
package org.vaadin.netbeans.code.generator;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Name;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import org.netbeans.api.java.project.JavaProjectConstants;
import org.netbeans.api.java.queries.UnitTestForSourceQuery;
import org.netbeans.api.java.source.ClassIndex.SearchKind;
import org.netbeans.api.java.source.ClassIndex.SearchScope;
import org.netbeans.api.java.source.CompilationController;
import org.netbeans.api.java.source.CompilationInfo;
import org.netbeans.api.java.source.ElementHandle;
import org.netbeans.api.java.source.JavaSource;
import org.netbeans.api.java.source.JavaSource.Phase;
import org.netbeans.api.java.source.Task;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.SourceGroup;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.filesystems.URLMapper;
import org.openide.loaders.DataFolder;
import org.openide.loaders.DataObject;
import org.vaadin.netbeans.ui.wizard.NewWidgetWizardIterator;
/**
* @author denis
*/
public final class JavaUtils {
public static final String JAVA_SUFFIX = ".java"; // NOI18N
public static final String VAADIN_SERVLET_CONFIGURATION = "com.vaadin.annotations.VaadinServletConfiguration";//NOI18N
public static final String SERVLET_ANNOTATION = "javax.servlet.annotation.WebServlet"; // NOI18N
public static final String WIDGETSET = "widgetset"; // NOI18N
public static final String INIT_PARAMS = "initParams"; // NOI18N
public static final String VALUE = "value"; // NOI18N
public static final String NAME = "name"; // NOI18N
public static final String UI = "ui"; // NOI18N
public static final String WEB_INIT_PARAM = "javax.servlet.annotation.WebInitParam"; // NOI18N
public static final String VAADIN_UI_FQN = "com.vaadin.ui.UI"; // NOI18N
public static final String VAADIN_SERVLET = "com.vaadin.server.VaadinServlet"; // NOI18N
public static final Set<ElementKind> TYPE_KINDS = EnumSet.of(
ElementKind.CLASS, ElementKind.INTERFACE, ElementKind.ENUM,
ElementKind.ANNOTATION_TYPE);
private JavaUtils() {
}
public static String getFreeName( FileObject folder, String namePrefix,
String suffix )
{
String name = namePrefix;
String fileName = name + suffix;
int i = 1;
while (folder.getFileObject(fileName) != null) {
name = namePrefix + i;
fileName = name + suffix;
i++;
}
return name;
}
public static String getFqn( FileObject javaClassFile ) throws IOException {
JavaModelElement element = getModlelElement(javaClassFile);
if (element == null) {
return null;
}
else {
return element.getFqn();
}
}
public static JavaModelElement getModlelElement( FileObject javaClassFile )
throws IOException
{
JavaSource javaSource = JavaSource.forFileObject(javaClassFile);
if (javaSource == null) {
Logger.getLogger(JavaUtils.class.getName()).log(Level.WARNING,
"Java source is null for fileObject: {0}",
javaClassFile.getPath());
return null;
}
final JavaModelElement[] element = new JavaModelElement[1];
javaSource.runUserActionTask(new Task<CompilationController>() {
@Override
public void run( CompilationController controller )
throws Exception
{
controller.toPhase(Phase.ELEMENTS_RESOLVED);
List<? extends TypeElement> topLevelElements = controller
.getTopLevelElements();
if (topLevelElements.size() != 1) {
Logger.getLogger(JavaUtils.class.getName()).log(
Level.WARNING,
"Found {0} top level elements in the file: {1}",
new Object[] { topLevelElements.size(),
controller.getFileObject().getPath() });
}
if (topLevelElements.size() > 0) {
String fqn = topLevelElements.get(0).getQualifiedName()
.toString();
String name = topLevelElements.get(0).getSimpleName()
.toString();
element[0] = new JavaModelElement(name, fqn);
}
}
}, true);
return element[0];
}
public static DataObject createDataObjectFromTemplate( String template,
FileObject targetFolder, String targetName,
Map<String, String> params ) throws IOException
{
assert template != null;
assert targetFolder != null;
String name = targetName;
FileObject templateFileObject = FileUtil.getConfigFile(template);
if (targetName == null) {
name = templateFileObject.getName();
}
else {
assert targetName.trim().length() > 0;
}
DataObject templateDataObject = DataObject.find(templateFileObject);
DataFolder dataFolder = DataFolder.findFolder(targetFolder);
if (params != null) {
for (Entry<String, String> entry : params.entrySet()) {
templateFileObject.setAttribute(entry.getKey(),
entry.getValue());
}
}
return templateDataObject.createFromTemplate(dataFolder, name, params);
}
public static SourceGroup[] getJavaSourceGroups( Project project ) {
return getSourceGroups(project, JavaProjectConstants.SOURCES_TYPE_JAVA);
}
public static SourceGroup[] getResourcesSourceGroups( Project project ) {
return getSourceGroups(project,
JavaProjectConstants.SOURCES_TYPE_RESOURCES);
}
public static SourceGroup[] getSourceGroups( Project project,
String sourcesType )
{
SourceGroup[] sourceGroups = ProjectUtils.getSources(project)
.getSourceGroups(sourcesType);
Set<FileObject> testRoots = getTestRoots(project, sourceGroups);
List<SourceGroup> list = new ArrayList<>(sourceGroups.length);
for (SourceGroup sourceGroup : sourceGroups) {
if (!testRoots.contains(sourceGroup.getRootFolder())) {
list.add(sourceGroup);
}
}
return list.toArray(new SourceGroup[list.size()]);
}
public static Set<FileObject> getTestRoots( Project project ) {
return getTestRoots(project, JavaProjectConstants.SOURCES_TYPE_JAVA);
}
public static Set<FileObject> getTestRoots( Project project,
String sourcesType )
{
SourceGroup[] sourceGroups = ProjectUtils.getSources(project)
.getSourceGroups(sourcesType);
return getTestRoots(project, sourceGroups);
}
public static List<TypeElement> findAnnotatedElements(
final String searchedTypeName, CompilationInfo controller )
throws InterruptedException
{
TypeElement searchedType = controller.getElements().getTypeElement(
searchedTypeName);
if (searchedType == null) {
Logger.getLogger(JavaUtils.class.getName()).log(Level.FINE,
"Annotation type {0} is not found", searchedTypeName); // NOI18N
return Collections.emptyList();
}
ElementHandle<TypeElement> searchedTypeHandle = ElementHandle
.create(searchedType);
final Set<ElementHandle<TypeElement>> elementHandles = controller
.getClasspathInfo()
.getClassIndex()
.getElements(
searchedTypeHandle,
EnumSet.of(SearchKind.TYPE_REFERENCES),
EnumSet.of(SearchScope.SOURCE, SearchScope.DEPENDENCIES));
if (elementHandles == null) {
throw new InterruptedException(
"ClassIndex.getElements() was interrupted"); // NOI18N
}
List<TypeElement> result = new ArrayList<>(elementHandles.size());
for (ElementHandle<TypeElement> elementHandle : elementHandles) {
Logger.getLogger(JavaUtils.class.getName()).log(Level.FINE,
"found element {0}", elementHandle.getQualifiedName()); // NOI18N
TypeElement typeElement = elementHandle.resolve(controller);
/*
* Top level class is returned in the result in case if inner type
* has annotation. Don't include it.
*/
if (typeElement == null
|| getAnnotation(typeElement, searchedTypeName) == null)
{
continue;
}
result.add(typeElement);
}
return result;
}
public static AnnotationMirror getAnnotation( TypeElement type,
String annotaiton )
{
List<? extends AnnotationMirror> annotations = type
.getAnnotationMirrors();
for (AnnotationMirror annotationMirror : annotations) {
Element element = annotationMirror.getAnnotationType().asElement();
if (element instanceof TypeElement) {
Name fqn = ((TypeElement) element).getQualifiedName();
if (fqn.contentEquals(annotaiton)) {
return annotationMirror;
}
}
}
return null;
}
public static String getValue( AnnotationMirror annotation, String method )
{
AnnotationValue annotationValue = getAnnotationValue(annotation, method);
if (annotationValue == null) {
return null;
}
else {
return annotationValue.getValue().toString();
}
}
public static AnnotationValue getAnnotationValue(
AnnotationMirror annotation, String method )
{
Map<? extends ExecutableElement, ? extends AnnotationValue> map = annotation
.getElementValues();
for (Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : map
.entrySet())
{
ExecutableElement executableElement = entry.getKey();
if (executableElement.getSimpleName().contentEquals(method)) {
return entry.getValue();
}
}
return null;
}
public static List<?> getArrayValue( AnnotationMirror annotation,
String method )
{
AnnotationValue annotationValue = getAnnotationValue(annotation, method);
if (annotationValue == null) {
return null;
}
Object value = annotationValue.getValue();
if (value instanceof List<?>) {
return (List<?>) value;
}
else {
return null;
}
}
public static String getWebInitParamValue( AnnotationMirror annotation,
String paramName )
{
String value = null;
List<?> params = JavaUtils.getArrayValue(annotation,
JavaUtils.INIT_PARAMS);
if (params == null) {
return null;
}
for (Object param : params) {
if (param instanceof AnnotationMirror) {
AnnotationMirror mirror = (AnnotationMirror) param;
String name = JavaUtils.getValue(mirror, JavaUtils.NAME);
if (paramName.equals(name)) {
value = JavaUtils.getValue(mirror, JavaUtils.VALUE);
break;
}
}
}
return value;
}
public static String getWidgetsetWebInit( AnnotationMirror annotation ) {
return getWebInitParamValue(annotation, JavaUtils.WIDGETSET);
}
public static AnnotationMirror getAnnotation( Element element,
String annotationFqn )
{
List<? extends AnnotationMirror> annotations = element
.getAnnotationMirrors();
for (AnnotationMirror annotation : annotations) {
Element annotationElement = annotation.getAnnotationType()
.asElement();
if (annotationElement instanceof TypeElement) {
String fqn = ((TypeElement) annotationElement)
.getQualifiedName().toString();
if (fqn.equals(annotationFqn)) {
return annotation;
}
}
}
return null;
}
public static boolean hasAnnotation( Element element,
String... annotationFqns )
{
List<? extends AnnotationMirror> annotations = element
.getAnnotationMirrors();
for (AnnotationMirror annotation : annotations) {
Element annotationElement = annotation.getAnnotationType()
.asElement();
if (annotationElement instanceof TypeElement) {
String fqn = ((TypeElement) annotationElement)
.getQualifiedName().toString();
for (String annotationFqn : annotationFqns) {
if (fqn.equals(annotationFqn)) {
return true;
}
}
}
}
return false;
}
public static Set<TypeElement> getSubclasses( TypeElement typeElement,
CompilationInfo info ) throws InterruptedException
{
if (typeElement == null) {
return Collections.emptySet();
}
return discoverHierarchy(typeElement, EnumSet.of(ElementKind.CLASS),
info);
}
public static Collection<? extends TypeMirror> getSupertypes(
TypeMirror type, CompilationInfo info )
{
Collection<TypeMirror> result = new LinkedList<>();
List<TypeMirror> walkThrough = new LinkedList<>();
walkThrough.add(type);
Set<Element> visited = new HashSet<>();
collectSupertypes(walkThrough, result, visited, info);
return result;
}
private static void collectSupertypes( List<TypeMirror> types,
Collection<TypeMirror> superTypes, Set<Element> visited,
CompilationInfo info )
{
while (!types.isEmpty()) {
TypeMirror typeMirror = types.remove(0);
Element typeElement = info.getTypes().asElement(typeMirror);
List<? extends TypeMirror> directSupertypes = info.getTypes()
.directSupertypes(typeMirror);
for (TypeMirror directSupertype : directSupertypes) {
typeElement = info.getTypes().asElement(directSupertype);
if (!visited.contains(typeElement)) {
types.add(directSupertype);
superTypes.add(directSupertype);
visited.add(typeElement);
}
}
}
}
private static Set<FileObject> getTestRoots( Project project,
SourceGroup[] sourceGroups )
{
Set<FileObject> result = new HashSet<>();
for (SourceGroup sourceGroup : sourceGroups) {
result.addAll(getTestRoots(sourceGroup));
}
return result;
}
private static Set<FileObject> getTestRoots( SourceGroup group ) {
final URL[] rootURLs = UnitTestForSourceQuery.findUnitTests(group
.getRootFolder());
if (rootURLs.length == 0) {
return Collections.emptySet();
}
List<FileObject> sourceRoots = getFileObjects(rootURLs);
if (sourceRoots.isEmpty()) {
return Collections.emptySet();
}
return new HashSet<>(sourceRoots);
}
private static List<FileObject> getFileObjects( URL[] urls ) {
List<FileObject> result = new ArrayList<>(urls.length);
for (URL url : urls) {
FileObject sourceRoot = URLMapper.findFileObject(url);
if (sourceRoot != null) {
result.add(sourceRoot);
}
else {
Logger.getLogger(NewWidgetWizardIterator.class.getName()).log(
Level.INFO,
"No FileObject found for the following URL: {0}", url);
}
}
return result;
}
private static Set<TypeElement> discoverHierarchy( TypeElement typeElement,
Set<ElementKind> kinds, CompilationInfo info )
throws InterruptedException
{
Set<TypeElement> result = new HashSet<TypeElement>();
result.add((TypeElement) typeElement);
Set<TypeElement> toProcess = new HashSet<TypeElement>();
toProcess.add((TypeElement) typeElement);
Set<ElementKind> requiredKinds = EnumSet.copyOf(kinds);
requiredKinds.retainAll(TYPE_KINDS);
while (toProcess.size() > 0) {
TypeElement element = toProcess.iterator().next();
toProcess.remove(element);
Set<TypeElement> set = doDiscoverHierarchy(element, requiredKinds,
info);
if (set.size() == 0) {
continue;
}
result.addAll(set);
for (TypeElement impl : set) {
toProcess.add(impl);
}
}
result.remove(typeElement);
return result;
}
private static Set<TypeElement> doDiscoverHierarchy(
TypeElement typeElement, Set<ElementKind> kinds,
CompilationInfo info ) throws InterruptedException
{
Set<TypeElement> result = new HashSet<>();
ElementHandle<TypeElement> handle = ElementHandle.create(typeElement);
final Set<ElementHandle<TypeElement>> handles = info
.getClasspathInfo()
.getClassIndex()
.getElements(
handle,
EnumSet.of(SearchKind.IMPLEMENTORS),
EnumSet.of(SearchScope.SOURCE, SearchScope.DEPENDENCIES));
if (handles == null) {
throw new InterruptedException(
"ClassIndex.getElements() was interrupted"); // NOI18N
}
for (ElementHandle<TypeElement> elementHandle : handles) {
Logger.getLogger(JavaUtils.class.getName()).log(Level.INFO,
elementHandle.getQualifiedName()); // NOI18N
TypeElement derivedElement = elementHandle.resolve(info);
if (derivedElement == null
|| !kinds.contains(derivedElement.getKind()))
{
continue;
}
result.add(derivedElement);
}
return result;
}
public static class JavaModelElement {
public JavaModelElement( String name, String fqn ) {
myName = name;
myFqn = fqn;
}
public String getFqn() {
return myFqn;
}
public String getName() {
return myName;
}
private final String myName;
private final String myFqn;
}
}
| |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.provisioning.dao;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.persistence.JDBCPersistenceManager;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil;
import org.wso2.carbon.identity.provisioning.IdentityProvisioningConstants;
import org.wso2.carbon.identity.provisioning.ProvisionedIdentifier;
import org.wso2.carbon.identity.provisioning.ProvisioningEntity;
import org.wso2.carbon.idp.mgt.util.IdPManagementConstants;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
public class ProvisioningManagementDAO {
private static final Log log = LogFactory.getLog(ProvisioningManagementDAO.class);
/**
*
* @param identityProviderName
* @param connectorType
* @param provisioningEntity
* @param tenantId
* @throws IdentityApplicationManagementException
*/
public void addProvisioningEntity(String identityProviderName, String connectorType,
ProvisioningEntity provisioningEntity, int tenantId)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
try {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
PreparedStatement prepStmt = null;
// id of the identity provider
int idpId = getIdentityProviderIdentifier(dbConnection, identityProviderName, tenantId);
// id of the provisioning configuration
int provisioningConfigId = getProvisioningConfigurationIdentifier(dbConnection, idpId,
connectorType);
// PROVISIONING_CONFIG_ID, ENTITY_TYPE,
// ENTITY_LOCAL_USERSTORE, ENTITY_NAME, ENTITY_VALUE,
// TENANT_ID
String sqlStmt = IdentityProvisioningConstants.SQLQueries.ADD_PROVISIONING_ENTITY_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, provisioningConfigId);
prepStmt.setString(2, provisioningEntity.getEntityType().toString());
prepStmt.setString(3, UserCoreUtil.extractDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setString(4, UserCoreUtil.removeDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setString(5, provisioningEntity.getIdentifier().getIdentifier());
prepStmt.setInt(6, tenantId);
prepStmt.execute();
dbConnection.commit();
} catch (SQLException e) {
IdentityApplicationManagementUtil.rollBack(dbConnection);
String msg = "Error occurred while adding Provisioning entity for tenant " + tenantId;
throw new IdentityApplicationManagementException(msg, e);
} finally {
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
}
/**
*
* @param identityProviderName
* @param connectorType
* @param provisioningEntity
* @param tenantId
* @throws IdentityApplicationManagementException
*/
public void deleteProvisioningEntity(String identityProviderName, String connectorType,
ProvisioningEntity provisioningEntity, int tenantId)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
try {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
PreparedStatement prepStmt = null;
// id of the identity provider
int idpId = getIdentityProviderIdentifier(dbConnection, identityProviderName, tenantId);
// id of the provisioning configuration
int provisioningConfigId = getProvisioningConfigurationIdentifier(dbConnection, idpId,
connectorType);
// PROVISIONING_CONFIG_ID, ENTITY_TYPE,
// ENTITY_LOCAL_USERSTORE, ENTITY_NAME, TENANT_ID
String sqlStmt = IdentityProvisioningConstants.SQLQueries.DELETE_PROVISIONING_ENTITY_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, provisioningConfigId);
prepStmt.setString(2, provisioningEntity.getEntityType().toString());
prepStmt.setString(3, UserCoreUtil.extractDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setString(4, UserCoreUtil.removeDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setInt(5, tenantId);
prepStmt.execute();
dbConnection.commit();
} catch (SQLException e) {
IdentityApplicationManagementUtil.rollBack(dbConnection);
String msg = "Error occurred while deleting Provisioning entity for tenant " + tenantId;
throw new IdentityApplicationManagementException(msg, e);
} finally {
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
}
/**
*
* @param identityProviderName
* @param connectorType
* @param provisioningEntity
* @param tenantId
* @throws IdentityApplicationManagementException
*/
public ProvisionedIdentifier getProvisionedIdentifier(String identityProviderName, String connectorType,
ProvisioningEntity provisioningEntity, int tenantId)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
try {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
PreparedStatement prepStmt = null;
// id of the identity provider
int idpId = getIdentityProviderIdentifier(dbConnection, identityProviderName, tenantId);
// id of the provisioning configuration
int provisioningConfigId = getProvisioningConfigurationIdentifier(dbConnection, idpId,
connectorType);
// PROVISIONING_CONFIG_ID, ENTITY_TYPE,
// ENTITY_LOCAL_USERSTORE, ENTITY_NAME, ENTITY_VALUE,
// TENANT_ID
String sqlStmt = IdentityProvisioningConstants.SQLQueries.GET_PROVISIONING_ENTITY_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, provisioningConfigId);
prepStmt.setString(2, provisioningEntity.getEntityType().toString());
prepStmt.setString(3, UserCoreUtil.extractDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setString(4, UserCoreUtil.removeDomainFromName(provisioningEntity.getEntityName()));
prepStmt.setInt(5, tenantId);
ResultSet rs = prepStmt.executeQuery();
if (rs.next()) {
String entityId = rs.getString(1);
ProvisionedIdentifier provisionedIdentifier = new ProvisionedIdentifier();
provisionedIdentifier.setIdentifier(entityId);
return provisionedIdentifier;
} else {
return null;
}
} catch (SQLException e) {
IdentityApplicationManagementUtil.rollBack(dbConnection);
String msg = "Error occurred while adding Provisioning entity for tenant " + tenantId;
throw new IdentityApplicationManagementException(msg, e);
} finally {
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
}
/**
*
* @param newIdentityProvider
* @param currentIdentityProvider
* @param tenantId
* @throws IdentityApplicationManagementException
*/
public void updateProvisionedIdentifier(IdentityProvider newIdentityProvider,
IdentityProvider currentIdentityProvider, int tenantId)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
try {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
int idPId =
getIdentityProviderIdByName(dbConnection,
newIdentityProvider.getIdentityProviderName(),
tenantId);
if (idPId <= 0) {
String msg =
"Trying to update non-existent Identity Provider for tenant " +
tenantId;
throw new IdentityApplicationManagementException(msg);
}
PreparedStatement prepStmt = null;
// SP_IDP_NAME=?, SP_IDP_PRIMARY=?,SP_IDP_HOME_REALM_ID=?,
// SP_IDP_THUMBPRINT=?,
// SP_IDP_TOKEN_EP_ALIAS=?,
// SP_IDP_INBOUND_PROVISIONING_ENABLED=?,SP_IDP_INBOUND_PROVISIONING_USER_STORE_ID=?,SP_IDP_USER_CLAIM_URI=?,
// SP_IDP_ROLE_CLAIM_URI=?,SP_IDP_DEFAULT_AUTHENTICATOR_NAME=?,SP_IDP_DEFAULT_PRO_CONNECTOR_NAME=?
String sqlStmt = IdPManagementConstants.SQLQueries.UPDATE_IDP_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setString(1, newIdentityProvider.getIdentityProviderName());
if (newIdentityProvider.isPrimary()) {
prepStmt.setString(2, "1");
} else {
prepStmt.setString(2, "0");
}
prepStmt.setString(3, newIdentityProvider.getHomeRealmId());
prepStmt.setBinaryStream(4, setBlobValue(newIdentityProvider.getCertificate()));
prepStmt.setString(5, newIdentityProvider.getAlias());
if (newIdentityProvider.getJustInTimeProvisioningConfig() != null &&
newIdentityProvider.getJustInTimeProvisioningConfig().isProvisioningEnabled()) {
prepStmt.setString(6, "1");
prepStmt.setString(7, newIdentityProvider.getJustInTimeProvisioningConfig()
.getProvisioningUserStore());
} else {
prepStmt.setString(6, "0");
prepStmt.setString(7, null);
}
if (newIdentityProvider.getClaimConfig() != null) {
prepStmt.setString(8, newIdentityProvider.getClaimConfig().getUserClaimURI());
prepStmt.setString(9, newIdentityProvider.getClaimConfig().getRoleClaimURI());
} else {
prepStmt.setString(8, null);
prepStmt.setString(9, null);
}
// update the default authenticator
if (newIdentityProvider.getDefaultAuthenticatorConfig() != null &&
newIdentityProvider.getDefaultAuthenticatorConfig().getName() != null) {
prepStmt.setString(10, newIdentityProvider.getDefaultAuthenticatorConfig()
.getName());
} else {
// its not a must to have a default authenticator.
prepStmt.setString(10, null);
}
// update the default provisioning connector.
if (newIdentityProvider.getDefaultProvisioningConnectorConfig() != null &&
newIdentityProvider.getDefaultProvisioningConnectorConfig().getName() != null) {
prepStmt.setString(11, newIdentityProvider.getDefaultProvisioningConnectorConfig()
.getName());
} else {
// its not a must to have a default provisioning connector..
prepStmt.setString(11, null);
}
prepStmt.setString(12, newIdentityProvider.getIdentityProviderDescription());
prepStmt.setInt(13, tenantId);
prepStmt.setString(14, currentIdentityProvider.getIdentityProviderName());
prepStmt.executeUpdate();
prepStmt.clearParameters();
IdentityApplicationManagementUtil.closeStatement(prepStmt);
sqlStmt = IdPManagementConstants.SQLQueries.GET_IDP_BY_NAME_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, newIdentityProvider.getIdentityProviderName());
ResultSet rs = prepStmt.executeQuery();
if (rs.next()) {
// id of the updated identity provider.
int idpId = rs.getInt(1);
}
dbConnection.commit();
} catch (SQLException e) {
log.error(e.getMessage(), e);
IdentityApplicationManagementUtil.rollBack(dbConnection);
String msg =
"Error occurred while updating Identity Provider information for tenant " +
tenantId;
throw new IdentityApplicationManagementException(msg, e);
} finally {
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
}
/**
*
* @param idPName
* @param tenantId
* @param tenantDomain
* @throws IdentityApplicationManagementException
*/
public void deleteProvisionedIdentifier(String idPName, int tenantId, String tenantDomain)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
}
/**
*
* @param conn
* @param tenantId
* @param idPName
* @throws SQLException
*/
private void deleteIdP(Connection conn, int tenantId, String idPName) throws SQLException {
PreparedStatement prepStmt = null;
String sqlStmt = IdPManagementConstants.SQLQueries.DELETE_IDP_SQL;
try {
prepStmt = conn.prepareStatement(sqlStmt);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, idPName);
prepStmt.executeUpdate();
} finally {
IdentityApplicationManagementUtil.closeStatement(prepStmt);
}
}
/**
*
* @param dbConnection
* @param idpName
* @param tenantId
* @return
* @throws SQLException
* @throws IdentityApplicationManagementException
*/
private int getIdentityProviderIdByName(Connection dbConnection, String idpName, int tenantId)
throws SQLException,
IdentityApplicationManagementException {
boolean dbConnInitialized = true;
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
if (dbConnection == null) {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
} else {
dbConnInitialized = false;
}
String sqlStmt = IdPManagementConstants.SQLQueries.GET_IDP_ROW_ID_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, idpName);
rs = prepStmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
}
} finally {
IdentityApplicationManagementUtil.closeStatement(prepStmt);
IdentityApplicationManagementUtil.closeResultSet(rs);
if (dbConnInitialized) {
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
}
return 0;
}
/**
*
* @param dbConnection
* @param idPName
* @param tenantId
* @return
* @throws SQLException
* @throws IdentityApplicationManagementException
*/
private int getIdentityProviderIdentifier(Connection dbConnection, String idPName, int tenantId)
throws SQLException,
IdentityApplicationManagementException {
String sqlStmt = null;
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
sqlStmt = IdPManagementConstants.SQLQueries.GET_IDP_ID_BY_NAME_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, tenantId);
prepStmt.setString(2, idPName);
rs = prepStmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new IdentityApplicationManagementException("Invalid Identity Provider Name " +
idPName);
}
} finally {
IdentityApplicationManagementUtil.closeResultSet(rs);
IdentityApplicationManagementUtil.closeStatement(prepStmt);
}
}
/**
*
* @param dbConnection
* @param idPId
* @param connectorType
* @return
* @throws SQLException
* @throws IdentityApplicationManagementException
*/
private int getProvisioningConfigurationIdentifier(Connection dbConnection, int idPId,
String connectorType) throws SQLException,
IdentityApplicationManagementException {
String sqlStmt = null;
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
sqlStmt = IdentityProvisioningConstants.SQLQueries.GET_IDP_PROVISIONING_CONFIG_ID_SQL;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setInt(1, idPId);
prepStmt.setString(2, connectorType);
rs = prepStmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new IdentityApplicationManagementException("Invalid connector type " +
connectorType);
}
} finally {
IdentityApplicationManagementUtil.closeResultSet(rs);
IdentityApplicationManagementUtil.closeStatement(prepStmt);
}
}
private InputStream setBlobValue(String value) throws SQLException {
if (value != null) {
InputStream is = new ByteArrayInputStream(value.getBytes());
return is;
}
return null;
}
public List<String> getSPNamesOfProvisioningConnectorsByIDP(String idPName, String tenantDomain)
throws IdentityApplicationManagementException {
Connection dbConnection = null;
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<String> spNames = new ArrayList<String>();
try {
dbConnection = JDBCPersistenceManager.getInstance().getDBConnection();
String sqlStmt = null;
if(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
sqlStmt = IdentityProvisioningConstants.SQLQueries.GET_SP_NAMES_OF_SUPER_TENANT_PROV_CONNECTORS_BY_IDP;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setString(1, idPName);
prepStmt.setInt(2, MultitenantConstants.SUPER_TENANT_ID);
} else {
sqlStmt = IdentityProvisioningConstants.SQLQueries.GET_SP_NAMES_OF_PROVISIONING_CONNECTORS_BY_IDP;
prepStmt = dbConnection.prepareStatement(sqlStmt);
prepStmt.setString(1, idPName);
prepStmt.setString(2, tenantDomain);
}
rs = prepStmt.executeQuery();
while (rs.next()) {
spNames.add(rs.getString(1));
}
} catch (SQLException e) {
String msg = "Error occurred while retrieving SP names of provisioning connectors by IDP name";
throw new IdentityApplicationManagementException(msg, e);
} finally {
if (prepStmt != null) {
IdentityApplicationManagementUtil.closeStatement(prepStmt);
}
if (rs != null) {
IdentityApplicationManagementUtil.closeResultSet(rs);
}
IdentityApplicationManagementUtil.closeConnection(dbConnection);
}
return spNames;
}
}
| |
/*
* Copyright 2014, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.exemel.disco.netutil.nio;
import uk.co.exemel.disco.netutil.nio.message.*;
import uk.co.exemel.disco.util.jmx.Exportable;
import uk.co.exemel.disco.util.jmx.JMXControl;
import org.apache.mina.common.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jmx.export.annotation.ManagedAttribute;
import org.springframework.jmx.export.annotation.ManagedResource;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLong;
import static uk.co.exemel.disco.netutil.nio.NioLogger.LoggingLevel.PROTOCOL;
import static uk.co.exemel.disco.netutil.nio.NioLogger.LoggingLevel.SESSION;
@ManagedResource
public class DiscoProtocol1 extends IoFilterAdapter implements Exportable, IDiscoProtocol {
private static final Logger LOG = LoggerFactory.getLogger(DiscoProtocol1.class);
private static final KeepAliveMessage KEEP_ALIVE = new KeepAliveMessage();
private final NioLogger nioLogger;
private final byte applicationVersion;
private boolean isServer;
private volatile boolean isEnabled = false;
private final int interval ;
private final int timeout ;
private final AtomicLong heartbeatsMissed = new AtomicLong();
private final AtomicLong heartbeatsSent = new AtomicLong();
private final AtomicLong sessionsCreated = new AtomicLong();
private String lastSessionFrom = null;
public DiscoProtocol1(boolean server, NioLogger nioLogger, byte applicationVersion, int keepAliveInterval, int keepAliveTimeout) {
this.isServer = server;
this.nioLogger = nioLogger;
this.applicationVersion = applicationVersion;
this.interval = keepAliveInterval;
this.timeout = keepAliveTimeout;
export(nioLogger.getJmxControl());
}
public void closeSession(IoSession ioSession) {
WriteFuture future = ioSession.write(new DisconnectMessage());
future.addListener(new IoFutureListener() {
@Override
public void operationComplete(IoFuture future) {
future.getSession().close();
}
});
}
@Override
public void sessionOpened(NextFilter nextFilter, IoSession session) throws Exception {
if (!isServer) {
ClientHandshake clientHandshake = new ClientHandshake();
session.setAttribute(ClientHandshake.HANDSHAKE, clientHandshake);
session.write(new ConnectMessage(new byte[] {applicationVersion}));
}
super.sessionOpened(nextFilter,session);
}
@Override
public void sessionIdle(NextFilter nextFilter, IoSession session, IdleStatus status) throws Exception {
try {
if (status == IdleStatus.WRITER_IDLE) {
nioLogger.log(PROTOCOL, session, "DiscoProtocolCodecFilter: sending KEEP_ALIVE");
session.write(KEEP_ALIVE);
heartbeatsSent.incrementAndGet();
} else {
nioLogger.log(PROTOCOL, session, "DiscoProtocolCodecFilter: KEEP_ALIVE timeout closing session");
session.close();
heartbeatsMissed.incrementAndGet();
}
} finally {
nextFilter.sessionIdle(session, status);
}
}
@Override
public void sessionCreated(NextFilter nextFilter, IoSession session) throws Exception {
session.setIdleTime(IdleStatus.READER_IDLE, timeout);
session.setIdleTime(IdleStatus.WRITER_IDLE, interval);
nextFilter.sessionCreated(session);
nioLogger.log(SESSION, session, "DiscoProtocolCodecFilter: Created session at %s from %s", session.getCreationTime(), session.getRemoteAddress());
sessionsCreated.incrementAndGet();
lastSessionFrom = session.getRemoteAddress().toString();
}
@Override
public void messageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception {
if (message instanceof byte[]) {
super.messageReceived(nextFilter,session,message);
}
else if (message instanceof ProtocolMessage) {
ProtocolMessage protocolMessage = (ProtocolMessage) message;
switch (protocolMessage.getProtocolMessageType()) {
case CONNECT:
if (isEnabled()) {
ConnectMessage connectMessage = (ConnectMessage) protocolMessage;
//As a server, ensure that we support the version the client is expecting us to communicate with
if (Arrays.binarySearch(connectMessage.getApplicationVersions(), applicationVersion) >= 0) {
nioLogger.log(PROTOCOL, session, "DiscoProtocolDecoder: ACCEPTing connection request with version %s", applicationVersion);
session.write(new AcceptMessage(applicationVersion));
} else {
//we don't speak your language. goodbye
nioLogger.log(PROTOCOL, session, "DiscoProtocolDecoder: REJECTing connection request with versions %s", getAsString(connectMessage.getApplicationVersions()));
LOG.info("REJECTing connection request from session "+session.getRemoteAddress()+" with versions "+ getAsString(connectMessage.getApplicationVersions()));
session.write(new RejectMessage(RejectMessageReason.INCOMPATIBLE_VERSION,new byte[] {applicationVersion}));
}
}
else {
nioLogger.log(PROTOCOL, session, "REJECTing connection request from session %s as service unavailable", session.getReadMessages());
LOG.info("REJECTing connection request from session " + session.getReadMessages() + " as service unavailable");
session.write(new RejectMessage(RejectMessageReason.SERVER_UNAVAILABLE, new byte[] {applicationVersion}));
}
break;
case ACCEPT:
//Client Side - server has accepted our connection request
AcceptMessage acceptMessage = (AcceptMessage) protocolMessage;
if (acceptMessage.getAcceptedVersion() != applicationVersion) {
session.close();
throw new IllegalStateException("Protocol version mismatch - client version is "+applicationVersion+", server has accepted "+acceptMessage.getAcceptedVersion());
}
nioLogger.log(PROTOCOL, session, "DiscoProtocolDecoder: ACCEPT received for with version %s", acceptMessage.getAcceptedVersion());
((ClientHandshake)session.getAttribute(ClientHandshake.HANDSHAKE)).accept();
break;
case REJECT:
//Client Side - server has said foxtrot oscar
RejectMessage rejectMessage = (RejectMessage) protocolMessage;
nioLogger.log(PROTOCOL, session, "DiscoProtocolDecoder: REJECT received: versions accepted are %s", getAsString(rejectMessage.getAcceptableVersions()));
ClientHandshake handshake = (ClientHandshake)session.getAttribute(ClientHandshake.HANDSHAKE);
if (handshake != null) {
handshake.reject();
}
break;
case KEEP_ALIVE:
//Both sides keep alive received, which is ignored
nioLogger.log(PROTOCOL, session, "DiscoProtocolDecoder: KEEP_ALIVE received");
break;
case DISCONNECT:
//Client Side - server doesn't love us anymore
session.setAttribute(ProtocolMessage.ProtocolMessageType.DISCONNECT.name());
session.close();
break;
default:
LOG.error("Unknown message type "+protocolMessage.getProtocolMessageType()+" - Ignoring");
}
}
}
private String getAsString(byte[] versions) {
StringBuilder sb = new StringBuilder("{");
boolean first = true;
for (byte b: versions) {
if (first) {
first = false;
} else {
sb.append(",");
}
sb.append(b);
}
sb.append("}");
return sb.toString();
}
@ManagedAttribute
public void setEnabled(boolean healthy) {
this.isEnabled = healthy;
}
@ManagedAttribute
public boolean isEnabled() {
return this.isEnabled;
}
/**
* Exports this service as an MBean, if the JMXControl is available
*/
@Override
public void export(JMXControl jmxControl) {
if (jmxControl != null) {
jmxControl.registerMBean("DiSCO.socket.transport:name=wireProtocol", this);
}
}
@ManagedAttribute
public int getInterval() {
return interval;
}
@ManagedAttribute
public int getTimeout() {
return timeout;
}
@ManagedAttribute
public long getHeartbeatsMissed() {
return heartbeatsMissed.get();
}
@ManagedAttribute
public long getHeartbeatsSent() {
return heartbeatsSent.get();
}
@ManagedAttribute
public long getSessionsCreated() {
return sessionsCreated.get();
}
@ManagedAttribute
public String getLastSessionFrom() {
return lastSessionFrom;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 11-Nov-2008
*/
package org.jetbrains.idea.eclipse.conversion;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.eclipse.EclipseXml;
import org.jetbrains.idea.eclipse.IdeaXml;
import org.jetbrains.idea.eclipse.config.EclipseModuleManager;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
public class EclipseClasspathWriter {
private static final Logger LOG = Logger.getInstance("#" + EclipseClasspathWriter.class.getName());
private final ModuleRootModel myModel;
private final Map<String, Element> myOldEntries = new HashMap<String, Element>();
public EclipseClasspathWriter(final ModuleRootModel model) {
myModel = model;
}
public void writeClasspath(Element classpathElement, @Nullable Element oldRoot) throws ConversionException {
if (oldRoot != null) {
for (Object o : oldRoot.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
final Element oldChild = (Element)o;
final String oldKind = oldChild.getAttributeValue(EclipseXml.KIND_ATTR);
final String oldPath = oldChild.getAttributeValue(EclipseXml.PATH_ATTR);
myOldEntries.put(oldKind + getJREKey(oldPath), oldChild);
}
}
for (OrderEntry orderEntry : myModel.getOrderEntries()) {
createClasspathEntry(orderEntry, classpathElement);
}
@NonNls String outputPath = "bin";
final String compilerOutputUrl = myModel.getModuleExtension(CompilerModuleExtension.class).getCompilerOutputUrl();
final String linkedPath = EclipseModuleManager.getInstance(myModel.getModule()).getEclipseLinkedVarPath(compilerOutputUrl);
if (linkedPath != null) {
outputPath = linkedPath;
} else {
final VirtualFile contentRoot = EPathUtil.getContentRoot(myModel);
final VirtualFile output = myModel.getModuleExtension(CompilerModuleExtension.class).getCompilerOutputPath();
if (contentRoot != null && output != null && VfsUtil.isAncestor(contentRoot, output, false)) {
outputPath = EPathUtil.collapse2EclipsePath(output.getUrl(), myModel);
}
else if (output == null && compilerOutputUrl != null) {
outputPath = EPathUtil.collapse2EclipsePath(compilerOutputUrl, myModel);
}
}
final Element orderEntry = addOrderEntry(EclipseXml.OUTPUT_KIND, outputPath, classpathElement);
setAttributeIfAbsent(orderEntry, EclipseXml.PATH_ATTR, EclipseXml.BIN_DIR);
}
private void createClasspathEntry(OrderEntry entry, Element classpathRoot) throws ConversionException {
final EclipseModuleManager eclipseModuleManager = EclipseModuleManager.getInstance(entry.getOwnerModule());
if (entry instanceof ModuleSourceOrderEntry) {
final boolean shouldPlaceSeparately =
eclipseModuleManager.isExpectedModuleSourcePlace(Arrays.binarySearch(myModel.getOrderEntries(), entry));
final ContentEntry[] entries = myModel.getContentEntries();
for (final ContentEntry contentEntry : entries) {
final VirtualFile contentRoot = contentEntry.getFile();
for (SourceFolder sourceFolder : contentEntry.getSourceFolders()) {
final String srcUrl = sourceFolder.getUrl();
String relativePath = EPathUtil.collapse2EclipsePath(srcUrl, myModel);
if (contentRoot != EPathUtil.getContentRoot(myModel)) {
final String linkedPath = EclipseModuleManager.getInstance(entry.getOwnerModule()).getEclipseLinkedSrcVariablePath(srcUrl);
if (linkedPath != null) {
relativePath = linkedPath;
}
}
final Integer idx = eclipseModuleManager.getSrcPlace(srcUrl);
addOrderEntry(EclipseXml.SRC_KIND, relativePath, classpathRoot, shouldPlaceSeparately && idx != null ? idx.intValue() : -1);
}
}
}
else if (entry instanceof ModuleOrderEntry) {
Element orderEntry = addOrderEntry(EclipseXml.SRC_KIND, "/" + ((ModuleOrderEntry)entry).getModuleName(), classpathRoot);
setAttributeIfAbsent(orderEntry, EclipseXml.COMBINEACCESSRULES_ATTR, EclipseXml.FALSE_VALUE);
setExported(orderEntry, ((ExportableOrderEntry)entry));
}
else if (entry instanceof LibraryOrderEntry) {
final LibraryOrderEntry libraryOrderEntry = (LibraryOrderEntry)entry;
final String libraryName = libraryOrderEntry.getLibraryName();
if (libraryOrderEntry.isModuleLevel()) {
final String[] files = libraryOrderEntry.getRootUrls(OrderRootType.CLASSES);
if (files.length > 0) {
if (libraryName != null &&
libraryName.contains(IdeaXml.JUNIT) &&
Comparing.strEqual(files[0], EclipseClasspathReader.getJunitClsUrl(libraryName.contains("4")))) {
final Element orderEntry =
addOrderEntry(EclipseXml.CON_KIND, EclipseXml.JUNIT_CONTAINER + "/" + libraryName.substring(IdeaXml.JUNIT.length()),
classpathRoot);
setExported(orderEntry, libraryOrderEntry);
}
else {
boolean newVarLibrary = false;
String eclipseVariablePath = eclipseModuleManager.getEclipseVariablePath(files[0]);
if (eclipseVariablePath == null && !eclipseModuleManager.isEclipseLibUrl(files[0])) { //new library was added
newVarLibrary = true;
eclipseVariablePath = EPathUtil.collapse2EclipseVariabledPath(libraryOrderEntry, OrderRootType.CLASSES);
}
Element orderEntry;
if (eclipseVariablePath != null) {
orderEntry = addOrderEntry(EclipseXml.VAR_KIND, eclipseVariablePath, classpathRoot);
}
else {
orderEntry = addOrderEntry(EclipseXml.LIB_KIND, EPathUtil.collapse2EclipsePath(files[0], myModel), classpathRoot);
}
final String srcRelativePath;
String eclipseSrcVariablePath = null;
boolean addSrcRoots = true;
final String[] srcFiles = libraryOrderEntry.getRootUrls(OrderRootType.SOURCES);
if (srcFiles.length == 0) {
srcRelativePath = null;
}
else {
final String srcFile = srcFiles[0];
srcRelativePath = EPathUtil.collapse2EclipsePath(srcFile, myModel);
if (eclipseVariablePath != null) {
eclipseSrcVariablePath = eclipseModuleManager.getEclipseSrcVariablePath(srcFile);
if (eclipseSrcVariablePath == null) {
eclipseSrcVariablePath = EPathUtil.collapse2EclipseVariabledPath(libraryOrderEntry, OrderRootType.SOURCES);
if (eclipseSrcVariablePath != null) {
eclipseSrcVariablePath = "/" + eclipseSrcVariablePath;
} else {
if (newVarLibrary) { //new library which cannot be replaced with vars
orderEntry.detach();
orderEntry = addOrderEntry(EclipseXml.LIB_KIND, EPathUtil.collapse2EclipsePath(files[0], myModel), classpathRoot);
}
else {
LOG.info("Added root " + srcRelativePath + " (in existing var library) can't be replaced with any variable; src roots placed in .eml only");
addSrcRoots = false;
}
}
}
}
}
setOrRemoveAttribute(orderEntry, EclipseXml.SOURCEPATH_ATTR, addSrcRoots ? (eclipseSrcVariablePath != null ? eclipseSrcVariablePath : srcRelativePath) : null);
EJavadocUtil.setupJavadocAttributes(orderEntry, libraryOrderEntry, myModel);
setExported(orderEntry, libraryOrderEntry);
}
}
}
else {
final Element orderEntry;
if (eclipseModuleManager.getUnknownCons().contains(libraryName)) {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, libraryName, classpathRoot);
} else if (Comparing.strEqual(libraryName, IdeaXml.ECLIPSE_LIBRARY)) {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, EclipseXml.ECLIPSE_PLATFORM, classpathRoot);
}
else {
orderEntry = addOrderEntry(EclipseXml.CON_KIND, EclipseXml.USER_LIBRARY + "/" + libraryName, classpathRoot);
}
setExported(orderEntry, libraryOrderEntry);
}
}
else if (entry instanceof JdkOrderEntry) {
if (entry instanceof InheritedJdkOrderEntry) {
if (!EclipseModuleManager.getInstance(entry.getOwnerModule()).isForceConfigureJDK()) {
addOrderEntry(EclipseXml.CON_KIND, EclipseXml.JRE_CONTAINER, classpathRoot);
}
}
else {
final Sdk jdk = ((JdkOrderEntry)entry).getJdk();
String jdkLink;
if (jdk == null) {
jdkLink = EclipseXml.JRE_CONTAINER;
}
else {
jdkLink = EclipseXml.JRE_CONTAINER;
if (jdk.getSdkType() instanceof JavaSdkType) {
jdkLink += EclipseXml.JAVA_SDK_TYPE;
}
jdkLink += "/" + jdk.getName();
}
addOrderEntry(EclipseXml.CON_KIND, jdkLink, classpathRoot);
}
}
else {
throw new ConversionException("Unknown EclipseProjectModel.ClasspathEntry: " + entry.getClass());
}
}
private Element addOrderEntry(String kind, String path, Element classpathRoot) {
return addOrderEntry(kind, path, classpathRoot, -1);
}
private Element addOrderEntry(String kind, String path, Element classpathRoot, int idx) {
final Element element = myOldEntries.get(kind + getJREKey(path));
if (element != null){
final Element clonedElement = (Element)element.clone();
if (idx == -1 || idx >= classpathRoot.getContentSize()) {
classpathRoot.addContent(clonedElement);
} else {
classpathRoot.addContent(idx, clonedElement);
}
return clonedElement;
}
Element orderEntry = new Element(EclipseXml.CLASSPATHENTRY_TAG);
orderEntry.setAttribute(EclipseXml.KIND_ATTR, kind);
if (path != null) {
orderEntry.setAttribute(EclipseXml.PATH_ATTR, path);
}
if (idx == -1) {
classpathRoot.addContent(orderEntry);
} else {
classpathRoot.addContent(idx, orderEntry);
}
return orderEntry;
}
private static String getJREKey(String path) {
return path.startsWith(EclipseXml.JRE_CONTAINER) ? EclipseXml.JRE_CONTAINER : path;
}
private static void setExported(Element orderEntry, ExportableOrderEntry dependency) {
setOrRemoveAttribute(orderEntry, EclipseXml.EXPORTED_ATTR, dependency.isExported() ? EclipseXml.TRUE_VALUE : null);
}
private static void setOrRemoveAttribute(Element element, String name, String value) {
if (value != null) {
element.setAttribute(name, value);
}
else {
element.removeAttribute(name);
}
}
private static void setAttributeIfAbsent(Element element, String name, String value) {
if (element.getAttribute(name) == null) {
element.setAttribute(name, value);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.partition;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.runtime.event.AbstractEvent;
import org.apache.flink.runtime.io.network.buffer.Buffer;
import org.apache.flink.runtime.io.network.buffer.BufferBuilder;
import org.apache.flink.runtime.io.network.buffer.BufferConsumer;
import org.apache.flink.runtime.io.network.buffer.BufferProvider;
import org.apache.flink.runtime.io.network.util.TestConsumerCallback;
import org.apache.flink.runtime.io.network.util.TestPooledBufferProvider;
import org.apache.flink.runtime.io.network.util.TestProducerSource;
import org.apache.flink.runtime.io.network.util.TestSubpartitionConsumer;
import org.apache.flink.runtime.io.network.util.TestSubpartitionProducer;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import static org.apache.flink.runtime.io.network.buffer.BufferBuilderTestUtils.createFilledBufferConsumer;
import static org.apache.flink.util.FutureUtil.waitForAll;
import static org.apache.flink.util.Preconditions.checkState;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Tests for {@link PipelinedSubpartition}.
*
* @see PipelinedSubpartitionWithReadViewTest
*/
public class PipelinedSubpartitionTest extends SubpartitionTestBase {
/** Executor service for concurrent produce/consume tests. */
private static final ExecutorService executorService = Executors.newCachedThreadPool();
@AfterClass
public static void shutdownExecutorService() throws Exception {
executorService.shutdownNow();
}
@Override
PipelinedSubpartition createSubpartition() {
final ResultPartition parent = mock(ResultPartition.class);
return new PipelinedSubpartition(0, parent);
}
@Test
public void testIllegalReadViewRequest() throws Exception {
final PipelinedSubpartition subpartition = createSubpartition();
// Successful request
assertNotNull(subpartition.createReadView(new NoOpBufferAvailablityListener()));
try {
subpartition.createReadView(new NoOpBufferAvailablityListener());
fail("Did not throw expected exception after duplicate notifyNonEmpty view request.");
} catch (IllegalStateException expected) {
}
}
/**
* Verifies that the isReleased() check of the view checks the parent
* subpartition.
*/
@Test
public void testIsReleasedChecksParent() {
PipelinedSubpartition subpartition = mock(PipelinedSubpartition.class);
PipelinedSubpartitionView reader = new PipelinedSubpartitionView(
subpartition, mock(BufferAvailabilityListener.class));
assertFalse(reader.isReleased());
verify(subpartition, times(1)).isReleased();
when(subpartition.isReleased()).thenReturn(true);
assertTrue(reader.isReleased());
verify(subpartition, times(2)).isReleased();
}
@Test
public void testConcurrentFastProduceAndFastConsume() throws Exception {
testProduceConsume(false, false);
}
@Test
public void testConcurrentFastProduceAndSlowConsume() throws Exception {
testProduceConsume(false, true);
}
@Test
public void testConcurrentSlowProduceAndFastConsume() throws Exception {
testProduceConsume(true, false);
}
@Test
public void testConcurrentSlowProduceAndSlowConsume() throws Exception {
testProduceConsume(true, true);
}
private void testProduceConsume(boolean isSlowProducer, boolean isSlowConsumer) throws Exception {
// Config
final int producerBufferPoolSize = 8;
final int producerNumberOfBuffersToProduce = 128;
// Producer behaviour
final TestProducerSource producerSource = new TestProducerSource() {
private BufferProvider bufferProvider = new TestPooledBufferProvider(producerBufferPoolSize);
private int numberOfBuffers;
@Override
public BufferConsumerAndChannel getNextBufferConsumer() throws Exception {
if (numberOfBuffers == producerNumberOfBuffersToProduce) {
return null;
}
final BufferBuilder bufferBuilder = bufferProvider.requestBufferBuilderBlocking();
int segmentSize = bufferBuilder.getMaxCapacity();
MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(segmentSize);
int next = numberOfBuffers * (segmentSize / Integer.BYTES);
for (int i = 0; i < segmentSize; i += 4) {
segment.putInt(i, next);
next++;
}
checkState(bufferBuilder.appendAndCommit(ByteBuffer.wrap(segment.getArray())) == segmentSize);
bufferBuilder.finish();
numberOfBuffers++;
return new BufferConsumerAndChannel(bufferBuilder.createBufferConsumer(), 0);
}
};
// Consumer behaviour
final TestConsumerCallback consumerCallback = new TestConsumerCallback() {
private int numberOfBuffers;
@Override
public void onBuffer(Buffer buffer) {
final MemorySegment segment = buffer.getMemorySegment();
assertEquals(segment.size(), buffer.getSize());
int expected = numberOfBuffers * (segment.size() / 4);
for (int i = 0; i < segment.size(); i += 4) {
assertEquals(expected, segment.getInt(i));
expected++;
}
numberOfBuffers++;
buffer.recycleBuffer();
}
@Override
public void onEvent(AbstractEvent event) {
// Nothing to do in this test
}
};
final PipelinedSubpartition subpartition = createSubpartition();
TestSubpartitionConsumer consumer = new TestSubpartitionConsumer(isSlowConsumer, consumerCallback);
final PipelinedSubpartitionView view = subpartition.createReadView(consumer);
consumer.setSubpartitionView(view);
Future<Boolean> producerResult = executorService.submit(
new TestSubpartitionProducer(subpartition, isSlowProducer, producerSource));
Future<Boolean> consumerResult = executorService.submit(consumer);
waitForAll(60_000L, producerResult, consumerResult);
}
/**
* Tests cleanup of {@link PipelinedSubpartition#release()} with no read view attached.
*/
@Test
public void testCleanupReleasedPartitionNoView() throws Exception {
testCleanupReleasedPartition(false);
}
/**
* Tests cleanup of {@link PipelinedSubpartition#release()} with a read view attached.
*/
@Test
public void testCleanupReleasedPartitionWithView() throws Exception {
testCleanupReleasedPartition(true);
}
/**
* Tests cleanup of {@link PipelinedSubpartition#release()}.
*
* @param createView
* whether the partition should have a view attached to it (<tt>true</tt>) or not (<tt>false</tt>)
*/
private void testCleanupReleasedPartition(boolean createView) throws Exception {
PipelinedSubpartition partition = createSubpartition();
BufferConsumer buffer1 = createFilledBufferConsumer(4096);
BufferConsumer buffer2 = createFilledBufferConsumer(4096);
boolean buffer1Recycled;
boolean buffer2Recycled;
try {
partition.add(buffer1);
partition.add(buffer2);
// create the read view first
ResultSubpartitionView view = null;
if (createView) {
view = partition.createReadView(new NoOpBufferAvailablityListener());
}
partition.release();
assertTrue(partition.isReleased());
if (createView) {
assertTrue(view.isReleased());
}
assertTrue(buffer1.isRecycled());
} finally {
buffer1Recycled = buffer1.isRecycled();
if (!buffer1Recycled) {
buffer1.close();
}
buffer2Recycled = buffer2.isRecycled();
if (!buffer2Recycled) {
buffer2.close();
}
}
if (!buffer1Recycled) {
Assert.fail("buffer 1 not recycled");
}
if (!buffer2Recycled) {
Assert.fail("buffer 2 not recycled");
}
assertEquals(2, partition.getTotalNumberOfBuffers());
assertEquals(0, partition.getTotalNumberOfBytes()); // buffer data is never consumed
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.agent.service;
import com.thoughtworks.go.agent.common.ssl.GoAgentServerHttpClient;
import com.thoughtworks.go.agent.common.ssl.GoAgentServerHttpClientBuilder;
import com.thoughtworks.go.config.AgentAutoRegistrationProperties;
import com.thoughtworks.go.config.AgentRegistry;
import com.thoughtworks.go.config.GuidService;
import com.thoughtworks.go.security.KeyStoreManager;
import com.thoughtworks.go.security.Registration;
import com.thoughtworks.go.security.RegistrationJSONizer;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.SystemUtil;
import com.thoughtworks.go.util.URLService;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.NullInputStream;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import static com.thoughtworks.go.security.CertificateUtil.md5Fingerprint;
import static com.thoughtworks.go.security.SelfSignedCertificateX509TrustManager.CRUISE_SERVER;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static org.apache.http.HttpStatus.SC_ACCEPTED;
@Service
public class SslInfrastructureService {
private static final String CHAIN_ALIAS = "agent";
private static final Logger LOGGER = Logger.getLogger(SslInfrastructureService.class);
private static final int REGISTER_RETRY_INTERVAL = 5000;
private final RemoteRegistrationRequester remoteRegistrationRequester;
private final KeyStoreManager keyStoreManager;
private final GoAgentServerHttpClient httpClient;
private transient boolean registered = false;
@Autowired
public SslInfrastructureService(URLService urlService, GoAgentServerHttpClient httpClient, AgentRegistry agentRegistry) throws Exception {
this(new RemoteRegistrationRequester(urlService.getAgentRegistrationURL(), agentRegistry, httpClient), httpClient);
}
// For mocking out remote call
SslInfrastructureService(RemoteRegistrationRequester requester, GoAgentServerHttpClient httpClient)
throws Exception {
this.remoteRegistrationRequester = requester;
this.httpClient = httpClient;
this.keyStoreManager = new KeyStoreManager();
this.keyStoreManager.preload(GoAgentServerHttpClientBuilder.AGENT_CERTIFICATE_FILE, httpClientBuilder().keystorePassword());
}
private GoAgentServerHttpClientBuilder httpClientBuilder() {
return new GoAgentServerHttpClientBuilder(new SystemEnvironment());
}
public void createSslInfrastructure() throws IOException {
httpClientBuilder().initialize();
httpClient.reset();
}
public void registerIfNecessary(AgentAutoRegistrationProperties agentAutoRegistrationProperties) throws Exception {
registered = keyStoreManager.hasCertificates(CHAIN_ALIAS, GoAgentServerHttpClientBuilder.AGENT_CERTIFICATE_FILE,
httpClientBuilder().keystorePassword()) && GuidService.guidPresent();
if (!registered) {
LOGGER.info("[Agent Registration] Starting to register agent.");
register(agentAutoRegistrationProperties);
createSslInfrastructure();
registered = true;
LOGGER.info("[Agent Registration] Successfully registered agent.");
}
}
public boolean isRegistered() {
return registered;
}
private void register(AgentAutoRegistrationProperties agentAutoRegistrationProperties) throws Exception {
String hostName = SystemUtil.getLocalhostNameOrRandomNameIfNotFound();
Registration keyEntry = Registration.createNullPrivateKeyEntry();
while (!keyEntry.isValid()) {
try {
keyEntry = remoteRegistrationRequester.requestRegistration(hostName, agentAutoRegistrationProperties);
} catch (Exception e) {
LOGGER.error("[Agent Registration] There was a problem registering with the go server.", e);
throw e;
}
if ((!keyEntry.isValid())) {
try {
LOGGER.debug("[Agent Registration] Retrieved agent key from Go server is not valid.");
Thread.sleep(REGISTER_RETRY_INTERVAL);
} catch (InterruptedException e) {
// Ok
}
}
}
LOGGER.info("[Agent Registration] Retrieved registration from Go server.");
storeChainIntoAgentStore(keyEntry);
agentAutoRegistrationProperties.scrubRegistrationProperties();
}
private void storeChainIntoAgentStore(Registration keyEntry) {
try {
keyStoreManager.storeCertificate(CHAIN_ALIAS, GoAgentServerHttpClientBuilder.AGENT_CERTIFICATE_FILE, httpClientBuilder().keystorePassword(), keyEntry);
LOGGER.info(String.format("[Agent Registration] Stored registration for cert with hash code: %s not valid before: %s", md5Fingerprint(keyEntry.getFirstCertificate()),
keyEntry.getCertificateNotBeforeDate()));
} catch (Exception e) {
throw bomb("Couldn't save agent key into store", e);
}
}
public void invalidateAgentCertificate() {
try {
httpClient.reset();
keyStoreManager.deleteEntry(CHAIN_ALIAS, GoAgentServerHttpClientBuilder.AGENT_CERTIFICATE_FILE, httpClientBuilder().keystorePassword());
keyStoreManager.deleteEntry(CRUISE_SERVER, GoAgentServerHttpClientBuilder.AGENT_TRUST_FILE, httpClientBuilder().keystorePassword());
} catch (Exception e) {
LOGGER.fatal("[Agent Registration] Error while deleting key from key store", e);
deleteKeyStores();
}
}
private void deleteKeyStores() {
FileUtils.deleteQuietly(GoAgentServerHttpClientBuilder.AGENT_CERTIFICATE_FILE);
FileUtils.deleteQuietly(GoAgentServerHttpClientBuilder.AGENT_TRUST_FILE);
}
public static class RemoteRegistrationRequester {
private final AgentRegistry agentRegistry;
private String serverUrl;
private GoAgentServerHttpClient httpClient;
public RemoteRegistrationRequester(String serverUrl, AgentRegistry agentRegistry, GoAgentServerHttpClient httpClient) {
this.serverUrl = serverUrl;
this.httpClient = httpClient;
this.agentRegistry = agentRegistry;
}
protected Registration requestRegistration(String agentHostName, AgentAutoRegistrationProperties agentAutoRegisterProperties) throws IOException, ClassNotFoundException {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("[Agent Registration] Using URL %s to register.", serverUrl));
}
HttpRequestBase postMethod = (HttpRequestBase) RequestBuilder.post(serverUrl)
.addParameter("hostname", agentHostName)
.addParameter("uuid", agentRegistry.uuid())
.addParameter("location", SystemUtil.currentWorkingDirectory())
.addParameter("usablespace", String.valueOf(AgentRuntimeInfo.usableSpace(SystemUtil.currentWorkingDirectory())))
.addParameter("operatingSystem", new SystemEnvironment().getOperatingSystemCompleteName())
.addParameter("agentAutoRegisterKey", agentAutoRegisterProperties.agentAutoRegisterKey())
.addParameter("agentAutoRegisterResources", agentAutoRegisterProperties.agentAutoRegisterResources())
.addParameter("agentAutoRegisterEnvironments", agentAutoRegisterProperties.agentAutoRegisterEnvironments())
.addParameter("agentAutoRegisterHostname", agentAutoRegisterProperties.agentAutoRegisterHostname())
.addParameter("elasticAgentId", agentAutoRegisterProperties.agentAutoRegisterElasticAgentId())
.addParameter("elasticPluginId", agentAutoRegisterProperties.agentAutoRegisterElasticPluginId())
.build();
try {
CloseableHttpResponse response = httpClient.execute(postMethod);
if (getStatusCode(response) == SC_ACCEPTED) {
LOGGER.debug("The server has accepted the registration request.");
return Registration.createNullPrivateKeyEntry();
}
try (InputStream is = response.getEntity() == null ? new NullInputStream(0) : response.getEntity().getContent()) {
String responseBody = IOUtils.toString(is, StandardCharsets.UTF_8);
if (getStatusCode(response) == 200) {
LOGGER.info("This agent is now approved by the server.");
return readResponse(responseBody);
} else {
LOGGER.warn(String.format("The server sent a response that we could not understand. The HTTP status was %s. The response body was:\n%s", response.getStatusLine(), responseBody));
return Registration.createNullPrivateKeyEntry();
}
}
} finally {
postMethod.releaseConnection();
}
}
protected Registration readResponse(String responseBody) {
return RegistrationJSONizer.fromJson(responseBody);
}
protected int getStatusCode(CloseableHttpResponse response) {
return response.getStatusLine().getStatusCode();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.IOException;
import java.io.OutputStream;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.function.Supplier;
import java.util.stream.IntStream;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.filter.ClusteringIndexSliceFilter;
import org.apache.cassandra.db.filter.ColumnFilter;
import org.apache.cassandra.db.filter.DataLimits;
import org.apache.cassandra.db.filter.RowFilter;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.ReversedType;
import org.apache.cassandra.db.marshal.SetType;
import org.apache.cassandra.db.partitions.*;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.rows.RowIterator;
import org.apache.cassandra.db.rows.Unfiltered;
import org.apache.cassandra.db.rows.DeserializationHelper;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import org.apache.cassandra.db.rows.UnfilteredRowIterators;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.DataInputBuffer;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.io.util.WrappedDataOutputStreamPlus;
import org.apache.cassandra.locator.EndpointsForToken;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.locator.ReplicaUtils;
import org.apache.cassandra.net.Message;
import org.apache.cassandra.metrics.ClearableHistogram;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.net.Verb;
import org.apache.cassandra.repair.consistent.LocalSessionAccessor;
import org.apache.cassandra.schema.CachingParams;
import org.apache.cassandra.schema.KeyspaceMetadata;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.schema.Schema;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.schema.TableParams;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.streaming.PreviewKind;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.UUIDGen;
import static org.apache.cassandra.utils.ByteBufferUtil.EMPTY_BYTE_BUFFER;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class ReadCommandTest
{
private static final String KEYSPACE = "ReadCommandTest";
private static final String CF1 = "Standard1";
private static final String CF2 = "Standard2";
private static final String CF3 = "Standard3";
private static final String CF4 = "Standard4";
private static final String CF5 = "Standard5";
private static final String CF6 = "Standard6";
private static final String CF7 = "Counter7";
private static final String CF8 = "Standard8";
private static final String CF9 = "Standard9";
private static final InetAddressAndPort REPAIR_COORDINATOR;
static {
try
{
REPAIR_COORDINATOR = InetAddressAndPort.getByName("10.0.0.1");
}
catch (UnknownHostException e)
{
throw new AssertionError(e);
}
}
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
DatabaseDescriptor.daemonInitialization();
TableMetadata.Builder metadata1 = SchemaLoader.standardCFMD(KEYSPACE, CF1);
TableMetadata.Builder metadata2 =
TableMetadata.builder(KEYSPACE, CF2)
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance);
TableMetadata.Builder metadata3 =
TableMetadata.builder(KEYSPACE, CF3)
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.addRegularColumn("c", AsciiType.instance)
.addRegularColumn("d", AsciiType.instance)
.addRegularColumn("e", AsciiType.instance)
.addRegularColumn("f", AsciiType.instance);
TableMetadata.Builder metadata4 =
TableMetadata.builder(KEYSPACE, CF4)
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.addRegularColumn("c", AsciiType.instance)
.addRegularColumn("d", AsciiType.instance)
.addRegularColumn("e", AsciiType.instance)
.addRegularColumn("f", AsciiType.instance);
TableMetadata.Builder metadata5 =
TableMetadata.builder(KEYSPACE, CF5)
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.addRegularColumn("c", AsciiType.instance)
.addRegularColumn("d", AsciiType.instance)
.addRegularColumn("e", AsciiType.instance)
.addRegularColumn("f", AsciiType.instance);
TableMetadata.Builder metadata6 =
TableMetadata.builder(KEYSPACE, CF6)
.addPartitionKeyColumn("key", BytesType.instance)
.addStaticColumn("s", AsciiType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.caching(CachingParams.CACHE_EVERYTHING);
TableMetadata.Builder metadata7 =
TableMetadata.builder(KEYSPACE, CF7)
.flags(EnumSet.of(TableMetadata.Flag.COUNTER, TableMetadata.Flag.COMPOUND))
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("c", CounterColumnType.instance);
TableMetadata.Builder metadata8 =
TableMetadata.builder(KEYSPACE, CF8)
.addPartitionKeyColumn("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.addRegularColumn("c", SetType.getInstance(AsciiType.instance, true));
TableMetadata.Builder metadata9 =
TableMetadata.builder(KEYSPACE, CF9)
.addPartitionKeyColumn("key", Int32Type.instance)
.addClusteringColumn("col", ReversedType.getInstance(Int32Type.instance))
.addRegularColumn("a", AsciiType.instance);
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE,
KeyspaceParams.simple(1),
metadata1,
metadata2,
metadata3,
metadata4,
metadata5,
metadata6,
metadata7,
metadata8,
metadata9);
LocalSessionAccessor.startup();
}
@Test
public void testPartitionRangeAbort() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF1);
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key1"))
.clustering("Column1")
.add("val", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key2"))
.clustering("Column1")
.add("val", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs).build();
assertEquals(2, Util.getAll(readCommand).size());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionSliceAbort()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
List<FilteredPartition> partitions = Util.getAll(readCommand);
assertEquals(1, partitions.size());
assertEquals(2, partitions.get(0).rowCount());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionNamesAbort()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).includeRow("cc").includeRow("dd").build();
List<FilteredPartition> partitions = Util.getAll(readCommand);
assertEquals(1, partitions.size());
assertEquals(2, partitions.get(0).rowCount());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionGroupMerge() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF3);
String[][][] groups = new String[][][] {
new String[][] {
new String[] { "1", "key1", "aa", "a" }, // "1" indicates to create the data, "-1" to delete the row
new String[] { "1", "key2", "bb", "b" },
new String[] { "1", "key3", "cc", "c" }
},
new String[][] {
new String[] { "1", "key3", "dd", "d" },
new String[] { "1", "key2", "ee", "e" },
new String[] { "1", "key1", "ff", "f" }
},
new String[][] {
new String[] { "1", "key6", "aa", "a" },
new String[] { "1", "key5", "bb", "b" },
new String[] { "1", "key4", "cc", "c" }
},
new String[][] {
new String[] { "-1", "key6", "aa", "a" },
new String[] { "-1", "key2", "bb", "b" }
}
};
// Given the data above, when the keys are sorted and the deletions removed, we should
// get these clustering rows in this order
String[] expectedRows = new String[] { "aa", "ff", "ee", "cc", "dd", "cc", "bb"};
List<ByteBuffer> buffers = new ArrayList<>(groups.length);
int nowInSeconds = FBUtilities.nowInSeconds();
ColumnFilter columnFilter = ColumnFilter.allRegularColumnsBuilder(cfs.metadata(), false).build();
RowFilter rowFilter = RowFilter.create();
Slice slice = Slice.make(BufferClusteringBound.BOTTOM, BufferClusteringBound.TOP);
ClusteringIndexSliceFilter sliceFilter = new ClusteringIndexSliceFilter(Slices.with(cfs.metadata().comparator, slice), false);
for (String[][] group : groups)
{
cfs.truncateBlocking();
List<SinglePartitionReadCommand> commands = new ArrayList<>(group.length);
for (String[] data : group)
{
if (data[0].equals("1"))
{
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes(data[1]))
.clustering(data[2])
.add(data[3], ByteBufferUtil.bytes("blah"))
.build()
.apply();
}
else
{
RowUpdateBuilder.deleteRow(cfs.metadata(), FBUtilities.timestampMicros(), ByteBufferUtil.bytes(data[1]), data[2]).apply();
}
commands.add(SinglePartitionReadCommand.create(cfs.metadata(), nowInSeconds, columnFilter, rowFilter, DataLimits.NONE, Util.dk(data[1]), sliceFilter));
}
cfs.forceBlockingFlush();
ReadQuery query = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
try (ReadExecutionController executionController = query.executionController();
UnfilteredPartitionIterator iter = query.executeLocally(executionController);
DataOutputBuffer buffer = new DataOutputBuffer())
{
UnfilteredPartitionIterators.serializerForIntraNode().serialize(iter,
columnFilter,
buffer,
MessagingService.current_version);
buffers.add(buffer.buffer());
}
}
// deserialize, merge and check the results are all there
List<UnfilteredPartitionIterator> iterators = new ArrayList<>();
for (ByteBuffer buffer : buffers)
{
try (DataInputBuffer in = new DataInputBuffer(buffer, true))
{
iterators.add(UnfilteredPartitionIterators.serializerForIntraNode().deserialize(in,
MessagingService.current_version,
cfs.metadata(),
columnFilter,
DeserializationHelper.Flag.LOCAL));
}
}
UnfilteredPartitionIterators.MergeListener listener =
new UnfilteredPartitionIterators.MergeListener()
{
public UnfilteredRowIterators.MergeListener getRowMergeListener(DecoratedKey partitionKey, List<UnfilteredRowIterator> versions)
{
return null;
}
public void close()
{
}
};
try (PartitionIterator partitionIterator = UnfilteredPartitionIterators.filter(UnfilteredPartitionIterators.merge(iterators, listener), nowInSeconds))
{
int i = 0;
int numPartitions = 0;
while (partitionIterator.hasNext())
{
numPartitions++;
try(RowIterator rowIterator = partitionIterator.next())
{
while (rowIterator.hasNext())
{
Row row = rowIterator.next();
assertEquals("col=" + expectedRows[i++], row.clustering().toString(cfs.metadata()));
//System.out.print(row.toString(cfs.metadata, true));
}
}
}
assertEquals(5, numPartitions);
assertEquals(expectedRows.length, i);
}
}
@Test
public void testSerializer() throws IOException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
new RowUpdateBuilder(cfs.metadata.get(), 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).includeRow("dd").build();
int messagingVersion = MessagingService.current_version;
FakeOutputStream out = new FakeOutputStream();
Tracing.instance.newSession(Tracing.TraceType.QUERY);
Message<ReadCommand> messageOut = Message.out(Verb.READ_REQ, readCommand);
long size = messageOut.serializedSize(messagingVersion);
Message.serializer.serialize(messageOut, new WrappedDataOutputStreamPlus(out), messagingVersion);
Assert.assertEquals(size, out.count);
}
static class FakeOutputStream extends OutputStream
{
long count;
public void write(int b) throws IOException
{
count++;
}
}
@Test
public void testCountDeletedRows() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF4);
String[][][] groups = new String[][][] {
new String[][] {
new String[] { "1", "key1", "aa", "a" }, // "1" indicates to create the data, "-1" to delete the
// row
new String[] { "1", "key2", "bb", "b" },
new String[] { "1", "key3", "cc", "c" }
},
new String[][] {
new String[] { "1", "key3", "dd", "d" },
new String[] { "1", "key2", "ee", "e" },
new String[] { "1", "key1", "ff", "f" }
},
new String[][] {
new String[] { "1", "key6", "aa", "a" },
new String[] { "1", "key5", "bb", "b" },
new String[] { "1", "key4", "cc", "c" }
},
new String[][] {
new String[] { "1", "key2", "aa", "a" },
new String[] { "1", "key2", "cc", "c" },
new String[] { "1", "key2", "dd", "d" }
},
new String[][] {
new String[] { "-1", "key6", "aa", "a" },
new String[] { "-1", "key2", "bb", "b" },
new String[] { "-1", "key2", "ee", "e" },
new String[] { "-1", "key2", "aa", "a" },
new String[] { "-1", "key2", "cc", "c" },
new String[] { "-1", "key2", "dd", "d" }
}
};
List<ByteBuffer> buffers = new ArrayList<>(groups.length);
int nowInSeconds = FBUtilities.nowInSeconds();
ColumnFilter columnFilter = ColumnFilter.allRegularColumnsBuilder(cfs.metadata(), false).build();
RowFilter rowFilter = RowFilter.create();
Slice slice = Slice.make(BufferClusteringBound.BOTTOM, BufferClusteringBound.TOP);
ClusteringIndexSliceFilter sliceFilter = new ClusteringIndexSliceFilter(
Slices.with(cfs.metadata().comparator, slice), false);
for (String[][] group : groups)
{
cfs.truncateBlocking();
List<SinglePartitionReadCommand> commands = new ArrayList<>(group.length);
for (String[] data : group)
{
if (data[0].equals("1"))
{
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes(data[1]))
.clustering(data[2])
.add(data[3], ByteBufferUtil.bytes("blah"))
.build()
.apply();
}
else
{
RowUpdateBuilder.deleteRow(cfs.metadata(), FBUtilities.timestampMicros(),
ByteBufferUtil.bytes(data[1]), data[2]).apply();
}
commands.add(SinglePartitionReadCommand.create(cfs.metadata(), nowInSeconds, columnFilter, rowFilter,
DataLimits.NONE, Util.dk(data[1]), sliceFilter));
}
cfs.forceBlockingFlush();
ReadQuery query = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
try (ReadExecutionController executionController = query.executionController();
UnfilteredPartitionIterator iter = query.executeLocally(executionController);
DataOutputBuffer buffer = new DataOutputBuffer())
{
UnfilteredPartitionIterators.serializerForIntraNode().serialize(iter,
columnFilter,
buffer,
MessagingService.current_version);
buffers.add(buffer.buffer());
}
}
assertEquals(5, cfs.metric.tombstoneScannedHistogram.cf.getSnapshot().getMax());
}
@Test
public void testCountWithNoDeletedRow() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF5);
String[][][] groups = new String[][][] {
new String[][] {
new String[] { "1", "key1", "aa", "a" }, // "1" indicates to create the data, "-1" to delete the
// row
new String[] { "1", "key2", "bb", "b" },
new String[] { "1", "key3", "cc", "c" }
},
new String[][] {
new String[] { "1", "key3", "dd", "d" },
new String[] { "1", "key2", "ee", "e" },
new String[] { "1", "key1", "ff", "f" }
},
new String[][] {
new String[] { "1", "key6", "aa", "a" },
new String[] { "1", "key5", "bb", "b" },
new String[] { "1", "key4", "cc", "c" }
}
};
List<ByteBuffer> buffers = new ArrayList<>(groups.length);
int nowInSeconds = FBUtilities.nowInSeconds();
ColumnFilter columnFilter = ColumnFilter.allRegularColumnsBuilder(cfs.metadata(), false).build();
RowFilter rowFilter = RowFilter.create();
Slice slice = Slice.make(BufferClusteringBound.BOTTOM, BufferClusteringBound.TOP);
ClusteringIndexSliceFilter sliceFilter = new ClusteringIndexSliceFilter(
Slices.with(cfs.metadata().comparator, slice), false);
for (String[][] group : groups)
{
cfs.truncateBlocking();
List<SinglePartitionReadCommand> commands = new ArrayList<>(group.length);
for (String[] data : group)
{
if (data[0].equals("1"))
{
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes(data[1]))
.clustering(data[2])
.add(data[3], ByteBufferUtil.bytes("blah"))
.build()
.apply();
}
else
{
RowUpdateBuilder.deleteRow(cfs.metadata(), FBUtilities.timestampMicros(),
ByteBufferUtil.bytes(data[1]), data[2]).apply();
}
commands.add(SinglePartitionReadCommand.create(cfs.metadata(), nowInSeconds, columnFilter, rowFilter,
DataLimits.NONE, Util.dk(data[1]), sliceFilter));
}
cfs.forceBlockingFlush();
ReadQuery query = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
try (ReadExecutionController executionController = query.executionController();
UnfilteredPartitionIterator iter = query.executeLocally(executionController);
DataOutputBuffer buffer = new DataOutputBuffer())
{
UnfilteredPartitionIterators.serializerForIntraNode().serialize(iter,
columnFilter,
buffer,
MessagingService.current_version);
buffers.add(buffer.buffer());
}
}
assertEquals(1, cfs.metric.tombstoneScannedHistogram.cf.getSnapshot().getMax());
}
@Test
public void testSinglePartitionSliceRepairedDataTracking() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
testRepairedDataTracking(cfs, readCommand);
}
@Test
public void testPartitionRangeRepairedDataTracking() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
ReadCommand readCommand = Util.cmd(cfs).build();
testRepairedDataTracking(cfs, readCommand);
}
@Test
public void testSinglePartitionNamesRepairedDataTracking() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).includeRow("cc").includeRow("dd").build();
testRepairedDataTracking(cfs, readCommand);
}
@Test
public void testSinglePartitionNamesSkipsOptimisationsIfTrackingRepairedData()
{
// when tracking, the optimizations of querying sstables in timestamp order and
// returning once all requested columns are not available so just assert that
// all sstables are read when performing such queries
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata(), 1, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("wxyz"))
.build()
.apply();
cfs.forceBlockingFlush();
List<SSTableReader> sstables = new ArrayList<>(cfs.getLiveSSTables());
assertEquals(2, sstables.size());
sstables.sort(SSTableReader.maxTimestampDescending);
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).includeRow("dd").columns("a").build();
assertEquals(0, readCount(sstables.get(0)));
assertEquals(0, readCount(sstables.get(1)));
ReadCommand withTracking = readCommand.copy();
Util.getAll(withTracking, withTracking.executionController(true));
assertEquals(1, readCount(sstables.get(0)));
assertEquals(1, readCount(sstables.get(1)));
// same command without tracking touches only the table with the higher timestamp
Util.getAll(readCommand.copy());
assertEquals(2, readCount(sstables.get(0)));
assertEquals(1, readCount(sstables.get(1)));
}
@Test
public void dontIncludeLegacyCounterContextInDigest()
{
// Serializations of a CounterContext containing legacy (pre-2.1) shards
// can legitimately differ across replicas. For this reason, the context
// bytes are omitted from the repaired digest if they contain legacy shards.
// This clearly has a tradeoff with the efficacy of the digest, without doing
// so false positive digest mismatches will be reported for scenarios where
// there is nothing that can be done to "fix" the replicas
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF7);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
// insert a row with the counter column having value 0, in a legacy shard.
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("aa")
.addLegacyCounterCell("c", 0L)
.build()
.apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
// execute a read and capture the digest
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
ByteBuffer digestWithLegacyCounter0 = performReadAndVerifyRepairedInfo(readCommand, 1, 1, true);
assertNotEquals(EMPTY_BYTE_BUFFER, digestWithLegacyCounter0);
// truncate, then re-insert the same partition, but this time with a legacy
// shard having the value 1. The repaired digest should match the previous, as
// the values (context) are not included, only the cell metadata (ttl, timestamp, etc)
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("aa")
.addLegacyCounterCell("c", 1L)
.build()
.apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
ByteBuffer digestWithLegacyCounter1 = performReadAndVerifyRepairedInfo(readCommand, 1, 1, true);
assertEquals(digestWithLegacyCounter0, digestWithLegacyCounter1);
// truncate, then re-insert the same partition, but this time with a non-legacy
// counter cell present. The repaired digest should not match the previous ones
// as this time the value (context) is included.
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("aa")
.add("c", 1L)
.build()
.apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
ByteBuffer digestWithCounterCell = performReadAndVerifyRepairedInfo(readCommand, 1, 1, true);
assertNotEquals(EMPTY_BYTE_BUFFER, digestWithCounterCell);
assertNotEquals(digestWithLegacyCounter0, digestWithCounterCell);
assertNotEquals(digestWithLegacyCounter1, digestWithCounterCell);
}
/**
* Writes a single partition containing a single row and reads using a partition read. The single
* row includes 1 live simple column, 1 simple tombstone and 1 complex column with a complex
* deletion and a live cell. The repaired data digests generated by executing the same query
* before and after the tombstones become eligible for purging should not match each other.
* Also, neither digest should be empty as the partition is not made empty by the purging.
*/
@Test
public void purgeGCableTombstonesBeforeCalculatingDigest()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF8);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
setGCGrace(cfs, 600);
DecoratedKey[] keys = new DecoratedKey[] { Util.dk("key0"), Util.dk("key1"), Util.dk("key2"), Util.dk("key3") };
int nowInSec = FBUtilities.nowInSeconds();
// A simple tombstone
new RowUpdateBuilder(cfs.metadata(), 0, keys[0]).clustering("cc").delete("a").build().apply();
// Collection with an associated complex deletion
PartitionUpdate.SimpleBuilder builder = PartitionUpdate.simpleBuilder(cfs.metadata(), keys[1]).timestamp(0);
builder.row("cc").add("c", ImmutableSet.of("element1", "element2"));
builder.buildAsMutation().apply();
// RangeTombstone and a row (not covered by the RT). The row contains a regular tombstone which will not be
// purged. This is to prevent the partition from being fully purged and removed from the final results
new RowUpdateBuilder(cfs.metadata(), nowInSec, 0L, keys[2]).addRangeTombstone("aa", "bb").build().apply();
new RowUpdateBuilder(cfs.metadata(), nowInSec+ 1000, 1000L, keys[2]).clustering("cc").delete("a").build().apply();
// Partition with 2 rows, one fully deleted
new RowUpdateBuilder(cfs.metadata.get(), 0, keys[3]).clustering("bb").add("a", ByteBufferUtil.bytes("a")).delete("b").build().apply();
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, keys[3], "cc").apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
Map<DecoratedKey, ByteBuffer> digestsWithTombstones = new HashMap<>();
//Tombstones are not yet purgable
for (DecoratedKey key : keys)
{
ReadCommand cmd = Util.cmd(cfs, key).withNowInSeconds(nowInSec).build();
try (ReadExecutionController controller = cmd.executionController(true))
{
Partition partition = Util.getOnlyPartitionUnfiltered(cmd, controller);
assertFalse(partition.isEmpty());
partition.unfilteredIterator().forEachRemaining(u -> {
// must be either a RT, or a row containing some kind of deletion
assertTrue(u.isRangeTombstoneMarker() || ((Row) u).hasDeletion(cmd.nowInSec()));
});
ByteBuffer digestWithTombstones = controller.getRepairedDataDigest();
// None should generate an empty digest
assertDigestsDiffer(EMPTY_BYTE_BUFFER, digestWithTombstones);
digestsWithTombstones.put(key, digestWithTombstones);
}
}
// Make tombstones eligible for purging and re-run cmd with an incremented nowInSec
setGCGrace(cfs, 0);
//Tombstones are now purgable, so won't be in the read results and produce different digests
for (DecoratedKey key : keys)
{
ReadCommand cmd = Util.cmd(cfs, key).withNowInSeconds(nowInSec + 60).build();
try (ReadExecutionController controller = cmd.executionController(true))
{
Partition partition = Util.getOnlyPartitionUnfiltered(cmd, controller);
assertFalse(partition.isEmpty());
partition.unfilteredIterator().forEachRemaining(u -> {
// After purging, only rows without any deletions should remain.
// The one exception is "key2:cc" which has a regular column tombstone which is not
// eligible for purging. This is to prevent the partition from being fully purged
// when its RT is removed.
assertTrue(u.isRow());
Row r = (Row) u;
assertTrue(!r.hasDeletion(cmd.nowInSec())
|| (key.equals(keys[2]) && r.clustering()
.bufferAt(0)
.equals(AsciiType.instance.fromString("cc"))));
});
ByteBuffer digestWithoutTombstones = controller.getRepairedDataDigest();
// not an empty digest
assertDigestsDiffer(EMPTY_BYTE_BUFFER, digestWithoutTombstones);
// should not match the pre-purge digest
assertDigestsDiffer(digestsWithTombstones.get(key), digestWithoutTombstones);
}
}
}
@Test
public void testRepairedDataOverreadMetrics()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF9);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
cfs.metadata().withSwapped(cfs.metadata().params.unbuild()
.caching(CachingParams.CACHE_NOTHING)
.build());
// Insert and repair
insert(cfs, IntStream.range(0, 10), () -> IntStream.range(0, 10));
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
// Insert and leave unrepaired
insert(cfs, IntStream.range(0, 10), () -> IntStream.range(10, 20));
// Single partition reads
int limit = 5;
ReadCommand cmd = Util.cmd(cfs, ByteBufferUtil.bytes(0)).withLimit(limit).build();
assertEquals(0, getAndResetOverreadCount(cfs));
// No overreads if not tracking
readAndCheckRowCount(Collections.singletonList(Util.getOnlyPartition(cmd)), limit);
assertEquals(0, getAndResetOverreadCount(cfs));
// Overread up to (limit - 1) if tracking is enabled
cmd = cmd.copy();
readAndCheckRowCount(Collections.singletonList(Util.getOnlyPartition(cmd, true)), limit);
// overread count is always < limit as the first read is counted during merging (and so is expected)
assertEquals(limit - 1, getAndResetOverreadCount(cfs));
// if limit already requires reading all repaired data, no overreads should be recorded
limit = 20;
cmd = Util.cmd(cfs, ByteBufferUtil.bytes(0)).withLimit(limit).build();
readAndCheckRowCount(Collections.singletonList(Util.getOnlyPartition(cmd)), limit);
assertEquals(0, getAndResetOverreadCount(cfs));
// Range reads
limit = 5;
cmd = Util.cmd(cfs).withLimit(limit).build();
assertEquals(0, getAndResetOverreadCount(cfs));
// No overreads if not tracking
readAndCheckRowCount(Util.getAll(cmd), limit);
assertEquals(0, getAndResetOverreadCount(cfs));
// Overread up to (limit - 1) if tracking is enabled
cmd = cmd.copy();
readAndCheckRowCount(Util.getAll(cmd, cmd.executionController(true)), limit);
assertEquals(limit - 1, getAndResetOverreadCount(cfs));
// if limit already requires reading all repaired data, no overreads should be recorded
limit = 100;
cmd = Util.cmd(cfs).withLimit(limit).build();
readAndCheckRowCount(Util.getAll(cmd), limit);
assertEquals(0, getAndResetOverreadCount(cfs));
}
private void setGCGrace(ColumnFamilyStore cfs, int gcGrace)
{
TableParams newParams = cfs.metadata().params.unbuild().gcGraceSeconds(gcGrace).build();
KeyspaceMetadata keyspaceMetadata = Schema.instance.getKeyspaceMetadata(cfs.metadata().keyspace);
Schema.instance.load(
keyspaceMetadata.withSwapped(
keyspaceMetadata.tables.withSwapped(
cfs.metadata().withSwapped(newParams))));
}
private long getAndResetOverreadCount(ColumnFamilyStore cfs)
{
// always clear the histogram after reading to make comparisons & asserts easier
long rows = cfs.metric.repairedDataTrackingOverreadRows.cf.getSnapshot().getMax();
((ClearableHistogram)cfs.metric.repairedDataTrackingOverreadRows.cf).clear();
return rows;
}
private void readAndCheckRowCount(Iterable<FilteredPartition> partitions, int expected)
{
int count = 0;
for (Partition partition : partitions)
{
assertFalse(partition.isEmpty());
try (UnfilteredRowIterator iter = partition.unfilteredIterator())
{
while (iter.hasNext())
{
iter.next();
count++;
}
}
}
assertEquals(expected, count);
}
private void insert(ColumnFamilyStore cfs, IntStream partitionIds, Supplier<IntStream> rowIds)
{
partitionIds.mapToObj(ByteBufferUtil::bytes)
.forEach( pk ->
rowIds.get().forEach( c ->
new RowUpdateBuilder(cfs.metadata(), 0, pk)
.clustering(c)
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply()
));
}
private void assertDigestsDiffer(ByteBuffer b0, ByteBuffer b1)
{
assertTrue(ByteBufferUtil.compareUnsigned(b0, b1) != 0);
}
@Test
public void partitionReadFullyPurged() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
ReadCommand partitionRead = Util.cmd(cfs, Util.dk("key")).build();
fullyPurgedPartitionCreatesEmptyDigest(cfs, partitionRead);
}
@Test
public void rangeReadFullyPurged() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
ReadCommand rangeRead = Util.cmd(cfs).build();
fullyPurgedPartitionCreatesEmptyDigest(cfs, rangeRead);
}
/**
* Writes a single partition containing only a single row deletion and reads with either a range or
* partition query. Before the row deletion is eligible for purging, it should appear in the query
* results and cause a non-empty repaired data digest to be generated. Repeating the query after
* the row deletion is eligible for purging, both the result set and the repaired data digest should
* be empty.
*/
private void fullyPurgedPartitionCreatesEmptyDigest(ColumnFamilyStore cfs, ReadCommand command)
{
cfs.truncateBlocking();
cfs.disableAutoCompaction();
setGCGrace(cfs, 600);
// Partition with a fully deleted static row and a single, fully deleted regular row
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, ByteBufferUtil.bytes("key")).apply();
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, ByteBufferUtil.bytes("key"), "cc").apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
try (ReadExecutionController controller = command.executionController(true))
{
List<ImmutableBTreePartition> partitions = Util.getAllUnfiltered(command, controller);
assertEquals(1, partitions.size());
ByteBuffer digestWithTombstones = controller.getRepairedDataDigest();
assertTrue(ByteBufferUtil.compareUnsigned(EMPTY_BYTE_BUFFER, digestWithTombstones) != 0);
// Make tombstones eligible for purging and re-run cmd with an incremented nowInSec
setGCGrace(cfs, 0);
}
AbstractReadCommandBuilder builder = command instanceof PartitionRangeReadCommand
? Util.cmd(cfs)
: Util.cmd(cfs, Util.dk("key"));
builder.withNowInSeconds(command.nowInSec() + 60);
command = builder.build();
try (ReadExecutionController controller = command.executionController(true))
{
List<ImmutableBTreePartition> partitions = Util.getAllUnfiltered(command, controller);
assertTrue(partitions.isEmpty());
ByteBuffer digestWithoutTombstones = controller.getRepairedDataDigest();
assertEquals(0, ByteBufferUtil.compareUnsigned(EMPTY_BYTE_BUFFER, digestWithoutTombstones));
}
}
/**
* Verifies that during range reads which include multiple partitions, fully purged partitions
* have no material effect on the calculated digest. This test writes two sstables, each containing
* a single partition; the first is live and the second fully deleted and eligible for purging.
* Initially, only the sstable containing the live partition is marked repaired, while a range read
* which covers both partitions is performed to generate a digest. Then the sstable containing the
* purged partition is also marked repaired and the query reexecuted. The digests produced by both
* queries should match as the digest calculation should exclude the fully purged partition.
*/
@Test
public void mixedPurgedAndNonPurgedPartitions()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
setGCGrace(cfs, 0);
ReadCommand command = Util.cmd(cfs).withNowInSeconds(FBUtilities.nowInSeconds() + 60).build();
// Live partition in a repaired sstable, so included in the digest calculation
new RowUpdateBuilder(cfs.metadata.get(), 0, ByteBufferUtil.bytes("key-0")).clustering("cc").add("a", ByteBufferUtil.bytes("a")).build().apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
// Fully deleted partition (static and regular rows) in an unrepaired sstable, so not included in the intial digest
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, ByteBufferUtil.bytes("key-1")).apply();
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, ByteBufferUtil.bytes("key-1"), "cc").apply();
cfs.forceBlockingFlush();
ByteBuffer digestWithoutPurgedPartition = null;
try (ReadExecutionController controller = command.executionController(true))
{
List<ImmutableBTreePartition> partitions = Util.getAllUnfiltered(command, controller);
assertEquals(1, partitions.size());
digestWithoutPurgedPartition = controller.getRepairedDataDigest();
assertTrue(ByteBufferUtil.compareUnsigned(EMPTY_BYTE_BUFFER, digestWithoutPurgedPartition) != 0);
}
// mark the sstable containing the purged partition as repaired, so both partitions are now
// read during in the digest calculation. Because the purged partition is entirely
// discarded, the resultant digest should match the earlier one.
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
command = Util.cmd(cfs).withNowInSeconds(command.nowInSec()).build();
try (ReadExecutionController controller = command.executionController(true))
{
List<ImmutableBTreePartition> partitions = Util.getAllUnfiltered(command, controller);
assertEquals(1, partitions.size());
ByteBuffer digestWithPurgedPartition = controller.getRepairedDataDigest();
assertEquals(0, ByteBufferUtil.compareUnsigned(digestWithPurgedPartition, digestWithoutPurgedPartition));
}
}
@Test
public void purgingConsidersRepairedDataOnly()
{
// 2 sstables, first is repaired and contains data that is all purgeable
// the second is unrepaired and contains non-purgable data. Even though
// the partition itself is not fully purged, the repaired data digest
// should be empty as there was no non-purgeable, repaired data read.
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
setGCGrace(cfs, 0);
// Partition with a fully deleted static row and a single, fully deleted row which will be fully purged
DecoratedKey key = Util.dk("key");
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, key).apply();
RowUpdateBuilder.deleteRow(cfs.metadata(), 0, key, "cc").apply();
cfs.forceBlockingFlush();
cfs.getLiveSSTables().forEach(sstable -> mutateRepaired(cfs, sstable, 111, null));
new RowUpdateBuilder(cfs.metadata(), 1, key).clustering("cc").add("a", ByteBufferUtil.bytes("a")).build().apply();
cfs.forceBlockingFlush();
int nowInSec = FBUtilities.nowInSeconds() + 10;
ReadCommand cmd = Util.cmd(cfs, key).withNowInSeconds(nowInSec).build();
try (ReadExecutionController controller = cmd.executionController(true))
{
Partition partition = Util.getOnlyPartitionUnfiltered(cmd, controller);
assertFalse(partition.isEmpty());
// check that
try (UnfilteredRowIterator rows = partition.unfilteredIterator())
{
assertFalse(rows.isEmpty());
Unfiltered unfiltered = rows.next();
assertFalse(rows.hasNext());
assertTrue(unfiltered.isRow());
assertFalse(((Row) unfiltered).hasDeletion(nowInSec));
}
assertEquals(EMPTY_BYTE_BUFFER, controller.getRepairedDataDigest());
}
}
private long readCount(SSTableReader sstable)
{
return sstable.getReadMeter().count();
}
@Test
public void skipRowCacheIfTrackingRepairedData()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
cfs.truncateBlocking();
cfs.disableAutoCompaction();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
assertTrue(cfs.isRowCacheEnabled());
// warm the cache
assertFalse(Util.getAll(readCommand).isEmpty());
long cacheHits = cfs.metric.rowCacheHit.getCount();
Util.getAll(readCommand);
assertTrue(cfs.metric.rowCacheHit.getCount() > cacheHits);
cacheHits = cfs.metric.rowCacheHit.getCount();
ReadCommand withRepairedInfo = readCommand.copy();
try (ReadExecutionController controller = withRepairedInfo.executionController(true))
{
Util.getAll(withRepairedInfo, controller);
assertEquals(cacheHits, cfs.metric.rowCacheHit.getCount());
}
}
@Test (expected = IllegalArgumentException.class)
public void copyFullAsTransientTest()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
readCommand.copyAsTransientQuery(ReplicaUtils.full(FBUtilities.getBroadcastAddressAndPort()));
}
@Test (expected = IllegalArgumentException.class)
public void copyTransientAsDigestQuery()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
readCommand.copyAsDigestQuery(ReplicaUtils.trans(FBUtilities.getBroadcastAddressAndPort()));
}
@Test (expected = IllegalArgumentException.class)
public void copyMultipleFullAsTransientTest()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
DecoratedKey key = Util.dk("key");
Token token = key.getToken();
// Address is unimportant for this test
InetAddressAndPort addr = FBUtilities.getBroadcastAddressAndPort();
ReadCommand readCommand = Util.cmd(cfs, key).build();
readCommand.copyAsTransientQuery(EndpointsForToken.of(token,
ReplicaUtils.trans(addr, token),
ReplicaUtils.full(addr, token)));
}
@Test (expected = IllegalArgumentException.class)
public void copyMultipleTransientAsDigestQuery()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF6);
DecoratedKey key = Util.dk("key");
Token token = key.getToken();
// Address is unimportant for this test
InetAddressAndPort addr = FBUtilities.getBroadcastAddressAndPort();
ReadCommand readCommand = Util.cmd(cfs, key).build();
readCommand.copyAsDigestQuery(EndpointsForToken.of(token,
ReplicaUtils.trans(addr, token),
ReplicaUtils.full(addr, token)));
}
@Test
public void testToCQLString()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
DecoratedKey key = Util.dk("key");
ReadCommand readCommand = Util.cmd(cfs, key).build();
String result = readCommand.toCQLString();
assertEquals(result, String.format("SELECT * FROM \"ReadCommandTest\".\"Standard2\" WHERE key = 0x%s ALLOW FILTERING", ByteBufferUtil.bytesToHex(key.getKey())));
}
private void testRepairedDataTracking(ColumnFamilyStore cfs, ReadCommand readCommand)
{
cfs.truncateBlocking();
cfs.disableAutoCompaction();
new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata(), 1, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
List<SSTableReader> sstables = new ArrayList<>(cfs.getLiveSSTables());
assertEquals(2, sstables.size());
sstables.forEach(sstable -> assertFalse(sstable.isRepaired() || sstable.isPendingRepair()));
SSTableReader sstable1 = sstables.get(0);
SSTableReader sstable2 = sstables.get(1);
int numPartitions = 1;
int rowsPerPartition = 2;
// Capture all the digest versions as we mutate the table's repaired status. Each time
// we make a change, we expect a different digest.
Set<ByteBuffer> digests = new HashSet<>();
// first time round, nothing has been marked repaired so we expect digest to be an empty buffer and to be marked conclusive
ByteBuffer digest = performReadAndVerifyRepairedInfo(readCommand, numPartitions, rowsPerPartition, true);
assertEquals(EMPTY_BYTE_BUFFER, digest);
digests.add(digest);
// add a pending repair session to table1, digest should remain the same but now we expect it to be marked inconclusive
UUID session1 = UUIDGen.getTimeUUID();
mutateRepaired(cfs, sstable1, ActiveRepairService.UNREPAIRED_SSTABLE, session1);
digests.add(performReadAndVerifyRepairedInfo(readCommand, numPartitions, rowsPerPartition, false));
assertEquals(1, digests.size());
// add a different pending session to table2, digest should remain the same and still consider it inconclusive
UUID session2 = UUIDGen.getTimeUUID();
mutateRepaired(cfs, sstable2, ActiveRepairService.UNREPAIRED_SSTABLE, session2);
digests.add(performReadAndVerifyRepairedInfo(readCommand, numPartitions, rowsPerPartition, false));
assertEquals(1, digests.size());
// mark one table repaired
mutateRepaired(cfs, sstable1, 111, null);
// this time, digest should not be empty, session2 still means that the result is inconclusive
digests.add(performReadAndVerifyRepairedInfo(readCommand, numPartitions, rowsPerPartition, false));
assertEquals(2, digests.size());
// mark the second table repaired
mutateRepaired(cfs, sstable2, 222, null);
// digest should be updated again and as there are no longer any pending sessions, it should be considered conclusive
digests.add(performReadAndVerifyRepairedInfo(readCommand, numPartitions, rowsPerPartition, true));
assertEquals(3, digests.size());
// insert a partition tombstone into the memtable, then re-check the repaired info.
// This is to ensure that when the optimisations which skip reading from sstables
// when a newer partition tombstone has already been cause the digest to be marked
// as inconclusive.
// the exception to this case is for partition range reads, where we always read
// and generate digests for all sstables, so we only test this path for single partition reads
if (readCommand.isLimitedToOnePartition())
{
new Mutation(PartitionUpdate.simpleBuilder(cfs.metadata(), ByteBufferUtil.bytes("key"))
.delete()
.build()).apply();
digest = performReadAndVerifyRepairedInfo(readCommand, 0, rowsPerPartition, false);
assertEquals(EMPTY_BYTE_BUFFER, digest);
// now flush so we have an unrepaired table with the deletion and repeat the check
cfs.forceBlockingFlush();
digest = performReadAndVerifyRepairedInfo(readCommand, 0, rowsPerPartition, false);
assertEquals(EMPTY_BYTE_BUFFER, digest);
}
}
private void mutateRepaired(ColumnFamilyStore cfs, SSTableReader sstable, long repairedAt, UUID pendingSession)
{
try
{
sstable.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable.descriptor, repairedAt, pendingSession, false);
sstable.reloadSSTableMetadata();
}
catch (IOException e)
{
e.printStackTrace();
fail("Caught IOException when mutating sstable metadata");
}
if (pendingSession != null)
{
// setup a minimal repair session. This is necessary because we
// check for sessions which have exceeded timeout and been purged
Range<Token> range = new Range<>(cfs.metadata().partitioner.getMinimumToken(),
cfs.metadata().partitioner.getRandomToken());
ActiveRepairService.instance.registerParentRepairSession(pendingSession,
REPAIR_COORDINATOR,
Lists.newArrayList(cfs),
Sets.newHashSet(range),
true,
repairedAt,
true,
PreviewKind.NONE);
LocalSessionAccessor.prepareUnsafe(pendingSession, null, Sets.newHashSet(REPAIR_COORDINATOR));
}
}
private ByteBuffer performReadAndVerifyRepairedInfo(ReadCommand command,
int expectedPartitions,
int expectedRowsPerPartition,
boolean expectConclusive)
{
// perform equivalent read command multiple times and assert that
// the repaired data info is always consistent. Return the digest
// so we can verify that it changes when the repaired status of
// the queried tables does.
Set<ByteBuffer> digests = new HashSet<>();
for (int i = 0; i < 10; i++)
{
ReadCommand withRepairedInfo = command.copy();
try (ReadExecutionController controller = withRepairedInfo.executionController(true))
{
List<FilteredPartition> partitions = Util.getAll(withRepairedInfo, controller);
assertEquals(expectedPartitions, partitions.size());
partitions.forEach(p -> assertEquals(expectedRowsPerPartition, p.rowCount()));
ByteBuffer digest = controller.getRepairedDataDigest();
digests.add(digest);
assertEquals(1, digests.size());
assertEquals(expectConclusive, controller.isRepairedDataDigestConclusive());
}
}
return digests.iterator().next();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import javax.jms.ConnectionFactory;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.TypeConversionException;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.support.TypeConverterSupport;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
import static org.apache.camel.component.jms.JmsConstants.JMS_MESSAGE_TYPE;
/**
* @version
*/
public class JmsMessageTypeTest extends CamelTestSupport {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
camelContext.addComponent("jms", jmsComponentAutoAcknowledge(connectionFactory));
camelContext.getTypeConverterRegistry().addTypeConverter(byte[].class, MyFooBean.class, new MyFooBean());
camelContext.getTypeConverterRegistry().addTypeConverter(String.class, MyFooBean.class, new MyFooBean());
camelContext.getTypeConverterRegistry().addTypeConverter(Map.class, MyFooBean.class, new MyFooBean());
return camelContext;
}
@Test
public void testHeaderTextType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
// we use Text type then it should be a String
mock.message(0).body().isInstanceOf(String.class);
// we send an object and force it to use Text type
template.sendBodyAndHeader("direct:foo", new MyFooBean("World"), JMS_MESSAGE_TYPE, "Text");
assertMockEndpointsSatisfied();
}
@Test
public void testConvertTextType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
// we use Text type then it should be a String
mock.message(0).body().isInstanceOf(String.class);
// we send an object and force it to use Text type
template.sendBody("direct:text", new MyFooBean("World"));
assertMockEndpointsSatisfied();
}
@Test
public void testTextType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
// we use Text type then it should be a String
mock.message(0).body().isInstanceOf(String.class);
// we send a string and force it to use Text type
template.sendBody("direct:text", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testHeaderBytesType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Bye World".getBytes());
mock.message(0).body().isInstanceOf(byte[].class);
// we send an object and force it to use Bytes type
template.sendBodyAndHeader("direct:foo", new MyFooBean("World"), JMS_MESSAGE_TYPE, "Bytes");
assertMockEndpointsSatisfied();
}
@Test
public void testConvertBytesType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Bye World".getBytes());
mock.message(0).body().isInstanceOf(byte[].class);
// we send an object and force it to use Bytes type
template.sendBody("direct:bytes", new MyFooBean("World"));
assertMockEndpointsSatisfied();
}
@Test
public void testBytesType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Bye World".getBytes());
mock.message(0).body().isInstanceOf(byte[].class);
// we send a string and force it to use Bytes type
template.sendBody("direct:bytes", "Bye World");
assertMockEndpointsSatisfied();
}
@Test
public void testHeaderMapType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Map.class);
// we send an object and force it to use Map type
template.sendBodyAndHeader("direct:foo", new MyFooBean("Claus"), JMS_MESSAGE_TYPE, "Map");
assertMockEndpointsSatisfied();
assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name"));
}
@Test
public void testConvertMapType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Map.class);
// we send an object and force it to use Map type
template.sendBody("direct:map", new MyFooBean("Claus"));
assertMockEndpointsSatisfied();
assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name"));
}
@Test
public void testMapType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Map.class);
Map<String, Object> body = new HashMap<>();
body.put("name", "Claus");
// we send a Map object and force it to use Map type
template.sendBody("direct:map", body);
assertMockEndpointsSatisfied();
assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name"));
}
@Test
public void testHeaderObjectType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
// we use Object type then it should be a MyFooBean object
mock.message(0).body().isInstanceOf(MyFooBean.class);
// we send an object and force it to use Object type
template.sendBodyAndHeader("direct:foo", new MyFooBean("James"), JMS_MESSAGE_TYPE, "Object");
assertMockEndpointsSatisfied();
assertEquals("James", mock.getExchanges().get(0).getIn().getBody(MyFooBean.class).getName());
}
@Test
public void testObjectType() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
// we use Object type then it should be a MyFooBean object
mock.message(0).body().isInstanceOf(MyFooBean.class);
// we send an object and force it to use Object type
template.sendBody("direct:object", new MyFooBean("James"));
assertMockEndpointsSatisfied();
assertEquals("James", mock.getExchanges().get(0).getIn().getBody(MyFooBean.class).getName());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:text").to("jms:queue:foo?jmsMessageType=Text");
from("direct:bytes").to("jms:queue:foo?jmsMessageType=Bytes");
from("direct:map").to("jms:queue:foo?jmsMessageType=Map");
from("direct:object").to("jms:queue:foo?jmsMessageType=Object");
from("direct:foo").to("jms:queue:foo");
from("jms:queue:foo").to("mock:result");
}
};
}
public static final class MyFooBean extends TypeConverterSupport implements Serializable {
private static final long serialVersionUID = 1L;
private String name;
private MyFooBean() {
}
private MyFooBean(String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
@SuppressWarnings("unchecked")
public <T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException {
if (type.isAssignableFrom(String.class)) {
return (T) ("Hello " + ((MyFooBean)value).getName());
}
if (type.isAssignableFrom(byte[].class)) {
return (T) ("Bye " + ((MyFooBean)value).getName()).getBytes();
}
if (type.isAssignableFrom(Map.class)) {
Map<String, Object> map = new HashMap<>();
map.put("name", ((MyFooBean)value).getName());
return (T) map;
}
return null;
}
}
}
| |
package act.inject.param;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import act.Act;
import act.app.ActionContext;
import act.app.App;
import act.cli.CliContext;
import act.cli.Optional;
import act.cli.Required;
import act.cli.meta.CommandMethodMetaInfo;
import act.cli.util.CommandLineParser;
import act.inject.DefaultValue;
import act.util.ActContext;
import org.osgl.$;
import org.osgl.exception.UnexpectedException;
import org.osgl.http.H;
import org.osgl.inject.BeanSpec;
import org.osgl.inject.util.AnnotationUtil;
import org.osgl.mvc.annotation.Resolve;
import org.osgl.util.S;
import org.osgl.util.StringValueResolver;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Responsible for loading param value for {@link ActionContext}
*/
public class CliContextParamLoader extends ParamValueLoaderService {
private final static transient ThreadLocal<CommandMethodMetaInfo> methodMetaInfoHolder = new ThreadLocal<CommandMethodMetaInfo>();
private ConcurrentMap<Method, List<OptionLoader>> optionLoaderRegistry = new ConcurrentHashMap<Method, List<OptionLoader>>();
CliContextParamLoader(App app) {
super(app);
}
public CliContext.ParsingContext buildParsingContext(Class commander, Method method, CommandMethodMetaInfo methodMetaInfo) {
CliContext.ParsingContextBuilder.start();
ensureOptionLoaders(method, methodMetaInfo);
methodMetaInfoHolder.set(methodMetaInfo);
if (!Modifier.isStatic(method.getModifiers())) {
ParamValueLoader loader = findBeanLoader(commander);
classRegistry.putIfAbsent(commander, loader);
}
$.Var<Boolean> boolBag = $.var();
// create a pseudo ctx as we do not have one here
// the ctx is just a way to pass the method info
ActContext ctx = new ActContext.Base<ActContext.Base>(Act.app()) {
@Override
public Base accept(H.Format fmt) {
return null;
}
@Override
public H.Format accept() {
return null;
}
@Override
public String methodPath() {
return null;
}
@Override
public Set<String> paramKeys() {
return null;
}
@Override
public String paramVal(String key) {
return null;
}
@Override
public String[] paramVals(String key) {
return new String[0];
}
};
ctx.currentMethod(method);
ParamValueLoader[] loaders = findMethodParamLoaders(method, commander, ctx, boolBag);
methodRegistry.putIfAbsent(method, loaders);
methodValidationConstraintLookup.put(method, boolBag.get());
return CliContext.ParsingContextBuilder.finish();
}
public void preParseOptions(Method method, CommandMethodMetaInfo methodMetaInfo, CliContext context) {
List<OptionLoader> optionLoaders = ensureOptionLoaders(method, methodMetaInfo);
CommandLineParser commandLineParser = context.commandLine();
boolean argumentAsOption = false;
if (1 == optionLoaders.size()) {
OptionLoader loader = optionLoaders.get(0);
if (loader.required) {
String theOptionVal = commandLineParser.argumentAsOption();
if (null != theOptionVal) {
argumentAsOption = true;
context.parsingContext().foundRequired(loader.requiredGroup);
context.param(loader.bindName, theOptionVal);
}
}
}
if (!argumentAsOption) {
for (OptionLoader loader : optionLoaders) {
String bindName = loader.bindName;
String value = commandLineParser.getString(loader.lead1, loader.lead2);
if (S.notBlank(value)) {
if (loader.required) {
context.parsingContext().foundRequired(loader.requiredGroup);
}
context.param(bindName, value);
}
}
}
context.parsingContext().raiseExceptionIfThereAreMissingOptions(context);
}
@Override
protected ParamValueLoader findContextSpecificLoader(
String bindName,
BeanSpec spec
) {
boolean isArray = spec.isArray();
String defVal = null;
DefaultValue defaultValue = spec.getAnnotation(DefaultValue.class);
if (null != defaultValue) {
defVal = defaultValue.value();
}
StringValueResolver resolver = findResolver(spec, isArray); //= isArray ? resolverManager.resolver(rawType.getComponentType(), spec) : resolverManager.resolver(rawType, spec);
Required required = spec.getAnnotation(Required.class);
Optional optional = null == required ? spec.getAnnotation(Optional.class) : null;
if (null != required) {
return new OptionLoader(bindName, required, resolver, spec);
} else if (null != optional) {
return new OptionLoader(bindName, optional, resolver, spec);
}
return isArray ? new CliVarArgumentLoader(spec.rawType().getComponentType(), resolver) : new CliArgumentLoader(resolver, defVal);
}
private StringValueResolver findResolver(BeanSpec spec, boolean isArray) {
StringValueResolver resolver = findAnnotatedResolver(spec);
return null == resolver ? findImplicitResolver(spec, isArray) : resolver;
}
private StringValueResolver findAnnotatedResolver(BeanSpec spec) {
StringValueResolver resolver = findDirectAnnotatedResolver(spec);
return null == resolver ? findIndirectAnnotatedResolver(spec) : resolver;
}
private StringValueResolver findDirectAnnotatedResolver(BeanSpec spec) {
Resolve resolve = spec.getAnnotation(Resolve.class);
Class<?> rawType = spec.rawType();
if (null != resolve) {
Class<? extends StringValueResolver>[] resolvers = resolve.value();
for (Class<? extends StringValueResolver> resolverClass : resolvers) {
StringValueResolver resolver = injector.get(resolverClass);
Class<?> targetType = resolver.targetType();
boolean matches = rawType.isAssignableFrom(targetType);
if (!matches) {
Class<?> rawType2 = $.wrapperClassOf(rawType);
if (rawType != rawType2) {
matches = rawType2.isAssignableFrom(targetType);
}
}
if (matches) {
return resolver;
}
}
}
return null;
}
private StringValueResolver findIndirectAnnotatedResolver(BeanSpec spec) {
Annotation[] aa = spec.allAnnotations();
Class<?> rawType = spec.rawType();
for (Annotation a : aa) {
Resolve resolve = AnnotationUtil.tagAnnotation(a, Resolve.class);
if (null != resolve) {
Class<? extends StringValueResolver>[] resolvers = resolve.value();
for (Class<? extends StringValueResolver> resolverClass : resolvers) {
StringValueResolver resolver = injector.get(resolverClass);
resolver.attributes($.evaluate(a));
Class<?> targetType = resolver.targetType();
boolean matches = rawType.isAssignableFrom(targetType);
if (!matches) {
Class<?> rawType2 = $.wrapperClassOf(rawType);
if (rawType != rawType2) {
matches = rawType2.isAssignableFrom(targetType);
}
}
if (matches) {
return resolver;
}
}
}
}
return null;
}
private StringValueResolver findImplicitResolver(final BeanSpec spec, boolean isArray) {
StringValueResolver resolver = resolverManager.resolver(spec.rawType(), spec);
if (null != resolver) {
return resolver;
} else if (isArray) {
final BeanSpec compSpec = spec.componentSpec();
final StringValueResolver<ArrayList> colResolver = resolverManager.collectionResolver(ArrayList.class, compSpec.rawType(), S.COMMON_SEP);
final boolean isPrimitive = $.isPrimitive(compSpec.rawType());
return new StringValueResolver() {
@Override
public Object resolve(String s) {
List list = colResolver.resolve(s);
int size = list.size();
final Class<?> compType = compSpec.rawType();
Object array = Array.newInstance(compType, size);
for (int i = 0; i < size; ++i) {
Object item = list.get(i);
if (isPrimitive) {
if (boolean.class == compType) {
Array.setBoolean(array, i, ((Boolean)item).booleanValue());
} else if (byte.class == compType) {
Array.setByte(array, i, ((Byte) item).byteValue());
} else if (char.class == compType) {
Array.setChar(array, i, ((Character) item).charValue());
} else if (double.class == compType) {
Array.setDouble(array, i, ((Double) item).doubleValue());
} else if (float.class == compType) {
Array.setFloat(array, i, ((Float) item).floatValue());
} else if (int.class == compType) {
Array.setInt(array, i, ((Integer) item).intValue());
} else if (long.class == compType) {
Array.setLong(array, i, ((Long) item).longValue());
} else if (short.class == compType) {
Array.setShort(array, i, ((Short) item).shortValue());
} else {
throw new UnexpectedException("Unknown primitive type");
}
} else {
Array.set(array, i, item);
}
}
return array;
}
};
} else {
return null;
}
}
@Override
protected String paramName(int i) {
return methodMetaInfoHolder.get().param(i).name();
}
private List<OptionLoader> ensureOptionLoaders(Method method, CommandMethodMetaInfo methodMetaInfo) {
List<OptionLoader> optionLoaders = optionLoaderRegistry.get(method);
if (null == optionLoaders) {
optionLoaders = findOptionLoaders(method, methodMetaInfo);
optionLoaderRegistry.put(method, optionLoaders);
}
return optionLoaders;
}
private List<OptionLoader> findOptionLoaders(Method method, CommandMethodMetaInfo methodMetaInfo) {
List<OptionLoader> optionLoaders = new ArrayList<OptionLoader>();
findParamOptionLoaders(method, methodMetaInfo, optionLoaders);
findFieldOptionLoaders(method.getDeclaringClass(), optionLoaders);
return optionLoaders;
}
private void findFieldOptionLoaders(Class c, List<OptionLoader> optionLoaders) {
if (injector.isProvided(c)) {
// No field injection for a provided host
return;
}
for (Field field : $.fieldsOf(c, true)) {
Type type = field.getGenericType();
Annotation[] annotations = field.getAnnotations();
String bindName = bindName(annotations, field.getName());
BeanSpec spec = BeanSpec.of(type, annotations, bindName, injector);
boolean provided = injector.isProvided(spec);
ParamValueLoader loader = provided ? ProvidedValueLoader.get(spec, injector) : findContextSpecificLoader(bindName, spec);
if (loader instanceof OptionLoader) {
optionLoaders.add((OptionLoader) loader);
}
}
}
private void findParamOptionLoaders(Method m, CommandMethodMetaInfo methodMetaInfo, List<OptionLoader> optionLoaders) {
Type[] types = m.getGenericParameterTypes();
int len = types.length;
if (len == 0) {
return;
}
Annotation[][] allAnnotations = m.getParameterAnnotations();
for (int i = len - 1; i >= 0; --i) {
Type type = types[i];
Annotation[] annotations = allAnnotations[i];
BeanSpec spec = BeanSpec.of(type, annotations, null, injector);
String bindName = tryFindBindName(annotations, spec.name());
if (null == bindName) {
bindName = methodMetaInfo.param(i).name();
}
ParamValueLoader loader = findContextSpecificLoader(bindName, spec);
if (loader instanceof OptionLoader) {
optionLoaders.add((OptionLoader) loader);
} else if (!$.isSimpleType(spec.rawType())) {
}
}
}
}
| |
/*
* (C) 2001 by Argonne National Laboratory
* See COPYRIGHT in top-level directory.
*/
/*
* @author Anthony Chan
*/
package viewer.convertor;
import java.awt.Color;
import java.awt.Insets;
import java.awt.Dimension;
import java.awt.Component;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JPanel;
import javax.swing.JLabel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JTextField;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JProgressBar;
import javax.swing.ImageIcon;
import javax.swing.AbstractButton;
import javax.swing.BorderFactory;
import javax.swing.border.Border;
import javax.swing.border.TitledBorder;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
// import java.util.Properties;
import java.io.File;
// import java.io.InputStream;
// import java.io.InputStreamReader;
// import java.io.BufferedReader;
// import java.io.IOException;
import java.net.URL;
import logformat.slog2.input.InputLog;
import viewer.common.Const;
import viewer.common.Dialogs;
import viewer.common.Routines;
import viewer.common.CustomCursor;
import viewer.common.ActableTextField;
import viewer.common.LogFileChooser;
import viewer.common.RuntimeExecCommand;
public class ConvertorPanel extends JPanel
implements WaitingContainer
{
private static final long serialVersionUID = 11200L;
private JComboBox cmd_pulldown;
private ActableTextField cmd_infile;
private JButton infile_btn;
private JTextField cmd_outfile;
private JButton outfile_btn;
private AdvancingTextArea cmd_textarea;
private JTextField cmd_outfile_size;
private JProgressBar cmd_progress;
private JTextField cmd_option4jvm;
private JTextField cmd_option4jar;
private JTextField cmd_path2jvm;
private ActableTextField cmd_path2jardir;
private JTextField cmd_path2tracelib;
private JSplitPane cmd_splitter;
private JButton cmd_start_btn;
private JButton cmd_stop_btn;
private JButton cmd_help_btn;
private JButton cmd_close4ok_btn;
private JButton cmd_close4cancel_btn;
private Window top_window;
private LogFileChooser file_chooser;
// private String file_sep, path_sep;
private String err_msg;
private SwingProcessWorker logconv_worker;
public ConvertorPanel( LogFileChooser in_file_chooser )
{
super();
this.initComponents( in_file_chooser != null );
this.initAllTextFields();
if ( in_file_chooser != null )
file_chooser = in_file_chooser;
else
file_chooser = new LogFileChooser( false );
cmd_pulldown.addActionListener( new PulldownListener() );
cmd_infile.addActionListener( new LogNameListener() );
infile_btn.addActionListener( new InputFileSelectorListener() );
outfile_btn.addActionListener( new OutputFileSelectorListener() );
cmd_path2jardir.addActionListener( new JarDirectoryListener() );
cmd_start_btn.addActionListener( new StartConvertorListener() );
cmd_stop_btn.addActionListener( new StopConvertorListener() );
cmd_help_btn.addActionListener( new HelpConvertorListener() );
this.finalizeWaiting();
logconv_worker = null;
}
public void init( String filename )
{
top_window = SwingUtilities.windowForComponent( this );
cmd_splitter.setDividerLocation( 1.0d );
if ( filename != null && filename.length() > 0 ) {
cmd_infile.setText( filename );
cmd_infile.fireActionPerformed(); // Invoke LogNameListener()
cmd_pulldown.setSelectedItem(
ConvertorConst.getDefaultConvertor( filename ) );
if ( cmd_close4ok_btn != null )
cmd_close4ok_btn.setEnabled( false );
if ( cmd_close4cancel_btn != null )
cmd_close4cancel_btn.setEnabled( true );
}
if ( err_msg != null )
Dialogs.error( top_window, err_msg );
}
private URL getURL( String filename )
{
URL url = null;
url = getClass().getResource( filename );
return url;
}
private void initComponents( boolean has_close4ok_btn )
{
Border raised_border, etched_border;
raised_border = BorderFactory.createRaisedBevelBorder();
etched_border = BorderFactory.createEtchedBorder();
// Setup all relevant Dimension of various components
Dimension row_pref_sz; // for typical row JPanel
Dimension lbl_pref_sz; // for all JLabel
// Dimension fld_pref_sz; // for all JTextField
Dimension pfld_pref_sz; // for JProgressBar
Dimension pbar_pref_sz; // for JTextField of Output File Size
row_pref_sz = new Dimension( 410, 30 );
lbl_pref_sz = new Dimension( 130, 26 );
// fld_pref_sz = new Dimension( row_pref_sz.width - lbl_pref_sz.width,
// lbl_pref_sz.height );
pfld_pref_sz = new Dimension( lbl_pref_sz.width,
2 * lbl_pref_sz.height );
pbar_pref_sz = new Dimension( row_pref_sz.width,
pfld_pref_sz.height );
super.setLayout( new BoxLayout( this, BoxLayout.Y_AXIS ) );
Color thumb_color, pulldown_bg_color;
thumb_color = UIManager.getColor( "ScrollBar.thumb" );
pulldown_bg_color = Routines.getSlightBrighterColor( thumb_color );
JLabel label;
Insets btn_insets;
URL icon_URL;
JPanel upper_panel = new JPanel();
upper_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
upper_panel.setLayout( new BoxLayout( upper_panel,
BoxLayout.Y_AXIS ) );
upper_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_name_panel = new JPanel();
cmd_name_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_name_panel.setLayout( new BoxLayout( cmd_name_panel,
BoxLayout.X_AXIS ) );
cmd_name_panel.add( Box.createHorizontalStrut( 5 ) );
cmd_pulldown = new JComboBox();
cmd_pulldown.setForeground( Color.yellow );
cmd_pulldown.setBackground( pulldown_bg_color );
cmd_pulldown.setToolTipText( " Logfile Convertor's Name " );
cmd_pulldown.addItem( ConvertorConst.CLOG2_TO_SLOG2 );
cmd_pulldown.addItem( ConvertorConst.CLOG_TO_SLOG2 );
cmd_pulldown.addItem( ConvertorConst.RLOG_TO_SLOG2 );
cmd_pulldown.addItem( ConvertorConst.UTE_TO_SLOG2 );
cmd_pulldown.addItem( ConvertorConst.TXT_TO_SLOG2 );
cmd_pulldown.setBorder( raised_border );
cmd_pulldown.setEditable( false );
cmd_pulldown.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_name_panel.add( cmd_pulldown );
cmd_name_panel.add( Box.createHorizontalStrut( 5 ) );
Routines.setShortJComponentSizes( cmd_name_panel,
row_pref_sz );
upper_panel.add( cmd_name_panel );
upper_panel.add( Box.createVerticalStrut( 4 ) );
btn_insets = new Insets( 1, 1, 1, 1 );
JPanel cmd_infile_panel = new JPanel();
cmd_infile_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_infile_panel.setLayout( new BoxLayout( cmd_infile_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " Input File Spec. : " );
label.setToolTipText(
"File Specification of the Input Trace File." );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_infile_panel.add( label );
cmd_infile = new ActableTextField();
cmd_infile_panel.add( cmd_infile );
icon_URL = getURL( Const.IMG_PATH + "Open24.gif" );
infile_btn = null;
if ( icon_URL != null )
infile_btn = new JButton( new ImageIcon( icon_URL ) );
else
infile_btn = new JButton( "Browse" );
infile_btn.setToolTipText( "Select a new Input Logfile" );
infile_btn.setMargin( btn_insets );
cmd_infile_panel.add( infile_btn );
Routines.setShortJComponentSizes( cmd_infile_panel,
row_pref_sz );
upper_panel.add( cmd_infile_panel );
upper_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_outfile_panel = new JPanel();
cmd_outfile_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_outfile_panel.setLayout( new BoxLayout( cmd_outfile_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " Output File Name : " );
label.setToolTipText( "File Name of the SLOG-2 File" );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_outfile_panel.add( label );
cmd_outfile = new JTextField();
cmd_outfile_panel.add( cmd_outfile );
icon_URL = getURL( Const.IMG_PATH + "Open24.gif" );
outfile_btn = null;
if ( icon_URL != null )
outfile_btn = new JButton( new ImageIcon( icon_URL ) );
else
outfile_btn = new JButton( "Browse" );
outfile_btn.setToolTipText( "Select a new Output Logfile" );
outfile_btn.setMargin( btn_insets );
cmd_outfile_panel.add( outfile_btn );
Routines.setShortJComponentSizes( cmd_outfile_panel,
row_pref_sz );
upper_panel.add( cmd_outfile_panel );
upper_panel.add( Box.createVerticalStrut( 4 ) );
cmd_textarea = new AdvancingTextArea();
cmd_textarea.setColumns( 50 );
cmd_textarea.setRows( 5 );
cmd_textarea.setEditable( false );
cmd_textarea.setLineWrap( false );
JScrollPane scroller = new JScrollPane( cmd_textarea );
scroller.setAlignmentX( Component.CENTER_ALIGNMENT );
upper_panel.add( scroller );
upper_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_outfile_status_panel = new JPanel();
cmd_outfile_status_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_outfile_status_panel.setLayout(
new BoxLayout( cmd_outfile_status_panel,
BoxLayout.X_AXIS ) );
JPanel cmd_outfile_size_panel = new JPanel();
cmd_outfile_size_panel.setAlignmentY(
Component.CENTER_ALIGNMENT );
cmd_outfile_size_panel.setLayout(
new BoxLayout( cmd_outfile_size_panel,
BoxLayout.X_AXIS ) );
cmd_outfile_size_panel.setBorder(
new TitledBorder( etched_border,
" Output File Size ") );
cmd_outfile_size = new JTextField();
cmd_outfile_size.setEditable( false );
cmd_outfile_size_panel.add( cmd_outfile_size );
Routines.setShortJComponentSizes( cmd_outfile_size_panel,
pfld_pref_sz );
cmd_outfile_status_panel.add( cmd_outfile_size_panel );
JPanel cmd_progress_panel = new JPanel();
cmd_progress_panel.setAlignmentY(
Component.CENTER_ALIGNMENT );
cmd_progress_panel.setLayout(
new BoxLayout( cmd_progress_panel,
BoxLayout.X_AXIS ) );
cmd_progress_panel.setBorder(
new TitledBorder( etched_border,
" Output to Input Logfile Size Ratio " ) );
cmd_progress = new JProgressBar();
cmd_progress.setStringPainted( true );
cmd_progress_panel.add( cmd_progress );
Routines.setShortJComponentSizes( cmd_progress_panel,
pbar_pref_sz );
cmd_outfile_status_panel.add( cmd_progress_panel );
upper_panel.add( cmd_outfile_status_panel );
row_pref_sz = new Dimension( 410, 27 );
lbl_pref_sz = new Dimension( 130, 25 );
// fld_pref_sz = new Dimension( row_pref_sz.width - lbl_pref_sz.width,
// lbl_pref_sz.height );
JPanel lower_panel = new JPanel();
lower_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
lower_panel.setLayout( new BoxLayout( lower_panel,
BoxLayout.Y_AXIS ) );
lower_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_path2jvm_panel = new JPanel();
cmd_path2jvm_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_path2jvm_panel.setLayout(
new BoxLayout( cmd_path2jvm_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " JVM Path : " );
label.setToolTipText(
"Full Pathname of the Java Virtual Machine." );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_path2jvm_panel.add( label );
cmd_path2jvm = new JTextField();
// Routines.setShortJComponentSizes( cmd_path2jvm,
// fld_pref_sz );
cmd_path2jvm_panel.add( cmd_path2jvm );
Routines.setShortJComponentSizes( cmd_path2jvm_panel,
row_pref_sz );
lower_panel.add( cmd_path2jvm_panel );
lower_panel.add( Box.createVerticalGlue() );
lower_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_option4jvm_panel = new JPanel();
cmd_option4jvm_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_option4jvm_panel.setLayout(
new BoxLayout( cmd_option4jvm_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " JVM Option : " );
label.setToolTipText(
"Option to the Java Virtual Machine." );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_option4jvm_panel.add( label );
cmd_option4jvm = new JTextField();
// Routines.setShortJComponentSizes( cmd_option4jvm,
// fld_pref_sz );
cmd_option4jvm_panel.add( cmd_option4jvm );
Routines.setShortJComponentSizes( cmd_option4jvm_panel,
row_pref_sz );
lower_panel.add( cmd_option4jvm_panel );
lower_panel.add( Box.createVerticalGlue() );
lower_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_path2jardir_panel = new JPanel();
cmd_path2jardir_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_path2jardir_panel.setLayout(
new BoxLayout( cmd_path2jardir_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " JAR Directory : " );
label.setToolTipText( "Directory of the .jar files." );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_path2jardir_panel.add( label );
cmd_path2jardir = new ActableTextField();
// Routines.setShortJComponentSizes( cmd_path2jardir,
// fld_pref_sz );
cmd_path2jardir_panel.add( cmd_path2jardir );
Routines.setShortJComponentSizes( cmd_path2jardir_panel,
row_pref_sz );
lower_panel.add( cmd_path2jardir_panel );
lower_panel.add( Box.createVerticalGlue() );
lower_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_option4jar_panel = new JPanel();
cmd_option4jar_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_option4jar_panel.setLayout(
new BoxLayout( cmd_option4jar_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " JAR Option : " );
label.setToolTipText( "Option to the selected Convertor." );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_option4jar_panel.add( label );
cmd_option4jar = new JTextField();
// Routines.setShortJComponentSizes( cmd_option4jar,
// fld_pref_sz );
cmd_option4jar_panel.add( cmd_option4jar );
Routines.setShortJComponentSizes( cmd_option4jar_panel,
row_pref_sz );
lower_panel.add( cmd_option4jar_panel );
lower_panel.add( Box.createVerticalGlue() );
lower_panel.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_path2tracelib_panel = new JPanel();
cmd_path2tracelib_panel.setAlignmentX(
Component.CENTER_ALIGNMENT );
cmd_path2tracelib_panel.setLayout(
new BoxLayout( cmd_path2tracelib_panel,
BoxLayout.X_AXIS ) );
label = new JLabel( " TraceLibrary Path : " );
label.setToolTipText(
"Trace Input Library path of the selected Convertor" );
Routines.setShortJComponentSizes( label, lbl_pref_sz );
cmd_path2tracelib_panel.add( label );
cmd_path2tracelib = new JTextField();
// Routines.setShortJComponentSizes( cmd_path2tracelib,
// fld_pref_sz );
cmd_path2tracelib_panel.add( cmd_path2tracelib );
Routines.setShortJComponentSizes( cmd_path2tracelib_panel,
row_pref_sz );
lower_panel.add( cmd_path2tracelib_panel );
lower_panel.add( Box.createVerticalStrut( 4 ) );
cmd_splitter = new JSplitPane( JSplitPane.VERTICAL_SPLIT, true,
upper_panel, lower_panel );
cmd_splitter.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_splitter.setOneTouchExpandable( true );
err_msg = null;
try {
cmd_splitter.setResizeWeight( 1.0d );
} catch ( NoSuchMethodError err ) {
err_msg =
"Method JSplitPane.setResizeWeight() cannot be found.\n"
+ "This indicates you are running an older Java2 RunTime,\n"
+ "like the one in J2SDK 1.2.2 or older. If this is the case,\n"
+ "some features in Convertor window may not work correctly,\n"
+ "For instance, resize of the window may not resize upper \n"
+ "TextArea. Manuel movement of splitter is needed.\n";
}
super.add( cmd_splitter );
super.add( Box.createVerticalStrut( 4 ) );
JPanel cmd_button_panel = new JPanel();
cmd_button_panel.setLayout( new BoxLayout( cmd_button_panel,
BoxLayout.X_AXIS ) );
cmd_button_panel.setAlignmentX( Component.CENTER_ALIGNMENT );
cmd_button_panel.add( Box.createHorizontalGlue() );
btn_insets = new Insets( 2, 4, 2, 4 );
cmd_start_btn = new JButton( "Convert" );
icon_URL = getURL( Const.IMG_PATH + "Convert24.gif" );
if ( icon_URL != null ) {
cmd_start_btn.setIcon( new ImageIcon( icon_URL ) );
cmd_start_btn.setVerticalTextPosition(
AbstractButton.CENTER );
cmd_start_btn.setHorizontalTextPosition(
AbstractButton.RIGHT );
cmd_start_btn.setMargin( btn_insets );
}
cmd_start_btn.setToolTipText(
"Proceed with the selected logfile conversion." );
cmd_button_panel.add( cmd_start_btn );
cmd_button_panel.add( Box.createHorizontalGlue() );
cmd_stop_btn = new JButton( " Stop " );
icon_URL = getURL( Const.IMG_PATH + "Stop24.gif" );
if ( icon_URL != null ) {
cmd_stop_btn.setIcon( new ImageIcon( icon_URL ) );
cmd_stop_btn.setVerticalTextPosition(
AbstractButton.CENTER );
cmd_stop_btn.setHorizontalTextPosition(
AbstractButton.RIGHT );
// cmd_stop_btn.setMargin( btn_insets );
}
cmd_stop_btn.setToolTipText(
"Stop the ongoing logfile conversion." );
cmd_button_panel.add( cmd_stop_btn );
cmd_button_panel.add( Box.createHorizontalGlue() );
cmd_help_btn = new JButton( " Usage " );
icon_URL = getURL( Const.IMG_PATH + "About24.gif" );
if ( icon_URL != null ) {
cmd_help_btn.setIcon( new ImageIcon( icon_URL ) );
cmd_help_btn.setVerticalTextPosition(
AbstractButton.CENTER );
cmd_help_btn.setHorizontalTextPosition(
AbstractButton.RIGHT );
// cmd_help_btn.setMargin( btn_insets );
}
cmd_help_btn.setToolTipText(
"Usage information of the selected logfile convertor." );
cmd_button_panel.add( cmd_help_btn );
cmd_button_panel.add( Box.createHorizontalGlue() );
cmd_close4cancel_btn = new JButton( "Cancel" );
icon_URL = getURL( Const.IMG_PATH + "ConvertCancel24.gif" );
if ( icon_URL != null ) {
cmd_close4cancel_btn.setIcon( new ImageIcon( icon_URL ) );
cmd_close4cancel_btn.setVerticalTextPosition(
AbstractButton.CENTER );
cmd_close4cancel_btn.setHorizontalTextPosition(
AbstractButton.RIGHT );
// cmd_close4cancel_btn.setMargin( btn_insets );
}
cmd_close4cancel_btn.setToolTipText( "Close this panel." );
cmd_button_panel.add( cmd_close4cancel_btn );
cmd_button_panel.add( Box.createHorizontalGlue() );
cmd_close4ok_btn = null;
if ( has_close4ok_btn ) {
cmd_close4ok_btn = new JButton( "OK" );
icon_URL = getURL( Const.IMG_PATH + "ConvertOk24.gif" );
if ( icon_URL != null ) {
cmd_close4ok_btn.setIcon( new ImageIcon( icon_URL ) );
cmd_close4ok_btn.setVerticalTextPosition(
AbstractButton.CENTER );
cmd_close4ok_btn.setHorizontalTextPosition(
AbstractButton.RIGHT );
// cmd_close4ok_btn.setMargin( btn_insets );
}
cmd_close4ok_btn.setToolTipText(
"Display the last converted SLOG2 logfile "
+ "and Exit this dialog box." );
cmd_button_panel.add( cmd_close4ok_btn );
cmd_button_panel.add( Box.createHorizontalGlue() );
}
super.add( cmd_button_panel );
}
private void initAllTextFields()
{
String path2jardir;
String option4jvm;
ConvertorConst.initializeSystemProperties();
// set the path to JVM
cmd_path2jvm.setText( ConvertorConst.getDefaultPathToJVM() );
// set the path to all the jar files
path2jardir = ConvertorConst.getDefaultPathToJarDir();
cmd_path2jardir.setText( path2jardir );
// set the JVM option
option4jvm = null;
try {
option4jvm = cmd_option4jvm.getText();
} catch ( NullPointerException err ) {}
if ( option4jvm == null || option4jvm.length() <= 0 );
cmd_option4jvm.setText( "-Xms32m -Xmx64m" );
}
public String selectLogFile()
{
int istat;
istat = file_chooser.showOpenDialog( top_window );
if ( istat == LogFileChooser.APPROVE_OPTION ) {
File selected_file, selected_dir;
selected_file = file_chooser.getSelectedFile();
if ( selected_file != null ) {
selected_dir = selected_file.getParentFile();
if ( selected_dir != null )
file_chooser.setCurrentDirectory( selected_dir );
return selected_file.getPath();
}
}
else
Dialogs.info( top_window, "No file chosen", null );
return null;
}
private void printSelectedConvertorHelp()
{
String convertor;
String path2jardir;
String path2tracelib;
String jar_path;
RuntimeExecCommand exec_cmd;
File jar_file;
Runtime runtime;
Process proc;
InputStreamThread proc_err_task, proc_out_task;
convertor = (String) cmd_pulldown.getSelectedItem();
// Set the path to the jar file
path2jardir = cmd_path2jardir.getText();
jar_path = ConvertorConst.getDefaultJarPath( path2jardir, convertor );
jar_file = new File( jar_path );
if ( ! jar_file.exists() ) {
Dialogs.error( top_window, jar_path + " does not exist!" );
return;
}
if ( ! jar_file.canRead() ) {
Dialogs.error( top_window, jar_path + " is NOT readable!\n" );
return;
}
exec_cmd = new RuntimeExecCommand();
exec_cmd.addWholeString( cmd_path2jvm.getText() );
exec_cmd.addTokenizedString( cmd_option4jvm.getText() );
path2tracelib = cmd_path2tracelib.getText();
if ( path2tracelib != null && path2tracelib.length() > 0 )
exec_cmd.addWholeString( "-Djava.library.path=" + path2tracelib );
exec_cmd.addWholeString( "-jar" );
exec_cmd.addWholeString( jar_path );
exec_cmd.addWholeString( "-h" );
cmd_textarea.append( "Executing " + exec_cmd.toString() + "...." );
runtime = Runtime.getRuntime();
try {
proc = runtime.exec( exec_cmd.toStringArray() );
proc_err_task = new InputStreamThread( proc.getErrorStream(),
"Error", cmd_textarea );
proc_out_task = new InputStreamThread( proc.getInputStream(),
"Output", cmd_textarea );
proc_err_task.start();
proc_out_task.start();
// Block THIS thread till process returns!
int proc_istatus = proc.waitFor();
// Clean up InputStreamThread's when the proces is done.
proc_err_task.stopRunning();
proc_err_task = null;
proc_out_task.stopRunning();
proc_out_task = null;
cmd_textarea.append( "\n> Ending with exit status "
+ proc_istatus + "\n" );
} catch ( Throwable err ) {
err.printStackTrace();
}
}
private SwingProcessWorker convertSelectedLogFile()
{
String convertor;
String path2jardir, path2tracelib;
String infile_name, outfile_name, jar_path;
String option4jar;
File infile, outfile, jar_file;
InputLog slog_ins;
RuntimeExecCommand exec_cmd;
SwingProcessWorker conv_worker;
ProgressAction conv_progress;
// Check the validity of the Input File
infile_name = cmd_infile.getText();
infile = new File( infile_name );
if ( ! infile.exists() ) {
Dialogs.error( top_window,
infile_name + " does not exist!\n"
+ "No conversion will take place." );
return null;
}
if ( infile.isDirectory() ) {
Dialogs.error( top_window,
infile_name + " is a directory!\n"
+ "No conversion will take place." );
return null;
}
if ( ! infile.canRead() ) {
Dialogs.error( top_window,
"File " + infile_name + " is NOT readable!\n"
+ "No conversion will take place." );
return null;
}
slog_ins = null;
try {
slog_ins = new InputLog( infile_name );
} catch ( NullPointerException nperr ) {
slog_ins = null;
} catch ( Exception err ) {
slog_ins = null;
}
if ( slog_ins != null && slog_ins.isSLOG2() ) {
Dialogs.error( top_window,
infile_name + " is already a SLOG-2 file!\n"
+ "No conversion will take place." );
cmd_outfile.setText( infile_name );
return null;
}
// Check the validity of the Output File
outfile_name = cmd_outfile.getText();
outfile = new File( outfile_name );
if ( outfile.exists() ) {
if ( outfile.isDirectory() ) {
Dialogs.error( top_window,
outfile_name + " is a directory!\n"
+ "No conversion will take place." );
return null;
}
if ( ! outfile.canWrite() ) {
Dialogs.error( top_window,
"File " + outfile_name + " cannot be written!\n"
+ "No conversion will take place." );
return null;
}
if ( ! Dialogs.confirm( top_window,
outfile_name + " already exists! "
+ "Do you want to overwrite it ?" ) ) {
Dialogs.info( top_window,
"Please change the output filename "
+ "and restart the conversion again.",
null );
return null;
}
outfile.delete();
}
convertor = (String) cmd_pulldown.getSelectedItem();
// Set the path to the jar file
path2jardir = cmd_path2jardir.getText();
jar_path = ConvertorConst.getDefaultJarPath( path2jardir, convertor );
jar_file = new File( jar_path );
if ( ! jar_file.exists() ) {
Dialogs.error( top_window, jar_path + " does not exist!" );
return null;
}
exec_cmd = new RuntimeExecCommand();
exec_cmd.addWholeString( cmd_path2jvm.getText() );
exec_cmd.addTokenizedString( cmd_option4jvm.getText() );
path2tracelib = cmd_path2tracelib.getText();
if ( path2tracelib != null && path2tracelib.length() > 0 )
exec_cmd.addWholeString( "-Djava.library.path=" + path2tracelib );
exec_cmd.addWholeString( "-jar" );
exec_cmd.addWholeString( jar_path );
option4jar = cmd_option4jar.getText();
if ( option4jar != null && option4jar.length() > 0 )
exec_cmd.addTokenizedString( option4jar );
exec_cmd.addWholeString( "-o" );
exec_cmd.addWholeString( outfile_name );
exec_cmd.addWholeString( infile_name );
/*
Start a SwingWorker thread to execute the process:
Prepare a progress action for the JProgressBar for the SwingWorker
*/
conv_progress = new ProgressAction( cmd_outfile_size, cmd_progress );
conv_progress.initialize( infile, outfile );
conv_worker = new SwingProcessWorker( this, cmd_textarea );
conv_worker.initialize( exec_cmd.toStringArray(), conv_progress );
conv_worker.start();
return conv_worker;
}
private void resetAllButtons( boolean isConvertingLogfile )
{
cmd_start_btn.setEnabled( !isConvertingLogfile );
cmd_stop_btn.setEnabled( isConvertingLogfile );
cmd_help_btn.setEnabled( !isConvertingLogfile );
if ( cmd_close4cancel_btn != null )
cmd_close4cancel_btn.setEnabled( !isConvertingLogfile );
if ( cmd_close4ok_btn != null ) {
// The scenarios that logconv_worker == null are either the
// process has not been started or it has been stopped by user.
if ( logconv_worker != null && logconv_worker.isEndedNormally() )
cmd_close4ok_btn.setEnabled( !isConvertingLogfile );
else
cmd_close4ok_btn.setEnabled( false );
}
}
// Interface for WaitingContainer (used by SwingProcessWorker)
public void initializeWaiting()
{
Routines.setComponentAndChildrenCursors( cmd_splitter,
CustomCursor.Wait );
this.resetAllButtons( true );
}
// Interface for WaitingContainer (used by SwingProcessWorker)
public void finalizeWaiting()
{
this.resetAllButtons( false );
Routines.setComponentAndChildrenCursors( cmd_splitter,
CustomCursor.Normal );
}
public void addActionListenerForOkayButton( ActionListener action )
{
if ( action != null && cmd_close4ok_btn != null )
cmd_close4ok_btn.addActionListener( action );
}
public void addActionListenerForCancelButton( ActionListener action )
{
if ( action != null && cmd_close4cancel_btn != null )
cmd_close4cancel_btn.addActionListener( action );
}
public String getOutputSLOG2Name()
{
return cmd_outfile.getText();
}
private class LogNameListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
String infile_name, outfile_name;
infile_name = cmd_infile.getText();
outfile_name = ConvertorConst.getDefaultSLOG2Name( infile_name );
cmd_outfile.setText( outfile_name );
}
}
private class InputFileSelectorListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
String filename = selectLogFile();
if ( filename != null && filename.length() > 0 ) {
cmd_infile.setText( filename );
printSelectedConvertorHelp();
}
}
}
private class OutputFileSelectorListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
String filename = selectLogFile();
if ( filename != null && filename.length() > 0 ) {
cmd_outfile.setText( filename );
}
}
}
private class PulldownListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
String convertor, path2jardir;
convertor = (String) cmd_pulldown.getSelectedItem();
path2jardir = ConvertorConst.getDefaultPathToJarDir();
cmd_path2tracelib.setText(
ConvertorConst.getDefaultTraceLibPath( convertor,
path2jardir ) );
printSelectedConvertorHelp();
}
}
private class JarDirectoryListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
String convertor, path2jardir;
String def_path2tracelib, cur_path2tracelib, new_path2tracelib;
convertor = (String) cmd_pulldown.getSelectedItem();
cur_path2tracelib = cmd_path2tracelib.getText();
if ( cur_path2tracelib == null )
cur_path2tracelib = "";
// Check if the TraceLibrary Path has been updated,
// i.e. already synchronized with JAR Directory.
path2jardir = cmd_path2jardir.getText();
new_path2tracelib = ConvertorConst.getDefaultTraceLibPath(
convertor, path2jardir );
if ( new_path2tracelib == null )
new_path2tracelib = "";
if ( cur_path2tracelib.equals( new_path2tracelib ) )
return;
// Check if path2tracelib is differnt from the default
path2jardir = ConvertorConst.getDefaultPathToJarDir();
def_path2tracelib = ConvertorConst.getDefaultTraceLibPath(
convertor, path2jardir );
if ( def_path2tracelib == null )
def_path2tracelib = "";
if ( ! cur_path2tracelib.equals( def_path2tracelib ) ) {
if ( ! Dialogs.confirm( top_window,
"TraceLibrary Path has been modified "
+ "from the original default value.\n"
+ "Should it be updated by the new "
+ "default value based on your modified "
+ "JAR Directory ?" ) ) {
return;
}
}
// Update the default TraceLibPaths with the modified cmd_path2jar.
cmd_path2tracelib.setText( new_path2tracelib );
}
}
private class StartConvertorListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
logconv_worker = convertSelectedLogFile();
}
}
private class StopConvertorListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
// Set logconv_worker = null when the conversion is stopped manually
// so resetAllButtons() can set close4ok button accordingly.
if ( logconv_worker != null ) {
logconv_worker.finished();
logconv_worker = null;
}
}
}
private class HelpConvertorListener implements ActionListener
{
public void actionPerformed( ActionEvent evt )
{
cmd_path2jardir.fireActionPerformed(); // call JarDirectoryListener
printSelectedConvertorHelp();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ftpserver.clienttests;
import java.io.ByteArrayInputStream;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPConnectionClosedException;
import org.apache.commons.net.ftp.FTPReply;
import org.apache.ftpserver.ftplet.FtpStatistics;
/**
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>*
*/
public class LoginTest extends ClientTestTemplate {
private static final String UNKNOWN_USERNAME = "foo";
private static final String UNKNOWN_PASSWORD = "bar";
public void testLogin() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
}
public void testCommandWithoutLogin() throws Exception {
assertFalse(client.storeFile("foo", new ByteArrayInputStream("foo"
.getBytes())));
}
public void testLoginNoUser() throws Exception {
assertFalse(client.login(null, null));
}
public void testLoginDisabledUser() throws Exception {
assertFalse(client.login("testuser4", "password"));
}
public void testLoginWithAccount() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
assertTrue(FTPReply.isPositiveCompletion(client.acct("FOO")));
}
public void testLoginWithEmptyPassword() throws Exception {
assertTrue(FTPReply.isPositiveIntermediate(client.user(ADMIN_USERNAME)));
assertEquals(530, client.sendCommand("PASS"));
}
public void testLoginWithEmptyCorrectPassword() throws Exception {
assertTrue(FTPReply.isPositiveIntermediate(client.user("testuser3")));
assertTrue(FTPReply.isPositiveCompletion(client.sendCommand("PASS")));
}
public void testLoginIncorrectPassword() throws Exception {
assertFalse(client.login(ADMIN_USERNAME, UNKNOWN_PASSWORD));
}
public void testReLogin() throws Exception {
assertFalse(client.login(ADMIN_USERNAME, UNKNOWN_PASSWORD));
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
}
public void testDoubleLoginSameUser() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
}
public void testDoubleLoginDifferentUser() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
assertFalse("Login with different user not allowed", client.login(
TESTUSER1_USERNAME, TESTUSER_PASSWORD));
}
public void testREIN() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
assertTrue(FTPReply.isPositiveCompletion(client.rein()));
assertTrue(client.login(TESTUSER1_USERNAME, TESTUSER_PASSWORD));
}
public void testReLoginWithOnlyPass() throws Exception {
assertFalse(client.login(ADMIN_USERNAME, UNKNOWN_PASSWORD));
int reply = client.pass(ADMIN_PASSWORD);
assertTrue(FTPReply.isNegativePermanent(reply));
}
public void testOnlyPass() throws Exception {
int reply = client.pass(ADMIN_PASSWORD);
assertTrue(FTPReply.isNegativePermanent(reply));
}
public void testLoginThenPass() throws Exception {
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
int reply = client.pass(ADMIN_PASSWORD);
assertTrue(FTPReply.isPositiveCompletion(reply));
}
public void testLoginAnon() throws Exception {
assertTrue(client.login(ANONYMOUS_USERNAME, ANONYMOUS_PASSWORD));
}
public void testLoginUnknownUser() throws Exception {
assertFalse(client.login(UNKNOWN_USERNAME, UNKNOWN_PASSWORD));
}
public void testLoginCount() throws Exception {
FtpStatistics stats = server.getServerContext().getFtpStatistics();
assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));
int n =stats.getCurrentLoginNumber();
assertEquals(1,n );
client.rein();
client.logout();
assertEquals(0, stats.getCurrentLoginNumber());
}
/*
* public void testLoginWithMaxConnectionsPerIp() throws Exception {
* String[] ips = getHostAddresses();
*
* if(ips.length > 1) { FTPClient client2 = new FTPClient();
* client2.connect(ips[0], port); FTPClient client3 = new FTPClient();
* client3.connect(ips[0], port); FTPClient client4 = new FTPClient();
* client4.connect(ips[1], port); FTPClient client5 = new FTPClient();
* client5.connect(ips[1], port); FTPClient client6 = new FTPClient();
* client6.connect(ips[1], port);
*
* assertTrue(client2.login(TESTUSER2_USERNAME, TESTUSER_PASSWORD));
* assertTrue(client3.login(TESTUSER2_USERNAME, TESTUSER_PASSWORD));
* assertTrue(client4.login(TESTUSER2_USERNAME, TESTUSER_PASSWORD));
* assertTrue(client5.login(TESTUSER2_USERNAME, TESTUSER_PASSWORD));
*
* try{ assertTrue(client6.login(TESTUSER2_USERNAME, TESTUSER_PASSWORD));
* fail("Must throw FTPConnectionClosedException"); }
* catch(FTPConnectionClosedException e) { // expected } } else { // ignore
* test } }
*/
/*
* public void testLoginWithMaxConnectionsMulti() throws Exception { for(int
* i = 0; i<50; i++) { testLoginWithMaxConnections(); } }
*/
public void testLoginWithMaxConnections() throws Exception {
FTPClient client1 = new FTPClient();
FTPClient client2 = new FTPClient();
FTPClient client3 = new FTPClient();
FTPClient client4 = new FTPClient();
try {
client1.connect("localhost", getListenerPort());
client2.connect("localhost", getListenerPort());
client3.connect("localhost", getListenerPort());
client4.connect("localhost", getListenerPort());
assertTrue(client1.login(TESTUSER1_USERNAME, TESTUSER_PASSWORD));
assertTrue(client2.login(TESTUSER1_USERNAME, TESTUSER_PASSWORD));
assertTrue(client3.login(TESTUSER1_USERNAME, TESTUSER_PASSWORD));
try {
assertTrue(client4.login(TESTUSER1_USERNAME, TESTUSER_PASSWORD));
assertEquals(421, client.getReplyCode());
fail("Must throw FTPConnectionClosedException");
} catch (FTPConnectionClosedException e) {
// expected
}
} finally {
closeQuitely(client1);
closeQuitely(client2);
closeQuitely(client3);
closeQuitely(client4);
}
}
private void closeQuitely(FTPClient client) {
try {
client.logout();
} catch (Exception e) {
// ignore
}
try {
client.disconnect();
} catch (Exception e) {
// ignore
}
}
}
| |
/*******************************************************************************
* Copyright 2015 Junichi Tatemura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.nec.strudel.session.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import com.nec.strudel.session.LocalParam;
import com.nec.strudel.session.Param;
import com.nec.strudel.session.ParamBuilder;
import com.nec.strudel.session.StateParam;
import com.nec.strudel.util.RandomSelector;
public class ParamBuilderImpl implements ParamBuilder {
private final State state;
private final Param param;
public ParamBuilderImpl(State state, Param param) {
this.state = state;
this.param = param;
}
@Override
public ParamBuilder use(StateParam paramName) {
param.put(paramName, state.get(paramName));
return this;
}
@Override
public ParamBuilder use(LocalParam dst, StateParam src) {
param.put(dst, state.get(src));
return this;
}
@Override
public boolean defined(StateParam paramName) {
return state.get(paramName) != null;
}
@SuppressWarnings("unchecked")
@Nullable
@Override
public <T> T get(StateParam paramName) {
return (T) state.get(paramName);
}
@SuppressWarnings("unchecked")
@Override
public <T> List<T> getList(StateParam paramName) {
Object obj = state.get(paramName);
if (obj == null) {
return new ArrayList<T>();
} else if (obj instanceof List) {
return (List<T>) obj;
} else if (obj instanceof Collection) {
return new ArrayList<T>((Collection<T>) obj);
} else {
List<T> list = new ArrayList<T>();
list.add((T) obj);
return list;
}
}
@Override
public ParamBuilder set(LocalParam paramName, Object value) {
param.put(paramName, value);
return this;
}
@Override
public ParamBuilder randomInt(LocalParam name,
StateParam minName, StateParam maxName) {
param.put(name, getRandomInt(minName, maxName));
return this;
}
/**
* Defines a random integer ID
*
* @param name
* the parameter to be defined
* @param minName
* the minimum ID
* @param sizeName
* the size of (consecutive) ID numbers.
* @param excludeName
* the ID to be excluded
* @return this
*/
@Override
public ParamBuilder randomIntId(LocalParam name,
StateParam minName, StateParam sizeName,
StateParam excludeName) {
param.put(name, getRandomIntId(minName, sizeName, excludeName));
return this;
}
/**
* Defines a random integer ID
*
* @param name
* the parameter to be defined
* @param minName
* the minimum ID
* @param sizeName
* the size of (consecutive) ID numbers.
* @param excludeName
* the ID to be excluded
* @return this
*/
@Override
public ParamBuilder randomIntId(LocalParam name,
StateParam minName, StateParam sizeName) {
param.put(name, getRandomIntId(minName, sizeName));
return this;
}
@Override
public int getRandomIntId(StateParam minName, StateParam sizeName,
StateParam excludeName) {
int exclude = getInt(excludeName);
int min = getInt(minName);
int size = getInt(sizeName);
int max = min + size;
if (size == 1 && min == exclude) {
throw new RuntimeException(
"invalid randomInt:[" + min
+ "," + max + ") excluding " + exclude);
} else if (size <= 0) {
throw new RuntimeException(
"invalid randomInt:[" + min
+ "," + max + ")");
}
int value = 0;
RandomSelector<Integer> ints = RandomSelector.create(
min, max);
do {
value = ints.next(state.getRandom());
} while (exclude == value);
return value;
}
@Override
public int getRandomIntId(StateParam minName, StateParam sizeName) {
int min = getInt(minName);
int size = getInt(sizeName);
int max = min + size;
if (size <= 0) {
throw new RuntimeException(
"invalid randomInt:[" + min
+ "," + max + ")");
}
return getRandomInt(min, max);
}
@Override
public Set<Integer> getRandomIntIdSet(StateParam countName,
StateParam minName, StateParam sizeName) {
int count = getInt(countName);
int min = getInt(minName);
int size = getInt(sizeName);
int max = min + size;
if (size < count) {
throw new RuntimeException(
"invalid randomIntSet("
+ count + "):[" + min
+ "," + max + ")");
}
Set<Integer> set = new HashSet<Integer>();
if (count > size / 2) {
for (int i = min; i < max; i++) {
set.add(i);
}
while (set.size() > count) {
set.remove(getRandomInt(min, max));
}
} else {
while (set.size() < count) {
set.add(getRandomInt(min, max));
}
}
return set;
}
@Override
public ParamBuilder randomDouble(LocalParam name,
StateParam minName, StateParam maxName) {
param.put(name, getRandomDouble(minName, maxName));
return this;
}
@Override
public ParamBuilder randomAlphaString(LocalParam paramName, int length) {
param.put(paramName, getRandomAlphaString(length));
return this;
}
@Override
public ParamBuilder randomAlphaString(LocalParam paramName,
StateParam lengthParam) {
return randomAlphaString(paramName, getInt(lengthParam));
}
@Override
public int getInt(StateParam pname) {
return state.getInt(pname);
}
@Override
public double getDouble(StateParam name) {
return state.getDouble(name);
}
@Override
public int getRandomInt(StateParam minName, StateParam maxName) {
return getRandomInt(getInt(minName), getInt(maxName));
}
@Override
public int getRandomInt(int min, int max) {
return RandomSelector.create(min, max).next(state.getRandom());
}
@Override
public int getRandomInt(int max) {
return state.getRandom().nextInt(max);
}
@Override
public double getRandomDouble(StateParam minName, StateParam maxName) {
return getRandomDouble(getDouble(minName), getDouble(maxName));
}
@Override
public double getRandomDouble(double min, double max) {
return RandomSelector.createDouble(min, max).next(state.getRandom());
}
@Override
public double getRandomDouble() {
return state.getRandom().nextDouble();
}
@Override
public String getRandomAlphaString(int length) {
return RandomSelector.createAlphaString(length).next(state.getRandom());
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.IntObjectCache;
import com.intellij.util.io.storage.AbstractStorage;
import org.jetbrains.annotations.NotNull;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* @author Eugene Zhuravlev
* Date: Dec 19, 2007
*/
public class PersistentMapTest extends PersistentMapTestBase {
public void testMap() throws IOException {
myMap.put("AAA", "AAA_VALUE");
assertEquals("AAA_VALUE", myMap.get("AAA"));
assertNull(myMap.get("BBB"));
assertEquals(new HashSet<>(Arrays.asList("AAA")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
myMap.put("BBB", "BBB_VALUE");
assertEquals("BBB_VALUE", myMap.get("BBB"));
assertEquals(new HashSet<>(Arrays.asList("AAA", "BBB")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
myMap.put("AAA", "ANOTHER_AAA_VALUE");
assertEquals("ANOTHER_AAA_VALUE", myMap.get("AAA"));
assertEquals(new HashSet<>(Arrays.asList("AAA", "BBB")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
myMap.remove("AAA");
assertNull(myMap.get("AAA"));
assertEquals("BBB_VALUE", myMap.get("BBB"));
assertEquals(new HashSet<>(Arrays.asList("BBB")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
myMap.remove("BBB");
assertNull(myMap.get("AAA"));
assertNull(myMap.get("BBB"));
assertEquals(new HashSet<>(), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
myMap.put("AAA", "FINAL_AAA_VALUE");
assertEquals("FINAL_AAA_VALUE", myMap.get("AAA"));
assertNull(myMap.get("BBB"));
assertEquals(new HashSet<>(Arrays.asList("AAA")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
}
public void testOpeningClosing() throws IOException {
List<String> strings = new ArrayList<>(2000);
for (int i = 0; i < 2000; ++i) {
strings.add(createRandomString());
}
for (int i = 0; i < 2000; ++i) {
final String key = strings.get(i);
myMap.put(key, key + "_value");
myMap.close();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
}
for (int i = 0; i < 2000; ++i) {
final String key = strings.get(i);
final String value = key + "_value";
assertEquals(value, myMap.get(key));
myMap.put(key, value);
assertTrue(myMap.isDirty());
myMap.close();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
}
for (int i = 0; i < 2000; ++i) {
assertTrue(!myMap.isDirty());
myMap.close();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
}
final String randomKey = createRandomString();
myMap.put(randomKey, randomKey + "_value");
assertTrue(myMap.isDirty());
}
public void testPutCompactGet() throws IOException {
myMap.put("a", "b");
myMap.compact();
assertEquals("b", myMap.get("a"));
}
public void testOpeningWithCompact() throws IOException {
final int stringsCount = 5/*1000000*/;
Set<String> strings = new HashSet<>(stringsCount);
for (int i = 0; i < stringsCount; ++i) {
final String key = createRandomString();
strings.add(key);
myMap.put(key, key + "_value");
}
myMap.close();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
{ // before compact
final Collection<String> allKeys = new HashSet<>(myMap.getAllKeysWithExistingMapping());
assertEquals(strings, allKeys);
for (String key : allKeys) {
final String val = myMap.get(key);
assertEquals(key + "_value", val);
}
}
myMap.compact();
{ // after compact
final Collection<String> allKeys = new HashSet<>(myMap.getAllKeysWithExistingMapping());
assertEquals(strings, allKeys);
for (String key : allKeys) {
final String val = myMap.get(key);
assertEquals(key + "_value", val);
}
}
}
public void testGarbageSizeUpdatedAfterCompact() throws IOException {
final int stringsCount = 5/*1000000*/;
Set<String> strings = new HashSet<>(stringsCount);
for (int i = 0; i < stringsCount; ++i) {
final String key = createRandomString();
strings.add(key);
myMap.put(key, key + "_value");
}
// create some garbage
for (String string : strings) {
myMap.remove(string);
}
strings.clear();
for (int i = 0; i < stringsCount; ++i) {
final String key = createRandomString();
strings.add(key);
myMap.put(key, key + "_value");
}
myMap.close();
final int garbageSizeOnClose = myMap.getGarbageSize();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
final int garbageSizeOnOpen = myMap.getGarbageSize();
assertEquals(garbageSizeOnClose, garbageSizeOnOpen);
{ // before compact
final Collection<String> allKeys = new HashSet<>(myMap.getAllKeysWithExistingMapping());
assertEquals(strings, allKeys);
for (String key : allKeys) {
final String val = myMap.get(key);
assertEquals(key + "_value", val);
}
}
myMap.compact();
assertEquals(0, myMap.getGarbageSize());
myMap.close();
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
final int garbageSizeAfterCompact = myMap.getGarbageSize();
assertEquals(0, garbageSizeAfterCompact);
{ // after compact
final Collection<String> allKeys = new HashSet<>(myMap.getAllKeysWithExistingMapping());
assertEquals(strings, allKeys);
for (String key : allKeys) {
final String val = myMap.get(key);
assertEquals(key + "_value", val);
}
}
}
public void testOpeningWithCompact2() throws IOException {
File file = FileUtil.createTempFile("persistent", "map");
PersistentHashMap<Integer, String> map = new PersistentHashMap<>(file, new IntInlineKeyDescriptor(), EnumeratorStringDescriptor.INSTANCE);
try {
final int stringsCount = 5/*1000000*/;
Map<Integer, String> testMapping = new LinkedHashMap<>(stringsCount);
for (int i = 0; i < stringsCount; ++i) {
final String key = createRandomString();
String value = key + "_value";
testMapping.put(i, value);
map.put(i, value);
}
map.close();
map = new PersistentHashMap<>(file, new IntInlineKeyDescriptor(), EnumeratorStringDescriptor.INSTANCE);
{ // before compact
final Collection<Integer> allKeys = new HashSet<>(map.getAllKeysWithExistingMapping());
assertEquals(new HashSet<>(testMapping.keySet()), allKeys);
for (Integer key : allKeys) {
final String val = map.get(key);
assertEquals(testMapping.get(key), val);
}
}
map.compact();
{ // after compact
final Collection<Integer> allKeys = new HashSet<>(map.getAllKeysWithExistingMapping());
assertEquals(new HashSet<>(testMapping.keySet()), allKeys);
for (Integer key : allKeys) {
final String val = map.get(key);
assertEquals(testMapping.get(key), val);
}
}
}
finally {
clearMap(file, map);
}
}
public void testPerformance() throws IOException {
final IntObjectCache<String> stringCache = new IntObjectCache<>(2000);
final IntObjectCache.DeletedPairsListener listener = (key, mapKey) -> {
try {
final String _mapKey = (String)mapKey;
assertEquals(myMap.enumerate(_mapKey), key);
final String expectedMapValue = _mapKey == null ? null : _mapKey + "_value";
final String actual = myMap.get(_mapKey);
assertEquals(expectedMapValue, actual);
myMap.remove(_mapKey);
assertNull(myMap.get(_mapKey));
}
catch (IOException e) {
throw new RuntimeException(e);
}
};
PlatformTestUtil.startPerformanceTest("put/remove", 5000, () -> {
try {
stringCache.addDeletedPairsListener(listener);
for (int i = 0; i < 100000; ++i) {
final String string = createRandomString();
final int id = myMap.enumerate(string);
stringCache.put(id, string);
myMap.put(string, string + "_value");
}
stringCache.removeDeletedPairsListener(listener);
for (String key : stringCache) {
myMap.remove(key);
}
stringCache.removeAll();
myMap.compact();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}).ioBound().assertTiming();
myMap.close();
LOG.debug(String.format("File size = %d bytes\n", myFile.length()));
LOG.debug(String.format("Data file size = %d bytes\n",
new File(myDataFile.getParentFile(), myDataFile.getName() + AbstractStorage.DATA_EXTENSION).length()));
}
public void testPerformance1() throws IOException {
final List<String> strings = new ArrayList<>(2000);
for (int i = 0; i < 100000; ++i) {
strings.add(createRandomString());
}
PlatformTestUtil.startPerformanceTest("put/remove", 1500, () -> {
for (int i = 0; i < 100000; ++i) {
final String string = strings.get(i);
myMap.put(string, string);
}
for (int i = 0; i < 100000; ++i) {
final String string = createRandomString();
myMap.get(string);
}
for (int i = 0; i < 100000; ++i) {
final String string = createRandomString();
myMap.remove(string);
}
for (String string : strings) {
myMap.remove(string);
}
}).assertTiming();
myMap.close();
LOG.debug(String.format("File size = %d bytes\n", myFile.length()));
LOG.debug(String.format("Data file size = %d bytes\n",
new File(myDataFile.getParentFile(), myDataFile.getName() + AbstractStorage.DATA_EXTENSION).length()));
}
public void testReadonlyMap() throws IOException {
myMap.put("AAA", "AAA_VALUE");
myMap.close();
myMap = new PersistentHashMap<String, String>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE) {
@Override
protected boolean isReadOnly() {
return true;
}
};
try {
myMap.compact();
fail();
} catch (IncorrectOperationException ignore) {}
try {
myMap.put("AAA", "AAA_VALUE2");
fail();
} catch (IncorrectOperationException ignore) {}
assertEquals("AAA_VALUE", myMap.get("AAA"));
assertNull(myMap.get("BBB"));
assertEquals(new HashSet<>(Arrays.asList("AAA")), new HashSet<>(myMap.getAllKeysWithExistingMapping()));
try {
myMap.remove("AAA");
fail();
} catch (IncorrectOperationException ignore) {}
try {
myMap.appendData("AAA", out -> out.writeUTF("BAR"));
fail();
} catch (IncorrectOperationException ignore) {}
}
public void testCreatePersistentMapWithoutCompression() throws IOException {
clearMap(myFile, myMap);
Boolean compressionFlag = PersistentHashMapValueStorage.CreationTimeOptions.DO_COMPRESSION.get();
try {
PersistentHashMapValueStorage.CreationTimeOptions.DO_COMPRESSION.set(Boolean.FALSE);
myMap = new PersistentHashMap<>(myFile, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE);
myMap.put("Foo", "Bar");
assertTrue(myMap.containsMapping("Foo"));
myMap.close();
assertEquals(55,PersistentHashMap.getDataFile(myFile).length());
}
finally {
PersistentHashMapValueStorage.CreationTimeOptions.DO_COMPRESSION.set(compressionFlag);
}
}
public void testFailedReadWriteSetsCorruptedFlag() throws IOException {
EnumeratorStringDescriptor throwingException = new EnumeratorStringDescriptor() {
@Override
public void save(@NotNull DataOutput storage, @NotNull String value) throws IOException {
throw new IOException("test");
}
@Override
public String read(@NotNull DataInput storage) throws IOException {
throw new IOException("test");
}
};
PersistentMapPerformanceTest.MapConstructor<String, String> mapConstructorWithBrokenKeyDescriptor =
(file) -> IOUtil.openCleanOrResetBroken(
() -> new PersistentHashMap<>(file, throwingException, EnumeratorStringDescriptor.INSTANCE), file);
PersistentMapPerformanceTest.MapConstructor<String, String> mapConstructorWithBrokenValueDescriptor =
(file) -> IOUtil.openCleanOrResetBroken(
() -> new PersistentHashMap<>(file, EnumeratorStringDescriptor.INSTANCE, throwingException), file);
runIteration(mapConstructorWithBrokenKeyDescriptor);
runIteration(mapConstructorWithBrokenValueDescriptor);
}
private void runIteration(PersistentMapPerformanceTest.MapConstructor<String, String> brokenMapDescritor) throws IOException {
String key = "AAA";
String value = "AAA_VALUE";
PersistentMapPerformanceTest.MapConstructor<String, String> defaultMapConstructor =
(file) -> IOUtil.openCleanOrResetBroken(
() -> new PersistentHashMap<>(file, EnumeratorStringDescriptor.INSTANCE, EnumeratorStringDescriptor.INSTANCE), file);
createInitializedMap(key, value, defaultMapConstructor);
myMap = brokenMapDescritor.createMap(myFile);
try {
myMap.get(key);
fail();
} catch (IOException ignore) {
assertTrue(myMap.isCorrupted());
}
createInitializedMap(key, value, defaultMapConstructor);
myMap = brokenMapDescritor.createMap(myFile);
try {
myMap.put(key, value + value);
fail();
} catch (IOException ignore) {
assertTrue(myMap.isCorrupted());
}
createInitializedMap(key, value, defaultMapConstructor);
myMap = brokenMapDescritor.createMap(myFile);
try {
myMap.appendData(key, new PersistentHashMap.ValueDataAppender() {
@Override
public void append(DataOutput out) throws IOException {
throw new IOException();
}
});
fail();
} catch (IOException ignore) {
assertTrue(myMap.isCorrupted());
}
}
private void closeMapSilently() {
try {
myMap.close();
} catch (IOException ignore) {}
}
private void createInitializedMap(String key,
String value,
PersistentMapPerformanceTest.MapConstructor<String, String> defaultMapConstructor)
throws IOException {
closeMapSilently();
myMap = defaultMapConstructor.createMap(myFile);
myMap.put(key, value);
closeMapSilently();
}
}
| |
package com.example.android.project1;
import android.content.ContentResolver;
import android.content.res.Configuration;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import com.example.android.project1.database.MoviesContract;
import com.example.android.project1.database.MoviesDBHelper;
import com.example.android.project1.model.MovieInfo;
import com.example.android.project1.model.SortType;
import com.example.android.project1.utils.MovieDBService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import okhttp3.OkHttpClient;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
import static java.sql.Types.NULL;
public class MainActivity extends AppCompatActivity {
private static final String BASE_URL_THEMOVIEDB = "http://api.themoviedb.org/3/";
private static final String LOG_TAG = "MainActivity";
private static final String SAVED_LAYOUT_MANAGER = "layout-manager-state";
private static final String SAVED_SORT_TYPE = "sort-type-state";
private RecyclerView recyclerView;
private GridLayoutManager layoutManager;
private MyRecyclerViewAdapter myAdapter;
private SortType sortType = SortType.POPULARITY;
private MoviesDBHelper dbHelper;
private Parcelable mLayoutManagerSavedState = null;
private class MoviesLoadTask extends AsyncTask<Void, Void, Cursor> {
@Override
protected Cursor doInBackground(Void... voids) {
return dbHelper.getAllFavoriteMovies();
}
@Override
protected void onPostExecute(Cursor cursor) {
List<MovieInfo> movieInfoResults = new ArrayList<>();
if (cursor != null && cursor.getCount() > 0) {
while(cursor.moveToNext()){
int id = cursor.getInt(cursor.getColumnIndex(MoviesContract.MoviesEntry.ID));
String title = cursor.getString(cursor.getColumnIndex(MoviesContract.MoviesEntry.TITLE));
String description = cursor.getString(cursor.getColumnIndex(MoviesContract.MoviesEntry.DESCRIPTION));
String imageUrl = cursor.getString(cursor.getColumnIndex(MoviesContract.MoviesEntry.IMAGEURL));
String backdropUrl = cursor.getString(cursor.getColumnIndex(MoviesContract.MoviesEntry.BACKDROPURL));
long releaseDate = cursor.getLong(cursor.getColumnIndex(MoviesContract.MoviesEntry.RELEASEDATE));
double userRating = cursor.getDouble(cursor.getColumnIndex(MoviesContract.MoviesEntry.USERRATING));
// Acciones...
MovieInfo movieInfo=new MovieInfo(id,title,imageUrl,description,backdropUrl,userRating,new Date(releaseDate));
movieInfoResults.add(movieInfo);
}
} else {
// Mostrar empty state
}
myAdapter.setMovieList(movieInfoResults);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
recyclerView= (RecyclerView) findViewById(R.id.rv_main);
recyclerView.setHasFixedSize(true);
int galleryColumns = getResources().getInteger(R.integer.gallery_columns);
layoutManager = new GridLayoutManager(this, galleryColumns);
recyclerView.setLayoutManager(layoutManager);
myAdapter = new MyRecyclerViewAdapter();
recyclerView.setAdapter(myAdapter);
dbHelper = new MoviesDBHelper(this);
getMovies();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putSerializable(SAVED_SORT_TYPE, sortType);
outState.putParcelable(SAVED_LAYOUT_MANAGER, layoutManager.onSaveInstanceState());
super.onSaveInstanceState(outState);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
sortType=(SortType) savedInstanceState.get(SAVED_SORT_TYPE);
mLayoutManagerSavedState = savedInstanceState.getParcelable(SAVED_LAYOUT_MANAGER);
getMovies();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case R.id.action_order_popularity:
sortType = SortType.POPULARITY;
break;
case R.id.action_order_top_rated:
sortType = SortType.TOP_RATED;
break;
case R.id.action_order_favorites:
sortType = SortType.FAVORITES;
break;
default:
return super.onOptionsItemSelected(item);
}
item.setChecked(true);
getMovies();
return true;
}
/**
* Restores scroll position after configuration change.
* <p>
* <b>NOTE:</b> Must be called after adapter has been set.
*/
private void restorePosition() {
if (mLayoutManagerSavedState != null) {
layoutManager.onRestoreInstanceState(mLayoutManagerSavedState);
mLayoutManagerSavedState = null;
}
}
private void getMovies()
{
switch(sortType)
{
case FAVORITES:
getFavoriteMovies();
default:
getPopularMovies();
}
}
private void getPopularMovies() {
OkHttpClient okHttpClient = new OkHttpClient.Builder()
.build();
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(BASE_URL_THEMOVIEDB)
.addConverterFactory(GsonConverterFactory.create()).client(okHttpClient)
.build();
MovieDBService movieDBService = retrofit.create(MovieDBService.class);
final Call<MovieInfo.MovieResult> call;
call = movieDBService.getMovies(sortType, 1, BuildConfig.API_KEY);
call.enqueue(new Callback<MovieInfo.MovieResult>() {
@Override
public void onResponse(Call<MovieInfo.MovieResult> call, Response<MovieInfo.MovieResult> response) {
if (response.isSuccessful()) {
myAdapter.setMovieList(response.body().getResults());
restorePosition();
}
}
@Override
public void onFailure(Call<MovieInfo.MovieResult> call, Throwable t) {
Log.e(LOG_TAG,"Error: ", t);
}
});
}
private void getFavoriteMovies() {
new GetFavoritesMoviesTask().execute();
}
public class GetFavoritesMoviesTask extends AsyncTask<Void, Void, ArrayList<MovieInfo>> {
@Override
protected ArrayList<MovieInfo> doInBackground(Void... params) {
ContentResolver contentResolver = getContentResolver();
Cursor cursor = contentResolver.query(MoviesContract.CONTENT_URI,
null, null, null, null);
ArrayList<MovieInfo> favoritesMovies = new ArrayList<MovieInfo>();
if(cursor != null && cursor.moveToFirst()) {
do {
int id = cursor.getInt(cursor.getColumnIndex(MoviesContract.MoviesEntry.ID));
String title = cursor.getString(
cursor.getColumnIndex(MoviesContract.MoviesEntry.TITLE));
String imageUrl = cursor.getString(
cursor.getColumnIndex(MoviesContract.MoviesEntry.IMAGEURL));
String description = cursor.getString(
cursor.getColumnIndex(MoviesContract.MoviesEntry.DESCRIPTION));
String backdropUrl = cursor.getString(
cursor.getColumnIndex(MoviesContract.MoviesEntry.BACKDROPURL));
double userRating = cursor.getDouble(
cursor.getColumnIndex(MoviesContract.MoviesEntry.USERRATING));
long releaseDate = cursor.getLong(
cursor.getColumnIndex(MoviesContract.MoviesEntry.RELEASEDATE));
MovieInfo movie = new MovieInfo(id, title, imageUrl, description, backdropUrl, userRating, new Date(releaseDate));
favoritesMovies.add(movie);
} while (cursor.moveToNext());
restorePosition();
}
if(cursor != null) {
cursor.close();
}
return favoritesMovies;
}
@Override
protected void onPostExecute(ArrayList<MovieInfo> movies) {
myAdapter.setMovieList(movies);
}
}
}
| |
/*L
* Copyright SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cabio/LICENSE.txt for details.
*/
package gov.nih.nci.caBIOApp.ui;
import gov.nih.nci.caBIO.search.Datasource;
import gov.nih.nci.caBIO.search.RowIndex;
import gov.nih.nci.caBIO.search.SelectionNode;
import gov.nih.nci.caBIO.search.SelectionNodeImpl;
import gov.nih.nci.caBIOApp.report.CaBIOTable;
import gov.nih.nci.caBIOApp.report.Table;
import gov.nih.nci.caBIOApp.sod.SODUtils;
import gov.nih.nci.caBIOApp.sod.SearchableObject;
import gov.nih.nci.caBIOApp.util.MessageLog;
import gov.nih.nci.common.search.SearchCriteria;
import gov.nih.nci.common.util.COREUtilities;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public class ReportDesign {
private QueryDesign _qd = null;
private String _id = null;
private String _label = null;
private String _name = null;
private Table _mergeTable = null;
private List _colSpecs = new ArrayList();
private boolean _refreshCache = true;
private List _rowIndices = null;
private int _idxSeq = 0;
private List _nullCellRowNums = new ArrayList();
private HashMap _filters = new HashMap();
// private SearchableObjectsDescription _sod = null;
public ReportDesign(String id, String label, QueryDesign qd) {
try {
_qd = qd;
/*
* _id = qd.getId() + "_" + System.currentTimeMillis(); _label =
* qd.getLabel() + " - Report " + DateFormat.getTimeInstance(
* DateFormat.SHORT ).format( new Date() );
*/
_id = id;
_label = label;
SearchCriteriaNode mergeNode = _qd.getMergeCriterionNode();
if (mergeNode == null) {
/*
* User doesn't want to merge. So, don't need to prepopulate
* merge table.
*/
MessageLog.printInfo("ReportDesign( QueryDesign ): NOT merging");
_mergeTable = new CaBIOTable(1, 1);
}
else {
/*
* User plans to merge.
*/
MessageLog.printInfo("ReportDesign( QueryDesign ): IS merging");
_mergeTable = (Table) mergeNode.getTable().clone();
setupColSpecs(_mergeTable, mergeNode);
}
}
catch (RuntimeException ex) {
MessageLog.printStackTrace(ex);
throw ex;
}
}
private void setupColSpecs(Table table, SearchCriteriaNode node) {
int mergeCol = node.getSourceColumnNumber();
MessageLog.printInfo("ReportDesign.setupColSpecs(): mergeCol = "
+ mergeCol);
int colCount = table.getColumnCount();
for (int i = 0; i < colCount; i++) {
ColumnSpecification colSpec = new ColumnSpecification();
if (i != mergeCol) {
colSpec.setId(Integer.toString(i));
String title = table.getColumnName(i);
if (title == null) {
title = "COLUMN #" + Integer.toString(i + 1);
}
colSpec.setOldColumnTitle(title);
colSpec.setNewColumnTitle(title);
}
else {
// colSpec.setId( node.getObjectName() +
// "." + node.getPropertyName() );
colSpec.setId(node.getId());
colSpec.setPath(node.getId());
MessageLog.printInfo("The merge colSpec has id "
+ colSpec.getId());
CriterionValue val = (CriterionValue) node.getUserObject();
colSpec.setOldColumnTitle(val.getObjectLabel() + " "
+ val.getPropertyLabel());
colSpec.setNewColumnTitle(val.getObjectLabel() + " "
+ val.getPropertyLabel());
colSpec.setIsMapped(true);
colSpec.setIsMergeColumn(true);
colSpec.setObjectName(val.getObjectName());
colSpec.setObjectLabel(val.getObjectLabel());
colSpec.setAttributeName(val.getPropertyName());
colSpec.setAttributeLabel(val.getPropertyLabel());
}
colSpec.setOldColumnNumber(i);
colSpec.setNewColumnNumber(i);
_colSpecs.add(colSpec);
}
}
/**
* Given the classname of a caBIO object and an attribute name, this method
* will check to see if a ColumnSpecification for it exists and if so,
* return a _copy_ of it. If a ColumnSpecification for it doesn't exist, it
* will create a new one and return a copy of it.
*
* @param objectName the classname of a caBIO object
* @param attributeName the attribute name of the given caBIO object
* @return ColumnSpecification that describes the mapping of column to
* object-attribute
* @throws InvalidSpecificationException if the given object-attribute is
* not allowed according to rules
*/
public ColumnSpecification selectColumn(String searchId)
throws InvalidSpecificationException {
ColumnSpecification theSpec = null;
MessageLog.printInfo("ReportDesign.selectColumn(): searchId = "
+ searchId);
SODUtils sod = SODUtils.getInstance();
if (searchId.startsWith(sod.getBeanName(_qd.getObjectName()) + ".")) {
searchId = searchId.substring(searchId.indexOf(".") + 1);
}
// deactivate all colSpecs
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification aColSpec = (ColumnSpecification) i.next();
MessageLog.printInfo("COLSPEC_ID: " + aColSpec.getId());
if (aColSpec.isActive()) {
aColSpec.setActive(false);
}
}
theSpec = getColumnSpecification(searchId);
// if it doesn't exist, create a new one
if (theSpec == null) {
String theClassName = null;
String theAttName = null;
if (searchId.indexOf(".") != -1) {
theAttName = searchId.substring(searchId.lastIndexOf(".") + 1);
String rest = searchId.substring(0, searchId.lastIndexOf("."));
String beanName = sod.getBeanNameFromPath(sod.getBeanName(_qd.getObjectName())
+ "." + rest);
SearchableObject so = sod.getSearchableObject(beanName);
if (so == null) {
throw new InvalidSpecificationException(
"couldn't find searchable object for: " + beanName);
}
theClassName = so.getClassname();
}
else {
theClassName = _qd.getObjectName();
theAttName = searchId;
}
theSpec = new ColumnSpecification(theClassName, theAttName);
theSpec.setId(searchId);
theSpec.setPath(searchId);
theSpec.setNewColumnNumber(getMaxColumnNumber() + 1);
_colSpecs.add(theSpec);
}
ColumnSpecification copy = null;
if (theSpec != null) {
theSpec.setActive(true);
copy = new ColumnSpecification(theSpec);
}
// return a copy, not the real thing
return copy;
}
public void updateColumn(ColumnSpecification aColSpec)
throws InvalidSpecificationException {
if (aColSpec == null) {
throw new InvalidSpecificationException("specification is null");
}
ColumnSpecification theColSpec = getColumnSpecification(aColSpec.getId());
if (theColSpec == null) {
throw new InvalidSpecificationException(
"Couldn't find ColumnSpecification for id: "
+ aColSpec.getId());
}
int oldSpecNewColNum = theColSpec.getNewColumnNumber();
int newSpecNewColNum = aColSpec.getNewColumnNumber();
/*
* MessageLog.printInfo( "ReportDesign.updateColumn(): " +
* "theColSpec.getId() = " + theColSpec.getId() +
* ", theColSpec.getNewColumnNumber() = " + oldSpecNewColNum +
* ", aColSpec.getId() = " + aColSpec.getId() +
* ", aColSpec.getNewColumnNumber() = " + newSpecNewColNum );
* MessageLog.printInfo(
* "ReportDesign.updateColumn(): size before calling _colSpecs.set: " +
* _colSpecs.size() );
*/
// Replace oldSpec with new
// _colSpecs.set( oldSpecNewColNum, aColSpec );
int idx = _colSpecs.indexOf(aColSpec);
_colSpecs.set(idx, aColSpec);
/*
* MessageLog.printInfo(
* "ReportDesign.updateColumn(): size after calling _colSpecs.set: " +
* _colSpecs.size() );
*/
// Determine if the positions of this spec has changed.
if (oldSpecNewColNum != newSpecNewColNum) {
// position has changed
moveColumnSpecification(oldSpecNewColNum, newSpecNewColNum);
}
}
public Table getMergeTable() throws InvalidSpecificationException {
Table newMergeTable = new CaBIOTable(_mergeTable.getRowCount(),
_colSpecs.size());
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification aSpec = (ColumnSpecification) i.next();
if (!aSpec.isMapped() || aSpec.isMergeColumn()) {
// copy the values of this column into the final table
copyColumnValues(_mergeTable, newMergeTable,
aSpec.getOldColumnNumber(), aSpec.getNewColumnNumber());
}
else if (aSpec.isNewColumn()) {
// do nothing
}
else {
throw new InvalidSpecificationException("ColumnSpecification "
+ aSpec.getId()
+ " is neither unmapped, nor merge, nor new.");
}
}
return newMergeTable;
}
public List getRowIndices() throws InvalidCriterionException {
MessageLog.printInfo("ReportDesign.getRowIndices()");
// if( _refreshCache ){
buildRowIndicesAndSearchParams();
// }
return _rowIndices;
}
public List getJoinObjectNames() {
Set objNames = new HashSet();
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification spec = (ColumnSpecification) i.next();
if (spec.isMapped()) {
objNames.add(spec.getObjectName());
}
}
StringBuffer sb = new StringBuffer();
sb.append("ReportDesign.getJoinObjectNames(): join objects: ");
String mainObjName = _qd.getRootSearchCriteriaNode().getObjectName();
List retVals = null;
retVals = new ArrayList();
// ensure that main object name is first
sb.append("\n" + mainObjName);
retVals.add(mainObjName);
for (Iterator i = objNames.iterator(); i.hasNext();) {
String name = (String) i.next();
if (!mainObjName.equals(name)) {
sb.append("\n" + name);
retVals.add(name);
}
}
MessageLog.printInfo(sb.toString());
return retVals;
}
private void buildRowIndicesAndSearchParams()
throws InvalidCriterionException {
MessageLog.printInfo("ReportDesign.buildRowIndicesAndSearchParams()");
_rowIndices = new ArrayList();
// generate the common SearchCriteria
// SearchCriteria commonCriteria = _qd.toSearchCriteria();
// get the merge criterion node
SearchCriteriaNode mergeNode = _qd.getMergeCriterionNode();
if (mergeNode == null) {
// then we aren't merging
MessageLog.printInfo("ReportDesign: we are NOT merging.");
// RowIndex idx = new RowIndex();
// idx.setIndex( 1 );
// idx.setSearchCriteria(commonCriteria);
// _rowIndices.add( idx );
}
else {
// we are merging
MessageLog.printInfo("ReportDesign: we are merging.");
// build row indices and search params
HashMap valToIdx = new HashMap();
// int mergeColNum = getMergeColumnNumber();
int mergeColNum = getMergeColumnSpecification().getOldColumnNumber();
int rowCount = _mergeTable.getRowCount();
int colCount = _mergeTable.getColumnCount();
MessageLog.printInfo("ReportDesign: mergeColNum = " + mergeColNum
+ ", rowCount = " + rowCount + ", colCount = " + colCount);
for (int rowNum = 0; rowNum < rowCount; rowNum++) {
// look for RowIndex with this value
String val = _mergeTable.getStringValueAt(rowNum, mergeColNum);
if (val != null && val.trim().length() > 0) {
RowIndex idx = (RowIndex) valToIdx.get(val);
if (idx == null) {
MessageLog.printInfo("ReportDesign: row " + rowNum
+ " with value " + val + " first encountered.");
// create a rowIndex and searchParam
idx = new RowIndex();
idx.setIndex(_idxSeq++);
idx.setDatasource((Datasource) mergeNode.getTable());
// idx.setSearchCriteria( _qd.toSearchCriteria( val ) );
idx.setCriteria(_qd.toSearchCriteria(val));
_rowIndices.add(idx);
valToIdx.put(val, idx);
}
else {
MessageLog.printInfo("ReportDesign: row " + rowNum
+ " has duplicate value: " + val);
}
// add this rowNum to found RowIndex
idx.addRowNumber(new Integer(rowNum));
}
else {
MessageLog.printInfo("ReportDesign: val at row " + rowNum
+ " is null.");
_nullCellRowNums.add(new Integer(rowNum));
}// -- end if( val != null...
}// -- end for( int rowNum...
}// -- end else: we are merging
}// -- end build RowIndicesAndSearchParams
public int getMergeColumnNumber() {
return getMergeColumnSpecification().getNewColumnNumber();
}
public ColumnSpecification getMergeColumnSpecification() {
MessageLog.printInfo("ReportDesign.getMergeColumnSpecification()");
ColumnSpecification mSpec = null;
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification spec = (ColumnSpecification) i.next();
if (spec.isMergeColumn()) {
mSpec = spec;
}
}
if (mSpec == null) {
throw new RuntimeException("Couldn't find merge colSpec");
}
else {
MessageLog.printInfo("...found it.");
}
return mSpec;
}
public int getMaxColumnNumber() {
int max = 0;
/*
* for( Iterator i = _colSpecs.iterator(); i.hasNext(); ){ int tst =
* ((ColumnSpecification)i.next()).getNewColumnNumber(); if( tst > max
* ){ max = tst; } }
*/
max = _colSpecs.size() - 1;
return max;
}
private ColumnSpecification getColumnSpecification(String id) {
ColumnSpecification theSpec = null;
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification aSpec = (ColumnSpecification) i.next();
/*
* MessageLog.printInfo(
* "ReportDesign.getColumnSpecification(): comparing " + id +
* ", with " + aSpec.getId() );
*/
if (aSpec.getId().equals(id)) {
theSpec = aSpec;
break;
}
}
return theSpec;
}
public void removeColumn(ColumnSpecification colSpec)
throws InvalidSpecificationException {
int numSpecs = _colSpecs.size();
if (numSpecs == 1) {
_colSpecs.clear();
}
else {
int idx = _colSpecs.indexOf(colSpec);
if (idx == -1) {
throw new InvalidSpecificationException(
"couldn't find spec with id: " + colSpec.getId());
}
_colSpecs.remove(idx);
for (int i = idx; i < _colSpecs.size(); i++) {
ColumnSpecification cs = (ColumnSpecification) _colSpecs.get(i);
cs.setNewColumnNumber(i);
}
}
}
private void moveColumnSpecification(int oldNewColNum, int newNewColNum)
throws InvalidSpecificationException {
MessageLog.printInfo("ReportDesign.moveColumnSpecification(): "
+ "oldNewColNum = " + oldNewColNum + ", newNewColNum = "
+ newNewColNum);
// validate the move
if (newNewColNum == _colSpecs.size() || newNewColNum < 0) {
throw new InvalidSpecificationException("invalid column number: "
+ newNewColNum);
}
// if not moving, do nothing - return
if (newNewColNum == oldNewColNum) {
return;
}
// determine direction
boolean toRight = true;
if (newNewColNum < oldNewColNum) {
toRight = false;
}
// MessageLog.printInfo( "MOVING TO " + ( toRight ? "RIGHT" : "LEFT" )
// );
// do the physical move
ColumnSpecification theSpec = (ColumnSpecification) _colSpecs.get(oldNewColNum);
// MessageLog.printInfo( "SIZE BEFORE REMOVING: " + _colSpecs.size() );
_colSpecs.remove(oldNewColNum);
// MessageLog.printInfo( "SIZE AFTER REMOVING: " + _colSpecs.size() );
_colSpecs.add(newNewColNum, theSpec);
// MessageLog.printInfo( "SIZE AFTER ADDING: " + _colSpecs.size() );
// do the logical move
boolean done = false;
int colNum = 0;
int idx = newNewColNum;
do {
if (toRight) {
idx--;
}
else {// toLeft
idx++;
}
if (idx == -1 || idx == _colSpecs.size()) {
done = true;
}
if (!done && idx != newNewColNum) {
ColumnSpecification cs = (ColumnSpecification) _colSpecs.get(idx);
MessageLog.printInfo("changing col num of cs " + cs.getId()
+ " from " + cs.getNewColumnNumber() + " to " + idx);
cs.setNewColumnNumber(idx);
}
} while (!done);
}// -- end moveColumnSpecification
private void copyColumnValues(Table srcTable, Table destTable,
int oldColNum, int newColNum) {
int rowCount = srcTable.getRowCount();
for (int i = 0; i < rowCount; i++) {
destTable.setStringValueAt(srcTable.getStringValueAt(i, oldColNum),
i, newColNum);
}
}
public List getColumnSpecifications() throws InvalidSpecificationException {
List copiedSpecs = new ArrayList();
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
copiedSpecs.add(new ColumnSpecification(
(ColumnSpecification) i.next()));
}
return copiedSpecs;
}
public SearchCriteria getCommonCriteria() throws InvalidCriterionException {
return _qd.toSearchCriteria();
}
public void setId(String s) {
_id = s;
}
public String getId() {
return _id;
}
public void setLabel(String s) {
_label = s;
}
public String getLabel() {
return _label;
}
public QueryDesign getQueryDesign() {
return _qd;
}
public List getNullCellRowNums() {
return _nullCellRowNums;
}
public SelectionNode getSelectionTree() throws Exception {
MessageLog.printInfo("ReportDesign.getSelectionTree()");
// This is here until I implement filter construction in the GUI
SearchCriteriaNode mergeCrit = (SearchCriteriaNode) _qd.getMergeCriterionNode();
if (mergeCrit != null) {
SearchCriteriaNode mergeCritParent = (SearchCriteriaNode) mergeCrit.getParent();
// MessageLog.printInfo( "...caching merge filter under: " +
// mergeCritParent.getId() );
_filters.put(mergeCritParent.getId(), mergeCritParent);
}
// Create the root node
String basePath = SODUtils.getInstance().getShortName(
_qd.getObjectName());
SelectionNode tree = new SelectionNodeImpl(basePath,
_qd.toSearchCriteria(), new ArrayList());
for (Iterator i = _colSpecs.iterator(); i.hasNext();) {
ColumnSpecification colSpec = (ColumnSpecification) i.next();
if (colSpec.isMapped()) {
String csPath = colSpec.getPath();
/*
* int pidx = csPath.indexOf( "." ); if( pidx != -1 ){ String fe
* = csPath.substring( 0, pidx ); if( basePath.equals( fe ) ){
* csPath = csPath.substring( pidx + 1 ); } }
*/
if (!csPath.startsWith(basePath)) {
csPath = basePath + "." + csPath;
}
MessageLog.printInfo("...csPath = " + csPath);
insertIntoTree(tree, csPath);
}
}
return tree;
}
protected void insertIntoTree(SelectionNode parent, String path)
throws Exception {
MessageLog.printInfo("ReportDesign.insertIntoTree(): parent.getClassname() = "
+ parent.getClassname() + ", path = " + path);
SODUtils sod = SODUtils.getInstance();
String pbn = sod.getBeanName(parent.getClassname());
if (pbn.endsWith("Impl")) {
pbn = pbn.substring(0, pbn.indexOf("Impl"));
}
if (path.startsWith(pbn + ".")) {
path = path.substring(path.indexOf(".") + 1);
}
int idx = path.indexOf(".");
if (idx == -1) {
// Then we need to add an attribute to the parent node.
List atts = parent.getAttributes();
atts.add(path);
}
else {
String firstElement = path.substring(0, idx);
String restOfPath = path.substring(idx + 1);
// See if child node for firstElement already exists
SelectionNode theChild = null;
for (Enumeration children = parent.children(); children.hasMoreElements();) {
SelectionNode aChild = (SelectionNode) children.nextElement();
if (aChild.getPathName().endsWith(firstElement)) {
theChild = aChild;
break;
}
}
if (theChild == null) {
// It doesn't exist, so create it.
String newPath = parent.getPathName() + "." + firstElement;
// See if there is a special filter for this.
SearchCriteria filter = null;
// MessageLog.printInfo( "...looking for filter under newPath: "
// + newPath );
SearchCriteriaNode f = (SearchCriteriaNode) _filters.get(newPath);
if (f != null) {
// MessageLog.printInfo( "...found special filter" );
filter = _qd.toSearchCriteria(f, null);
}
else {
// There isn't, so just use a blank one.
// MessageLog.printInfo( "...creating blank filter" );
/*
* filter = (SearchCriteria)Class.forName(
* "gov.nih.nci.caBIO.bean." + firstElement +
* "SearchCriteria" ).newInstance();
*/
String beanName = sod.getBeanNameFromPath(sod.getBeanName(parent.getClassname())
+ "." + firstElement);
String scClassName = COREUtilities.getSCPackageName(beanName)
+ "." + beanName + "SearchCriteria";
filter = (SearchCriteria) Class.forName(scClassName).newInstance();
}
theChild = new SelectionNodeImpl(newPath, filter,
new ArrayList());
parent.insert(theChild, parent.getChildCount());
}
insertIntoTree(theChild, restOfPath);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.tserver.compaction.strategies;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.accumulo.core.compaction.CompactionSettings;
import org.apache.accumulo.core.conf.ConfigurationCopy;
import org.apache.accumulo.core.file.FileSKVIterator;
import org.apache.accumulo.core.metadata.schema.DataFileValue;
import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
import org.apache.accumulo.server.fs.FileRef;
import org.apache.accumulo.tserver.compaction.CompactionPlan;
import org.apache.accumulo.tserver.compaction.CompactionStrategy;
import org.apache.accumulo.tserver.compaction.MajorCompactionRequest;
import org.apache.accumulo.tserver.compaction.WriteParameters;
import org.apache.hadoop.fs.Path;
public class ConfigurableCompactionStrategy extends CompactionStrategy {
private static interface Test {
boolean shouldCompact(Entry<FileRef,DataFileValue> file, MajorCompactionRequest request);
}
private static class NoSampleTest implements Test {
@Override
public boolean shouldCompact(Entry<FileRef,DataFileValue> file, MajorCompactionRequest request) {
try (FileSKVIterator reader = request.openReader(file.getKey())) {
SamplerConfigurationImpl sc = SamplerConfigurationImpl.newSamplerConfig(new ConfigurationCopy(request.getTableProperties()));
if (sc == null) {
return false;
}
return reader.getSample(sc) == null;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private static abstract class FileSizeTest implements Test {
private final long esize;
private FileSizeTest(String s) {
this.esize = Long.parseLong(s);
}
@Override
public boolean shouldCompact(Entry<FileRef,DataFileValue> file, MajorCompactionRequest request) {
return shouldCompact(file.getValue().getSize(), esize);
}
public abstract boolean shouldCompact(long fsize, long esize);
}
private static abstract class PatternPathTest implements Test {
private Pattern pattern;
private PatternPathTest(String p) {
this.pattern = Pattern.compile(p);
}
@Override
public boolean shouldCompact(Entry<FileRef,DataFileValue> file, MajorCompactionRequest request) {
return pattern.matcher(getInput(file.getKey().path())).matches();
}
public abstract String getInput(Path path);
}
private List<Test> tests = new ArrayList<>();
private boolean andTest = true;
private int minFiles = 1;
private WriteParameters writeParams = new WriteParameters();
@Override
public void init(Map<String,String> options) {
Set<Entry<String,String>> es = options.entrySet();
for (Entry<String,String> entry : es) {
switch (CompactionSettings.valueOf(entry.getKey())) {
case SF_NO_SAMPLE:
tests.add(new NoSampleTest());
break;
case SF_LT_ESIZE_OPT:
tests.add(new FileSizeTest(entry.getValue()) {
@Override
public boolean shouldCompact(long fsize, long esize) {
return fsize < esize;
}
});
break;
case SF_GT_ESIZE_OPT:
tests.add(new FileSizeTest(entry.getValue()) {
@Override
public boolean shouldCompact(long fsize, long esize) {
return fsize > esize;
}
});
break;
case SF_NAME_RE_OPT:
tests.add(new PatternPathTest(entry.getValue()) {
@Override
public String getInput(Path path) {
return path.getName();
}
});
break;
case SF_PATH_RE_OPT:
tests.add(new PatternPathTest(entry.getValue()) {
@Override
public String getInput(Path path) {
return path.toString();
}
});
break;
case MIN_FILES_OPT:
minFiles = Integer.parseInt(entry.getValue());
break;
case OUTPUT_COMPRESSION_OPT:
writeParams.setCompressType(entry.getValue());
break;
case OUTPUT_BLOCK_SIZE_OPT:
writeParams.setBlockSize(Long.parseLong(entry.getValue()));
break;
case OUTPUT_INDEX_BLOCK_SIZE_OPT:
writeParams.setIndexBlockSize(Long.parseLong(entry.getValue()));
break;
case OUTPUT_HDFS_BLOCK_SIZE_OPT:
writeParams.setHdfsBlockSize(Long.parseLong(entry.getValue()));
break;
case OUTPUT_REPLICATION_OPT:
writeParams.setReplication(Integer.parseInt(entry.getValue()));
break;
default:
throw new IllegalArgumentException("Unknown option " + entry.getKey());
}
}
}
private List<FileRef> getFilesToCompact(MajorCompactionRequest request) {
List<FileRef> filesToCompact = new ArrayList<>();
for (Entry<FileRef,DataFileValue> entry : request.getFiles().entrySet()) {
boolean compact = false;
for (Test test : tests) {
if (andTest) {
compact = test.shouldCompact(entry, request);
if (!compact)
break;
} else {
compact |= test.shouldCompact(entry, request);
}
}
if (compact || tests.isEmpty())
filesToCompact.add(entry.getKey());
}
return filesToCompact;
}
@Override
public boolean shouldCompact(MajorCompactionRequest request) throws IOException {
return getFilesToCompact(request).size() >= minFiles;
}
@Override
public CompactionPlan getCompactionPlan(MajorCompactionRequest request) throws IOException {
List<FileRef> filesToCompact = getFilesToCompact(request);
if (filesToCompact.size() >= minFiles) {
CompactionPlan plan = new CompactionPlan();
plan.inputFiles.addAll(filesToCompact);
plan.writeParameters = writeParams;
return plan;
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Objects;
import javax.annotation.Nullable;
import com.google.common.base.MoreObjects;
import com.google.common.collect.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.auth.DataResource;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.service.reads.SpeculativeRetryPolicy;
import org.apache.cassandra.utils.AbstractIterator;
import org.github.jamm.Unmetered;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Iterables.transform;
import static org.apache.cassandra.schema.IndexMetadata.isNameValid;
@Unmetered
public final class TableMetadata
{
private static final Logger logger = LoggerFactory.getLogger(TableMetadata.class);
private static final ImmutableSet<Flag> DEFAULT_CQL_FLAGS = ImmutableSet.of(Flag.COMPOUND);
private static final ImmutableSet<Flag> DEPRECATED_CS_FLAGS = ImmutableSet.of(Flag.DENSE, Flag.SUPER);
public static final String COMPACT_STORAGE_HALT_MESSAGE =
"Compact Tables are not allowed in Cassandra starting with 4.0 version. " +
"Use `ALTER ... DROP COMPACT STORAGE` command supplied in 3.x/3.11 Cassandra " +
"in order to migrate off Compact Storage.";
private static final String COMPACT_STORAGE_DEPRECATION_MESSAGE =
"Incorrect set of flags is was detected in table {}.{}: '{}'. \n" +
"Starting with version 4.0, '{}' flags are deprecated and every table has to have COMPOUND flag. \n" +
"Forcing the following set of flags: '{}'";
public enum Flag
{
SUPER, COUNTER, DENSE, COMPOUND;
public static boolean isCQLCompatible(Set<Flag> flags)
{
return !flags.contains(Flag.DENSE) && !flags.contains(Flag.SUPER) && flags.contains(Flag.COMPOUND);
}
public static Set<Flag> fromStringSet(Set<String> strings)
{
return strings.stream().map(String::toUpperCase).map(Flag::valueOf).collect(toSet());
}
public static Set<String> toStringSet(Set<Flag> flags)
{
return flags.stream().map(Flag::toString).map(String::toLowerCase).collect(toSet());
}
}
public enum Kind
{
REGULAR, INDEX, VIEW, VIRTUAL
}
public final String keyspace;
public final String name;
public final TableId id;
public final IPartitioner partitioner;
public final Kind kind;
public final TableParams params;
public final ImmutableSet<Flag> flags;
@Nullable
private final String indexName; // derived from table name
/*
* All CQL3 columns definition are stored in the columns map.
* On top of that, we keep separated collection of each kind of definition, to
* 1) allow easy access to each kind and
* 2) for the partition key and clustering key ones, those list are ordered by the "component index" of the elements.
*/
public final ImmutableMap<ByteBuffer, DroppedColumn> droppedColumns;
final ImmutableMap<ByteBuffer, ColumnMetadata> columns;
private final ImmutableList<ColumnMetadata> partitionKeyColumns;
private final ImmutableList<ColumnMetadata> clusteringColumns;
private final RegularAndStaticColumns regularAndStaticColumns;
public final Indexes indexes;
public final Triggers triggers;
// derived automatically from flags and columns
public final AbstractType<?> partitionKeyType;
public final ClusteringComparator comparator;
/*
* For dense tables, this alias the single non-PK column the table contains (since it can only have one). We keep
* that as convenience to access that column more easily (but we could replace calls by regularAndStaticColumns().iterator().next()
* for those tables in practice).
*/
public final ColumnMetadata compactValueColumn;
// performance hacks; TODO see if all are really necessary
public final DataResource resource;
private TableMetadata(Builder builder)
{
if (!Flag.isCQLCompatible(builder.flags))
{
flags = ImmutableSet.copyOf(Sets.union(Sets.difference(builder.flags, DEPRECATED_CS_FLAGS), DEFAULT_CQL_FLAGS));
logger.warn(COMPACT_STORAGE_DEPRECATION_MESSAGE, builder.keyspace, builder.name, builder.flags, DEPRECATED_CS_FLAGS, flags);
}
else
{
flags = Sets.immutableEnumSet(builder.flags);
}
keyspace = builder.keyspace;
name = builder.name;
id = builder.id;
partitioner = builder.partitioner;
kind = builder.kind;
params = builder.params.build();
indexName = kind == Kind.INDEX ? name.substring(name.indexOf('.') + 1) : null;
droppedColumns = ImmutableMap.copyOf(builder.droppedColumns);
Collections.sort(builder.partitionKeyColumns);
partitionKeyColumns = ImmutableList.copyOf(builder.partitionKeyColumns);
Collections.sort(builder.clusteringColumns);
clusteringColumns = ImmutableList.copyOf(builder.clusteringColumns);
regularAndStaticColumns = RegularAndStaticColumns.builder().addAll(builder.regularAndStaticColumns).build();
columns = ImmutableMap.copyOf(builder.columns);
indexes = builder.indexes;
triggers = builder.triggers;
partitionKeyType = partitionKeyColumns.size() == 1
? partitionKeyColumns.get(0).type
: CompositeType.getInstance(transform(partitionKeyColumns, t -> t.type));
comparator = new ClusteringComparator(transform(clusteringColumns, c -> c.type));
compactValueColumn = isCompactTable()
? CompactTables.getCompactValueColumn(regularAndStaticColumns, isSuper())
: null;
resource = DataResource.table(keyspace, name);
}
public static Builder builder(String keyspace, String table)
{
return new Builder(keyspace, table);
}
public static Builder builder(String keyspace, String table, TableId id)
{
return new Builder(keyspace, table, id);
}
public Builder unbuild()
{
return builder(keyspace, name, id)
.partitioner(partitioner)
.kind(kind)
.params(params)
.flags(flags)
.addColumns(columns())
.droppedColumns(droppedColumns)
.indexes(indexes)
.triggers(triggers);
}
public boolean isIndex()
{
return kind == Kind.INDEX;
}
public TableMetadata withSwapped(TableParams params)
{
return unbuild().params(params).build();
}
public TableMetadata withSwapped(Triggers triggers)
{
return unbuild().triggers(triggers).build();
}
public TableMetadata withSwapped(Indexes indexes)
{
return unbuild().indexes(indexes).build();
}
public boolean isView()
{
return kind == Kind.VIEW;
}
public boolean isVirtual()
{
return kind == Kind.VIRTUAL;
}
public Optional<String> indexName()
{
return Optional.ofNullable(indexName);
}
/*
* We call dense a CF for which each component of the comparator is a clustering column, i.e. no
* component is used to store a regular column names. In other words, non-composite static "thrift"
* and CQL3 CF are *not* dense.
*/
public boolean isDense()
{
return flags.contains(Flag.DENSE);
}
public boolean isCompound()
{
return flags.contains(Flag.COMPOUND);
}
public boolean isSuper()
{
return flags.contains(Flag.SUPER);
}
public boolean isCounter()
{
return flags.contains(Flag.COUNTER);
}
public boolean isCQLTable()
{
return !isSuper() && !isDense() && isCompound();
}
public boolean isCompactTable()
{
return !isCQLTable();
}
public boolean isStaticCompactTable()
{
return !isSuper() && !isDense() && !isCompound();
}
public ImmutableCollection<ColumnMetadata> columns()
{
return columns.values();
}
public Iterable<ColumnMetadata> primaryKeyColumns()
{
return Iterables.concat(partitionKeyColumns, clusteringColumns);
}
public ImmutableList<ColumnMetadata> partitionKeyColumns()
{
return partitionKeyColumns;
}
public ImmutableList<ColumnMetadata> clusteringColumns()
{
return clusteringColumns;
}
public RegularAndStaticColumns regularAndStaticColumns()
{
return regularAndStaticColumns;
}
public Columns regularColumns()
{
return regularAndStaticColumns.regulars;
}
public Columns staticColumns()
{
return regularAndStaticColumns.statics;
}
/*
* An iterator over all column definitions but that respect the order of a SELECT *.
* This also "hide" the clustering/regular columns for a non-CQL3 non-dense table for backward compatibility
* sake.
*/
public Iterator<ColumnMetadata> allColumnsInSelectOrder()
{
final boolean isStaticCompactTable = isStaticCompactTable();
final boolean noNonPkColumns = isCompactTable() && CompactTables.hasEmptyCompactValue(this);
return new AbstractIterator<ColumnMetadata>()
{
private final Iterator<ColumnMetadata> partitionKeyIter = partitionKeyColumns.iterator();
private final Iterator<ColumnMetadata> clusteringIter =
isStaticCompactTable ? Collections.emptyIterator() : clusteringColumns.iterator();
private final Iterator<ColumnMetadata> otherColumns =
noNonPkColumns
? Collections.emptyIterator()
: (isStaticCompactTable ? staticColumns().selectOrderIterator()
: regularAndStaticColumns.selectOrderIterator());
protected ColumnMetadata computeNext()
{
if (partitionKeyIter.hasNext())
return partitionKeyIter.next();
if (clusteringIter.hasNext())
return clusteringIter.next();
return otherColumns.hasNext() ? otherColumns.next() : endOfData();
}
};
}
/**
* Returns the ColumnMetadata for {@code name}.
*/
public ColumnMetadata getColumn(ColumnIdentifier name)
{
return columns.get(name.bytes);
}
/*
* In general it is preferable to work with ColumnIdentifier to make it
* clear that we are talking about a CQL column, not a cell name, but there
* is a few cases where all we have is a ByteBuffer (when dealing with IndexExpression
* for instance) so...
*/
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public ColumnMetadata getDroppedColumn(ByteBuffer name)
{
DroppedColumn dropped = droppedColumns.get(name);
return dropped == null ? null : dropped.column;
}
/**
* Returns a "fake" ColumnMetadata corresponding to the dropped column {@code name}
* of {@code null} if there is no such dropped column.
*
* @param name - the column name
* @param isStatic - whether the column was a static column, if known
*/
public ColumnMetadata getDroppedColumn(ByteBuffer name, boolean isStatic)
{
DroppedColumn dropped = droppedColumns.get(name);
if (dropped == null)
return null;
if (isStatic && !dropped.column.isStatic())
return ColumnMetadata.staticColumn(this, name, dropped.column.type);
return dropped.column;
}
public boolean hasStaticColumns()
{
return !staticColumns().isEmpty();
}
public void validate()
{
if (!isNameValid(keyspace))
except("Keyspace name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, keyspace);
if (!isNameValid(name))
except("Table name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, name);
params.validate();
if (partitionKeyColumns.stream().anyMatch(c -> c.type.isCounter()))
except("PRIMARY KEY columns cannot contain counters");
// Mixing counter with non counter columns is not supported (#2614)
if (isCounter())
{
for (ColumnMetadata column : regularAndStaticColumns)
if (!(column.type.isCounter()) && !CompactTables.isSuperColumnMapColumn(column))
except("Cannot have a non counter column (\"%s\") in a counter table", column.name);
}
else
{
for (ColumnMetadata column : regularAndStaticColumns)
if (column.type.isCounter())
except("Cannot have a counter column (\"%s\") in a non counter column table", column.name);
}
// All tables should have a partition key
if (partitionKeyColumns.isEmpty())
except("Missing partition keys for table %s", toString());
// A compact table should always have a clustering
if (isCompactTable() && clusteringColumns.isEmpty())
except("For table %s, isDense=%b, isCompound=%b, clustering=%s", toString(), isDense(), isCompound(), clusteringColumns);
if (!indexes.isEmpty() && isSuper())
except("Secondary indexes are not supported on super column families");
indexes.validate(this);
}
void validateCompatibility(TableMetadata previous)
{
if (isIndex())
return;
if (!previous.keyspace.equals(keyspace))
except("Keyspace mismatch (found %s; expected %s)", keyspace, previous.keyspace);
if (!previous.name.equals(name))
except("Table mismatch (found %s; expected %s)", name, previous.name);
if (!previous.id.equals(id))
except("Table ID mismatch (found %s; expected %s)", id, previous.id);
if (!previous.flags.equals(flags))
except("Table type mismatch (found %s; expected %s)", flags, previous.flags);
if (previous.partitionKeyColumns.size() != partitionKeyColumns.size())
{
except("Partition keys of different length (found %s; expected %s)",
partitionKeyColumns.size(),
previous.partitionKeyColumns.size());
}
for (int i = 0; i < partitionKeyColumns.size(); i++)
{
if (!partitionKeyColumns.get(i).type.isCompatibleWith(previous.partitionKeyColumns.get(i).type))
{
except("Partition key column mismatch (found %s; expected %s)",
partitionKeyColumns.get(i).type,
previous.partitionKeyColumns.get(i).type);
}
}
if (previous.clusteringColumns.size() != clusteringColumns.size())
{
except("Clustering columns of different length (found %s; expected %s)",
clusteringColumns.size(),
previous.clusteringColumns.size());
}
for (int i = 0; i < clusteringColumns.size(); i++)
{
if (!clusteringColumns.get(i).type.isCompatibleWith(previous.clusteringColumns.get(i).type))
{
except("Clustering column mismatch (found %s; expected %s)",
clusteringColumns.get(i).type,
previous.clusteringColumns.get(i).type);
}
}
for (ColumnMetadata previousColumn : previous.regularAndStaticColumns)
{
ColumnMetadata column = getColumn(previousColumn.name);
if (column != null && !column.type.isCompatibleWith(previousColumn.type))
except("Column mismatch (found %s; expected %s)", column, previousColumn);
}
}
public ClusteringComparator partitionKeyAsClusteringComparator()
{
return new ClusteringComparator(partitionKeyColumns.stream().map(c -> c.type).collect(toList()));
}
/**
* The type to use to compare column names in "static compact"
* tables or superColum ones.
* <p>
* This exists because for historical reasons, "static compact" tables as
* well as super column ones can have non-UTF8 column names.
* <p>
* This method should only be called for superColumn tables and "static
* compact" ones. For any other table, all column names are UTF8.
*/
AbstractType<?> staticCompactOrSuperTableColumnNameType()
{
if (isSuper())
{
assert compactValueColumn != null && compactValueColumn.type instanceof MapType;
return ((MapType) compactValueColumn.type).nameComparator();
}
assert isStaticCompactTable();
return clusteringColumns.get(0).type;
}
public AbstractType<?> columnDefinitionNameComparator(ColumnMetadata.Kind kind)
{
return (isSuper() && kind == ColumnMetadata.Kind.REGULAR) || (isStaticCompactTable() && kind == ColumnMetadata.Kind.STATIC)
? staticCompactOrSuperTableColumnNameType()
: UTF8Type.instance;
}
/**
* Generate a table name for an index corresponding to the given column.
* This is NOT the same as the index's name! This is only used in sstable filenames and is not exposed to users.
*
* @param info A definition of the column with index
*
* @return name of the index table
*/
public String indexTableName(IndexMetadata info)
{
// TODO simplify this when info.index_name is guaranteed to be set
return name + Directories.SECONDARY_INDEX_NAME_SEPARATOR + info.name;
}
/**
* @return true if the change as made impacts queries/updates on the table,
* e.g. any columns or indexes were added, removed, or altered; otherwise, false is returned.
* Used to determine whether prepared statements against this table need to be re-prepared.
*/
boolean changeAffectsPreparedStatements(TableMetadata updated)
{
return !partitionKeyColumns.equals(updated.partitionKeyColumns)
|| !clusteringColumns.equals(updated.clusteringColumns)
|| !regularAndStaticColumns.equals(updated.regularAndStaticColumns)
|| !indexes.equals(updated.indexes)
|| params.defaultTimeToLive != updated.params.defaultTimeToLive
|| params.gcGraceSeconds != updated.params.gcGraceSeconds;
}
/**
* There is a couple of places in the code where we need a TableMetadata object and don't have one readily available
* and know that only the keyspace and name matter. This creates such "fake" metadata. Use only if you know what
* you're doing.
*/
public static TableMetadata minimal(String keyspace, String name)
{
return TableMetadata.builder(keyspace, name)
.addPartitionKeyColumn("key", BytesType.instance)
.build();
}
public TableMetadata updateIndexTableMetadata(TableParams baseTableParams)
{
TableParams.Builder builder = baseTableParams.unbuild().gcGraceSeconds(0);
// Depends on parent's cache setting, turn on its index table's cache.
// Row caching is never enabled; see CASSANDRA-5732
builder.caching(baseTableParams.caching.cacheKeys() ? CachingParams.CACHE_KEYS : CachingParams.CACHE_NOTHING);
return unbuild().params(builder.build()).build();
}
boolean referencesUserType(ByteBuffer name)
{
return any(columns(), c -> c.type.referencesUserType(name));
}
public TableMetadata withUpdatedUserType(UserType udt)
{
if (!referencesUserType(udt.name))
return this;
Builder builder = unbuild();
columns().forEach(c -> builder.alterColumnType(c.name, c.type.withUpdatedUserType(udt)));
return builder.build();
}
private void except(String format, Object... args)
{
throw new ConfigurationException(keyspace + "." + name + ": " + format(format, args));
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof TableMetadata))
return false;
TableMetadata tm = (TableMetadata) o;
return equalsWithoutColumns(tm) && columns.equals(tm.columns);
}
private boolean equalsWithoutColumns(TableMetadata tm)
{
return keyspace.equals(tm.keyspace)
&& name.equals(tm.name)
&& id.equals(tm.id)
&& partitioner.equals(tm.partitioner)
&& kind == tm.kind
&& params.equals(tm.params)
&& flags.equals(tm.flags)
&& droppedColumns.equals(tm.droppedColumns)
&& indexes.equals(tm.indexes)
&& triggers.equals(tm.triggers);
}
Optional<Difference> compare(TableMetadata other)
{
return equalsWithoutColumns(other)
? compareColumns(other.columns)
: Optional.of(Difference.SHALLOW);
}
private Optional<Difference> compareColumns(Map<ByteBuffer, ColumnMetadata> other)
{
if (!columns.keySet().equals(other.keySet()))
return Optional.of(Difference.SHALLOW);
boolean differsDeeply = false;
for (Map.Entry<ByteBuffer, ColumnMetadata> entry : columns.entrySet())
{
ColumnMetadata thisColumn = entry.getValue();
ColumnMetadata thatColumn = other.get(entry.getKey());
Optional<Difference> difference = thisColumn.compare(thatColumn);
if (difference.isPresent())
{
switch (difference.get())
{
case SHALLOW:
return difference;
case DEEP:
differsDeeply = true;
}
}
}
return differsDeeply ? Optional.of(Difference.DEEP) : Optional.empty();
}
@Override
public int hashCode()
{
return Objects.hash(keyspace, name, id, partitioner, kind, params, flags, columns, droppedColumns, indexes, triggers);
}
@Override
public String toString()
{
return String.format("%s.%s", ColumnIdentifier.maybeQuote(keyspace), ColumnIdentifier.maybeQuote(name));
}
public String toDebugString()
{
return MoreObjects.toStringHelper(this)
.add("keyspace", keyspace)
.add("table", name)
.add("id", id)
.add("partitioner", partitioner)
.add("kind", kind)
.add("params", params)
.add("flags", flags)
.add("columns", columns())
.add("droppedColumns", droppedColumns.values())
.add("indexes", indexes)
.add("triggers", triggers)
.toString();
}
public static final class Builder
{
final String keyspace;
final String name;
private TableId id;
private IPartitioner partitioner;
private Kind kind = Kind.REGULAR;
private TableParams.Builder params = TableParams.builder();
// Setting compound as default as "normal" CQL tables are compound and that's what we want by default
private Set<Flag> flags = EnumSet.of(Flag.COMPOUND);
private Triggers triggers = Triggers.none();
private Indexes indexes = Indexes.none();
private final Map<ByteBuffer, DroppedColumn> droppedColumns = new HashMap<>();
private final Map<ByteBuffer, ColumnMetadata> columns = new HashMap<>();
private final List<ColumnMetadata> partitionKeyColumns = new ArrayList<>();
private final List<ColumnMetadata> clusteringColumns = new ArrayList<>();
private final List<ColumnMetadata> regularAndStaticColumns = new ArrayList<>();
private Builder(String keyspace, String name, TableId id)
{
this.keyspace = keyspace;
this.name = name;
this.id = id;
}
private Builder(String keyspace, String name)
{
this.keyspace = keyspace;
this.name = name;
}
public TableMetadata build()
{
if (partitioner == null)
partitioner = DatabaseDescriptor.getPartitioner();
if (id == null)
id = TableId.generate();
return new TableMetadata(this);
}
public Builder id(TableId val)
{
id = val;
return this;
}
public Builder partitioner(IPartitioner val)
{
partitioner = val;
return this;
}
public Builder kind(Kind val)
{
kind = val;
return this;
}
public Builder params(TableParams val)
{
params = val.unbuild();
return this;
}
public Builder bloomFilterFpChance(double val)
{
params.bloomFilterFpChance(val);
return this;
}
public Builder caching(CachingParams val)
{
params.caching(val);
return this;
}
public Builder comment(String val)
{
params.comment(val);
return this;
}
public Builder compaction(CompactionParams val)
{
params.compaction(val);
return this;
}
public Builder compression(CompressionParams val)
{
params.compression(val);
return this;
}
public Builder defaultTimeToLive(int val)
{
params.defaultTimeToLive(val);
return this;
}
public Builder gcGraceSeconds(int val)
{
params.gcGraceSeconds(val);
return this;
}
public Builder maxIndexInterval(int val)
{
params.maxIndexInterval(val);
return this;
}
public Builder memtableFlushPeriod(int val)
{
params.memtableFlushPeriodInMs(val);
return this;
}
public Builder minIndexInterval(int val)
{
params.minIndexInterval(val);
return this;
}
public Builder crcCheckChance(double val)
{
params.crcCheckChance(val);
return this;
}
public Builder speculativeRetry(SpeculativeRetryPolicy val)
{
params.speculativeRetry(val);
return this;
}
public Builder speculativeWriteThreshold(SpeculativeRetryPolicy val)
{
params.speculativeWriteThreshold(val);
return this;
}
public Builder extensions(Map<String, ByteBuffer> val)
{
params.extensions(val);
return this;
}
public Builder flags(Set<Flag> val)
{
flags = val;
return this;
}
public Builder isSuper(boolean val)
{
return flag(Flag.SUPER, val);
}
public Builder isCounter(boolean val)
{
return flag(Flag.COUNTER, val);
}
public Builder isDense(boolean val)
{
return flag(Flag.DENSE, val);
}
public Builder isCompound(boolean val)
{
return flag(Flag.COMPOUND, val);
}
private Builder flag(Flag flag, boolean set)
{
if (set) flags.add(flag); else flags.remove(flag);
return this;
}
public Builder triggers(Triggers val)
{
triggers = val;
return this;
}
public Builder indexes(Indexes val)
{
indexes = val;
return this;
}
public Builder addPartitionKeyColumn(String name, AbstractType type)
{
return addPartitionKeyColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addPartitionKeyColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, partitionKeyColumns.size(), ColumnMetadata.Kind.PARTITION_KEY));
}
public Builder addClusteringColumn(String name, AbstractType type)
{
return addClusteringColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addClusteringColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, clusteringColumns.size(), ColumnMetadata.Kind.CLUSTERING));
}
public Builder addRegularColumn(String name, AbstractType type)
{
return addRegularColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addRegularColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.REGULAR));
}
public Builder addStaticColumn(String name, AbstractType type)
{
return addStaticColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addStaticColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.STATIC));
}
public Builder addColumn(ColumnMetadata column)
{
if (columns.containsKey(column.name.bytes))
throw new IllegalArgumentException();
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.add(column);
Collections.sort(partitionKeyColumns);
break;
case CLUSTERING:
column.type.checkComparable();
clusteringColumns.add(column);
Collections.sort(clusteringColumns);
break;
default:
regularAndStaticColumns.add(column);
}
columns.put(column.name.bytes, column);
return this;
}
Builder addColumns(Iterable<ColumnMetadata> columns)
{
columns.forEach(this::addColumn);
return this;
}
public Builder droppedColumns(Map<ByteBuffer, DroppedColumn> droppedColumns)
{
this.droppedColumns.clear();
this.droppedColumns.putAll(droppedColumns);
return this;
}
/**
* Records a deprecated column for a system table.
*/
public Builder recordDeprecatedSystemColumn(String name, AbstractType<?> type)
{
// As we play fast and loose with the removal timestamp, make sure this is misued for a non system table.
assert SchemaConstants.isLocalSystemKeyspace(keyspace);
recordColumnDrop(ColumnMetadata.regularColumn(keyspace, this.name, name, type), Long.MAX_VALUE);
return this;
}
public Builder recordColumnDrop(ColumnMetadata column, long timeMicros)
{
droppedColumns.put(column.name.bytes, new DroppedColumn(column.withNewType(column.type.expandUserTypes()), timeMicros));
return this;
}
public Iterable<ColumnMetadata> columns()
{
return columns.values();
}
public Set<String> columnNames()
{
return columns.values().stream().map(c -> c.name.toString()).collect(toSet());
}
public ColumnMetadata getColumn(ColumnIdentifier identifier)
{
return columns.get(identifier.bytes);
}
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public boolean hasRegularColumns()
{
return regularAndStaticColumns.stream().anyMatch(ColumnMetadata::isRegular);
}
/*
* The following methods all assume a Builder with valid set of partition key, clustering, regular and static columns.
*/
public Builder removeRegularOrStaticColumn(ColumnIdentifier identifier)
{
ColumnMetadata column = columns.get(identifier.bytes);
if (column == null || column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
columns.remove(identifier.bytes);
regularAndStaticColumns.remove(column);
return this;
}
public Builder renamePrimaryKeyColumn(ColumnIdentifier from, ColumnIdentifier to)
{
if (columns.containsKey(to.bytes))
throw new IllegalArgumentException();
ColumnMetadata column = columns.get(from.bytes);
if (column == null || !column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewName(to);
if (column.isPartitionKey())
partitionKeyColumns.set(column.position(), newColumn);
else
clusteringColumns.set(column.position(), newColumn);
columns.remove(from.bytes);
columns.put(to.bytes, newColumn);
return this;
}
Builder alterColumnType(ColumnIdentifier name, AbstractType<?> type)
{
ColumnMetadata column = columns.get(name.bytes);
if (column == null)
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewType(type);
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.set(column.position(), newColumn);
break;
case CLUSTERING:
clusteringColumns.set(column.position(), newColumn);
break;
case REGULAR:
case STATIC:
regularAndStaticColumns.remove(column);
regularAndStaticColumns.add(newColumn);
break;
}
columns.put(column.name.bytes, newColumn);
return this;
}
}
/**
* A table with strict liveness filters/ignores rows without PK liveness info,
* effectively tying the row liveness to its primary key liveness.
*
* Currently this is only used by views with normal base column as PK column
* so updates to other columns do not make the row live when the base column
* is not live. See CASSANDRA-11500.
*
* TODO: does not belong here, should be gone
*/
public boolean enforceStrictLiveness()
{
return isView() && Keyspace.open(keyspace).viewManager.getByName(name).enforceStrictLiveness();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.hadoop.integration.cascading;
import java.util.Properties;
import org.elasticsearch.hadoop.cascading.EsTap;
import org.elasticsearch.hadoop.cfg.ConfigurationOptions;
import org.elasticsearch.hadoop.util.TestSettings;
import org.elasticsearch.hadoop.util.TestUtils;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import cascading.flow.local.LocalFlowConnector;
import cascading.pipe.Pipe;
import cascading.scheme.local.TextLine;
import cascading.tap.Tap;
import cascading.tap.local.FileTap;
import cascading.tuple.Fields;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class AbstractCascadingLocalJsonSaveTest {
@Test
public void testWriteToES() throws Exception {
Properties props = new TestSettings().getProperties();
props.put(ConfigurationOptions.ES_INPUT_JSON, "true");
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/artists");
Pipe pipe = new Pipe("copy");
build(props, in, out, pipe);
}
@Test(expected = Exception.class)
public void testIndexAutoCreateDisabled() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "false");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "true");
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/non-existing", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testIndexPattern() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/pattern-{number}", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testIndexPatternWithFormat() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/pattern-format-{@timestamp:YYYY-MM-dd}", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpdate() throws Exception {
// local file-system source
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/createwithid", new Fields("line"));
Properties props = new TestSettings().getProperties();
props.put(ConfigurationOptions.ES_MAPPING_ID, "number");
props.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Pipe pipe = new Pipe("copy");
build(props, in, out, pipe);
}
@Test
public void testUpdateOnlyScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "update");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");
properties.put(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "counter = 3");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/createwithid", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpdateOnlyParamScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "update");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");
properties.put(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "counter = param1; anothercounter = param2");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS, " param1:<1>, param2:number ");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/createwithid", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpdateOnlyParamJsonScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "update");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "counter = param1; anothercounter = param2");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS_JSON, "{ \"param1\":1, \"param2\":2}");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/createwithid", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpsert() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "upsert");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
Tap in = sourceTap();
Tap out = new EsTap("json-cascading-local/upsert", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpsertScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "upsert");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "counter = 1");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/upsert-script", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpsertParamScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "upsert");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "counter += param1; anothercounter += param2");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS, " param1:<1>, param2:number ");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/upsert-param-script", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
@Test
public void testUpsertParamJsonScript() throws Exception {
Properties properties = new TestSettings().getProperties();
properties.put(ConfigurationOptions.ES_WRITE_OPERATION, "upsert");
properties.put(ConfigurationOptions.ES_MAPPING_ID, "number");
properties.put(ConfigurationOptions.ES_INPUT_JSON, "yes");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT, "ctx._source.counter += param1; ctx._source.anothercounter += param2");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
properties.put(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS_JSON, "{ \"param1\":1, \"param2\":2}");
Tap in = sourceTap();
// use an existing id to allow the update to succeed
Tap out = new EsTap("json-cascading-local/upsert-script-json-script", new Fields("line"));
Pipe pipe = new Pipe("copy");
build(properties, in, out, pipe);
}
private void build(Properties props, Tap in, Tap out, Pipe pipe) {
StatsUtils.proxy(new LocalFlowConnector(props).connect(in, out, pipe)).complete();
}
private Tap sourceTap() {
return new FileTap(new TextLine(new Fields("line")), TestUtils.sampleArtistsJson());
}
}
| |
package nl.victronenergy.activities;
import nl.victronenergy.R;
import nl.victronenergy.models.UserResponse;
import nl.victronenergy.util.AnalyticsConstants;
import nl.victronenergy.util.Constants;
import nl.victronenergy.util.Constants.LOADER_ID;
import nl.victronenergy.util.Constants.POST;
import nl.victronenergy.util.Constants.RESPONSE_CODE;
import nl.victronenergy.util.Constants.WEBAPP;
import nl.victronenergy.util.UserUtils;
import nl.victronenergy.util.webservice.JsonParserHelper;
import nl.victronenergy.util.webservice.RestResponse;
import nl.victronenergy.util.webservice.WebserviceAsync;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.LoaderManager.LoaderCallbacks;
import android.support.v4.content.Loader;
import android.support.v7.app.ActionBarActivity;
import android.text.TextUtils;
import android.util.Patterns;
import android.view.ContextThemeWrapper;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
/**
* Login screen for authentication.
*
* @author M2Mobi
*/
public class ActivityLogin extends ActionBarActivity implements OnClickListener, LoaderCallbacks<RestResponse>, OnEditorActionListener {
private static final String LOG_TAG = "ActivityLogin";
private long mLoginStartTime;
private EditText mEditTextEmail;
private EditText mEditTextPassword;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Force portrait orientation on phone
if (!getResources().getBoolean(R.bool.is_phone)) {
getSupportActionBar().hide();
}
setContentView(R.layout.activity_login);
initView();
}
/**
* Initialize the view
*/
private void initView() {
// Hide actionbar title
getSupportActionBar().setDisplayShowTitleEnabled(false);
mEditTextEmail = (EditText) findViewById(R.id.edittext_email);
mEditTextPassword = (EditText) findViewById(R.id.edittext_password);
findViewById(R.id.button_demo).setOnClickListener(this);
findViewById(R.id.button_sign_in).setOnClickListener(this);
findViewById(R.id.button_privacy_policy).setOnClickListener(this);
mEditTextPassword.setOnEditorActionListener(this);
findViewById(R.id.textview_forgot_password).setOnClickListener(this);
// If there is a username saved, make sure to prefill the fields
if (!TextUtils.isEmpty(UserUtils.getUsername(this)) && !TextUtils.isEmpty(UserUtils.getPassword(this))) {
mEditTextEmail.setText(UserUtils.getUsername(this));
mEditTextPassword.setText(UserUtils.getPassword(this));
}
}
/**
* Login with the userid
*
* @param pUserResponse
*/
private void openSiteList(UserResponse pUserResponse) {
// Track time it took to login
long loginEndTime = System.currentTimeMillis();
// Save the sessionId
UserUtils.saveSessionID(this, pUserResponse.data.user.sessionId);
UserUtils.saveUsername(this, mEditTextEmail.getText().toString());
UserUtils.savePassword(this, mEditTextPassword.getText().toString());
Intent intentMain = new Intent(this, ActivitySiteSummary.class);
intentMain.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intentMain);
finish();
}
/**
* Process the login, save the password and username and call the loginloader
*/
private void processLogin() {
// Check if the user entered a valid email address
String email = mEditTextEmail.getText().toString();
if (!isEmailAddressValid(email)) {
showDialogWithMessage(getString(R.string.invalid_email_address));
return;
}
// Check if the user entered a password
String pass = mEditTextPassword.getText().toString();
if (TextUtils.isEmpty(pass)) {
showDialogWithMessage(getString(R.string.invalid_password));
return;
}
callLoginLoader(mEditTextEmail.getText().toString(), pass);
}
/**
* Clear focus on EditText view and close soft keyboard
*/
private void hideKeyboard() {
InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(mEditTextPassword.getWindowToken(), 0);
mEditTextPassword.clearFocus();
}
/**
* Call the loader to try login
*
* @param email
* The emailaddress you want to login with
* @param password
* The password you want to login with
*/
private void callLoginLoader(String email, String password) {
// Keep track of the start login time for analytics
mLoginStartTime = System.currentTimeMillis();
// Disable buttons
setLoginButtonsDisabled(true);
Bundle params = new Bundle();
params.putString(POST.URI, Constants.WEBSERVICE_USER_LOGIN);
params.putString(POST.EMAIL, email);
params.putString(POST.PASSWORD, password);
params.putString(POST.DEVICE_TYPE, getString(R.string.device_type));
if (getSupportLoaderManager().getLoader(LOADER_ID.LOGIN) == null) {
getSupportLoaderManager().initLoader(LOADER_ID.LOGIN, params, this);
} else {
getSupportLoaderManager().restartLoader(LOADER_ID.LOGIN, params, this);
}
}
/**
* Parse the login response
*
* @param pRestResponse
* The restresponse that should contain the userinfo json
*/
private void parseLoginResponse(RestResponse pRestResponse) {
UserResponse userResponse = JsonParserHelper.getInstance().parseJsonAndShowError(this, pRestResponse, UserResponse.class);
if (userResponse != null && userResponse.status != null) {
if (userResponse.status.code == RESPONSE_CODE.RESPONSE_OK) {
openSiteList(userResponse);
}
}
}
/**
* Set the state of the screen, and disable the login buttons, show indicator accordingly
*
* @param disabled
* True if the buttons should be disabled, false if the buttons should be enabled
*/
private void setLoginButtonsDisabled(boolean disabled) {
// Disable buttons and show the user we are trying to log in
if (disabled) {
findViewById(R.id.button_demo).setEnabled(false);
findViewById(R.id.button_sign_in).setEnabled(false);
findViewById(R.id.layout_login_progress).setVisibility(View.VISIBLE);
} else {
findViewById(R.id.button_demo).setEnabled(true);
findViewById(R.id.button_sign_in).setEnabled(true);
findViewById(R.id.layout_login_progress).setVisibility(View.GONE);
}
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.button_demo:
callLoginLoader(Constants.DEMO_EMAIL, Constants.DEMO_PASSWORD);
break;
case R.id.button_sign_in:
processLogin();
break;
case R.id.textview_forgot_password:
String forgotPasswordURL = WEBAPP.FORGOT_PASSWORD_URL;
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(forgotPasswordURL));
startActivity(browserIntent);
break;
case R.id.button_privacy_policy:
Intent privacyIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.victronenergy.com/privacy-policy"));
startActivity(privacyIntent);
break;
}
}
/**
* Shows a dialog that informs the user that he didn't enter a valid emailaddress
*/
private void showDialogWithMessage(String pMessage) {
new AlertDialog.Builder(new ContextThemeWrapper(this, R.style.AlertDialogCustom)).setTitle(getString(R.string.app_name_complete))
.setMessage(pMessage).setIcon(R.drawable.ic_launcher_base)
.setNeutralButton(getString(android.R.string.ok), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
}).show();
}
/**
* Checks if the email adres is valid
*
* @param email
* The email adres that needs to be checked
* @return True if the email adres is valid
*/
private boolean isEmailAddressValid(String email) {
return Patterns.EMAIL_ADDRESS.matcher(email).matches();
}
@Override
public Loader<RestResponse> onCreateLoader(int loaderId, Bundle params) {
WebserviceAsync loader = WebserviceAsync.newInstance(ActivityLogin.this);
loader.setParams(params);
return loader;
}
@Override
public void onLoadFinished(Loader<RestResponse> loader, RestResponse response) {
switch (loader.getId()) {
case LOADER_ID.LOGIN:
parseLoginResponse(response);
break;
}
setLoginButtonsDisabled(false);
}
@Override
public void onLoaderReset(Loader<RestResponse> loader) {
// Do nothing
}
@Override
public boolean onEditorAction(TextView pTextView, int pActionId, KeyEvent pKeyEvent) {
if (pActionId == EditorInfo.IME_ACTION_DONE) {
hideKeyboard();
processLogin();
return true;
}
return false;
}
}
| |
package ues.casosAcad.prime.beans;
import com.uesocc.entities.casosAcad.TipoPaso;
import com.uesocc.facades.casosAcad.TipoPasoFacadeLocal;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.inject.Named;
import javax.faces.view.ViewScoped;
import org.primefaces.event.SelectEvent;
import org.primefaces.model.LazyDataModel;
import org.primefaces.model.SortOrder;
@Named(value = "frmTipoPasoPrime")
@ViewScoped
public class FrmTipoPasoPrime implements Serializable {
@EJB
private TipoPasoFacadeLocal ejbTipoPaso;
private LazyDataModel<TipoPaso> modelo; //encapsulado
private TipoPaso registro; //encapsulado
private boolean btnadd = false; //encapsulado
private boolean btnedit = false; //encapsulado
private boolean btnremove = false; //encapsulado
private boolean frmcrud = false; //encapsulado
private boolean frmcrudsts = true; // encapsulado
MensajesFormularios mensaje = new MensajesFormularios(); //Mensaje de Validacion
/*-------Setter and Getter ----------*/
public TipoPasoFacadeLocal getEjbTipoPaso() {
return ejbTipoPaso;
}
public void setEjbTipoPaso(TipoPasoFacadeLocal ejbTipoPaso) {
this.ejbTipoPaso = ejbTipoPaso;
}
public LazyDataModel<TipoPaso> getModelo() {
return modelo;
}
public void setModelo(LazyDataModel<TipoPaso> modelo) {
this.modelo = modelo;
}
public TipoPaso getRegistro() {
return registro;
}
public void setRegistro(TipoPaso registro) {
this.registro = registro;
}
/*-------- End Setter and Getter -----------*/
public FrmTipoPasoPrime() {
}
@Deprecated
public List<TipoPaso> obtenerTodos() {
List<TipoPaso> salida = new ArrayList();
try {
if (ejbTipoPaso != null) {
salida = ejbTipoPaso.findAll();
}
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
return salida;
}
@PostConstruct
private void inicio() {
registro = new TipoPaso();
try {
modelo = new LazyDataModel<TipoPaso>() {
@Override
public Object getRowKey(TipoPaso object) {
if (object != null) {
return object.getIdTipoPaso();
}
return null;
}
@Override
public TipoPaso getRowData(String rowKey) {
if (rowKey != null && !rowKey.isEmpty() && this.getWrappedData() != null) {
try {
Integer buscado = new Integer(rowKey);
for (TipoPaso thi : (List<TipoPaso>) getWrappedData()) {
if (thi.getIdTipoPaso().compareTo(buscado) == 0) {
return thi;
}
}
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
}
return null;
}
@Override
public List<TipoPaso> load(int first, int pageSize, String sortField, SortOrder sortOrder, Map<String, Object> filters) {
List<TipoPaso> salida = new ArrayList();
if (filters == null || filters.isEmpty()) {
try {
if (ejbTipoPaso != null) {
this.setRowCount(ejbTipoPaso.count());
salida = ejbTipoPaso.findRange(first, pageSize);
}
} catch (Exception e) {
}
return salida;
}
salida = null;
try {
if (!filters.isEmpty() && (filters.containsKey("idTipoPaso") || filters.containsKey("nombre") || filters.containsKey("descripcion") || filters.containsKey("activo"))) {
if (filters.containsKey("idTipoPaso")) {
salida = ejbTipoPaso.findBy("idTipoPaso", filters.get("idTipoPaso").toString(), first, pageSize);
if (modelo != null) {
modelo.setRowCount(salida.size());
}
} else if (filters.containsKey("nombre")) {
salida = ejbTipoPaso.findBy("nombre", filters.get("nombre").toString(), first, pageSize);
if (modelo != null) {
modelo.setRowCount(salida.size());
}
} else if (filters.containsKey("descripcion")) {
salida = ejbTipoPaso.findBy("descripcion", filters.get("descripcion").toString(), first, pageSize);
if (modelo != null) {
modelo.setRowCount(salida.size());
}
} else if (filters.containsKey("activo")) {
salida = ejbTipoPaso.findBy("activo", filters.get("activo").toString(), first, pageSize);
if (modelo != null) {
modelo.setRowCount(salida.size());
}
}
}
} catch (Exception ex) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, ex.getMessage(), ex);
} finally {
if (salida == null) {
salida = new ArrayList();
}
}
return salida;
}
};
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
}
public void nuevo() {
this.registro = new TipoPaso();
setBtnadd(true);
setBtnedit(false);
setBtnremove(false);
setFrmcrudsts(false);
}
public void crearRegistro() {
if (this.registro.getNombre().isEmpty() != true && this.registro.getDescripcion().isEmpty() != true && this.registro.getNombre() != null && this.registro.getDescripcion() != null) {
try {
if (this.ejbTipoPaso != null && this.registro != null) {
this.ejbTipoPaso.create(registro);
nuevo();
mensaje.msgCreadoExito();
}
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
} else {
mensaje.msgFaltanCampos();
}
}
public void eliminar() {
try {
if (this.ejbTipoPaso != null && registro != null) {
ejbTipoPaso.remove(this.registro);
nuevo();
mensaje.msgEliminacion();
}
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
}
public void editarRegistro() {
if (this.registro.getNombre().isEmpty() != true && this.registro.getDescripcion().isEmpty() != true && this.registro.getNombre() != null && this.registro.getDescripcion() != null) {
try {
if (this.registro != null && this.ejbTipoPaso != null) {
this.ejbTipoPaso.edit(registro);
nuevo();
mensaje.msgModificacion();
}
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
}
} else {
mensaje.msgFaltanCampos();
}
}
public void cambiarSeleccion(SelectEvent e) {
this.registro = (TipoPaso) e.getObject();
setBtnedit(true);
setBtnremove(true);
setBtnadd(false);
setFrmcrudsts(false);
}
/**
* @return the frmcrudsts
*/
public boolean isFrmcrudsts() {
return frmcrudsts;
}
/**
* @param frmcrudsts the frmcrudsts to set
*/
public void setFrmcrudsts(boolean frmcrudsts) {
this.frmcrudsts = frmcrudsts;
}
/**
* @return the btnedit
*/
public boolean isBtnedit() {
return btnedit;
}
/**
* @return the btnremove
*/
public boolean isBtnremove() {
return btnremove;
}
/**
* @return the frmcrud
*/
public boolean isFrmcrud() {
return frmcrud;
}
/**
* @param frmcrud the frmcrud to set
*/
public void setFrmcrud(boolean frmcrud) {
this.frmcrud = frmcrud;
}
/**
* @return the btnadd
*/
public boolean isBtnadd() {
return btnadd;
}
/**
* @param btnadd the btnadd to set
*/
public void setBtnadd(boolean btnadd) {
this.btnadd = btnadd;
}
/**
* @param btnedit the btnedit to set
*/
public void setBtnedit(boolean btnedit) {
this.btnedit = btnedit;
}
/**
* @param btnremove the btnremove to set
*/
public void setBtnremove(boolean btnremove) {
this.btnremove = btnremove;
}
}
// @Deprecated
// public void guardarRegitro() {
// try {
// if (this.registro != null && this.ejbTipoPaso != null) {
// if (this.ejbTipoPaso.creator(registro)) {
// this.btnadd = !this.btnadd;
// }
// }
// } catch (Exception e) {
// Logger.getLogger(getClass().getName()).log(Level.SEVERE, e.getMessage(), e);
// }
// }
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devcoin.core;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBufferInputStream;
import org.jboss.netty.buffer.ChannelBufferOutputStream;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.*;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.handler.codec.replay.ReplayingDecoder;
import org.jboss.netty.handler.codec.replay.VoidEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Date;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.jboss.netty.channel.Channels.write;
// TODO: Remove this class and refactor the way we build Netty pipelines.
/**
* <p>A {@code TCPNetworkConnection} is used for connecting to a Bitcoin node over the standard TCP/IP protocol.<p>
*
* <p>{@link TCPNetworkConnection#getHandler()} is part of a Netty Pipeline, downstream of other pipeline stages.</p>
*
*/
public class TCPNetworkConnection implements NetworkConnection {
private static final Logger log = LoggerFactory.getLogger(TCPNetworkConnection.class);
// The IP address to which we are connecting.
private InetAddress remoteIp;
private final NetworkParameters params;
private VersionMessage versionMessage;
private BitcoinSerializer serializer = null;
private VersionMessage myVersionMessage;
private Channel channel;
private NetworkHandler handler;
// For ping nonces.
private Random random = new Random();
/**
* Construct a network connection with the given params and version. If you use this constructor you need to set
* up the Netty pipelines and infrastructure yourself. If all you have is an IP address and port, use the static
* connectTo method.
*
* @param params Defines which network to connect to and details of the protocol.
* @param ver The VersionMessage to announce to the other side of the connection.
*/
public TCPNetworkConnection(NetworkParameters params, VersionMessage ver) {
this.params = params;
this.myVersionMessage = ver;
this.serializer = new BitcoinSerializer(this.params);
this.handler = new NetworkHandler();
}
// Some members that are used for convenience APIs. If the app only uses PeerGroup then these won't be used.
private static NioClientSocketChannelFactory channelFactory;
private SettableFuture<TCPNetworkConnection> handshakeFuture;
/**
* Returns a future for a TCPNetworkConnection that is connected and version negotiated to the given remote address.
* Behind the scenes this method sets up a thread pool and a Netty pipeline that uses it. The equivalent Netty code
* is quite complex so use this method if you aren't writing a complex app. The future completes once version
* handshaking is done, use .get() on the response to wait for it.
*
* @param params The network parameters to use (production or testnet)
* @param address IP address and port to use
* @param connectTimeoutMsec How long to wait before giving up and setting the future to failure.
* @param peer If not null, this peer will be added to the pipeline.
*/
public static ListenableFuture<TCPNetworkConnection> connectTo(NetworkParameters params, InetSocketAddress address,
int connectTimeoutMsec, @Nullable Peer peer) {
synchronized (TCPNetworkConnection.class) {
if (channelFactory == null) {
ExecutorService bossExecutor = Executors.newCachedThreadPool();
ExecutorService workerExecutor = Executors.newCachedThreadPool();
channelFactory = new NioClientSocketChannelFactory(bossExecutor, workerExecutor);
}
}
// Run the connection in the thread pool and wait for it to complete.
ClientBootstrap clientBootstrap = new ClientBootstrap(channelFactory);
ChannelPipeline pipeline = Channels.pipeline();
final TCPNetworkConnection conn = new TCPNetworkConnection(params, new VersionMessage(params, 0));
conn.handshakeFuture = SettableFuture.create();
conn.setRemoteAddress(address);
pipeline.addLast("codec", conn.getHandler());
if (peer != null) pipeline.addLast("peer", peer.getHandler());
clientBootstrap.setPipeline(pipeline);
clientBootstrap.setOption("connectTimeoutMillis", connectTimeoutMsec);
ChannelFuture socketFuture = clientBootstrap.connect(address);
// Once the socket is either connected on the TCP level, or failed ...
socketFuture.addListener(new ChannelFutureListener() {
public void operationComplete(ChannelFuture channelFuture) throws Exception {
// Check if it failed ...
if (channelFuture.isDone() && !channelFuture.isSuccess()) {
// And complete the returned future with an exception.
conn.handshakeFuture.setException(channelFuture.getCause());
}
// Otherwise the handshakeFuture will be marked as completed once we did ver/verack exchange.
}
});
return conn.handshakeFuture;
}
public void writeMessage(Message message) throws IOException {
write(channel, message);
}
private void onVersionMessage(Message m) throws IOException, ProtocolException {
if (!(m instanceof VersionMessage)) {
// Bad peers might not follow the protocol. This has been seen in the wild (issue 81).
log.info("First message received was not a version message but rather " + m);
return;
}
versionMessage = (VersionMessage) m;
// Switch to the new protocol version.
int peerVersion = versionMessage.clientVersion;
log.info("Connected to {}: version={}, subVer='{}', services=0x{}, time={}, blocks={}", new Object[] {
getPeerAddress().getAddr().getHostAddress(),
peerVersion,
versionMessage.subVer,
versionMessage.localServices,
new Date(versionMessage.time * 1000),
versionMessage.bestHeight
});
// Now it's our turn ...
// Send an ACK message stating we accept the peers protocol version.
write(channel, new VersionAck());
// devcoinj is a client mode implementation. That means there's not much point in us talking to other client
// mode nodes because we can't download the data from them we need to find/verify transactions. Some bogus
// implementations claim to have a block chain in their services field but then report a height of zero, filter
// them out here.
if (!versionMessage.hasBlockChain() ||
(!params.allowEmptyPeerChain() && versionMessage.bestHeight <= 0)) {
// Shut down the channel
throw new ProtocolException("Peer does not have a copy of the block chain.");
}
// Handshake is done!
if (handshakeFuture != null)
handshakeFuture.set(this);
}
public void ping() throws IOException {
// pong/nonce messages were added to any protocol version greater than 60000
if (versionMessage.clientVersion > 60000) {
write(channel, new Ping(random.nextLong()));
}
else
write(channel, new Ping());
}
@Override
public String toString() {
return "[" + remoteIp.getHostAddress() + "]:" + params.getPort();
}
public class NetworkHandler extends ReplayingDecoder<VoidEnum> implements ChannelDownstreamHandler {
@Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
super.channelConnected(ctx, e);
channel = e.getChannel();
// The version message does not use checksumming, until Feb 2012 when it magically does.
// Announce ourselves. This has to come first to connect to clients beyond v0.30.20.2 which wait to hear
// from us until they send their version message back.
log.info("Announcing to {} as: {}", channel.getRemoteAddress(), myVersionMessage.subVer);
write(channel, myVersionMessage);
// When connecting, the remote peer sends us a version message with various bits of
// useful data in it. We need to know the peer protocol version before we can talk to it.
}
// Attempt to decode a Bitcoin message passing upstream in the channel.
//
// By extending ReplayingDecoder, reading past the end of buffer will throw a special Error
// causing the channel to read more and retry.
//
// On VMs/systems where exception handling is slow, this will impact performance. On the
// other hand, implementing a FrameDecoder will increase code complexity due to having
// to implement retries ourselves.
//
// TODO: consider using a decoder state and checkpoint() if performance is an issue.
@Override
protected Object decode(ChannelHandlerContext ctx, Channel chan,
ChannelBuffer buffer, VoidEnum state) throws Exception {
Message message = serializer.deserialize(new ChannelBufferInputStream(buffer));
if (message instanceof VersionMessage)
onVersionMessage(message);
return message;
}
/** Serialize outgoing Bitcoin messages passing downstream in the channel. */
public void handleDownstream(ChannelHandlerContext ctx, ChannelEvent evt) throws Exception {
if (!(evt instanceof MessageEvent)) {
ctx.sendDownstream(evt);
return;
}
MessageEvent e = (MessageEvent) evt;
Message message = (Message)e.getMessage();
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
serializer.serialize(message, new ChannelBufferOutputStream(buffer));
write(ctx, e.getFuture(), buffer, e.getRemoteAddress());
}
public TCPNetworkConnection getOwnerObject() {
return TCPNetworkConnection.this;
}
}
/** Returns the Netty Pipeline stage handling Bitcoin serialization for this connection. */
public NetworkHandler getHandler() {
return handler;
}
public VersionMessage getVersionMessage() {
return versionMessage;
}
public PeerAddress getPeerAddress() {
return new PeerAddress(remoteIp, params.getPort());
}
public void close() {
channel.close();
}
public void setRemoteAddress(SocketAddress address) {
if (address instanceof InetSocketAddress)
remoteIp = ((InetSocketAddress)address).getAddress();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.util;
public abstract class AbstractConcurrentDoubleKeyMap<K1,K2,V> extends AbstractConcurrentMapBase {
public AbstractConcurrentDoubleKeyMap(Object segmentInfo) {
super(segmentInfo);
}
static <K1,K2> int hash(K1 key1, K2 key2) {
int h = 31*key1.hashCode() + key2.hashCode();
h += ~(h << 9);
h ^= (h >>> 14);
h += (h << 4);
h ^= (h >>> 10);
return h;
}
public V get(K1 key1, K2 key2) {
int hash = hash(key1, key2);
return segmentFor(hash).get(key1, key2, hash);
}
public Entry<K1,K2,V> getOrPut(K1 key1, K2 key2, V value) {
int hash = hash(key1,key2);
return segmentFor(hash).getOrPut(key1, key2, hash, value);
}
public void put(K1 key1, K2 key2, V value) {
int hash = hash(key1, key2);
segmentFor(hash).put(key1, key2, hash).setValue(value);
}
public void remove(K1 key1, K2 key2) {
int hash = hash(key1, key2);
segmentFor(hash).remove(key1, key2, hash);
}
public final Segment<K1,K2,V> segmentFor(int hash) {
return (Segment<K1,K2,V>) segments[(hash >>> segmentShift) & segmentMask];
}
abstract static class Segment<K1,K2,V> extends AbstractConcurrentMapBase.Segment {
Segment(int initialCapacity) {
super(initialCapacity);
}
V get(K1 key1, K2 key2, int hash) {
Object[] tab = table;
Object o = tab[hash & (tab.length - 1)];
if (o != null) {
if (o instanceof Entry) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) o;
if (e.isEqual(key1,key2,hash)) {
return e.getValue();
}
}
else {
Object arr [] = (Object[]) o;
for (int i = 0; i != arr.length; ++i) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) arr [i];
if (e != null && e.isEqual(key1, key2, hash))
return e.getValue();
}
}
}
return null;
}
Entry<K1,K2,V> getOrPut(K1 key1, K2 key2, int hash, V value) {
Object[] tab = table;
Object o = tab[hash & (tab.length - 1)];
if (o != null) {
if (o instanceof Entry) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) o;
if (e.isEqual(key1,key2,hash)) {
return e;
}
}
else {
Object arr [] = (Object[]) o;
for (int i = 0; i != arr.length; ++i) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) arr [i];
if (e != null && e.isEqual(key1, key2, hash))
return e;
}
}
}
final Entry<K1,K2,V> kvEntry = put(key1, key2, hash);
kvEntry.setValue(value);
return kvEntry;
}
Entry<K1,K2,V> put(K1 key1, K2 key2, int hash) {
lock();
try {
int c = count;
if (c++ > threshold) {
rehash();
}
Object[] tab = table;
final int index = hash & (tab.length - 1);
final Object o = tab[index];
if (o != null) {
if (o instanceof Entry) {
final Entry<K1,K2,V> e = (Entry<K1,K2,V>) o;
if (e.isEqual(key1,key2,hash)) {
return e;
}
final Object[] arr = new Object[2];
final Entry<K1,K2,V> res = createEntry(key1, key2, hash);
arr [0] = res;
arr [1] = e;
tab[index] = arr;
count = c; // write-volatile
return res;
}
else {
Object arr [] = (Object[]) o;
for (int i = 0; i != arr.length; ++i) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) arr [i];
if (e != null && e.isEqual(key1, key2, hash)) {
return e;
}
}
final Object[] newArr = new Object[arr.length+1];
final Entry<K1,K2,V> res = createEntry(key1,key2, hash);
arr [0] = res;
System.arraycopy(arr, 0, newArr, 1, arr.length);
tab[index] = arr;
count = c; // write-volatile
return res;
}
}
final Entry<K1,K2,V> res = createEntry(key1, key2, hash);
tab[index] = res;
count = c; // write-volatile
return res;
} finally {
unlock();
}
}
public void remove(K1 key1, K2 key2, int hash) {
lock();
try {
int c = count-1;
final Object[] tab = table;
final int index = hash & (tab.length - 1);
Object o = tab[index];
if (o != null) {
if (o instanceof Entry) {
if (((Entry<K1,K2,V>)o).isEqual(key1, key2, hash)) {
tab[index] = null;
count = c;
}
}
else {
Object arr [] = (Object[]) o;
for (int i = 0; i < arr.length; i++) {
Entry<K1,K2,V> e = (Entry<K1,K2,V>) arr[i];
if (e != null && e.isEqual(key1, key2, hash)) {
arr [i] = null;
count = c;
break;
}
}
}
}
}
finally {
unlock();
}
}
protected abstract Entry<K1,K2,V> createEntry(K1 key1, K2 key2, int hash);
}
interface Entry<K1, K2, V> extends AbstractConcurrentMapBase.Entry<V>{
boolean isEqual(K1 key1, K2 key2, int hash);
}
}
| |
/*
* Copyright 2015 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.dom.webgl;
import org.teavm.dom.canvas.ImageData;
import org.teavm.dom.html.HTMLCanvasElement;
import org.teavm.dom.html.HTMLImageElement;
import org.teavm.dom.typedarrays.ArrayBuffer;
import org.teavm.dom.typedarrays.ArrayBufferView;
import org.teavm.dom.typedarrays.Float32Array;
import org.teavm.dom.typedarrays.Int32Array;
import org.teavm.jso.*;
/**
*
* @author Alexey Andreev
*/
public interface WebGLRenderingContext extends JSObject {
int DEPTH_BUFFER_BIT = 0x00000100;
int STENCIL_BUFFER_BIT = 0x00000400;
int COLOR_BUFFER_BIT = 0x00004000;
int POINTS = 0x0000;
int LINES = 0x0001;
int LINE_LOOP = 0x0002;
int LINE_STRIP = 0x0003;
int TRIANGLES = 0x0004;
int TRIANGLE_STRIP = 0x0005;
int TRIANGLE_FAN = 0x0006;
int ZERO = 0;
int ONE = 1;
int SRC_COLOR = 0x0300;
int ONE_MINUS_SRC_COLOR = 0x0301;
int SRC_ALPHA = 0x0302;
int ONE_MINUS_SRC_ALPHA = 0x0303;
int DST_ALPHA = 0x0304;
int ONE_MINUS_DST_ALPHA = 0x0305;
int DST_COLOR = 0x0306;
int ONE_MINUS_DST_COLOR = 0x0307;
int SRC_ALPHA_SATURATE = 0x0308;
int FUNC_ADD = 0x8006;
int BLEND_EQUATION = 0x8009;
int BLEND_EQUATION_RGB = 0x8009;
int BLEND_EQUATION_ALPHA = 0x883D;
int FUNC_SUBTRACT = 0x800A;
int FUNC_REVERSE_SUBTRACT = 0x800B;
int BLEND_DST_RGB = 0x80C8;
int BLEND_SRC_RGB = 0x80C9;
int BLEND_DST_ALPHA = 0x80CA;
int BLEND_SRC_ALPHA = 0x80CB;
int CONSTANT_COLOR = 0x8001;
int ONE_MINUS_CONSTANT_COLOR = 0x8002;
int CONSTANT_ALPHA = 0x8003;
int ONE_MINUS_CONSTANT_ALPHA = 0x8004;
int BLEND_COLOR = 0x8005;
int ARRAY_BUFFER = 0x8892;
int ELEMENT_ARRAY_BUFFER = 0x8893;
int ARRAY_BUFFER_BINDING = 0x8894;
int ELEMENT_ARRAY_BUFFER_BINDING = 0x8895;
int STREAM_DRAW = 0x88E0;
int STATIC_DRAW = 0x88E4;
int DYNAMIC_DRAW = 0x88E8;
int BUFFER_SIZE = 0x8764;
int BUFFER_USAGE = 0x8765;
int CURRENT_VERTEX_ATTRIB = 0x8626;
int FRONT = 0x0404;
int BACK = 0x0405;
int FRONT_AND_BACK = 0x0408;
int CULL_FACE = 0x0B44;
int BLEND = 0x0BE2;
int DITHER = 0x0BD0;
int STENCIL_TEST = 0x0B90;
int DEPTH_TEST = 0x0B71;
int SCISSOR_TEST = 0x0C11;
int POLYGON_OFFSET_FILL = 0x8037;
int SAMPLE_ALPHA_TO_COVERAGE = 0x809E;
int SAMPLE_COVERAGE = 0x80A0;
int NO_ERROR = 0;
int INVALID_ENUM = 0x0500;
int INVALID_VALUE = 0x0501;
int INVALID_OPERATION = 0x0502;
int OUT_OF_MEMORY = 0x0505;
int CW = 0x0900;
int CCW = 0x0901;
int LINE_WIDTH = 0x0B21;
int ALIASED_POINT_SIZE_RANGE = 0x846D;
int ALIASED_LINE_WIDTH_RANGE = 0x846E;
int CULL_FACE_MODE = 0x0B45;
int FRONT_FACE = 0x0B46;
int DEPTH_RANGE = 0x0B70;
int DEPTH_WRITEMASK = 0x0B72;
int DEPTH_CLEAR_VALUE = 0x0B73;
int DEPTH_FUNC = 0x0B74;
int STENCIL_CLEAR_VALUE = 0x0B91;
int STENCIL_FUNC = 0x0B92;
int STENCIL_FAIL = 0x0B94;
int STENCIL_PASS_DEPTH_FAIL = 0x0B95;
int STENCIL_PASS_DEPTH_PASS = 0x0B96;
int STENCIL_REF = 0x0B97;
int STENCIL_VALUE_MASK = 0x0B93;
int STENCIL_WRITEMASK = 0x0B98;
int STENCIL_BACK_FUNC = 0x8800;
int STENCIL_BACK_FAIL = 0x8801;
int STENCIL_BACK_PASS_DEPTH_FAIL = 0x8802;
int STENCIL_BACK_PASS_DEPTH_PASS = 0x8803;
int STENCIL_BACK_REF = 0x8CA3;
int STENCIL_BACK_VALUE_MASK = 0x8CA4;
int STENCIL_BACK_WRITEMASK = 0x8CA5;
int VIEWPORT = 0x0BA2;
int SCISSOR_BOX = 0x0C10;
int COLOR_CLEAR_VALUE = 0x0C22;
int COLOR_WRITEMASK = 0x0C23;
int UNPACK_ALIGNMENT = 0x0CF5;
int PACK_ALIGNMENT = 0x0D05;
int MAX_TEXTURE_SIZE = 0x0D33;
int MAX_VIEWPORT_DIMS = 0x0D3A;
int SUBPIXEL_BITS = 0x0D50;
int RED_BITS = 0x0D52;
int GREEN_BITS = 0x0D53;
int BLUE_BITS = 0x0D54;
int ALPHA_BITS = 0x0D55;
int DEPTH_BITS = 0x0D56;
int STENCIL_BITS = 0x0D57;
int POLYGON_OFFSET_UNITS = 0x2A00;
int POLYGON_OFFSET_FACTOR = 0x8038;
int TEXTURE_BINDING_2D = 0x8069;
int SAMPLE_BUFFERS = 0x80A8;
int SAMPLES = 0x80A9;
int SAMPLE_COVERAGE_VALUE = 0x80AA;
int SAMPLE_COVERAGE_INVERT = 0x80AB;
int COMPRESSED_TEXTURE_FORMATS = 0x86A3;
int DONT_CARE = 0x1100;
int FASTEST = 0x1101;
int NICEST = 0x1102;
int GENERATE_MIPMAP_HINT = 0x8192;
int BYTE = 0x1400;
int UNSIGNED_BYTE = 0x1401;
int SHORT = 0x1402;
int UNSIGNED_SHORT = 0x1403;
int INT = 0x1404;
int UNSIGNED_INT = 0x1405;
int FLOAT = 0x1406;
int DEPTH_COMPONENT = 0x1902;
int ALPHA = 0x1906;
int RGB = 0x1907;
int RGBA = 0x1908;
int LUMINANCE = 0x1909;
int LUMINANCE_ALPHA = 0x190A;
int UNSIGNED_SHORT_4_4_4_4 = 0x8033;
int UNSIGNED_SHORT_5_5_5_1 = 0x8034;
int UNSIGNED_SHORT_5_6_5 = 0x8363;
int FRAGMENT_SHADER = 0x8B30;
int VERTEX_SHADER = 0x8B31;
int MAX_VERTEX_ATTRIBS = 0x8869;
int MAX_VERTEX_UNIFORM_VECTORS = 0x8DFB;
int MAX_VARYING_VECTORS = 0x8DFC;
int MAX_COMBINED_TEXTURE_IMAGE_UNITS = 0x8B4D;
int MAX_VERTEX_TEXTURE_IMAGE_UNITS = 0x8B4C;
int MAX_TEXTURE_IMAGE_UNITS = 0x8872;
int MAX_FRAGMENT_UNIFORM_VECTORS = 0x8DFD;
int SHADER_TYPE = 0x8B4F;
int DELETE_STATUS = 0x8B80;
int LINK_STATUS = 0x8B82;
int VALIDATE_STATUS = 0x8B83;
int ATTACHED_SHADERS = 0x8B85;
int ACTIVE_UNIFORMS = 0x8B86;
int ACTIVE_ATTRIBUTES = 0x8B89;
int SHADING_LANGUAGE_VERSION = 0x8B8C;
int CURRENT_PROGRAM = 0x8B8D;
int NEVER = 0x0200;
int LESS = 0x0201;
int EQUAL = 0x0202;
int LEQUAL = 0x0203;
int GREATER = 0x0204;
int NOTEQUAL = 0x0205;
int GEQUAL = 0x0206;
int ALWAYS = 0x0207;
int KEEP = 0x1E00;
int REPLACE = 0x1E01;
int INCR = 0x1E02;
int DECR = 0x1E03;
int INVERT = 0x150A;
int INCR_WRAP = 0x8507;
int DECR_WRAP = 0x8508;
int VENDOR = 0x1F00;
int RENDERER = 0x1F01;
int VERSION = 0x1F02;
int NEAREST = 0x2600;
int LINEAR = 0x2601;
int NEAREST_MIPMAP_NEAREST = 0x2700;
int LINEAR_MIPMAP_NEAREST = 0x2701;
int NEAREST_MIPMAP_LINEAR = 0x2702;
int LINEAR_MIPMAP_LINEAR = 0x2703;
int TEXTURE_MAG_FILTER = 0x2800;
int TEXTURE_MIN_FILTER = 0x2801;
int TEXTURE_WRAP_S = 0x2802;
int TEXTURE_WRAP_T = 0x2803;
int TEXTURE_2D = 0x0DE1;
int TEXTURE = 0x1702;
int TEXTURE_CUBE_MAP = 0x8513;
int TEXTURE_BINDING_CUBE_MAP = 0x8514;
int TEXTURE_CUBE_MAP_POSITIVE_X = 0x8515;
int TEXTURE_CUBE_MAP_NEGATIVE_X = 0x8516;
int TEXTURE_CUBE_MAP_POSITIVE_Y = 0x8517;
int TEXTURE_CUBE_MAP_NEGATIVE_Y = 0x8518;
int TEXTURE_CUBE_MAP_POSITIVE_Z = 0x8519;
int TEXTURE_CUBE_MAP_NEGATIVE_Z = 0x851A;
int MAX_CUBE_MAP_TEXTURE_SIZE = 0x851C;
int TEXTURE0 = 0x84C0;
int TEXTURE1 = 0x84C1;
int TEXTURE2 = 0x84C2;
int TEXTURE3 = 0x84C3;
int TEXTURE4 = 0x84C4;
int TEXTURE5 = 0x84C5;
int TEXTURE6 = 0x84C6;
int TEXTURE7 = 0x84C7;
int TEXTURE8 = 0x84C8;
int TEXTURE9 = 0x84C9;
int TEXTURE10 = 0x84CA;
int TEXTURE11 = 0x84CB;
int TEXTURE12 = 0x84CC;
int TEXTURE13 = 0x84CD;
int TEXTURE14 = 0x84CE;
int TEXTURE15 = 0x84CF;
int TEXTURE16 = 0x84D0;
int TEXTURE17 = 0x84D1;
int TEXTURE18 = 0x84D2;
int TEXTURE19 = 0x84D3;
int TEXTURE20 = 0x84D4;
int TEXTURE21 = 0x84D5;
int TEXTURE22 = 0x84D6;
int TEXTURE23 = 0x84D7;
int TEXTURE24 = 0x84D8;
int TEXTURE25 = 0x84D9;
int TEXTURE26 = 0x84DA;
int TEXTURE27 = 0x84DB;
int TEXTURE28 = 0x84DC;
int TEXTURE29 = 0x84DD;
int TEXTURE30 = 0x84DE;
int TEXTURE31 = 0x84DF;
int ACTIVE_TEXTURE = 0x84E0;
int REPEAT = 0x2901;
int CLAMP_TO_EDGE = 0x812F;
int MIRRORED_REPEAT = 0x8370;
int FLOAT_VEC2 = 0x8B50;
int FLOAT_VEC3 = 0x8B51;
int FLOAT_VEC4 = 0x8B52;
int INT_VEC2 = 0x8B53;
int INT_VEC3 = 0x8B54;
int INT_VEC4 = 0x8B55;
int BOOL = 0x8B56;
int BOOL_VEC2 = 0x8B57;
int BOOL_VEC3 = 0x8B58;
int BOOL_VEC4 = 0x8B59;
int FLOAT_MAT2 = 0x8B5A;
int FLOAT_MAT3 = 0x8B5B;
int FLOAT_MAT4 = 0x8B5C;
int SAMPLER_2D = 0x8B5E;
int SAMPLER_CUBE = 0x8B60;
/* Vertex Arrays */
int VERTEX_ATTRIB_ARRAY_ENABLED = 0x8622;
int VERTEX_ATTRIB_ARRAY_SIZE = 0x8623;
int VERTEX_ATTRIB_ARRAY_STRIDE = 0x8624;
int VERTEX_ATTRIB_ARRAY_TYPE = 0x8625;
int VERTEX_ATTRIB_ARRAY_NORMALIZED = 0x886A;
int VERTEX_ATTRIB_ARRAY_POINTER = 0x8645;
int VERTEX_ATTRIB_ARRAY_BUFFER_BINDING = 0x889F;
/* Shader Source */
int COMPILE_STATUS = 0x8B81;
/* Shader Precision-Specified Types */
int LOW_FLOAT = 0x8DF0;
int MEDIUM_FLOAT = 0x8DF1;
int HIGH_FLOAT = 0x8DF2;
int LOW_INT = 0x8DF3;
int MEDIUM_INT = 0x8DF4;
int HIGH_INT = 0x8DF5;
/* Framebuffer Object. */
int FRAMEBUFFER = 0x8D40;
int RENDERBUFFER = 0x8D41;
int RGBA4 = 0x8056;
int RGB5_A1 = 0x8057;
int RGB565 = 0x8D62;
int DEPTH_COMPONENT16 = 0x81A5;
int STENCIL_INDEX = 0x1901;
int STENCIL_INDEX8 = 0x8D48;
int DEPTH_STENCIL = 0x84F9;
int RENDERBUFFER_WIDTH = 0x8D42;
int RENDERBUFFER_HEIGHT = 0x8D43;
int RENDERBUFFER_INTERNAL_FORMAT = 0x8D44;
int RENDERBUFFER_RED_SIZE = 0x8D50;
int RENDERBUFFER_GREEN_SIZE = 0x8D51;
int RENDERBUFFER_BLUE_SIZE = 0x8D52;
int RENDERBUFFER_ALPHA_SIZE = 0x8D53;
int RENDERBUFFER_DEPTH_SIZE = 0x8D54;
int RENDERBUFFER_STENCIL_SIZE = 0x8D55;
int FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE = 0x8CD0;
int FRAMEBUFFER_ATTACHMENT_OBJECT_NAME = 0x8CD1;
int FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL = 0x8CD2;
int FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE = 0x8CD3;
int COLOR_ATTACHMENT0 = 0x8CE0;
int DEPTH_ATTACHMENT = 0x8D00;
int STENCIL_ATTACHMENT = 0x8D20;
int DEPTH_STENCIL_ATTACHMENT = 0x821A;
int NONE = 0;
int FRAMEBUFFER_COMPLETE = 0x8CD5;
int FRAMEBUFFER_INCOMPLETE_ATTACHMENT = 0x8CD6;
int FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = 0x8CD7;
int FRAMEBUFFER_INCOMPLETE_DIMENSIONS = 0x8CD9;
int FRAMEBUFFER_UNSUPPORTED = 0x8CDD;
int FRAMEBUFFER_BINDING = 0x8CA6;
int RENDERBUFFER_BINDING = 0x8CA7;
int MAX_RENDERBUFFER_SIZE = 0x84E8;
int INVALID_FRAMEBUFFER_OPERATION = 0x0506;
int UNPACK_FLIP_Y_WEBGL = 0x9240;
int UNPACK_PREMULTIPLY_ALPHA_WEBGL = 0x9241;
int CONTEXT_LOST_WEBGL = 0x9242;
int UNPACK_COLORSPACE_CONVERSION_WEBGL = 0x9243;
int BROWSER_DEFAULT_WEBGL = 0x9244;
@JSProperty
HTMLCanvasElement getCanvas();
@JSProperty
int getDrawingBufferWidth();
@JSProperty
int getDrawingBufferHeight();
WebGLContextAttributes getContextAttributes();
boolean isContextLost();
JSStringArrayReader getSupportedExtensions();
@JSMethod("getSupportedExtensions")
String[] getSupportedExtensionArray();
JSObject getExtension(String name);
void activeTexture(int texture);
void attachShader(WebGLProgram program, WebGLShader shader);
void bindAttribLocation(WebGLProgram program, int index, String name);
void bindBuffer(int target, WebGLBuffer buffer);
void bindFramebuffer(int target, WebGLFramebuffer framebuffer);
void bindRenderbuffer(int target, WebGLRenderbuffer renderbuffer);
void bindTexture(int target, WebGLTexture texture);
void blendColor(float red, float green, float blue, float alpha);
void blendEquation(int mode);
void blendEquationSeparate(int modeRGB, int modeAlpha);
void blendFunc(int sfactor, int dfactor);
void blendFuncSeparate(int srcRGB, int dstRGB, int srcAlpha, int dstAlpha);
void bufferData(int target, int size, int usage);
void bufferData(int target, ArrayBufferView data, int usage);
void bufferData(int target, ArrayBuffer data, int usage);
void bufferSubData(int target, int offset, ArrayBufferView data);
void bufferSubData(int target, int offset, ArrayBuffer data);
int checkFramebufferStatus(int target);
void clear(int mask);
void clearColor(float red, float green, float blue, float alpha);
void clearDepth(float depth);
void clearStencil(int s);
void colorMask(boolean red, boolean green, boolean blue, boolean alpha);
void compileShader(WebGLShader shader);
void compressedTexImage2D(int target, int level, int internalformat, int width, int height, int border,
ArrayBufferView data);
void compressedTexSubImage2D(int target, int level, int xoffset, int yoffset, int width, int height, int format,
ArrayBufferView data);
void copyTexImage2D(int target, int level, int internalformat, int x, int y, int width, int height, int border);
void copyTexSubImage2D(int target, int level, int xoffset, int yoffset, int x, int y, int width, int height);
WebGLBuffer createBuffer();
WebGLFramebuffer createFramebuffer();
WebGLProgram createProgram();
WebGLRenderbuffer createRenderbuffer();
WebGLShader createShader(int type);
WebGLTexture createTexture();
void cullFace(int mode);
void deleteBuffer(WebGLBuffer buffer);
void deleteFramebuffer(WebGLFramebuffer framebuffer);
void deleteProgram(WebGLProgram program);
void deleteRenderbuffer(WebGLRenderbuffer renderbuffer);
void deleteShader(WebGLShader shader);
void deleteTexture(WebGLTexture texture);
void depthFunc(int func);
void depthMask(boolean flag);
void depthRange(float zNear, float zFar);
void detachShader(WebGLProgram program, WebGLShader shader);
void disable(int cap);
void disableVertexAttribArray(int index);
void drawArrays(int mode, int first, int count);
void drawElements(int mode, int count, int type, int offset);
void enable(int cap);
void enableVertexAttribArray(int index);
void finish();
void flush();
void framebufferRenderbuffer(int target, int attachment, int renderbuffertarget, WebGLRenderbuffer renderbuffer);
void framebufferTexture2D(int target, int attachment, int textarget, WebGLTexture texture, int level);
void frontFace(int mode);
void generateMipmap(int target);
WebGLActiveInfo getActiveAttrib(WebGLProgram program, int index);
WebGLActiveInfo getActiveUniform(WebGLProgram program, int index);
JSArrayReader<WebGLShader> getAttachedShaders(WebGLProgram program);
@JSMethod("getAttachedShaders")
WebGLShader[] getAttachedShadersArray(WebGLProgram program);
int getAttribLocation(WebGLProgram program, String name);
JSObject getBufferParameter(int target, int pname);
JSObject getParameter(int pname);
@JSMethod("getParameter")
int getParameteri(int pname);
@JSMethod("getParameter")
String getParameterString(int pname);
@JSMethod("getParameter")
float getParameterf(int pname);
int getError();
JSObject getFramebufferAttachmentParameter(int target, int attachment, int pname);
JSObject getProgramParameter(WebGLProgram program, int pname);
@JSMethod("getProgramParameter")
boolean getProgramParameterb(WebGLProgram program, int pname);
@JSMethod("getProgramParameter")
int getProgramParameteri(WebGLProgram program, int pname);
String getProgramInfoLog(WebGLProgram program);
JSObject getRenderbufferParameter(int target, int pname);
JSObject getShaderParameter(WebGLShader shader, int pname);
@JSMethod("getShaderParameter")
boolean getShaderParameterb(WebGLShader shader, int pname);
@JSMethod("getShaderParameter")
int getShaderParameteri(WebGLShader shader, int pname);
WebGLShaderPrecisionFormat getShaderPrecisionFormat(int shadertype, int precisiontype);
String getShaderInfoLog(WebGLShader shader);
String getShaderSource(WebGLShader shader);
JSObject getTexParameter(int target, int pname);
JSObject getUniform(WebGLProgram program, WebGLUniformLocation location);
WebGLUniformLocation getUniformLocation(WebGLProgram program, String name);
JSObject getVertexAttrib(int index, int pname);
int getVertexAttribOffset(int index, int pname);
void hint(int target, int mode);
boolean isBuffer(WebGLBuffer buffer);
boolean isEnabled(int cap);
boolean isFramebuffer(WebGLFramebuffer framebuffer);
boolean isProgram(WebGLProgram program);
boolean isRenderbuffer(WebGLRenderbuffer renderbuffer);
boolean isShader(WebGLShader shader);
boolean isTexture(WebGLTexture texture);
void lineWidth(float width);
void linkProgram(WebGLProgram program);
void pixelStorei(int pname, int param);
void polygonOffset(float factor, float units);
void readPixels(int x, int y, int width, int height, int format, int type, ArrayBufferView pixels);
void renderbufferStorage(int target, int internalformat, int width, int height);
void sampleCoverage(float value, boolean invert);
void scissor(int x, int y, int width, int height);
void shaderSource(WebGLShader shader, String source);
void stencilFunc(int func, int ref, int mask);
void stencilFuncSeparate(int face, int func, int ref, int mask);
void stencilMask(int mask);
void stencilMaskSeparate(int face, int mask);
void stencilOp(int fail, int zfail, int zpass);
void stencilOpSeparate(int face, int fail, int zfail, int zpass);
void texImage2D(int target, int level, int internalformat, int width, int height, int border, int format,
int type, ArrayBufferView pixels);
void texImage2D(int target, int level, int internalformat, int format, int type, ImageData pixels);
void texImage2D(int target, int level, int internalformat, int format, int type, HTMLImageElement image);
void texImage2D(int target, int level, int internalformat, int format, int type, HTMLCanvasElement canvas);
//void texImage2D(int target, int level, int internalformat, int format, int type, HTMLVideoElement video);
void texParameterf(int target, int pname, float param);
void texParameteri(int target, int pname, int param);
void texSubImage2D(int target, int level, int xoffset, int yoffset, int width, int height, int format, int type,
ArrayBufferView pixels);
void texSubImage2D(int target, int level, int xoffset, int yoffset, int format, int type, ImageData pixels);
void texSubImage2D(int target, int level, int xoffset, int yoffset, int format, int type, HTMLImageElement image);
void texSubImage2D(int target, int level, int xoffset, int yoffset, int format, int type, HTMLCanvasElement canvas);
//void texSubImage2D(int target, int level, int xoffset, int yoffset, int format, int type, HTMLVideoElement video);
void uniform1f(WebGLUniformLocation location, float x);
void uniform1fv(WebGLUniformLocation location, Float32Array v);
void uniform1fv(WebGLUniformLocation location, JSDoubleArrayReader v);
void uniform1fv(WebGLUniformLocation location, float[] v);
void uniform1i(WebGLUniformLocation location, int x);
void uniform1iv(WebGLUniformLocation location, Int32Array v);
void uniform1iv(WebGLUniformLocation location, JSIntArrayReader v);
void uniform1iv(WebGLUniformLocation location, int[] v);
void uniform2f(WebGLUniformLocation location, float x, float y);
void uniform2fv(WebGLUniformLocation location, Float32Array v);
void uniform2fv(WebGLUniformLocation location, JSDoubleArrayReader v);
void uniform2fv(WebGLUniformLocation location, float[] v);
void uniform2i(WebGLUniformLocation location, int x, int y);
void uniform2iv(WebGLUniformLocation location, Int32Array v);
void uniform2iv(WebGLUniformLocation location, JSIntArrayReader v);
void uniform2iv(WebGLUniformLocation location, int[] v);
void uniform3f(WebGLUniformLocation location, float x, float y, float z);
void uniform3fv(WebGLUniformLocation location, Float32Array v);
void uniform3fv(WebGLUniformLocation location, JSDoubleArrayReader v);
void uniform3fv(WebGLUniformLocation location, float[] v);
void uniform3i(WebGLUniformLocation location, int x, int y, int z);
void uniform3iv(WebGLUniformLocation location, Int32Array v);
void uniform3iv(WebGLUniformLocation location, JSIntArrayReader v);
void uniform3iv(WebGLUniformLocation location, int[] v);
void uniform4f(WebGLUniformLocation location, float x, float y, float z, float w);
void uniform4fv(WebGLUniformLocation location, Float32Array v);
void uniform4fv(WebGLUniformLocation location, JSDoubleArrayReader v);
void uniform4fv(WebGLUniformLocation location, float[] v);
void uniform4i(WebGLUniformLocation location, int x, int y, int z, int w);
void uniform4iv(WebGLUniformLocation location, Int32Array v);
void uniform4iv(WebGLUniformLocation location, JSIntArrayReader v);
void uniform4iv(WebGLUniformLocation location, int[] v);
void uniformMatrix2fv(WebGLUniformLocation location, boolean transpose, Float32Array value);
void uniformMatrix2fv(WebGLUniformLocation location, boolean transpose, JSDoubleArrayReader value);
void uniformMatrix2fv(WebGLUniformLocation location, boolean transpose, float[] value);
void uniformMatrix3fv(WebGLUniformLocation location, boolean transpose, Float32Array value);
void uniformMatrix3fv(WebGLUniformLocation location, boolean transpose, JSDoubleArrayReader value);
void uniformMatrix3fv(WebGLUniformLocation location, boolean transpose, float[] value);
void uniformMatrix4fv(WebGLUniformLocation location, boolean transpose, Float32Array value);
void uniformMatrix4fv(WebGLUniformLocation location, boolean transpose, JSDoubleArrayReader value);
void uniformMatrix4fv(WebGLUniformLocation location, boolean transpose, float[] value);
void useProgram(WebGLProgram program);
void validateProgram(WebGLProgram program);
void vertexAttrib1f(int indx, float x);
void vertexAttrib1fv(int indx, Float32Array values);
void vertexAttrib1fv(int indx, JSDoubleArrayReader values);
void vertexAttrib1fv(int indx, float[] values);
void vertexAttrib2f(int indx, float x, float y);
void vertexAttrib2fv(int indx, Float32Array values);
void vertexAttrib2fv(int indx, JSDoubleArrayReader values);
void vertexAttrib2fv(int indx, float[] values);
void vertexAttrib3f(int indx, float x, float y, float z);
void vertexAttrib3fv(int indx, Float32Array values);
void vertexAttrib3fv(int indx, JSDoubleArrayReader values);
void vertexAttrib3fv(int indx, float[] values);
void vertexAttrib4f(int indx, float x, float y, float z, float w);
void vertexAttrib4fv(int indx, Float32Array values);
void vertexAttrib4fv(int indx, JSDoubleArrayReader values);
void vertexAttrib4fv(int indx, float[] values);
void vertexAttribPointer(int indx, int size, int type, boolean normalized, int stride, int offset);
void viewport(int x, int y, int width, int height);
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2012-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.vmextractor;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.GeneralFilter;
import org.sleuthkit.autopsy.casemodule.ImageDSProcessor;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
/**
* An ingest module that extracts virtual machine files and adds them to a case
* as data sources.
*/
@NbBundle.Messages({"# {0} - output directory name", "VMExtractorIngestModule.cannotCreateOutputDir.message=Unable to create output directory: {0}."
})
final class VMExtractorIngestModule extends DataSourceIngestModuleAdapter {
private static final Logger logger = Logger.getLogger(VMExtractorIngestModule.class.getName());
private IngestJobContext context;
private Path ingestJobOutputDir;
private String parentDeviceId;
private String parentTimeZone;
private final HashMap<String, String> imageFolderToOutputFolder = new HashMap<>();
private int folderId = 0;
@Messages({"# {0} - data source name", "deviceIdQueryErrMsg=Data source {0} missing Device ID"})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
long dataSourceObjId = context.getDataSource().getId();
try {
Case currentCase = Case.getCurrentCase();
SleuthkitCase caseDb = currentCase.getSleuthkitCase();
DataSource dataSource = caseDb.getDataSource(dataSourceObjId);
parentDeviceId = dataSource.getDeviceId();
parentTimeZone = dataSource.getTimeZone();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss");
String timeStamp = dateFormat.format(Calendar.getInstance().getTime());
String ingestJobOutputDirName = context.getDataSource().getName() + "_" + context.getDataSource().getId() + "_" + timeStamp;
ingestJobOutputDirName = ingestJobOutputDirName.replace(':', '_');
ingestJobOutputDir = Paths.get(Case.getCurrentCase().getModuleDirectory(), VMExtractorIngestModuleFactory.getModuleName(), ingestJobOutputDirName);
// create module output folder to write extracted virtual machine files to
Files.createDirectories(ingestJobOutputDir);
} catch (IOException | SecurityException | UnsupportedOperationException ex) {
throw new IngestModule.IngestModuleException(Bundle.VMExtractorIngestModule_cannotCreateOutputDir_message(ex.getLocalizedMessage()), ex);
} catch (TskDataException | TskCoreException ex) {
throw new IngestModule.IngestModuleException(Bundle.deviceIdQueryErrMsg(context.getDataSource().getName()), ex);
}
}
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
String outputFolderForThisVM;
List<AbstractFile> vmFiles;
// Configure and start progress bar - looking for VM files
progressBar.progress(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.searchingImage.message"));
// Not sure how long it will take for search to complete.
progressBar.switchToIndeterminate();
logger.log(Level.INFO, "Looking for virtual machine files in data source {0}", dataSource.getName()); //NON-NLS
try {
// look for all VM files
vmFiles = findVirtualMachineFiles(dataSource);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error querying case database", ex); //NON-NLS
return ProcessResult.ERROR;
}
if (vmFiles.isEmpty()) {
// no VM files found
logger.log(Level.INFO, "No virtual machine files found in data source {0}", dataSource.getName()); //NON-NLS
return ProcessResult.OK;
}
// display progress for saving each VM file to disk
progressBar.switchToDeterminate(vmFiles.size());
progressBar.progress(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.exportingToDisk.message"));
int numFilesSaved = 0;
for (AbstractFile vmFile : vmFiles) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
logger.log(Level.INFO, "Saving virtual machine file {0} to disk", vmFile.getName()); //NON-NLS
// get vmFolderPathInsideTheImage to the folder where VM is located
String vmFolderPathInsideTheImage = vmFile.getParentPath();
// check if the vmFolderPathInsideTheImage is already in hashmap
if (imageFolderToOutputFolder.containsKey(vmFolderPathInsideTheImage)) {
// if it is then we have already created output folder to write out all VM files in this parent folder
outputFolderForThisVM = imageFolderToOutputFolder.get(vmFolderPathInsideTheImage);
} else {
// if not - create output folder to write out VM files (can use any unique ID or number for folder name)
folderId++;
outputFolderForThisVM = Paths.get(ingestJobOutputDir.toString(), Integer.toString(folderId)).toString();
// add vmFolderPathInsideTheImage to hashmap
imageFolderToOutputFolder.put(vmFolderPathInsideTheImage, outputFolderForThisVM);
}
// write the vm file to output folder
try {
writeVirtualMachineToDisk(vmFile, outputFolderForThisVM);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Failed to write virtual machine file " + vmFile.getName() + " to folder " + outputFolderForThisVM, ex); //NON-NLS
MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.msgNotify.failedExtractVM.title.txt"),
NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.msgNotify.failedExtractVM.msg.txt", vmFile.getName()));
}
// Update progress bar
numFilesSaved++;
progressBar.progress(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.exportingToDisk.message"), numFilesSaved);
}
logger.log(Level.INFO, "Finished saving virtual machine files to disk"); //NON-NLS
// update progress bar
progressBar.switchToDeterminate(imageFolderToOutputFolder.size());
progressBar.progress(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.queuingIngestJobs.message"));
// this is for progress bar purposes because at this point we only know in advance how many job folders to ingest, not how many data sources.
int numJobsQueued = 0;
// start processing output folders after we are done writing out all vm files
for (String folder : imageFolderToOutputFolder.values()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
List<String> vmFilesToIngest = VirtualMachineFinder.identifyVirtualMachines(Paths.get(folder));
for (String file : vmFilesToIngest) {
try {
logger.log(Level.INFO, "Ingesting virtual machine file {0} in folder {1}", new Object[]{file, folder}); //NON-NLS
// for extracted virtual machines there is no manifest XML file to read data source ID from so use parent data source ID.
// ingest the data sources
ingestVirtualMachineImage(Paths.get(folder, file));
logger.log(Level.INFO, "Ingest complete for virtual machine file {0} in folder {1}", new Object[]{file, folder}); //NON-NLS
} catch (InterruptedException ex) {
logger.log(Level.INFO, "Interrupted while ingesting virtual machine file " + file + " in folder " + folder, ex); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Failed to ingest virtual machine file " + file + " in folder " + folder, ex); //NON-NLS
MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.msgNotify.failedIngestVM.title.txt"),
NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.msgNotify.failedIngestVM.msg.txt", file));
}
}
// Update progress bar
numJobsQueued++;
progressBar.progress(NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.queuingIngestJobs.message"), numJobsQueued);
}
logger.log(Level.INFO, "VMExtractorIngestModule completed processing of data source {0}", dataSource.getName()); //NON-NLS
return ProcessResult.OK;
}
/**
* Locate all supported virtual machine files, if any, contained in a data
* source.
*
* @param dataSource The data source.
*
* @return A list of virtual machine files, possibly empty.
*
* @throws TskCoreException if there is a problem querying the case
* database.
*/
private static List<AbstractFile> findVirtualMachineFiles(Content dataSource) throws TskCoreException {
List<AbstractFile> vmFiles = new ArrayList<>();
for (String vmExtension : GeneralFilter.VIRTUAL_MACHINE_EXTS) {
String searchString = "%" + vmExtension; // want a search string that looks like this "%.vmdk"
vmFiles.addAll(Case.getCurrentCase().getServices().getFileManager().findFiles(dataSource, searchString));
}
return vmFiles;
}
/**
* Writes out an abstract file to a specified output folder.
*
* @param vmFile Abstract file to write to disk.
* @param outputFolderForThisVM Absolute path to output folder.
*
* @throws IOException
*/
private void writeVirtualMachineToDisk(AbstractFile vmFile, String outputFolderForThisVM) throws IOException {
// TODO: check available disk space first? See IngestMonitor.getFreeSpace()
// check if output folder exists
File destinationFolder = Paths.get(outputFolderForThisVM).toFile();
if (!destinationFolder.exists()) {
destinationFolder.mkdirs();
}
/*
* Write the virtual machine file to disk.
*/
File localFile = Paths.get(outputFolderForThisVM, vmFile.getName()).toFile();
ContentUtils.writeToFile(vmFile, localFile, context::dataSourceIngestIsCancelled);
}
/**
* Add a virtual machine file to the case as a data source and analyze it
* with the ingest modules.
*
* @param vmFile A virtual machine file.
*/
private void ingestVirtualMachineImage(Path vmFile) throws InterruptedException, IOException {
/*
* Try to add the virtual machine file to the case as a data source.
*/
UUID taskId = UUID.randomUUID();
Case.getCurrentCase().notifyAddingDataSource(taskId);
ImageDSProcessor dataSourceProcessor = new ImageDSProcessor();
AddDataSourceCallback dspCallback = new AddDataSourceCallback(vmFile);
synchronized (this) {
dataSourceProcessor.run(parentDeviceId, vmFile.toString(), parentTimeZone, false, new AddDataSourceProgressMonitor(), dspCallback);
/*
* Block the ingest thread until the data source processor finishes.
*/
this.wait();
}
/*
* If the image was added, analyze it with the ingest modules for this
* ingest context.
*/
if (!dspCallback.vmDataSources.isEmpty()) {
Case.getCurrentCase().notifyDataSourceAdded(dspCallback.vmDataSources.get(0), taskId);
List<Content> dataSourceContent = new ArrayList<>(dspCallback.vmDataSources);
IngestJobSettings ingestJobSettings = new IngestJobSettings(context.getExecutionContext());
for (String warning : ingestJobSettings.getWarnings()) {
logger.log(Level.WARNING, String.format("Ingest job settings warning for virtual machine file %s : %s", vmFile.toString(), warning)); //NON-NLS
}
IngestServices.getInstance().postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO,
VMExtractorIngestModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(), "VMExtractorIngestModule.addedVirtualMachineImage.message", vmFile.toString())));
IngestManager.getInstance().queueIngestJob(dataSourceContent, ingestJobSettings);
} else {
Case.getCurrentCase().notifyFailedAddingDataSource(taskId);
}
}
/**
* A do nothing data source processor progress monitor.
*/
private static final class AddDataSourceProgressMonitor implements DataSourceProcessorProgressMonitor {
@Override
public void setIndeterminate(final boolean indeterminate) {
}
@Override
public void setProgress(final int progress) {
}
@Override
public void setProgressText(final String text) {
}
}
/**
* A callback for the data source processor that captures the content
* objects for the data source and unblocks the ingest thread.
*/
private final class AddDataSourceCallback extends DataSourceProcessorCallback {
private final Path vmFile;
private final List<Content> vmDataSources;
/**
* Constructs a callback for the data source processor.
*
* @param vmFile The virtual machine file to be added as a data source.
*/
private AddDataSourceCallback(Path vmFile) {
this.vmFile = vmFile;
vmDataSources = new ArrayList<>();
}
@Override
public void done(DataSourceProcessorCallback.DataSourceProcessorResult result, List<String> errList, List<Content> content) {
for (String error : errList) {
String logMessage = String.format("Data source processor error for virtual machine file %s: %s", vmFile.toString(), error); //NON-NLS
if (DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS == result) {
logger.log(Level.SEVERE, logMessage);
} else {
logger.log(Level.WARNING, logMessage);
}
}
/*
* Save a reference to the content object so it can be used to
* create a new ingest job.
*/
if (!content.isEmpty()) {
vmDataSources.add(content.get(0));
}
/*
* Unblock the ingest thread.
*/
synchronized (VMExtractorIngestModule.this) {
VMExtractorIngestModule.this.notify();
}
}
@Override
public void doneEDT(DataSourceProcessorResult result, List<String> errList, List<Content> newContents) {
done(result, errList, newContents);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/appengine/v1/appengine.proto
package com.google.appengine.v1;
/**
* <pre>
* Request message for `Services.DeleteService`.
* </pre>
*
* Protobuf type {@code google.appengine.v1.DeleteServiceRequest}
*/
public final class DeleteServiceRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.appengine.v1.DeleteServiceRequest)
DeleteServiceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteServiceRequest.newBuilder() to construct.
private DeleteServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteServiceRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DeleteServiceRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.appengine.v1.AppengineProto.internal_static_google_appengine_v1_DeleteServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppengineProto.internal_static_google_appengine_v1_DeleteServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.DeleteServiceRequest.class, com.google.appengine.v1.DeleteServiceRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.appengine.v1.DeleteServiceRequest)) {
return super.equals(obj);
}
com.google.appengine.v1.DeleteServiceRequest other = (com.google.appengine.v1.DeleteServiceRequest) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.DeleteServiceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.appengine.v1.DeleteServiceRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.DeleteServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.appengine.v1.DeleteServiceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for `Services.DeleteService`.
* </pre>
*
* Protobuf type {@code google.appengine.v1.DeleteServiceRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.appengine.v1.DeleteServiceRequest)
com.google.appengine.v1.DeleteServiceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.appengine.v1.AppengineProto.internal_static_google_appengine_v1_DeleteServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppengineProto.internal_static_google_appengine_v1_DeleteServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.DeleteServiceRequest.class, com.google.appengine.v1.DeleteServiceRequest.Builder.class);
}
// Construct using com.google.appengine.v1.DeleteServiceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.appengine.v1.AppengineProto.internal_static_google_appengine_v1_DeleteServiceRequest_descriptor;
}
public com.google.appengine.v1.DeleteServiceRequest getDefaultInstanceForType() {
return com.google.appengine.v1.DeleteServiceRequest.getDefaultInstance();
}
public com.google.appengine.v1.DeleteServiceRequest build() {
com.google.appengine.v1.DeleteServiceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.appengine.v1.DeleteServiceRequest buildPartial() {
com.google.appengine.v1.DeleteServiceRequest result = new com.google.appengine.v1.DeleteServiceRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.appengine.v1.DeleteServiceRequest) {
return mergeFrom((com.google.appengine.v1.DeleteServiceRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.appengine.v1.DeleteServiceRequest other) {
if (other == com.google.appengine.v1.DeleteServiceRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.appengine.v1.DeleteServiceRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.appengine.v1.DeleteServiceRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <pre>
* Name of the resource requested. Example: `apps/myapp/services/default`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.appengine.v1.DeleteServiceRequest)
}
// @@protoc_insertion_point(class_scope:google.appengine.v1.DeleteServiceRequest)
private static final com.google.appengine.v1.DeleteServiceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.appengine.v1.DeleteServiceRequest();
}
public static com.google.appengine.v1.DeleteServiceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteServiceRequest>
PARSER = new com.google.protobuf.AbstractParser<DeleteServiceRequest>() {
public DeleteServiceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteServiceRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteServiceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteServiceRequest> getParserForType() {
return PARSER;
}
public com.google.appengine.v1.DeleteServiceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.regionInstanceGroupManagers.insert. Creates a managed instance
* group using the information that you specify in the request. After the group is created,
* instances in the group are created using the specified instance template. This operation is
* marked as DONE when the group is created even if the instances in the group have not yet been
* created. You must separately verify the status of the individual instances with the
* listmanagedinstances method.
*
* <p>A regional managed instance group can contain up to 2000 instances.
*/
public final class InsertRegionInstanceGroupManagerHttpRequest implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final InstanceGroupManager instanceGroupManagerResource;
private final String key;
private final String prettyPrint;
private final String quotaUser;
private final String region;
private final String requestId;
private final String userIp;
private InsertRegionInstanceGroupManagerHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.instanceGroupManagerResource = null;
this.key = null;
this.prettyPrint = null;
this.quotaUser = null;
this.region = null;
this.requestId = null;
this.userIp = null;
}
private InsertRegionInstanceGroupManagerHttpRequest(
String access_token,
String callback,
String fields,
InstanceGroupManager instanceGroupManagerResource,
String key,
String prettyPrint,
String quotaUser,
String region,
String requestId,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.instanceGroupManagerResource = instanceGroupManagerResource;
this.key = key;
this.prettyPrint = prettyPrint;
this.quotaUser = quotaUser;
this.region = region;
this.requestId = requestId;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("instanceGroupManagerResource".equals(fieldName)) {
return instanceGroupManagerResource;
}
if ("key".equals(fieldName)) {
return key;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("region".equals(fieldName)) {
return region;
}
if ("requestId".equals(fieldName)) {
return requestId;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public InstanceGroupManager getApiMessageRequestBody() {
return instanceGroupManagerResource;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/**
* An Instance Group Manager resource. (== resource_for beta.instanceGroupManagers ==) (==
* resource_for v1.instanceGroupManagers ==) (== resource_for beta.regionInstanceGroupManagers ==)
* (== resource_for v1.regionInstanceGroupManagers ==)
*/
public InstanceGroupManager getInstanceGroupManagerResource() {
return instanceGroupManagerResource;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/**
* Name of the region scoping this request. It must have the format
* `{project}/regions/{region}/instanceGroupManagers`. \`{region}\` must start with a letter, and
* contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), * underscores
* (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs (\`%\`). It must
* be between 3 and 255 characters in length, and it * must not start with \`"goog"\`.
*/
public String getRegion() {
return region;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(InsertRegionInstanceGroupManagerHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static InsertRegionInstanceGroupManagerHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final InsertRegionInstanceGroupManagerHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new InsertRegionInstanceGroupManagerHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private InstanceGroupManager instanceGroupManagerResource;
private String key;
private String prettyPrint;
private String quotaUser;
private String region;
private String requestId;
private String userIp;
Builder() {}
public Builder mergeFrom(InsertRegionInstanceGroupManagerHttpRequest other) {
if (other == InsertRegionInstanceGroupManagerHttpRequest.getDefaultInstance()) return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getInstanceGroupManagerResource() != null) {
this.instanceGroupManagerResource = other.instanceGroupManagerResource;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getRegion() != null) {
this.region = other.region;
}
if (other.getRequestId() != null) {
this.requestId = other.requestId;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(InsertRegionInstanceGroupManagerHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.instanceGroupManagerResource = source.instanceGroupManagerResource;
this.key = source.key;
this.prettyPrint = source.prettyPrint;
this.quotaUser = source.quotaUser;
this.region = source.region;
this.requestId = source.requestId;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/**
* An Instance Group Manager resource. (== resource_for beta.instanceGroupManagers ==) (==
* resource_for v1.instanceGroupManagers ==) (== resource_for beta.regionInstanceGroupManagers
* ==) (== resource_for v1.regionInstanceGroupManagers ==)
*/
public InstanceGroupManager getInstanceGroupManagerResource() {
return instanceGroupManagerResource;
}
/**
* An Instance Group Manager resource. (== resource_for beta.instanceGroupManagers ==) (==
* resource_for v1.instanceGroupManagers ==) (== resource_for beta.regionInstanceGroupManagers
* ==) (== resource_for v1.regionInstanceGroupManagers ==)
*/
public Builder setInstanceGroupManagerResource(
InstanceGroupManager instanceGroupManagerResource) {
this.instanceGroupManagerResource = instanceGroupManagerResource;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/**
* Name of the region scoping this request. It must have the format
* `{project}/regions/{region}/instanceGroupManagers`. \`{region}\` must start with a letter,
* and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public String getRegion() {
return region;
}
/**
* Name of the region scoping this request. It must have the format
* `{project}/regions/{region}/instanceGroupManagers`. \`{region}\` must start with a letter,
* and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public Builder setRegion(String region) {
this.region = region;
return this;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public Builder setRequestId(String requestId) {
this.requestId = requestId;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public InsertRegionInstanceGroupManagerHttpRequest build() {
String missing = "";
if (region == null) {
missing += " region";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new InsertRegionInstanceGroupManagerHttpRequest(
access_token,
callback,
fields,
instanceGroupManagerResource,
key,
prettyPrint,
quotaUser,
region,
requestId,
userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setInstanceGroupManagerResource(this.instanceGroupManagerResource);
newBuilder.setKey(this.key);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setRegion(this.region);
newBuilder.setRequestId(this.requestId);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "InsertRegionInstanceGroupManagerHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "instanceGroupManagerResource="
+ instanceGroupManagerResource
+ ", "
+ "key="
+ key
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "region="
+ region
+ ", "
+ "requestId="
+ requestId
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof InsertRegionInstanceGroupManagerHttpRequest) {
InsertRegionInstanceGroupManagerHttpRequest that =
(InsertRegionInstanceGroupManagerHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(
this.instanceGroupManagerResource, that.getInstanceGroupManagerResource())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.region, that.getRegion())
&& Objects.equals(this.requestId, that.getRequestId())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token,
callback,
fields,
instanceGroupManagerResource,
key,
prettyPrint,
quotaUser,
region,
requestId,
userIp);
}
}
| |
/**
*/
package substationStandard.LNNodes.LNGroupP.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import substationStandard.Dataclasses.ACD;
import substationStandard.Dataclasses.ACT;
import substationStandard.Dataclasses.ASG;
import substationStandard.Dataclasses.CSD;
import substationStandard.Dataclasses.CURVE;
import substationStandard.Dataclasses.ING;
import substationStandard.LNNodes.LNGroupP.LNGroupPPackage;
import substationStandard.LNNodes.LNGroupP.PTOV;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>PTOV</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getStr <em>Str</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getOp <em>Op</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getTmVst <em>Tm Vst</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getTmVCrv <em>Tm VCrv</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getStrVal <em>Str Val</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getTmMult <em>Tm Mult</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getMinOpTmms <em>Min Op Tmms</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getMaxOpTmms <em>Max Op Tmms</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getOpDlTmms <em>Op Dl Tmms</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupP.impl.PTOVImpl#getRsDlTmms <em>Rs Dl Tmms</em>}</li>
* </ul>
*
* @generated
*/
public class PTOVImpl extends GroupPImpl implements PTOV {
/**
* The cached value of the '{@link #getStr() <em>Str</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStr()
* @generated
* @ordered
*/
protected ACD str;
/**
* The cached value of the '{@link #getOp() <em>Op</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOp()
* @generated
* @ordered
*/
protected ACT op;
/**
* The cached value of the '{@link #getTmVst() <em>Tm Vst</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTmVst()
* @generated
* @ordered
*/
protected CSD tmVst;
/**
* The cached value of the '{@link #getTmVCrv() <em>Tm VCrv</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTmVCrv()
* @generated
* @ordered
*/
protected CURVE tmVCrv;
/**
* The cached value of the '{@link #getStrVal() <em>Str Val</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStrVal()
* @generated
* @ordered
*/
protected ASG strVal;
/**
* The cached value of the '{@link #getTmMult() <em>Tm Mult</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTmMult()
* @generated
* @ordered
*/
protected ASG tmMult;
/**
* The cached value of the '{@link #getMinOpTmms() <em>Min Op Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMinOpTmms()
* @generated
* @ordered
*/
protected ING minOpTmms;
/**
* The cached value of the '{@link #getMaxOpTmms() <em>Max Op Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMaxOpTmms()
* @generated
* @ordered
*/
protected ING maxOpTmms;
/**
* The cached value of the '{@link #getOpDlTmms() <em>Op Dl Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOpDlTmms()
* @generated
* @ordered
*/
protected ING opDlTmms;
/**
* The cached value of the '{@link #getRsDlTmms() <em>Rs Dl Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRsDlTmms()
* @generated
* @ordered
*/
protected ING rsDlTmms;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PTOVImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return LNGroupPPackage.Literals.PTOV;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACD getStr() {
if (str != null && str.eIsProxy()) {
InternalEObject oldStr = (InternalEObject)str;
str = (ACD)eResolveProxy(oldStr);
if (str != oldStr) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__STR, oldStr, str));
}
}
return str;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACD basicGetStr() {
return str;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStr(ACD newStr) {
ACD oldStr = str;
str = newStr;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__STR, oldStr, str));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACT getOp() {
if (op != null && op.eIsProxy()) {
InternalEObject oldOp = (InternalEObject)op;
op = (ACT)eResolveProxy(oldOp);
if (op != oldOp) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__OP, oldOp, op));
}
}
return op;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACT basicGetOp() {
return op;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOp(ACT newOp) {
ACT oldOp = op;
op = newOp;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__OP, oldOp, op));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CSD getTmVst() {
if (tmVst != null && tmVst.eIsProxy()) {
InternalEObject oldTmVst = (InternalEObject)tmVst;
tmVst = (CSD)eResolveProxy(oldTmVst);
if (tmVst != oldTmVst) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__TM_VST, oldTmVst, tmVst));
}
}
return tmVst;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CSD basicGetTmVst() {
return tmVst;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTmVst(CSD newTmVst) {
CSD oldTmVst = tmVst;
tmVst = newTmVst;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__TM_VST, oldTmVst, tmVst));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CURVE getTmVCrv() {
if (tmVCrv != null && tmVCrv.eIsProxy()) {
InternalEObject oldTmVCrv = (InternalEObject)tmVCrv;
tmVCrv = (CURVE)eResolveProxy(oldTmVCrv);
if (tmVCrv != oldTmVCrv) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__TM_VCRV, oldTmVCrv, tmVCrv));
}
}
return tmVCrv;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CURVE basicGetTmVCrv() {
return tmVCrv;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTmVCrv(CURVE newTmVCrv) {
CURVE oldTmVCrv = tmVCrv;
tmVCrv = newTmVCrv;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__TM_VCRV, oldTmVCrv, tmVCrv));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG getStrVal() {
if (strVal != null && strVal.eIsProxy()) {
InternalEObject oldStrVal = (InternalEObject)strVal;
strVal = (ASG)eResolveProxy(oldStrVal);
if (strVal != oldStrVal) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__STR_VAL, oldStrVal, strVal));
}
}
return strVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG basicGetStrVal() {
return strVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStrVal(ASG newStrVal) {
ASG oldStrVal = strVal;
strVal = newStrVal;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__STR_VAL, oldStrVal, strVal));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG getTmMult() {
if (tmMult != null && tmMult.eIsProxy()) {
InternalEObject oldTmMult = (InternalEObject)tmMult;
tmMult = (ASG)eResolveProxy(oldTmMult);
if (tmMult != oldTmMult) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__TM_MULT, oldTmMult, tmMult));
}
}
return tmMult;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG basicGetTmMult() {
return tmMult;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTmMult(ASG newTmMult) {
ASG oldTmMult = tmMult;
tmMult = newTmMult;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__TM_MULT, oldTmMult, tmMult));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getMinOpTmms() {
if (minOpTmms != null && minOpTmms.eIsProxy()) {
InternalEObject oldMinOpTmms = (InternalEObject)minOpTmms;
minOpTmms = (ING)eResolveProxy(oldMinOpTmms);
if (minOpTmms != oldMinOpTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__MIN_OP_TMMS, oldMinOpTmms, minOpTmms));
}
}
return minOpTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetMinOpTmms() {
return minOpTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setMinOpTmms(ING newMinOpTmms) {
ING oldMinOpTmms = minOpTmms;
minOpTmms = newMinOpTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__MIN_OP_TMMS, oldMinOpTmms, minOpTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getMaxOpTmms() {
if (maxOpTmms != null && maxOpTmms.eIsProxy()) {
InternalEObject oldMaxOpTmms = (InternalEObject)maxOpTmms;
maxOpTmms = (ING)eResolveProxy(oldMaxOpTmms);
if (maxOpTmms != oldMaxOpTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__MAX_OP_TMMS, oldMaxOpTmms, maxOpTmms));
}
}
return maxOpTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetMaxOpTmms() {
return maxOpTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setMaxOpTmms(ING newMaxOpTmms) {
ING oldMaxOpTmms = maxOpTmms;
maxOpTmms = newMaxOpTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__MAX_OP_TMMS, oldMaxOpTmms, maxOpTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getOpDlTmms() {
if (opDlTmms != null && opDlTmms.eIsProxy()) {
InternalEObject oldOpDlTmms = (InternalEObject)opDlTmms;
opDlTmms = (ING)eResolveProxy(oldOpDlTmms);
if (opDlTmms != oldOpDlTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__OP_DL_TMMS, oldOpDlTmms, opDlTmms));
}
}
return opDlTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetOpDlTmms() {
return opDlTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOpDlTmms(ING newOpDlTmms) {
ING oldOpDlTmms = opDlTmms;
opDlTmms = newOpDlTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__OP_DL_TMMS, oldOpDlTmms, opDlTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getRsDlTmms() {
if (rsDlTmms != null && rsDlTmms.eIsProxy()) {
InternalEObject oldRsDlTmms = (InternalEObject)rsDlTmms;
rsDlTmms = (ING)eResolveProxy(oldRsDlTmms);
if (rsDlTmms != oldRsDlTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PTOV__RS_DL_TMMS, oldRsDlTmms, rsDlTmms));
}
}
return rsDlTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetRsDlTmms() {
return rsDlTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRsDlTmms(ING newRsDlTmms) {
ING oldRsDlTmms = rsDlTmms;
rsDlTmms = newRsDlTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PTOV__RS_DL_TMMS, oldRsDlTmms, rsDlTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case LNGroupPPackage.PTOV__STR:
if (resolve) return getStr();
return basicGetStr();
case LNGroupPPackage.PTOV__OP:
if (resolve) return getOp();
return basicGetOp();
case LNGroupPPackage.PTOV__TM_VST:
if (resolve) return getTmVst();
return basicGetTmVst();
case LNGroupPPackage.PTOV__TM_VCRV:
if (resolve) return getTmVCrv();
return basicGetTmVCrv();
case LNGroupPPackage.PTOV__STR_VAL:
if (resolve) return getStrVal();
return basicGetStrVal();
case LNGroupPPackage.PTOV__TM_MULT:
if (resolve) return getTmMult();
return basicGetTmMult();
case LNGroupPPackage.PTOV__MIN_OP_TMMS:
if (resolve) return getMinOpTmms();
return basicGetMinOpTmms();
case LNGroupPPackage.PTOV__MAX_OP_TMMS:
if (resolve) return getMaxOpTmms();
return basicGetMaxOpTmms();
case LNGroupPPackage.PTOV__OP_DL_TMMS:
if (resolve) return getOpDlTmms();
return basicGetOpDlTmms();
case LNGroupPPackage.PTOV__RS_DL_TMMS:
if (resolve) return getRsDlTmms();
return basicGetRsDlTmms();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case LNGroupPPackage.PTOV__STR:
setStr((ACD)newValue);
return;
case LNGroupPPackage.PTOV__OP:
setOp((ACT)newValue);
return;
case LNGroupPPackage.PTOV__TM_VST:
setTmVst((CSD)newValue);
return;
case LNGroupPPackage.PTOV__TM_VCRV:
setTmVCrv((CURVE)newValue);
return;
case LNGroupPPackage.PTOV__STR_VAL:
setStrVal((ASG)newValue);
return;
case LNGroupPPackage.PTOV__TM_MULT:
setTmMult((ASG)newValue);
return;
case LNGroupPPackage.PTOV__MIN_OP_TMMS:
setMinOpTmms((ING)newValue);
return;
case LNGroupPPackage.PTOV__MAX_OP_TMMS:
setMaxOpTmms((ING)newValue);
return;
case LNGroupPPackage.PTOV__OP_DL_TMMS:
setOpDlTmms((ING)newValue);
return;
case LNGroupPPackage.PTOV__RS_DL_TMMS:
setRsDlTmms((ING)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case LNGroupPPackage.PTOV__STR:
setStr((ACD)null);
return;
case LNGroupPPackage.PTOV__OP:
setOp((ACT)null);
return;
case LNGroupPPackage.PTOV__TM_VST:
setTmVst((CSD)null);
return;
case LNGroupPPackage.PTOV__TM_VCRV:
setTmVCrv((CURVE)null);
return;
case LNGroupPPackage.PTOV__STR_VAL:
setStrVal((ASG)null);
return;
case LNGroupPPackage.PTOV__TM_MULT:
setTmMult((ASG)null);
return;
case LNGroupPPackage.PTOV__MIN_OP_TMMS:
setMinOpTmms((ING)null);
return;
case LNGroupPPackage.PTOV__MAX_OP_TMMS:
setMaxOpTmms((ING)null);
return;
case LNGroupPPackage.PTOV__OP_DL_TMMS:
setOpDlTmms((ING)null);
return;
case LNGroupPPackage.PTOV__RS_DL_TMMS:
setRsDlTmms((ING)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case LNGroupPPackage.PTOV__STR:
return str != null;
case LNGroupPPackage.PTOV__OP:
return op != null;
case LNGroupPPackage.PTOV__TM_VST:
return tmVst != null;
case LNGroupPPackage.PTOV__TM_VCRV:
return tmVCrv != null;
case LNGroupPPackage.PTOV__STR_VAL:
return strVal != null;
case LNGroupPPackage.PTOV__TM_MULT:
return tmMult != null;
case LNGroupPPackage.PTOV__MIN_OP_TMMS:
return minOpTmms != null;
case LNGroupPPackage.PTOV__MAX_OP_TMMS:
return maxOpTmms != null;
case LNGroupPPackage.PTOV__OP_DL_TMMS:
return opDlTmms != null;
case LNGroupPPackage.PTOV__RS_DL_TMMS:
return rsDlTmms != null;
}
return super.eIsSet(featureID);
}
} //PTOVImpl
| |
package objets;
import interfaces.ImageListener;
import interfaces.Localise;
import interfaces.Sauvegardable;
import io.IO;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import javax.swing.AbstractButton;
import javax.swing.Box;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSpinner;
import javax.swing.JSplitPane;
import javax.swing.SpinnerNumberModel;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import layouts.LayoutLignes;
import listeners.ChangeObjetListener;
import listeners.RemoveListener;
import map.objets.Bloc;
import map.objets.Destructible;
import map.objets.Objet;
import map.objets.ObjetVide;
import map.objets.TypeObjet;
import physique.forme.Forme;
import ressources.SpriteObjets;
import vision.Orientation;
import composants.ChangeurImage;
import composants.SelecteurImage;
import composants.panel.PanelImage;
import composants.styles.Bouton;
import composants.styles.ScrollPaneTransparent;
import divers.Outil;
import exceptions.ExceptionJeu;
public class InterfaceObjet extends PanelImage implements Sauvegardable, ActionListener, ImageListener, ChangeObjetListener, ChangeListener {
private static final long serialVersionUID = 1L;
private final boolean editable;
private final JComponent autres;
private final PanelImage image;
private final RemoveListener<Objet> l;
private final JLabel nom;
private JSpinner largeur, hauteur, x, y;
private SelecteurImage select;
private JComboBox<String> type, orientation;
private AbstractButton suppr;
private Objet objet;
public InterfaceObjet(boolean editable, Objet objet, RemoveListener<Objet> l) {
super("fond/parchemin.jpg");
this.l = l;
this.objet = objet;
this.editable = editable;
setLayout(new BorderLayout());
add(nom = Outil.getTexte(objet.getNom(), false), BorderLayout.NORTH);
JSplitPane jsp = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
jsp.setDividerSize(4);
autres = new JPanel(new LayoutLignes());
if(editable) {
select = new ChangeurImage(objet.getID());
select.setOpaque(false);
jsp.setLeftComponent(select);
type = new JComboBox<String>(TypeObjet.noms());
add(type, BorderLayout.SOUTH);
orientation = new JComboBox<String>(Orientation.noms());
suppr = new Bouton("Supprimer").large();
largeur = new JSpinner(new SpinnerNumberModel(objet.getLargeur(), 0, Localise.UNITE.width * 2, 5));
hauteur = new JSpinner(new SpinnerNumberModel(objet.getHauteur(), 0, Localise.UNITE.width * 2, 5));
x = new JSpinner(new SpinnerNumberModel(objet.getX(), -Localise.UNITE.width, Integer.MAX_VALUE, 10));
y = new JSpinner(new SpinnerNumberModel(objet.getY(), -Localise.UNITE.height, Integer.MAX_VALUE, 10));
largeur.addChangeListener(this);
hauteur.addChangeListener(this);
x.addChangeListener(this);
y.addChangeListener(this);
type.addActionListener(this);
select.addImageListener(this);
suppr.addActionListener(this);
orientation.addActionListener(this);
image = null;
} else {
image = new PanelImage();
jsp.setLeftComponent(image);
add(Outil.getTexte("(" + objet.getType().getNom() + ")", false), BorderLayout.SOUTH);
}
Component c = new ScrollPaneTransparent(autres);
c.setMinimumSize(new Dimension(200, 200));
jsp.setRightComponent(c);
add(jsp, BorderLayout.CENTER);
actualise();
}
private void actualise() {
objet.addChangeObjetListener(this);
nom.setText(objet.getNom());
autres.removeAll();
if(editable) {
try {
select.setImage(objet.getImage());
} catch(IndexOutOfBoundsException err) {}
autres.add(Outil.creerPanel("Position X", x));
autres.add(Outil.creerPanel("Position Y", y));
autres.add(Outil.creerPanel("Largeur", largeur));
autres.add(Outil.creerPanel("Hauteur", hauteur));
orientation.setSelectedItem(objet.getForme().getOrientation().getNom());
orientation.setSelectedIndex(objet.getForme().getOrientation().getID());
largeur.setValue(objet.getLargeur());
hauteur.setValue(objet.getHauteur());
} else {
autres.add(Outil.creerPanel("Coordonnees", "{" + objet.getX() + ", " + objet.getY() + "}"));
autres.add(Outil.creerPanel("Largeur", objet.getLargeur() + "cm"));
autres.add(Outil.creerPanel("Hauteur", objet.getHauteur() + "cm"));
image.setImage(objet.getImage());
image.tailleImage();
add(Outil.getTexte("(" + objet.getType().getNom() + ")", false), BorderLayout.SOUTH);
}
objet.construireInterface(autres, editable);
if(editable) {
autres.add(Outil.creerPanel("Orientation", orientation));
autres.add(Box.createRigidArea(new Dimension(20, 20)));
autres.add(suppr);
if(objet.getType().getID() != type.getSelectedIndex()) {
type.removeActionListener(this);
type.setSelectedIndex(objet.getType().getID());
type.addActionListener(this);
}
} else {
autres.add(Outil.creerPanel("Orientation", objet.getForme().getOrientation().getNom()));
}
}
@Override
public IO sauvegarder(IO io) {
return objet.sauvegarder(io);
}
@Override
public void actionPerformed(ActionEvent e) {
if(e.getSource() == type)
objet.notifyChangeObjetListener(convertir(TypeObjet.get(type.getSelectedIndex())));
else if(e.getSource() == suppr)
l.remove(objet);
else if(e.getSource() == orientation)
objet.getForme().setOrientation(Orientation.get(orientation.getSelectedItem().toString()));
}
@Override
public void change(int id, BufferedImage image) {
SpriteObjets.getInstance().setImage(id, image);
}
@Override
public void change(Objet ancien, Objet nouveau) {
if(ancien == objet) {
ancien.removeChangeObjetListener(this);
objet = nouveau;
actualise();
}
}
private Objet convertir(TypeObjet typeObjet) {
Forme f = objet.getForme();
Objet o = creer(typeObjet);
o.setForme(f);
return o;
}
private Objet creer(TypeObjet typeObjet) {
switch(typeObjet) {
case VIDE:
return new ObjetVide(objet.getMap(), objet.getContaineurImages(), objet.getFond());
case BLOC:
int resistance = 10, degats = 0;
if(objet instanceof Destructible) {
resistance = ((Destructible) objet).getVitalite();
degats = ((Destructible) objet).getVie();
}
return new Bloc(objet.getMap(), objet.getContaineurImages(), objet.getFond(),
objet.getForme().dupliquer(), objet.getID(), resistance, degats);
default: throw new IllegalArgumentException(typeObjet + " n'a pas ete defini");
}
}
@Override
public void stateChanged(ChangeEvent e) {
if(e.getSource() == largeur) try {
objet.setLargeur((Integer) largeur.getValue());
} catch(Exception e2) {
largeur.setValue(objet.getLargeur());
} else if(e.getSource() == hauteur) try {
objet.setHauteur((Integer) hauteur.getValue());
} catch(Exception e2) {
hauteur.setValue(objet.getHauteur());
} else if(e.getSource() == x) try {
objet.setX((Integer) x.getValue());
} catch(ExceptionJeu e1) {
x.setValue(objet.getX());
} else if(e.getSource() == y) try {
objet.setY((Integer) y.getValue());
} catch(ExceptionJeu e1) {
y.setValue(objet.getY());
}
}
}
| |
package com.github.davidmoten.fsm.runtime.rx;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.reactivestreams.Subscriber;
import com.github.davidmoten.fsm.runtime.Action3;
import com.github.davidmoten.fsm.runtime.CancelTimedSignal;
import com.github.davidmoten.fsm.runtime.Clock;
import com.github.davidmoten.fsm.runtime.EntityBehaviour;
import com.github.davidmoten.fsm.runtime.EntityState;
import com.github.davidmoten.fsm.runtime.EntityStateMachine;
import com.github.davidmoten.fsm.runtime.Event;
import com.github.davidmoten.fsm.runtime.ObjectState;
import com.github.davidmoten.fsm.runtime.Search;
import com.github.davidmoten.fsm.runtime.Signal;
import com.github.davidmoten.guavamini.Preconditions;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Emitter;
import io.reactivex.Flowable;
import io.reactivex.FlowableTransformer;
import io.reactivex.Scheduler;
import io.reactivex.Scheduler.Worker;
import io.reactivex.disposables.Disposable;
import io.reactivex.flowables.GroupedFlowable;
import io.reactivex.functions.BiConsumer;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.internal.functions.Functions;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.PublishSubject;
public final class Processor<Id> {
private final Function<Class<?>, EntityBehaviour<?, Id>> behaviourFactory;
private final PublishSubject<Signal<?, Id>> subject;
private final Scheduler signalScheduler;
private final Scheduler processingScheduler;
private final Map<ClassId<?, Id>, EntityStateMachine<?, Id>> stateMachines = new ConcurrentHashMap<>();
private final Map<ClassIdPair<Id>, Disposable> subscriptions = new ConcurrentHashMap<>();
private final Flowable<Signal<?, Id>> signals;
private final Function<GroupedFlowable<ClassId<?, Id>, EntityStateMachine<?, Id>>, Flowable<EntityStateMachine<?, Id>>> entityTransform;
private final FlowableTransformer<Signal<?, Id>, Signal<?, Id>> preGroupBy;
private final Function<Consumer<Object>, Map<ClassId<?, Id>, Object>> mapFactory; // nullable
private final Clock signallerClock;
private final Action3<? super EntityStateMachine<?, Id>, ? super Event<?>, ? super EntityState<?>> preTransitionAction;
private final Consumer<? super EntityStateMachine<?, Id>> postTransitionAction;
private final Search<Id> search = new Search<Id>() {
@Override
public <T> Optional<T> search(Class<T> cls, Id id) {
return getStateMachine(cls, id).get();
}
};
private Processor(Function<Class<?>, EntityBehaviour<?, Id>> behaviourFactory,
Scheduler processingScheduler, Scheduler signalScheduler,
Flowable<Signal<?, Id>> signals,
Function<GroupedFlowable<ClassId<?, Id>, EntityStateMachine<?, Id>>, Flowable<EntityStateMachine<?, Id>>> entityTransform,
FlowableTransformer<Signal<?, Id>, Signal<?, Id>> preGroupBy,
Function<Consumer<Object>, Map<ClassId<?, Id>, Object>> mapFactory,
Action3<? super EntityStateMachine<?, Id>, ? super Event<?>, ? super EntityState<?>> preTransitionAction,
Consumer<? super EntityStateMachine<?, Id>> postTransitionAction) {
Preconditions.checkNotNull(behaviourFactory);
Preconditions.checkNotNull(signalScheduler);
Preconditions.checkNotNull(signals);
Preconditions.checkNotNull(entityTransform);
Preconditions.checkNotNull(preGroupBy);
Preconditions.checkNotNull(preTransitionAction);
Preconditions.checkNotNull(postTransitionAction);
// mapFactory is nullable
this.behaviourFactory = behaviourFactory;
this.signalScheduler = signalScheduler;
this.processingScheduler = processingScheduler;
this.subject = PublishSubject.create();
this.signals = signals;
this.entityTransform = entityTransform;
this.preGroupBy = preGroupBy;
this.mapFactory = mapFactory;
this.signallerClock = Clock.from(signalScheduler);
this.preTransitionAction = preTransitionAction;
this.postTransitionAction = postTransitionAction;
}
public static <Id> Builder<Id> behaviourFactory(
Function<Class<?>, EntityBehaviour<?, Id>> behaviourFactory) {
return new Builder<Id>().behaviourFactory(behaviourFactory);
}
public static <T, Id> Builder<Id> behaviour(Class<T> cls, EntityBehaviour<T, Id> behaviour) {
return new Builder<Id>().behaviour(cls, behaviour);
}
public static <Id> Builder<Id> signalScheduler(Scheduler signalScheduler) {
return new Builder<Id>().signalScheduler(signalScheduler);
}
public static <Id> Builder<Id> processingScheduler(Scheduler processingScheduler) {
return new Builder<Id>().processingScheduler(processingScheduler);
}
public static class Builder<Id> {
private Function<Class<?>, EntityBehaviour<?, Id>> behaviourFactory;
private Scheduler signalScheduler = Schedulers.computation();
private Scheduler processingScheduler = Schedulers.trampoline();
private Flowable<Signal<?, Id>> signals = Flowable.empty();
private Function<GroupedFlowable<ClassId<?, Id>, EntityStateMachine<?, Id>>, Flowable<EntityStateMachine<?, Id>>> entityTransform = g -> g;
private FlowableTransformer<Signal<?, Id>, Signal<?, Id>> preGroupBy = x -> x;
private Function<Consumer<Object>, Map<ClassId<?, Id>, Object>> mapFactory; // nullable
private Action3<? super EntityStateMachine<?, Id>, ? super Event<?>, ? super EntityState<?>> preTransitionAction = (
x, y, z) -> {
};
private Consumer<? super EntityStateMachine<?, Id>> postTransitionAction = x -> {
};
private final Map<Class<?>, EntityBehaviour<?, Id>> behaviours = new HashMap<>();
private Builder() {
}
public <T> Builder<Id> behaviour(Class<T> cls, EntityBehaviour<T, Id> behaviour) {
behaviours.put(cls, behaviour);
return this;
}
public Builder<Id> behaviourFactory(
Function<Class<?>, EntityBehaviour<?, Id>> behaviourFactory) {
this.behaviourFactory = behaviourFactory;
return this;
}
public Builder<Id> signalScheduler(Scheduler signalScheduler) {
this.signalScheduler = signalScheduler;
return this;
}
public Builder<Id> processingScheduler(Scheduler processingScheduler) {
this.processingScheduler = processingScheduler;
return this;
}
public Builder<Id> signals(Flowable<Signal<?, Id>> signals) {
this.signals = signals;
return this;
}
public Builder<Id> entityTransform(
Function<GroupedFlowable<ClassId<?, Id>, EntityStateMachine<?, Id>>, Flowable<EntityStateMachine<?, Id>>> entityTransform) {
this.entityTransform = entityTransform;
return this;
}
public Builder<Id> preGroupBy(
FlowableTransformer<Signal<?, Id>, Signal<?, Id>> preGroupBy) {
this.preGroupBy = preGroupBy;
return this;
}
public Builder<Id> mapFactory(
Function<Consumer<Object>, Map<ClassId<?, Id>, Object>> mapFactory) {
this.mapFactory = mapFactory;
return this;
}
public Builder<Id> preTransition(
Action3<? super EntityStateMachine<?, Id>, ? super Event<?>, ? super EntityState<?>> action) {
this.preTransitionAction = action;
return this;
}
public Builder<Id> postTransition(Consumer<? super EntityStateMachine<?, Id>> action) {
this.postTransitionAction = action;
return this;
}
public Processor<Id> build() {
Preconditions.checkArgument(behaviourFactory != null || !behaviours.isEmpty(),
"one of behaviourFactory or multiple calls to behaviour must be made (behaviour must be specified)");
Preconditions.checkArgument(behaviourFactory == null || behaviours.isEmpty(),
"cannot specify both behaviourFactory and behaviour");
if (!behaviours.isEmpty()) {
behaviourFactory = cls -> behaviours.get(cls);
}
return new Processor<Id>(behaviourFactory, processingScheduler, signalScheduler,
signals, entityTransform, preGroupBy, mapFactory, preTransitionAction,
postTransitionAction);
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public Flowable<EntityStateMachine<?, Id>> flowable() {
return Flowable.defer(() -> {
Worker worker = signalScheduler.createWorker();
Flowable<Signal<?, Id>> o0 = subject //
.toSerialized() //
.toFlowable(BackpressureStrategy.BUFFER) //
.mergeWith(signals) //
.doOnCancel(() -> worker.dispose()) //
.compose(preGroupBy);
Flowable<GroupedFlowable<ClassId<?, Id>, Signal<?, Id>>> o;
if (mapFactory != null) {
o = o0.groupBy(signal -> new ClassId(signal.cls(),
signal.id()), x -> x, true, 16, mapFactory);
} else {
o = o0.groupBy(signal -> new ClassId(signal.cls(), signal.id()),
Functions.identity());
}
return o.flatMap(g -> {
Flowable<EntityStateMachine<?, Id>> obs = g //
.flatMap(processSignalsToSelfAndSendSignalsToOthers(worker, g.getKey())) //
.doOnNext(m -> stateMachines.put(g.getKey(), m)) //
.subscribeOn(processingScheduler); //
Flowable<EntityStateMachine<?, Id>> res = entityTransform
.apply(grouped(g.getKey(), obs));
return res;
});
});
}
private static <K, T> GroupedFlowable<K, T> grouped(K key, final Flowable<T> o) {
return new GroupedFlowable<K, T>(key) {
@Override
protected void subscribeActual(Subscriber<? super T> s) {
o.subscribe(s);
}
};
}
private Function<? super Signal<?, Id>, Flowable<EntityStateMachine<?, Id>>> processSignalsToSelfAndSendSignalsToOthers(
Worker worker, ClassId<?, Id> classId) {
return signal -> process(classId, signal.event(), worker) //
.toList() //
.toFlowable().flatMapIterable(Functions.identity());
}
private static final class Signals<Id> {
final Deque<Event<?>> signalsToSelf = new ArrayDeque<>();
final Deque<Signal<?, Id>> signalsToOther = new ArrayDeque<>();
}
private Flowable<EntityStateMachine<?, Id>> process(ClassId<?, Id> classId, Event<?> event,
Worker worker) {
EntityStateMachine<?, Id> machine = getStateMachine(classId.cls(), classId.id());
TransitionHandler handler = new TransitionHandler(classId, event, worker, machine);
return Flowable.generate(handler, handler);
}
/**
* Combines the state creation and transition operations into the one class.
* Implements the transition rules of an Executable UML state machine in
* that for an entry procedure all signals to self are emitted before
* signals to others. Signals to self are actioned synchronously but signals
* to others may be actioned asynchronously.
*/
private final class TransitionHandler implements Callable<Signals<Id>>,
BiConsumer<Signals<Id>, Emitter<EntityStateMachine<?, Id>>> {
// note has access to surrounding classes' state because is not
// static
private final Event<?> event;
private final ClassId<?, Id> classId;
private final Worker worker;
// mutable
EntityStateMachine<?, Id> machine;
TransitionHandler(ClassId<?, Id> classId, Event<?> event, Worker worker,
EntityStateMachine<?, Id> machine) {
this.classId = classId;
this.event = event;
this.worker = worker;
this.machine = machine;
}
@Override
public Signals<Id> call() throws Exception {
// generate initial state
Signals<Id> signals = new Signals<>();
signals.signalsToSelf.offerFirst(event);
return signals;
}
@Override
public void accept(Signals<Id> signals, Emitter<EntityStateMachine<?, Id>> observer)
throws Exception {
@SuppressWarnings("unchecked")
Event<Object> event = (Event<Object>) signals.signalsToSelf.pollLast();
if (event != null) {
applySignalToSelf(signals, observer, event);
} else {
applySignalsToOthers(classId, worker, signals);
observer.onComplete();
}
}
@SuppressWarnings("unchecked")
private <T> void applySignalToSelf(Signals<Id> signals,
Emitter<? super EntityStateMachine<?, Id>> observer, Event<T> event)
throws Exception {
// run the entry procedure if a transition occurs
// and record signals to self and to others
machine = machine.signal((Event<Object>) event);
postTransitionAction.accept(machine);
// downstream synchronously updates the stateMachines
observer.onNext(machine);
List<Event<? super T>> list = (List<Event<? super T>>) (List<?>) machine
.signalsToSelf();
for (int i = list.size() - 1; i >= 0; i--) {
signals.signalsToSelf.offerLast(list.get(i));
}
for (Signal<?, ?> signal : machine.signalsToOther()) {
signals.signalsToOther.offerLast((Signal<?, Id>) signal);
}
}
private void applySignalsToOthers(ClassId<?, Id> cid, Worker worker, Signals<Id> signals) {
Signal<?, Id> signal;
while ((signal = signals.signalsToOther.pollFirst()) != null) {
Signal<?, Id> s = signal;
if (signal.isImmediate()) {
subject.onNext(signal);
} else if (signal.event() instanceof CancelTimedSignal) {
cancel(signal);
} else {
long delayMs = signal.time().get() - worker.now(TimeUnit.MILLISECONDS);
if (delayMs <= 0) {
subject.onNext(signal);
} else {
scheduleSignal(cid, worker, signal, s, delayMs);
}
}
}
}
private void cancel(Signal<?, Id> signal) {
@SuppressWarnings("unchecked")
CancelTimedSignal<Id> s = ((CancelTimedSignal<Id>) signal.event());
@SuppressWarnings({ "unchecked", "rawtypes" })
Disposable sub = subscriptions
.remove(new ClassIdPair<Id>(new ClassId(s.fromClass(), s.fromId()),
new ClassId(signal.cls(), signal.id())));
if (sub != null) {
sub.dispose();
}
}
private void scheduleSignal(ClassId<?, Id> from, Worker worker, Signal<?, Id> signal,
Signal<?, Id> s, long delayMs) {
// record pairwise signal so we can cancel it if
// desired
@SuppressWarnings({ "unchecked", "rawtypes" })
ClassIdPair<Id> idPair = new ClassIdPair<Id>(from,
new ClassId(signal.cls(), signal.id()));
long t1 = signalScheduler.now(TimeUnit.MILLISECONDS);
Disposable subscription = worker.schedule(() -> {
subject.onNext(s.now());
} , delayMs, TimeUnit.MILLISECONDS);
long t2 = signalScheduler.now(TimeUnit.MILLISECONDS);
worker.schedule(() -> {
subscriptions.remove(idPair);
} , delayMs - (t2 - t1), TimeUnit.MILLISECONDS);
Disposable previous = subscriptions.put(idPair, subscription);
if (previous != null) {
previous.dispose();
}
}
}
@SuppressWarnings({ "unchecked" })
private <T> EntityStateMachine<T, Id> getStateMachine(Class<T> cls, Id id) {
return (EntityStateMachine<T, Id>) stateMachines //
.computeIfAbsent(new ClassId<T, Id>(cls, id), clsId -> {
try {
return (EntityStateMachine<T, Id>) behaviourFactory.apply(cls) //
.create(id) //
.withSearch(search) //
.withClock(signallerClock) //
.withPreTransition(preTransitionAction);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
public <T> Optional<T> getObject(Class<T> cls, Id id) {
try {
return getStateMachine(cls, id).get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void signal(Signal<?, Id> signal) {
subject.onNext(signal);
}
public <T> void signal(Class<T> cls, Id id, Event<? super T> event) {
subject.onNext(Signal.create(cls, id, event));
}
public <T> void signal(ClassId<T, Id> cid, Event<? super T> event) {
signal(cid.cls(), cid.id(), event);
}
@SuppressWarnings("unchecked")
public <T> ObjectState<T> get(Class<T> cls, Id id) {
return (EntityStateMachine<T, Id>) stateMachines.get(new ClassId<T, Id>(cls, id));
}
public void onCompleted() {
subject.onComplete();
}
public void cancelSignal(Class<?> fromClass, Id fromId, Class<?> toClass, Id toId) {
@SuppressWarnings({ "unchecked", "rawtypes" })
Disposable subscription = subscriptions.remove(
new ClassIdPair<Id>(new ClassId(fromClass, fromId), new ClassId(toClass, toId)));
if (subscription != null) {
subscription.dispose();
}
}
public void cancelSignalToSelf(Class<?> cls, Id id) {
cancelSignal(cls, id, cls, id);
}
public void cancelSignalToSelf(ClassId<?, Id> cid) {
cancelSignalToSelf(cid.cls(), cid.id());
}
}
| |
/*
* Copyright (c) 2013 Allogy Interactive.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.allogy.app.ui;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.widget.PopupWindow;
import com.allogy.app.R;
/**
* This class does most of the work of wrapping the {@link PopupWindow} so it's
* simpler to use. Edited by Lorensius. W. L. T
*
* @author qberticus
*
*/
public class CustomPopupWindow {
protected final View anchor;
protected final PopupWindow window;
private View root;
private Drawable background = null;
protected final WindowManager windowManager;
/**
* Create a QuickAction
*
* @param anchor the view that the QuickAction will be displaying 'from'
*/
public CustomPopupWindow(View anchor) {
this.anchor = anchor;
this.window = new PopupWindow(anchor.getContext());
// when a touch even happens outside of the window
// make the window go away
window.setTouchInterceptor(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_OUTSIDE) {
CustomPopupWindow.this.window.dismiss();
return true;
}
return false;
}
});
windowManager =
(WindowManager) anchor.getContext().getSystemService(
Context.WINDOW_SERVICE);
onCreate();
}
/**
* Anything you want to have happen when created. Probably should create a
* view and setup the event listeners on child views.
*/
protected void onCreate() {
}
/**
* In case there is stuff to do right before displaying.
*/
protected void onShow() {
}
protected void preShow() {
if (root == null) {
throw new IllegalStateException(
"setContentView was not called with a view to display.");
}
onShow();
if (background == null) {
window.setBackgroundDrawable(new BitmapDrawable());
} else {
window.setBackgroundDrawable(background);
}
// if using PopupWindow#setBackgroundDrawable this is the only values of the
// width and hight that make it work
// otherwise you need to set the background of the root viewgroup
// and set the popupwindow background to an empty BitmapDrawable
window.setWidth(WindowManager.LayoutParams.WRAP_CONTENT);
window.setHeight(WindowManager.LayoutParams.WRAP_CONTENT);
window.setTouchable(true);
window.setFocusable(true);
window.setOutsideTouchable(true);
window.setContentView(root);
}
public void setBackgroundDrawable(Drawable background) {
this.background = background;
}
/**
* Sets the content view. Probably should be called from {@link onCreate}
*
* @param root the view the popup will display
*/
public void setContentView(View root) {
this.root = root;
window.setContentView(root);
}
/**
* Will inflate and set the view from a resource id
*
* @param layoutResID
*/
public void setContentView(int layoutResID) {
LayoutInflater inflator =
(LayoutInflater) anchor.getContext().getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
setContentView(inflator.inflate(layoutResID, null));
}
/**
* If you want to do anything when {@link dismiss} is called
*
* @param listener
*/
public void setOnDismissListener(PopupWindow.OnDismissListener listener) {
window.setOnDismissListener(listener);
}
/**
* Displays like a popdown menu from the anchor view
*/
public void showDropDown() {
showDropDown(0, 0);
}
/**
* Displays like a popdown menu from the anchor view.
*
* @param xOffset offset in X direction
* @param yOffset offset in Y direction
*/
public void showDropDown(int xOffset, int yOffset) {
preShow();
window.setAnimationStyle(R.style.Animations_PopDownMenu_Left);
window.showAsDropDown(anchor, xOffset, yOffset);
}
/**
* Displays like a QuickAction from the anchor view.
*/
public void showLikeQuickAction() {
showLikeQuickAction(0, 0);
}
/**
* Displays like a QuickAction from the anchor view.
*
* @param xOffset offset in the X direction
* @param yOffset offset in the Y direction
*/
public void showLikeQuickAction(int xOffset, int yOffset) {
preShow();
window.setAnimationStyle(R.style.Animations_PopUpMenu_Center);
int[] location = new int[2];
anchor.getLocationOnScreen(location);
Rect anchorRect =
new Rect(location[0], location[1], location[0] + anchor.getWidth(),
location[1] + anchor.getHeight());
root.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
root.measure(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
int rootWidth = root.getMeasuredWidth();
int rootHeight = root.getMeasuredHeight();
int screenWidth = windowManager.getDefaultDisplay().getWidth();
// int screenHeight = windowManager.getDefaultDisplay().getHeight();
int xPos = ((screenWidth - rootWidth) / 2) + xOffset;
int yPos = anchorRect.top - rootHeight + yOffset;
// display on bottom
if (rootHeight > anchorRect.top) {
yPos = anchorRect.bottom + yOffset;
window.setAnimationStyle(R.style.Animations_PopDownMenu_Center);
}
window.showAtLocation(anchor, Gravity.NO_GRAVITY, xPos, yPos);
}
public void dismiss() {
window.dismiss();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.RowPagesBuilder;
import com.facebook.presto.operator.NestedLoopBuildOperator.NestedLoopBuildOperatorFactory;
import com.facebook.presto.operator.NestedLoopJoinOperator.NestedLoopJoinOperatorFactory;
import com.facebook.presto.operator.NestedLoopJoinOperator.NestedLoopPageBuilder;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.TestingTaskContext;
import com.google.common.collect.ImmutableList;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.operator.OperatorAssertion.assertOperatorEquals;
import static com.facebook.presto.operator.ValuesOperator.ValuesOperatorFactory;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static com.google.common.collect.Iterables.concat;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
@Test(singleThreaded = true)
public class TestNestedLoopJoinOperator
{
private ExecutorService executor;
private ScheduledExecutorService scheduledExecutor;
@BeforeClass
public void setUp()
{
executor = newCachedThreadPool(daemonThreadsNamed("test-executor-%s"));
scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s"));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
@Test
public void testNestedLoopJoin()
{
TaskContext taskContext = createTaskContext();
// build
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(VARCHAR, BIGINT, BIGINT))
.addSequencePage(3, 20, 30, 40);
// probe
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(VARCHAR, BIGINT, BIGINT));
List<Page> probeInput = probePages
.addSequencePage(2, 0, 1000, 2000)
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probePages.getTypes(), buildPages.getTypes()))
.row("0", 1000L, 2000L, "20", 30L, 40L)
.row("0", 1000L, 2000L, "21", 31L, 41L)
.row("0", 1000L, 2000L, "22", 32L, 42L)
.row("1", 1001L, 2001L, "20", 30L, 40L)
.row("1", 1001L, 2001L, "21", 31L, 41L)
.row("1", 1001L, 2001L, "22", 32L, 42L)
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
// Test probe pages has more rows
buildPages = rowPagesBuilder(ImmutableList.of(VARCHAR, BIGINT, BIGINT))
.addSequencePage(2, 20, 30, 40);
joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// probe
probePages = rowPagesBuilder(ImmutableList.of(VARCHAR, BIGINT, BIGINT));
probeInput = probePages
.addSequencePage(3, 0, 1000, 2000)
.build();
// expected
expected = resultBuilder(taskContext.getSession(), concat(probePages.getTypes(), buildPages.getTypes()))
.row("0", 1000L, 2000L, "20", 30L, 40L)
.row("1", 1001L, 2001L, "20", 30L, 40L)
.row("2", 1002L, 2002L, "20", 30L, 40L)
.row("0", 1000L, 2000L, "21", 31L, 41L)
.row("1", 1001L, 2001L, "21", 31L, 41L)
.row("2", 1002L, 2002L, "21", 31L, 41L)
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testCrossJoinWithNullProbe()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("a")
.row("b");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("A")
.row((String) null)
.row((String) null)
.row("A")
.row("B")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("A", "a")
.row(null, "a")
.row(null, "a")
.row("A", "a")
.row("B", "a")
.row("A", "b")
.row(null, "b")
.row(null, "b")
.row("A", "b")
.row("B", "b")
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testCrossJoinWithNullBuild()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("a")
.row((String) null)
.row((String) null)
.row("a")
.row("b");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("A")
.row("B")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("A", "a")
.row("A", null)
.row("A", null)
.row("A", "a")
.row("A", "b")
.row("B", "a")
.row("B", null)
.row("B", null)
.row("B", "a")
.row("B", "b")
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testCrossJoinWithNullOnBothSides()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("a")
.row((String) null)
.row("b")
.row("c")
.row((String) null);
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("A")
.row("B")
.row((String) null)
.row("C")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("A", "a")
.row("A", null)
.row("A", "b")
.row("A", "c")
.row("A", null)
.row("B", "a")
.row("B", null)
.row("B", "b")
.row("B", "c")
.row("B", null)
.row(null, "a")
.row(null, null)
.row(null, "b")
.row(null, "c")
.row(null, null)
.row("C", "a")
.row("C", null)
.row("C", "b")
.row("C", "c")
.row("C", null)
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testBuildMultiplePages()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("a")
.pageBreak()
.row((String) null)
.row("b")
.row("c")
.pageBreak()
.row("d");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("A")
.row("B")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("A", "a")
.row("B", "a")
.row("A", null)
.row("A", "b")
.row("A", "c")
.row("B", null)
.row("B", "b")
.row("B", "c")
.row("A", "d")
.row("B", "d")
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testProbeMultiplePages()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("A")
.row("B");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("a")
.pageBreak()
.row((String) null)
.row("b")
.row("c")
.pageBreak()
.row("d")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("a", "A")
.row("a", "B")
.row(null, "A")
.row("b", "A")
.row("c", "A")
.row(null, "B")
.row("b", "B")
.row("c", "B")
.row("d", "A")
.row("d", "B")
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testProbeAndBuildMultiplePages()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("A")
.row("B")
.pageBreak()
.row("C");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("a")
.pageBreak()
.row((String) null)
.row("b")
.row("c")
.pageBreak()
.row("d")
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.row("a", "A")
.row("a", "B")
.row("a", "C")
.row(null, "A")
.row("b", "A")
.row("c", "A")
.row(null, "B")
.row("b", "B")
.row("c", "B")
.row(null, "C")
.row("b", "C")
.row("c", "C")
.row("d", "A")
.row("d", "B")
.row("d", "C")
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testEmptyProbePage()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.row("A")
.row("B")
.pageBreak()
.row("C");
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.pageBreak()
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testEmptyBuildPage()
{
TaskContext taskContext = createTaskContext();
// build
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildTypes)
.pageBreak();
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeTypes);
List<Page> probeInput = probePages
.row("A")
.row("B")
.pageBreak()
.build();
NestedLoopJoinOperatorFactory joinOperatorFactory = newJoinOperatorFactoryWithCompletedBuild(taskContext, buildPages);
// expected
MaterializedResult expected = resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes()))
.build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected);
}
@Test
public void testCount()
{
// normal case
Page buildPage = new Page(100);
Page probePage = new Page(45);
NestedLoopPageBuilder resultPageBuilder = new NestedLoopPageBuilder(probePage, buildPage);
assertTrue(resultPageBuilder.hasNext(), "There should be at least one page.");
long result = 0;
while (resultPageBuilder.hasNext()) {
result += resultPageBuilder.next().getPositionCount();
}
assertEquals(result, 4500);
// force the product to be bigger than Integer.MAX_VALUE
buildPage = new Page(Integer.MAX_VALUE - 10);
resultPageBuilder = new NestedLoopPageBuilder(probePage, buildPage);
result = 0;
while (resultPageBuilder.hasNext()) {
result += resultPageBuilder.next().getPositionCount();
}
assertEquals((Integer.MAX_VALUE - 10) * 45L, result);
}
private TaskContext createTaskContext()
{
return TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION);
}
private static NestedLoopJoinOperatorFactory newJoinOperatorFactoryWithCompletedBuild(TaskContext taskContext, RowPagesBuilder buildPages)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
ValuesOperatorFactory valuesOperatorFactory = new ValuesOperatorFactory(0, new PlanNodeId("test"), buildPages.build());
JoinBridgeManager<NestedLoopJoinBridge> nestedLoopJoinBridgeManager = new JoinBridgeManager<>(
false,
PipelineExecutionStrategy.UNGROUPED_EXECUTION,
PipelineExecutionStrategy.UNGROUPED_EXECUTION,
lifespan -> new NestedLoopJoinPagesSupplier(),
buildPages.getTypes());
NestedLoopBuildOperatorFactory nestedLoopBuildOperatorFactory = new NestedLoopBuildOperatorFactory(1, new PlanNodeId("test"), nestedLoopJoinBridgeManager);
NestedLoopJoinOperatorFactory joinOperatorFactory = new NestedLoopJoinOperatorFactory(3, new PlanNodeId("test"), nestedLoopJoinBridgeManager);
Operator valuesOperator = valuesOperatorFactory.createOperator(driverContext);
Operator nestedLoopBuildOperator = nestedLoopBuildOperatorFactory.createOperator(driverContext);
Driver driver = Driver.createDriver(driverContext,
valuesOperator,
nestedLoopBuildOperator);
valuesOperatorFactory.noMoreOperators();
nestedLoopBuildOperatorFactory.noMoreOperators();
while (nestedLoopBuildOperator.isBlocked().isDone()) {
driver.process();
}
return joinOperatorFactory;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.apache.camel.BeanScope;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Expression;
import org.apache.camel.ExpressionFactory;
import org.apache.camel.Message;
import org.apache.camel.Predicate;
import org.apache.camel.support.ExpressionAdapter;
import org.apache.camel.support.ExpressionToPredicateAdapter;
import org.apache.camel.support.builder.Namespaces;
/**
* Represents an expression clause within the DSL which when the expression is complete the clause continues to another
* part of the DSL
*/
public class ExpressionClause<T> implements Expression, Predicate {
private ExpressionClauseSupport<T> delegate;
private volatile Expression expr;
public ExpressionClause(T result) {
this.delegate = new ExpressionClauseSupport<>(result);
}
// Helper expressions
// -------------------------------------------------------------------------
/**
* Specify an {@link Expression} instance
*/
public T expression(Expression expression) {
return delegate.expression(expression);
}
/**
* Specify the constant expression value. <b>Important:</b> this is a fixed constant value that is only set once
* during starting up the route, do not use this if you want dynamic values during routing.
*/
public T constant(Object value) {
return delegate.constant(value);
}
/**
* An expression of the exchange
*/
public T exchange() {
return delegate.exchange();
}
/**
* A functional expression of the exchange
*/
public T exchange(final Function<Exchange, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange);
}
});
}
/**
* An expression of an inbound message
*/
public T message() {
return inMessage();
}
/**
* A functional expression of an inbound message
*/
public T message(final Function<Message, Object> function) {
return inMessage(function);
}
/**
* An expression of an inbound message
*/
public T inMessage() {
return delegate.inMessage();
}
/**
* A functional expression of an inbound message
*/
public T inMessage(final Function<Message, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange.getIn());
}
});
}
/**
* An expression of an inbound message body
*/
public T body() {
return delegate.body();
}
/**
* A functional expression of an inbound message body
*/
public T body(final Function<Object, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange.getIn().getBody());
}
});
}
/**
* A functional expression of an inbound message body and headers
*/
public T body(final BiFunction<Object, Map<String, Object>, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange.getIn().getBody(), exchange.getIn().getHeaders());
}
});
}
/**
* An expression of an inbound message body converted to the expected type
*/
public T body(Class<?> expectedType) {
return delegate.body(expectedType);
}
/**
* A functional expression of an inbound message body converted to the expected type
*/
public <B> T body(Class<B> expectedType, final Function<B, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange.getIn().getBody(expectedType));
}
});
}
/**
* A functional expression of an inbound message body converted to the expected type and headers
*/
public <B> T body(Class<B> expectedType, final BiFunction<B, Map<String, Object>, Object> function) {
return delegate.expression(new ExpressionAdapter() {
public Object evaluate(Exchange exchange) {
return function.apply(exchange.getIn().getBody(expectedType), exchange.getIn().getHeaders());
}
});
}
/**
* An expression of an inbound message header of the given name
*/
public T header(String name) {
return delegate.header(name);
}
/**
* An expression of the inbound headers
*/
public T headers() {
return delegate.headers();
}
/**
* An expression of an exchange property of the given name
*/
public T exchangeProperty(String name) {
return delegate.exchangeProperty(name);
}
/**
* An expression of the exchange properties
*/
public T exchangeProperties() {
return delegate.exchangeProperties();
}
// Languages
// -------------------------------------------------------------------------
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param bean the name of the bean looked up the registry
* @return the builder to continue processing the DSL
*/
public T method(String bean) {
return delegate.method(bean);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param instance the instance of the bean
* @return the builder to continue processing the DSL
*/
public T method(Object instance) {
return delegate.method(instance);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param beanType the Class of the bean which we want to invoke
* @return the builder to continue processing the DSL
*/
public T method(Class<?> beanType) {
return delegate.method(beanType);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param bean the name of the bean looked up the registry
* @param method the name of the method to invoke on the bean
* @return the builder to continue processing the DSL
*/
public T method(String bean, String method) {
return delegate.method(bean, method);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param bean the name of the bean looked up the registry
* @param scope the scope of the bean
* @return the builder to continue processing the DSL
*/
public T method(String bean, BeanScope scope) {
return delegate.method(bean, scope);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param bean the name of the bean looked up the registry
* @param method the name of the method to invoke on the bean
* @param scope the scope of the bean
* @return the builder to continue processing the DSL
*/
public T method(String bean, String method, BeanScope scope) {
return delegate.method(bean, method, scope);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param instance the instance of the bean
* @param method the name of the method to invoke on the bean
* @return the builder to continue processing the DSL
*/
public T method(Object instance, String method) {
return delegate.method(instance, method);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param beanType the Class of the bean which we want to invoke
* @param method the name of the method to invoke on the bean
* @return the builder to continue processing the DSL
*/
public T method(Class<?> beanType, String method) {
return delegate.method(beanType, method);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param beanType the Class of the bean which we want to invoke
* @param scope the scope of the bean
* @return the builder to continue processing the DSL
*/
public T method(Class<?> beanType, BeanScope scope) {
return delegate.method(beanType, scope);
}
/**
* Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which
* basically means the bean is invoked to determine the expression value.
*
* @param beanType the Class of the bean which we want to invoke
* @param method the name of the method to invoke on the bean
* @param scope the scope of the bean
* @return the builder to continue processing the DSL
*/
public T method(Class<?> beanType, String method, BeanScope scope) {
return delegate.method(beanType, method, scope);
}
/**
* Evaluates a <a href="http://camel.apache.org/groovy.html">Groovy expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T groovy(String text) {
return delegate.groovy(text);
}
/**
* Returns a JOOR expression value builder
*/
public T joor(String value) {
return delegate.joor(value);
}
/**
* Returns a JOOR expression value builder
*/
public T joor(String value, Class<?> resultType) {
return delegate.joor(value, resultType);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T jsonpath(String text) {
return delegate.jsonpath(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a>
*
* @param text the expression to be evaluated
* @param suppressExceptions whether to suppress exceptions such as PathNotFoundException
* @return the builder to continue processing the DSL
*/
public T jsonpath(String text, boolean suppressExceptions) {
return delegate.jsonpath(text, suppressExceptions);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a>
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @return the builder to continue processing the DSL
*/
public T jsonpath(String text, Class<?> resultType) {
return delegate.jsonpath(text, resultType);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a>
*
* @param text the expression to be evaluated
* @param suppressExceptions whether to suppress exceptions such as PathNotFoundException
* @param resultType the return type expected by the expression
* @return the builder to continue processing the DSL
*/
public T jsonpath(String text, boolean suppressExceptions, Class<?> resultType) {
return delegate.jsonpath(text, suppressExceptions, resultType);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a>
*
* @param text the expression to be evaluated
* @param suppressExceptions whether to suppress exceptions such as PathNotFoundException
* @param resultType the return type expected by the expression
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T jsonpath(String text, boolean suppressExceptions, Class<?> resultType, String headerName) {
return delegate.jsonpath(text, suppressExceptions, true, resultType, headerName);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled.
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T jsonpathWriteAsString(String text) {
return delegate.jsonpathWriteAsString(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled.
*
* @param text the expression to be evaluated
* @param suppressExceptions whether to suppress exceptions such as PathNotFoundException
* @return the builder to continue processing the DSL
*/
public T jsonpathWriteAsString(String text, boolean suppressExceptions) {
return delegate.jsonpathWriteAsString(text, suppressExceptions);
}
/**
* Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled.
*
* @param text the expression to be evaluated
* @param suppressExceptions whether to suppress exceptions such as PathNotFoundException
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T jsonpathWriteAsString(String text, boolean suppressExceptions, String headerName) {
return delegate.jsonpathWriteAsString(text, suppressExceptions, true, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/ognl.html">OGNL expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T ognl(String text) {
return delegate.ognl(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/mvel.html">MVEL expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T mvel(String text) {
return delegate.mvel(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/ref-language.html">Ref expression</a>
*
* @param ref refers to the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T ref(String ref) {
return delegate.ref(ref);
}
/**
* Evaluates a <a href="http://camel.apache.org/spel.html">SpEL expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T spel(String text) {
return delegate.spel(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/simple.html">Simple expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T simple(String text) {
return delegate.simple(text);
}
/**
* Evaluates a <a href="http://camel.apache.org/simple.html">Simple expression</a>
*
* @param text the expression to be evaluated
* @param resultType the result type
* @return the builder to continue processing the DSL
*/
public T simple(String text, Class<?> resultType) {
return delegate.simple(text, resultType);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @return the builder to continue processing the DSL
*/
public T tokenize(String token) {
return delegate.tokenize(token);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex) {
return tokenize(token, regex, false);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param skipFirst whether to skip the first element
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, boolean skipFirst) {
return delegate.tokenize(token, null, regex, skipFirst);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param group to group by the given number
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, int group) {
return tokenize(token, regex, group, false);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param group to group by the given number
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, String group) {
return tokenize(token, regex, group, false);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param group to group by the given number
* @param skipFirst whether to skip the first element
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, int group, boolean skipFirst) {
return delegate.tokenize(token, null, regex, group, skipFirst);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param group to group by the given number
* @param skipFirst whether to skip the first element
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, String group, boolean skipFirst) {
return delegate.tokenize(token, null, regex, group, skipFirst);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param regex whether the token is a regular expression or not
* @param group to group by the given number
* @param skipFirst whether to skip the first element
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, boolean regex, int group, String groupDelimiter, boolean skipFirst) {
return delegate.tokenize(token, null, regex, "" + group, groupDelimiter, skipFirst);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param group to group by the given number
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, int group) {
return delegate.tokenize(token, group);
}
/**
* Evaluates a token expression on the message body
*
* @param token the token
* @param group to group by the given number
* @param skipFirst whether to skip the first element
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, int group, boolean skipFirst) {
return delegate.tokenize(token, group, skipFirst);
}
/**
* Evaluates a token expression on the given header
*
* @param token the token
* @param headerName name of header to tokenize
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, String headerName) {
return delegate.tokenize(token, headerName);
}
/**
* Evaluates a token expression on the given header
*
* @param token the token
* @param headerName name of header to tokenize
* @param regex whether the token is a regular expression or not
* @return the builder to continue processing the DSL
*/
public T tokenize(String token, String headerName, boolean regex) {
return delegate.tokenize(token, headerName, regex);
}
/**
* Evaluates a token pair expression on the message body.
* <p/>
* Tokens is not included.
*
* @param startToken the start token
* @param endToken the end token
* @return the builder to continue processing the DSL
*/
public T tokenizePair(String startToken, String endToken) {
return tokenizePair(startToken, endToken, false);
}
/**
* Evaluates a token pair expression on the message body
*
* @param startToken the start token
* @param endToken the end token
* @param includeTokens whether to include tokens
* @return the builder to continue processing the DSL
*/
public T tokenizePair(String startToken, String endToken, boolean includeTokens) {
return delegate.tokenizePair(startToken, endToken, includeTokens);
}
/**
* Evaluates a XML token expression on the message body with XML content
*
* @param tagName the tag name of the child nodes to tokenize
* @return the builder to continue processing the DSL
*/
public T tokenizeXML(String tagName) {
return tokenizeXML(tagName, null);
}
/**
* Evaluates a XML token expression on the message body with XML content
*
* @param tagName the tag name of the child nodes to tokenize
* @param group to group by the given number
* @return the builder to continue processing the DSL
*/
public T tokenizeXML(String tagName, int group) {
return tokenizeXML(tagName, null, group);
}
/**
* Evaluates a token pair expression on the message body with XML content
*
* @param tagName the tag name of the child nodes to tokenize
* @param inheritNamespaceTagName parent or root tag name that contains namespace(s) to inherit
* @return the builder to continue processing the DSL
*/
public T tokenizeXML(String tagName, String inheritNamespaceTagName) {
return tokenizeXML(tagName, inheritNamespaceTagName, 0);
}
/**
* Evaluates a token pair expression on the message body with XML content
*
* @param tagName the tag name of the child nodes to tokenize
* @param inheritNamespaceTagName parent or root tag name that contains namespace(s) to inherit
* @param group to group by the given number
* @return the builder to continue processing the DSL
*/
public T tokenizeXML(String tagName, String inheritNamespaceTagName, int group) {
return delegate.tokenizeXMLPair(tagName, inheritNamespaceTagName, group);
}
public T xtokenize(String path, Namespaces namespaces) {
return xtokenize(path, 'i', namespaces);
}
public T xtokenize(String path, char mode, Namespaces namespaces) {
return xtokenize(path, mode, namespaces, 0);
}
public T xtokenize(String path, char mode, Namespaces namespaces, int group) {
return delegate.xtokenize(path, mode, namespaces, group);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T xpath(String text) {
return delegate.xpath(text);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> on the supplied header name's
* contents
*
* @param text the expression to be evaluated
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T xpath(String text, String headerName) {
return delegate.xpath(text, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Class<?> resultType) {
return delegate.xpath(text, resultType);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type on
* the supplied header name's contents
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Class<?> resultType, String headerName) {
return delegate.xpath(text, resultType, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and
* set of namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Class<?> resultType, Namespaces namespaces) {
return delegate.xpath(text, resultType, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and
* set of namespace prefixes and URIs on the supplied header name's contents
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param headerName the name of the header to apply the expression to
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Class<?> resultType, Namespaces namespaces, String headerName) {
return delegate.xpath(text, resultType, namespaces, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and
* set of namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Class<?> resultType, Map<String, String> namespaces) {
return delegate.xpath(text, resultType, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified set of
* namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Namespaces namespaces) {
return delegate.xpath(text, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified set of
* namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xpath(String text, Map<String, String> namespaces) {
return delegate.xpath(text, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a>
*
* @param text the expression to be evaluated
* @return the builder to continue processing the DSL
*/
public T xquery(String text) {
return delegate.xquery(text);
}
/**
* Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> on the supplied header name's
* contents
*
* @param text the expression to be evaluated
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T xquery(String text, String headerName) {
return delegate.xquery(text, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Class<?> resultType) {
return delegate.xquery(text, resultType);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param headerName the name of the header to apply the expression to
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Class<?> resultType, String headerName) {
return delegate.xquery(text, resultType, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type
* and set of namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Class<?> resultType, Namespaces namespaces) {
return delegate.xquery(text, resultType, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param headerName the name of the header to apply the expression to
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Class<?> resultType, Namespaces namespaces, String headerName) {
return delegate.xquery(text, resultType, namespaces, headerName);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type
* and set of namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param resultType the return type expected by the expression
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Class<?> resultType, Map<String, String> namespaces) {
return delegate.xquery(text, resultType, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified set of
* namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Namespaces namespaces) {
return delegate.xquery(text, namespaces);
}
/**
* Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified set of
* namespace prefixes and URIs
*
* @param text the expression to be evaluated
* @param namespaces the namespace prefix and URIs to use
* @return the builder to continue processing the DSL
*/
public T xquery(String text, Map<String, String> namespaces) {
return delegate.xquery(text, namespaces);
}
/**
* Evaluates a given language name with the expression text
*
* @param language the name of the language
* @param expression the expression in the given language
* @return the builder to continue processing the DSL
*/
public T language(String language, String expression) {
return delegate.language(language, expression);
}
// Properties
// -------------------------------------------------------------------------
public Expression getExpressionValue() {
return delegate.getExpressionValue();
}
public ExpressionFactory getExpressionType() {
return delegate.getExpressionType();
}
@Override
public void init(CamelContext context) {
if (expr == null) {
synchronized (this) {
if (expr == null) {
Expression newExpression = getExpressionValue();
if (newExpression == null) {
newExpression = delegate.getExpressionType().createExpression(context);
}
newExpression.init(context);
expr = newExpression;
}
}
}
}
@Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
init(exchange.getContext());
return expr.evaluate(exchange, type);
}
@Override
public boolean matches(Exchange exchange) {
init(exchange.getContext());
return new ExpressionToPredicateAdapter(expr).matches(exchange);
}
}
| |
// Copyright 2016 Xiaomi, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.xiaomi.linden.core;
import java.io.IOException;
import java.util.List;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.junit.Assert;
import org.junit.Test;
import com.xiaomi.linden.bql.BQLCompiler;
import com.xiaomi.linden.thrift.common.LindenDocument;
import com.xiaomi.linden.thrift.common.LindenField;
import com.xiaomi.linden.thrift.common.LindenFieldSchema;
import com.xiaomi.linden.thrift.common.LindenResult;
import com.xiaomi.linden.thrift.common.LindenSchema;
import com.xiaomi.linden.thrift.common.LindenSearchRequest;
import com.xiaomi.linden.thrift.common.LindenType;
public class TestLindenDynamicField extends TestLindenCoreBase {
public LindenSchema schema;
public String jsonStr = "{\n"
+ " \"id\": \"1\",\n"
+ " \"name\": \"appstore-search\",\n"
+ " \"level\": \"info\",\n"
+ " \"log\": \"search result is empty\",\n"
+ " \"host\": \"xiaomi-search01.bj\",\n"
+ " \"shard\": \"1\",\n"
+ " \"_dynamic\": [\n"
+ " {\n"
+ " \"mgroup\": \"misearch\",\n"
+ " \"_type\": \"string\"\n"
+ " },\n"
+ " {\n"
+ " \"cost\": 30,\n"
+ " \"_type\": \"long\",\n"
+ " },\n"
+ " {\n"
+ " \"num\": 7.7,\n"
+ " \"_type\": \"float\",\n"
+ " },\n"
+ " {\n"
+ " \"count\": 3,\n"
+ " \"_type\": \"int\",\n"
+ " },\n"
+ " {\n"
+ " \"val\": \"10.0\",\n"
+ " \"_type\": \"double\",\n"
+ " }\n"
+ " {\n"
+ " \"text\": \"this is a tokenized string field\",\n"
+ " \"_tokenize\": \"true\",\n"
+ " }\n"
+ " ]\n"
+ "}";
public TestLindenDynamicField() throws Exception {
try {
JSONObject jsonObject = new JSONObject();
jsonObject.put("type", "index");
jsonObject.put("content", JSONObject.parseObject(jsonStr));
handleRequest(jsonObject.toString());
lindenCore.commit();
lindenCore.refresh();
bqlCompiler = new BQLCompiler(lindenConfig.getSchema());
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void init() {
schema = new LindenSchema().setId("id");
schema.addToFields(
new LindenFieldSchema("name", LindenType.STRING).setIndexed(true).setTokenized(true)
.setStored(true));
schema.addToFields(
new LindenFieldSchema("level", LindenType.STRING).setIndexed(true).setStored(true));
schema.addToFields(
new LindenFieldSchema("log", LindenType.STRING).setIndexed(true).setTokenized(true)
.setStored(true));
schema.addToFields(
new LindenFieldSchema("host", LindenType.STRING).setIndexed(true).setStored(true));
schema.addToFields(
new LindenFieldSchema("shard", LindenType.INTEGER).setIndexed(true).setStored(true));
lindenConfig.setSchema(schema);
}
@Test
public void testBuildField() throws IOException {
LindenDocument lindenDocument = LindenDocumentBuilder.build(schema, JSON.parseObject(jsonStr));
List<LindenField> fields = lindenDocument.getFields();
Assert.assertEquals(true, fields.contains(new LindenField(new LindenFieldSchema()
.setName("mgroup")
.setType(LindenType.STRING)
.setIndexed(true)
.setStored(true),
"misearch")));
Assert.assertEquals(true, fields.contains(new LindenField(new LindenFieldSchema()
.setName("cost")
.setType(LindenType.LONG)
.setIndexed(true)
.setStored(true),
"30")));
}
@Test
public void testDynamicField() throws IOException {
String
bql =
"select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' where query is 'cost.long:{30 TO 340]' " +
" source ";
LindenSearchRequest request = bqlCompiler.compile(bql).getSearchRequest();
LindenResult result = lindenCore.search(request);
Assert.assertEquals(0, result.getHitsSize());
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore cost.long:{30 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
JSONObject hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(30L, hit0Source.getLongValue("cost"));
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' where query is 'cost.long:[30 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(30L, hit0Source.getLongValue("cost"));
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is '+name:appstore +cost.long:{30 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
Assert.assertEquals(0, result.getHitsSize());
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' where query is 'num.float:[3 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(7.7f, hit0Source.getFloatValue("num"), 0.01);
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' where query is 'count.int:[3 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(3, hit0Source.getIntValue("count"));
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' where query is 'val.double:[3 TO 340]' " +
" source ";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(10.0, hit0Source.getDoubleValue("val"), 0.01);
bql = "select id,name,level,log,host,shard,cost.long,mgroup,num.float,count.int,val.double from linden by " +
" query is 'name:appstore' order by val.double source";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals(10.0, hit0Source.getDoubleValue("val"), 0.01);
Assert.assertEquals("10.0", result.getHits().get(0).getFields().get("val"));
bql = "select text.string from linden by query is 'name:appstore' source";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals("this is a tokenized string field", hit0Source.getString("text"));
bql = "select text from linden by query is 'name:appstore' source";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals("this is a tokenized string field", hit0Source.getString("text"));
bql = "select text from linden by query is 'text:field' source";
request = bqlCompiler.compile(bql).getSearchRequest();
result = lindenCore.search(request);
hit0Source = JSON.parseObject(result.hits.get(0).getSource());
Assert.assertEquals("1", hit0Source.getString("id"));
Assert.assertEquals("this is a tokenized string field", hit0Source.getString("text"));
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package cerl.gui.utilities;
import cerl.gui.forms.HelpFileDisplay;
import cerl.gui.forms.MarkovChainMatrix;
import cerl.gui.standard.utilities.FileUtility;
import cerl.gui.standard.utilities.HelpFile;
import cerl.gui.standard.utilities.Instruction;
import cerl.gui.standard.utilities.Result;
import static cerl.gui.utilities.DigPopFileTypeEnum.Census_Enumerations;
import static cerl.gui.utilities.DigPopFileTypeEnum.Household_Micro_Data;
import static cerl.gui.utilities.DigPopFileTypeEnum.Population_Micro_Data;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.Optional;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
*
* DigPopGUIUtilityClass is the Custom Utility class for the DigPopGUI application.
* @author ajohnson
*/
public class DigPopGUIUtilityClass {
/**
* Path to the help file stored in the resource package.
*/
private static final String HELP_FILE_PATH = "/cerl/gui/resources/HelpText.xml";
private static final int FIRST_COLUMN_FOR_CENSUS_ENUMERATIONS_FILE = 9;
private static final int FIRST_COLUMN_FOR_HOUSEHOLD_ENUMERATIONS_FILE = 8;
private static final int FIRST_COLUMN_FOR_POPULATION_ENUMERATIONS_FILE = 6;
/**
* Calls FileUtility to read in the applications help file and stores into
* the HelpFile object.
* @return Loaded HelpFile Object
*/
private static HelpFile getDefaultHelpFile() {
URL url = DigPopGUIUtilityClass.class.getResource(HELP_FILE_PATH);
Result result = FileUtility.ParseXMLFileIntoSpecifiedObjectFromURL(url, HelpFile.class);
return (HelpFile) result.getValue();
}
/**
* Loads the default help file GUI by the specified screen name.
* @param screenName - the screen to load the help for
*/
public static void loadDefaultHelpGUIByScreenName(String screenName) {
HelpFile newHelpFile = getDefaultHelpFile();
new HelpFileDisplay(newHelpFile.getSelectedScreenByName(screenName), newHelpFile).setVisible(true);
}
/**
* Loads the Help Screen for a specific item on a screen
* @param screenName - the screen to view help for
* @param instructionName - the specific item to view help for
*/
public static void loadDefaultHelpGUIByScreenInstructionName(String screenName, String instructionName) {
HelpFile newHelpFile = getDefaultHelpFile();
Instruction selectedInstruction
= newHelpFile.getSelectedScreenByName(screenName)
.getSelectedInstructionByName(instructionName);
new HelpFileDisplay(selectedInstruction, newHelpFile).setVisible(true);
}
/**
* Saves the DigPop Object
* @param information - the current DigPop object
* @param path - the FilePath where the file should be saved
* @return
*/
public static Result saveDigPopGUIInformationSaveFile(
DigPopGUIInformation information,
String path) {
information.setLastSaveDate(Calendar.getInstance().getTime().toString());
Result result = FileUtility.ParseObjectToXML(information, path, DigPopGUIInformation.class);
result.setValue(information);
return result;
}
/**
* Reads the DigPop Object from the save file
* @param path - the file path of the DigPop object to load
* @return
*/
public static Result readInDigPopGUIInformationSaveFile(String path) {
Result result = FileUtility.ParseXMLFileIntoSpecifiedObject(path, DigPopGUIInformation.class);
return result;
}
/**
* Gets the census and survey classes that were already loaded in a previous run
* @param censusEnumerationsFilePath - the file path of the census enumerations .csv file
* @param populationMicroDataFilePath - the file path of the population microdata .csv file
* @param householdMicroDataFilePath - the file path of the household microdata .csv file
* @return
*/
public static Result getLoadedCensusSurveyClasses(
String censusEnumerationsFilePath,
String populationMicroDataFilePath,
String householdMicroDataFilePath) {
Result result = new Result();
CensusSurveyClasses returnObject = new CensusSurveyClasses();
if (censusEnumerationsFilePath != null && !censusEnumerationsFilePath.equals("")) {
result = DigPopGUIUtilityClass.getClassNamesFromCSVFile(censusEnumerationsFilePath, Census_Enumerations);
if (result.isSuccess()) {
CensusSurveyClasses results = (CensusSurveyClasses) result.getValue();
returnObject.setCensusClasses(results.getCensusClasses());
}
}
if (populationMicroDataFilePath != null && !populationMicroDataFilePath.equals("")) {
result = DigPopGUIUtilityClass.getClassNamesFromCSVFile(populationMicroDataFilePath, Population_Micro_Data);
if (result.isSuccess()) {
CensusSurveyClasses results = (CensusSurveyClasses) result.getValue();
returnObject.setPopulationMicroDataClasses(results.getPopulationMicroDataClasses());
}
}
if (householdMicroDataFilePath != null && !householdMicroDataFilePath.equals("")) {
result = DigPopGUIUtilityClass.getClassNamesFromCSVFile(householdMicroDataFilePath, Household_Micro_Data);
if (result.isSuccess()) {
CensusSurveyClasses results = (CensusSurveyClasses) result.getValue();
returnObject.setHouseholdMicroDataClasses(results.getHouseholdMicroDataClasses());
}
}
result.setValue(returnObject);
return result;
}
/**
* Gets the census or survey class names from the .csv file
* @param filePath - the file path of the .csv file to read
* @param digPopFileType - the DigPop type, either Census_Enumerations, Population_Micro_Data or Household_Micro_Data
* @return
*/
public static Result getClassNamesFromCSVFile(
String filePath,
DigPopFileTypeEnum digPopFileType) {
Result result = new Result();
CensusSurveyClasses returnObject = new CensusSurveyClasses();
switch (digPopFileType) {
case Census_Enumerations:
result = readClassNamesFromFirstLine(filePath, FIRST_COLUMN_FOR_CENSUS_ENUMERATIONS_FILE, Census_Enumerations);
if (result.isSuccess()) {
returnObject.setCensusClasses((ArrayList<Class>) result.getValue());
}
break;
case Population_Micro_Data:
result = readClassNamesFromFirstLine(filePath, FIRST_COLUMN_FOR_POPULATION_ENUMERATIONS_FILE, Population_Micro_Data);
if (result.isSuccess()) {
returnObject.setPopulationMicroDataClasses((ArrayList<Class>) result.getValue());
}
break;
case Household_Micro_Data:
result = readClassNamesFromFirstLine(filePath, FIRST_COLUMN_FOR_HOUSEHOLD_ENUMERATIONS_FILE, Household_Micro_Data);
if (result.isSuccess()) {
returnObject.setHouseholdMicroDataClasses((ArrayList<Class>) result.getValue());
}
break;
}
result.setValue(returnObject);
return result;
}
/**
* Reads the census classes from the .csv file
* @param filePath - the file path of the file to read in
* @param columnIndent - the number of columns to ignore at the front of the file
* @param digPopFileType - the type of DigPop File Type provided
* @return the found classes
*/
private static Result readClassNamesFromFirstLine(
String filePath,
int columnIndent,
DigPopFileTypeEnum digPopFileType) {
Result result = new Result();
ArrayList<Class> foundClasses = new ArrayList<Class>();
String line = "";
try (BufferedReader br = new BufferedReader(new FileReader(filePath))) {
int classIDCounter = 1;
if ((line = br.readLine()) != null) {
// use comma as separator
String[] lineInfo = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
for (int count = columnIndent; count < lineInfo.length; count++) {
Class newClass = new Class(lineInfo[count], count, false, classIDCounter, digPopFileType);
foundClasses.add(newClass);
classIDCounter++;
}
}
br.close();
result.setSuccess(true);
} catch (IOException ex) {
result.setErrorMessage(
"getCensusClassesFromCSVFile",
ex.getMessage());
result.setSuccess(false);
}
result.setValue(foundClasses);
return result;
}
/**
* Gets the survey data column values from the .csv file
* @param filePath - the path of the file to read
* @param columnNumber - the number of columns to ignore at the start of the file
* @return the found column values
*/
public static Result getSurveyDataColumnValues(String filePath, int columnNumber) {
Result result = new Result();
List<SurveyColumnValue> columnValues = new ArrayList<SurveyColumnValue>();
String line = "";
int lineCounter = 0;
try (BufferedReader br = new BufferedReader(new FileReader(filePath))) {
while ((line = br.readLine()) != null) {
if (lineCounter >= 1) {
// use comma as separator, but allow for commas inside a string
String[] lineInfo = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
int value = Integer.parseInt(lineInfo[columnNumber]);
//boolean alreadyFound = columnValues.stream().anyMatch(c -> c.getValue() == value);
Optional<SurveyColumnValue> foundFromStream = columnValues.stream().filter(c -> c.getValue() == value).findFirst();
if (!foundFromStream.isPresent()) {
columnValues.add(new SurveyColumnValue(lineCounter, Integer.parseInt(lineInfo[columnNumber]), false, 1));
} else {
SurveyColumnValue found = foundFromStream.get();
found.addOneToNumberOfTimesUsed();
}
}
lineCounter++;
}
br.close();
result.setSuccess(true);
} catch (IOException ex) {
result.setErrorMessage(
"getCensusClassesFromCSVFile",
ex.getMessage());
result.setSuccess(false);
}
result.setValue(columnValues);
return result;
}
/**
* Gets the values of the selected census classes and adds them to the column total
* @param filePath - the file path for the census enumerations .csv file
* @param censusClasses - the selected classes
* @return the selected values from the selected census columns
*/
public static Result getSelectedCensusColumnValues(String filePath, List<Class> censusClasses) {
Result result = new Result();
String line = "";
int lineCounter = 0;
try (BufferedReader br = new BufferedReader(new FileReader(filePath))) {
while ((line = br.readLine()) != null) {
if (lineCounter >= 1) {
// use comma as separator, but allow for commas inside a string
String[] lineInfo = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
censusClasses.stream().forEach((c) -> {
c.addToClassTotal(Long.parseLong(lineInfo[c.getColumnNumber()]));
});
}
lineCounter++;
}
br.close();
result.setSuccess(true);
} catch (IOException ex) {
result.setErrorMessage(
"getCensusClassesFromCSVFile",
ex.getMessage());
result.setSuccess(false);
}
result.setValue(censusClasses);
return result;
}
/**
* Gets the classes from the land use .asc file
* @param filePath - the file path of the land use .asc file
* @return - the classes in the land use file
* @throws IOException if the file is not found, or classes are not found
*/
public static Result getClassesFromLandUseASCFile(
String filePath) throws IOException {
Result result = new Result(true);
List<String> classes = null;
int counter = 0;
FileInputStream inputStream = null;
Scanner sc = null;
try {
inputStream = new FileInputStream(filePath);
sc = new Scanner(inputStream, "UTF-8");
while (sc.hasNextLine()) {
String line = sc.nextLine();
if (counter >= 6) {
String[] lineInfo = line.split(" ");
if (classes == null) {
classes = Arrays.asList(lineInfo).stream().distinct().collect(Collectors.toList());
} else {
classes.addAll(Arrays.asList(lineInfo));
classes = classes.stream().distinct().collect(Collectors.toList());
}
}
counter++;
}
if (sc.ioException() != null) {
throw sc.ioException();
}
} catch (FileNotFoundException ex) {
result.setErrorMessage(
"getClassesFromLandUseASCFile",
ex.getMessage());
result.setSuccess(false);
} catch (IOException ex) {
result.setErrorMessage(
"getClassesFromLandUseASCFile",
ex.getMessage());
result.setSuccess(false);
} finally {
if (inputStream != null) {
inputStream.close();
}
if (sc != null) {
sc.close();
}
}
result.setValue(classes);
return result;
}
/**
* Create a new census enumeration file
* @param oldFilePath - the original census enumeration file's path
* @param newFilePath - the path for the new census enumeration file
* @param newDetailsToAdd - the ArrayList for each of the new columns and related values to add
* @return success or failure
* @throws IOException If can't find file or can't write to file
*/
public static Result outputNewCensusFile(
String oldFilePath,
String newFilePath,
ArrayList<NewCensusColumnDetails> newDetailsToAdd) throws IOException {
Result result = new Result(true);
ArrayList<String> outputLines = new ArrayList<>();
int counter = 1;
FileInputStream inputStream = null;
Scanner sc = null;
try {
inputStream = new FileInputStream(oldFilePath);
sc = new Scanner(inputStream, "UTF-8");
while (sc.hasNextLine()) {
String line = sc.nextLine();
if (counter == 1) {
for (NewCensusColumnDetails newInfo : newDetailsToAdd) {
line = line + ", " + newInfo.getNewColumnHeader() + "_" + newInfo.getRandomPercentage() + "%";
}
} else {
String[] lineInfo = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1);
for (NewCensusColumnDetails newInfo : newDetailsToAdd) {
int oldValue = 0;
for(int oldColumnNumber : newInfo.getOldValueLookUpColumns()){
oldValue = Integer.parseInt(lineInfo[oldColumnNumber]);
}
int newValue = (int) (oldValue * newInfo.getRandomPercentage());
line = line + ", " + newValue;
}
}
outputLines.add(line);
counter++;
}
if (sc.ioException() != null) {
throw sc.ioException();
}
} catch (FileNotFoundException ex) {
result.setErrorMessage(
"getClassesFromLandUseASCFile",
ex.getMessage());
result.setSuccess(false);
} catch (IOException ex) {
result.setErrorMessage(
"getClassesFromLandUseASCFile",
ex.getMessage());
result.setSuccess(false);
} finally {
if (inputStream != null) {
inputStream.close();
}
if (sc != null) {
sc.close();
}
}
if (result.isSuccess()) {
result = FileUtility.WriteNewTextFileFromArrayOfLines(newFilePath, outputLines);
}
return result;
}
/**
* Creates a new census .csv file
* @param markovChains - the list of all Markov Chains created
* @param numberOfRuns - the number of runs from the Run File step
* @param censusEnumerationFullPath - the filepath of the census enumeration file
* @param fileDirectory - the directory where all files for this run are saved
* @return the new census enumeration .csv output file
*/
public static Result CreateNewCensusCSVFiles(
ArrayList<MarkovChain> markovChains,
int numberOfRuns,
String censusEnumerationFullPath,
String fileDirectory) {
Result result = new Result(true);
int counter = 1;
String onlyFilename = (new File(censusEnumerationFullPath)).getName();
while (counter <= numberOfRuns && result.isSuccess()) {
for (MarkovChain markovChain : markovChains) {
String newFileName = String.format(
"%s\\%s_Run_%s_%s",
fileDirectory,
markovChain.getMarkovName(),
counter,
onlyFilename);
try {
result = DigPopGUIUtilityClass.outputNewCensusFile(
censusEnumerationFullPath,
newFileName,
markovChain.getNewCensusColumnDetails());
} catch (IOException ex) {
Logger.getLogger(MarkovChainMatrix.class.getName()).log(Level.SEVERE, null, ex);
}
}
counter++;
}
return result;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNameIdentifierOwner;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.util.ArrayFactory;
import com.intellij.util.Processor;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.psi.stubs.PyClassStub;
import com.jetbrains.python.psi.types.PyClassLikeType;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
/**
* Represents a class declaration in source.
*/
public interface PyClass extends PsiNameIdentifierOwner, PyStatement, NameDefiner, PyDocStringOwner, StubBasedPsiElement<PyClassStub>,
ScopeOwner, PyDecoratable, PyTypedElement, PyQualifiedNameOwner, PyStatementListContainer, PyWithAncestors {
ArrayFactory<PyClass> ARRAY_FACTORY = new ArrayFactory<PyClass>() {
@NotNull
@Override
public PyClass[] create(int count) {
return new PyClass[count];
}
};
@Nullable
ASTNode getNameNode();
/**
* Returns only those ancestors from the hierarchy, that are resolved to PyClass PSI elements.
*
* @param context type eval context (pass null to use loose, but better provide one)
* @see #getAncestorTypes(TypeEvalContext) for the full list of ancestors.
*/
@NotNull
List<PyClass> getAncestorClasses(@Nullable TypeEvalContext context);
/**
* Returns types of expressions in the super classes list.
* <p/>
* If no super classes are specified, returns the type of the implicit super class for old- and new-style classes.
*
* @see #getAncestorTypes(TypeEvalContext) for the full list of ancestors.
*/
@NotNull
List<PyClassLikeType> getSuperClassTypes(@NotNull TypeEvalContext context);
/**
* Returns only those super classes for expressions from the super classes list, that are resolved to PyClass PSI elements.
* <p/>
* If no super classes are specified, returns the implicit super class for old- and new-style classes.
*
* @see #getSuperClassTypes(TypeEvalContext) for the full list of super classes.
* @see #getAncestorTypes(TypeEvalContext) for the full list of ancestors.
* @param context
*/
@NotNull
PyClass[] getSuperClasses(@Nullable TypeEvalContext context);
/**
* Returns a PSI element for the super classes list.
* <p/>
* Operates at the AST level.
*/
@Nullable
PyArgumentList getSuperClassExpressionList();
/**
* Returns PSI elements for the expressions in the super classes list.
* <p/>
* Operates at the AST level.
*/
@NotNull
PyExpression[] getSuperClassExpressions();
/**
* Collects methods defined in the class.
* <p/>
* This method does not access AST if underlying PSI is stub based.
*
* @return class methods
*/
@NotNull
PyFunction[] getMethods();
/**
* Get class properties.
*
* @return Map [property_name] = [{@link com.jetbrains.python.psi.Property}]
*/
@NotNull
Map<String, Property> getProperties();
/**
* Finds a method with given name.
*
* @param name what to look for
* @param inherited true: search in superclasses; false: only look for methods defined in this class.
* @param context
* @return
*/
@Nullable
PyFunction findMethodByName(@Nullable @NonNls final String name, boolean inherited, TypeEvalContext context);
/**
* Finds either __init__ or __new__, whichever is defined for given class.
* If __init__ is defined, it is found first. This mimics the way initialization methods
* are searched for and called by Python when a constructor call is made.
* Since __new__ only makes sense for new-style classes, an old-style class never finds it with this method.
*
* @param inherited true: search in superclasses, too.
* @param context TODO: DOC
* @return a method that would be called first when an instance of this class is instantiated.
*/
@Nullable
PyFunction findInitOrNew(boolean inherited, @Nullable TypeEvalContext context);
/**
* Finds a property with the specified name in the class or one of its ancestors.
*
* @param name of the property
* @param inherited
* @param context type eval (null to use loose context, but you better provide one)
* @return descriptor of property accessors, or null if such property does not exist.
*/
@Nullable
Property findProperty(@NotNull String name, boolean inherited, @Nullable TypeEvalContext context);
/**
* Apply a processor to every method, looking at superclasses in method resolution order as needed.
* Consider using {@link PyClassLikeType#visitMembers(Processor, boolean, TypeEvalContext)}
*
* @param processor what to apply
* @param inherited true: search in superclasses, too.
* @param context loose context will be used if no context provided
* @see PyClassLikeType#visitMembers(Processor, boolean, TypeEvalContext)
*/
boolean visitMethods(Processor<PyFunction> processor, boolean inherited, @Nullable TypeEvalContext context);
/**
* Consider using {@link PyClassLikeType#visitMembers(Processor, boolean, TypeEvalContext)}
*
* @see PyClassLikeType#visitMembers(Processor, boolean, TypeEvalContext)
*/
boolean visitClassAttributes(Processor<PyTargetExpression> processor, boolean inherited, TypeEvalContext context);
/**
* Effectively collects assignments inside the class body.
* <p/>
* This method does not access AST if underlying PSI is stub based.
* Note that only <strong>own</strong> attrs are fetched, not parent attrs.
* If you need parent attributes, consider using {@link #getClassAttributesInherited(TypeEvalContext)}
*
* @see #getClassAttributesInherited(TypeEvalContext)
*/
List<PyTargetExpression> getClassAttributes();
/**
* Returns all class attributes this class class contains, including inherited one.
* Process may be heavy, depending or your context.
*
* @param context context to use for this process
* @return list of attrs.
*
* TODO: Replace it and {@link #getClassAttributes()} with a single getClassAttributes(@NotNull TypeEvalContext context, boolean inherited)
*/
@NotNull
List<PyTargetExpression> getClassAttributesInherited(@NotNull TypeEvalContext context);
@Nullable
PyTargetExpression findClassAttribute(@NotNull String name, boolean inherited, TypeEvalContext context);
/**
* Effectively collects assignments to attributes of {@code self} in {@code __init__}, {@code __new__} and
* other methods defined in the class.
* <p/>
* This method does not access AST if underlying PSI is stub based.
*/
List<PyTargetExpression> getInstanceAttributes();
@Nullable
PyTargetExpression findInstanceAttribute(String name, boolean inherited);
PyClass[] getNestedClasses();
@Nullable
PyClass findNestedClass(String name, boolean inherited);
/**
* @param context
* @return true if the class is new-style and descends from 'object'.
*/
boolean isNewStyleClass(TypeEvalContext context);
/**
* Scan properties in order of definition, until processor returns true for one of them.
*
* @param processor to check properties
* @param inherited whether inherited properties need to be scanned, too
* @return a property that processor accepted, or null.
*/
@Nullable
Property scanProperties(Processor<Property> processor, boolean inherited);
/**
* Non-recursively searches for a property for which the given function is a getter, setter or deleter.
*
* @param callable the function which may be an accessor
* @return the property, or null
*/
@Nullable
Property findPropertyByCallable(PyCallable callable);
/**
* @param parent
* @return True iff this and parent are the same or parent is one of our superclasses.
*/
boolean isSubclass(PyClass parent, @Nullable TypeEvalContext context);
boolean isSubclass(@NotNull String superClassQName, @Nullable TypeEvalContext context);
/**
* Returns the aggregated list of names defined in __slots__ attributes of the class and its ancestors.
*
* @param context (will be used default if null)
*/
@Nullable
List<String> getSlots(@Nullable TypeEvalContext context);
/**
* Returns the list of names in the class' __slots__ attribute, or null if the class
* does not define such an attribute.
*
* @return the list of names or null.
*/
@Nullable
List<String> getOwnSlots();
@Nullable
String getDocStringValue();
boolean processClassLevelDeclarations(@NotNull PsiScopeProcessor processor);
boolean processInstanceLevelDeclarations(@NotNull PsiScopeProcessor processor, @Nullable PsiElement location);
//TODO: Add "addMetaClass" or move methods out of here
/**
* Returns the type representing the metaclass of the class if it is explicitly set, null otherwise.
* <p/>
* The metaclass might be defined outside the class in case of Python 2 file-level __metaclass__ attributes.
*/
@Nullable
PyType getMetaClassType(@NotNull TypeEvalContext context);
/**
* Returns the expression that defines the metaclass of the class.
* <p/>
* Operates at the AST level.
*/
@Nullable
PyExpression getMetaClassExpression();
/**
* @param context eval context
* @return {@link com.jetbrains.python.psi.types.PyType} casted if it has right type
*/
@Nullable
PyClassLikeType getType(@NotNull TypeEvalContext context);
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.util.AsciiString;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
/**
* Utilities for the integration tests.
*/
final class Http2TestUtil {
/**
* Interface that allows for running a operation that throws a {@link Http2Exception}.
*/
interface Http2Runnable {
void run() throws Http2Exception;
}
/**
* Runs the given operation within the event loop thread of the given {@link Channel}.
*/
static void runInChannel(Channel channel, final Http2Runnable runnable) {
channel.eventLoop().execute(new Runnable() {
@Override
public void run() {
try {
runnable.run();
} catch (Http2Exception e) {
throw new RuntimeException(e);
}
}
});
}
/**
* Returns a byte array filled with random data.
*/
public static byte[] randomBytes() {
return randomBytes(100);
}
/**
* Returns a byte array filled with random data.
*/
public static byte[] randomBytes(int size) {
byte[] data = new byte[size];
new Random().nextBytes(data);
return data;
}
/**
* Returns an {@link AsciiString} that wraps a randomly-filled byte array.
*/
public static AsciiString randomString() {
return new AsciiString(randomBytes());
}
public static CharSequence of(String s) {
return s;
}
private Http2TestUtil() {
}
static class FrameAdapter extends ByteToMessageDecoder {
private final Http2Connection connection;
private final Http2FrameListener listener;
private final DefaultHttp2FrameReader reader;
private final CountDownLatch latch;
FrameAdapter(Http2FrameListener listener, CountDownLatch latch) {
this(null, listener, latch);
}
FrameAdapter(Http2Connection connection, Http2FrameListener listener, CountDownLatch latch) {
this(connection, new DefaultHttp2FrameReader(false), listener, latch);
}
FrameAdapter(Http2Connection connection, DefaultHttp2FrameReader reader, Http2FrameListener listener,
CountDownLatch latch) {
this.connection = connection;
this.listener = listener;
this.reader = reader;
this.latch = latch;
}
private Http2Stream getOrCreateStream(int streamId, boolean halfClosed) throws Http2Exception {
return getOrCreateStream(connection, streamId, halfClosed);
}
public static Http2Stream getOrCreateStream(Http2Connection connection, int streamId, boolean halfClosed)
throws Http2Exception {
if (connection != null) {
Http2Stream stream = connection.stream(streamId);
if (stream == null) {
if (connection.isServer() && streamId % 2 == 0 || !connection.isServer() && streamId % 2 != 0) {
stream = connection.local().createStream(streamId, halfClosed);
} else {
stream = connection.remote().createStream(streamId, halfClosed);
}
}
return stream;
}
return null;
}
private void closeStream(Http2Stream stream) {
closeStream(stream, false);
}
protected void closeStream(Http2Stream stream, boolean dataRead) {
if (stream != null) {
stream.close();
}
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
reader.readFrame(ctx, in, new Http2FrameListener() {
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding,
boolean endOfStream) throws Http2Exception {
Http2Stream stream = getOrCreateStream(streamId, endOfStream);
int processed = listener.onDataRead(ctx, streamId, data, padding, endOfStream);
if (endOfStream) {
closeStream(stream, true);
}
latch.countDown();
return processed;
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding,
boolean endStream) throws Http2Exception {
Http2Stream stream = getOrCreateStream(streamId, endStream);
listener.onHeadersRead(ctx, streamId, headers, padding, endStream);
if (endStream) {
closeStream(stream);
}
latch.countDown();
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers,
int streamDependency, short weight, boolean exclusive, int padding, boolean endStream)
throws Http2Exception {
Http2Stream stream = getOrCreateStream(streamId, endStream);
if (stream != null) {
stream.setPriority(streamDependency, weight, exclusive);
}
listener.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive, padding,
endStream);
if (endStream) {
closeStream(stream);
}
latch.countDown();
}
@Override
public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight,
boolean exclusive) throws Http2Exception {
Http2Stream stream = getOrCreateStream(streamId, false);
if (stream != null) {
stream.setPriority(streamDependency, weight, exclusive);
}
listener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive);
latch.countDown();
}
@Override
public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode)
throws Http2Exception {
Http2Stream stream = getOrCreateStream(streamId, false);
listener.onRstStreamRead(ctx, streamId, errorCode);
closeStream(stream);
latch.countDown();
}
@Override
public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception {
listener.onSettingsAckRead(ctx);
latch.countDown();
}
@Override
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception {
listener.onSettingsRead(ctx, settings);
latch.countDown();
}
@Override
public void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {
listener.onPingRead(ctx, data);
latch.countDown();
}
@Override
public void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {
listener.onPingAckRead(ctx, data);
latch.countDown();
}
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId,
Http2Headers headers, int padding) throws Http2Exception {
getOrCreateStream(promisedStreamId, false);
listener.onPushPromiseRead(ctx, streamId, promisedStreamId, headers, padding);
latch.countDown();
}
@Override
public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData)
throws Http2Exception {
listener.onGoAwayRead(ctx, lastStreamId, errorCode, debugData);
latch.countDown();
}
@Override
public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement)
throws Http2Exception {
getOrCreateStream(streamId, false);
listener.onWindowUpdateRead(ctx, streamId, windowSizeIncrement);
latch.countDown();
}
@Override
public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags,
ByteBuf payload) throws Http2Exception {
listener.onUnknownFrame(ctx, frameType, streamId, flags, payload);
latch.countDown();
}
});
}
}
/**
* A decorator around a {@link Http2FrameListener} that counts down the latch so that we can await the completion of
* the request.
*/
static class FrameCountDown implements Http2FrameListener {
private final Http2FrameListener listener;
private final CountDownLatch messageLatch;
private final CountDownLatch settingsAckLatch;
private final CountDownLatch dataLatch;
private final CountDownLatch trailersLatch;
private final CountDownLatch goAwayLatch;
FrameCountDown(Http2FrameListener listener, CountDownLatch settingsAckLatch, CountDownLatch messageLatch) {
this(listener, settingsAckLatch, messageLatch, null, null);
}
FrameCountDown(Http2FrameListener listener, CountDownLatch settingsAckLatch, CountDownLatch messageLatch,
CountDownLatch dataLatch, CountDownLatch trailersLatch) {
this(listener, settingsAckLatch, messageLatch, dataLatch, trailersLatch, messageLatch);
}
FrameCountDown(Http2FrameListener listener, CountDownLatch settingsAckLatch, CountDownLatch messageLatch,
CountDownLatch dataLatch, CountDownLatch trailersLatch, CountDownLatch goAwayLatch) {
this.listener = listener;
this.messageLatch = messageLatch;
this.settingsAckLatch = settingsAckLatch;
this.dataLatch = dataLatch;
this.trailersLatch = trailersLatch;
this.goAwayLatch = goAwayLatch;
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream)
throws Http2Exception {
int numBytes = data.readableBytes();
int processed = listener.onDataRead(ctx, streamId, data, padding, endOfStream);
messageLatch.countDown();
if (dataLatch != null) {
for (int i = 0; i < numBytes; ++i) {
dataLatch.countDown();
}
}
return processed;
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding,
boolean endStream) throws Http2Exception {
listener.onHeadersRead(ctx, streamId, headers, padding, endStream);
messageLatch.countDown();
if (trailersLatch != null && endStream) {
trailersLatch.countDown();
}
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency,
short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
listener.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive, padding, endStream);
messageLatch.countDown();
if (trailersLatch != null && endStream) {
trailersLatch.countDown();
}
}
@Override
public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight,
boolean exclusive) throws Http2Exception {
listener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive);
messageLatch.countDown();
}
@Override
public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception {
listener.onRstStreamRead(ctx, streamId, errorCode);
messageLatch.countDown();
}
@Override
public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception {
listener.onSettingsAckRead(ctx);
settingsAckLatch.countDown();
}
@Override
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception {
listener.onSettingsRead(ctx, settings);
messageLatch.countDown();
}
@Override
public void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {
listener.onPingRead(ctx, data);
messageLatch.countDown();
}
@Override
public void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {
listener.onPingAckRead(ctx, data);
messageLatch.countDown();
}
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId,
Http2Headers headers, int padding) throws Http2Exception {
listener.onPushPromiseRead(ctx, streamId, promisedStreamId, headers, padding);
messageLatch.countDown();
}
@Override
public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData)
throws Http2Exception {
listener.onGoAwayRead(ctx, lastStreamId, errorCode, debugData);
goAwayLatch.countDown();
}
@Override
public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement)
throws Http2Exception {
listener.onWindowUpdateRead(ctx, streamId, windowSizeIncrement);
messageLatch.countDown();
}
@Override
public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags,
ByteBuf payload) throws Http2Exception {
listener.onUnknownFrame(ctx, frameType, streamId, flags, payload);
messageLatch.countDown();
}
}
}
| |
/**
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.grobid.service;
import java.io.InputStream;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.UriInfo;
import org.grobid.core.factory.AbstractEngineFactory;
import org.grobid.service.process.GrobidRestProcessAdmin;
import org.grobid.service.process.GrobidRestProcessFiles;
import org.grobid.service.process.GrobidRestProcessGeneric;
import org.grobid.service.process.GrobidRestProcessString;
import org.grobid.service.util.GrobidServiceProperties;
import org.grobid.service.util.ZipUtils;
import org.grobid.service.util.GrobidRestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.jersey.multipart.FormDataParam;
import com.sun.jersey.spi.resource.Singleton;
/**
* RESTful service for the GROBID system.
*
* @author FloZi, Damien, Patrice
*
*/
@Singleton
@Path(GrobidPathes.PATH_GROBID)
public class GrobidRestService implements GrobidPathes {
/**
* The class Logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(GrobidRestService.class);
private static final String NAMES = "names";
private static final String DATE = "date";
private static final String AFFILIATIONS = "affiliations";
private static final String CITATION = "citations";
private static final String TEXT = "text";
private static final String SHA1 = "sha1";
private static final String XML = "xml";
private static final String INPUT = "input";
public GrobidRestService() {
LOGGER.info("Initiating Servlet GrobidRestService");
AbstractEngineFactory.fullInit();
GrobidServiceProperties.getInstance();
LOGGER.info("Initiating of Servlet GrobidRestService finished.");
}
/**
* @see org.grobid.service.process.GrobidRestProcessGeneric#isAlive()
*/
@Path(GrobidPathes.PATH_IS_ALIVE)
@Produces(MediaType.TEXT_PLAIN)
@GET
public Response isAlive() {
return GrobidRestProcessGeneric.isAlive();
}
/**
*
* @see org.grobid.service.process.GrobidRestProcessGeneric#getDescription_html(UriInfo)
*/
@Produces(MediaType.TEXT_HTML)
@GET
@Path("grobid")
public Response getDescription_html(@Context UriInfo uriInfo) {
return GrobidRestProcessGeneric.getDescription_html(uriInfo);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#getAdminParams(String)
*/
@Path(PATH_ADMIN)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_HTML)
@POST
public Response getAdmin_htmlPost(@FormParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.getAdminParams(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#getAdminParams(String)
*/
@Path(PATH_ADMIN)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_HTML)
@GET
public Response getAdmin_htmlGet(@QueryParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.getAdminParams(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessHeaderDocument(InputStream, String)
*/
@Path(PATH_HEADER)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processHeaderDocument_post(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate
) throws Exception {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessHeaderDocument(inputStream, consol, false);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessHeaderDocument(InputStream, String)
*/
@Path(PATH_HEADER)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processStatelessHeaderDocument(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate
) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessHeaderDocument(inputStream, consol, false);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessHeaderDocument(InputStream, String)
*/
@Path(PATH_HEADER_HTML)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processHeaderDocument_postHTML(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessHeaderDocument(inputStream, consol, true);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessHeaderDocument(InputStream, String)
*/
@Path(PATH_HEADER_HTML)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processStatelessHeaderDocumentHTML(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessHeaderDocument(inputStream, consol, true);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processFulltextDocument_post(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextDocument(inputStream,
consol, false, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processStatelessFulltextDocument(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextDocument(inputStream,
consol, false, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextAssetDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT_ASSET)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/zip")
@POST
public Response processFulltextAssetDocument_post(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextAssetDocument(inputStream,
consol, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextAssetDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT_ASSET)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/zip")
@PUT
public Response processStatelessFulltextAssetDocument(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextAssetDocument(inputStream,
consol, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT_HTML)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processFulltextDocument_postHTML(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextDocument(inputStream,
consol, true, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessFulltextDocument(InputStream, String)
*/
@Path(PATH_FULL_TEXT_HTML)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processStatelessFulltextDocumentHTML(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate,
@DefaultValue("-1") @FormDataParam("start") int startPage,
@DefaultValue("-1") @FormDataParam("end") int endPage,
@FormDataParam("generateIDs") String generateIDs) {
boolean consol = false;
boolean generate = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
if ( (generateIDs != null) && (generateIDs.equals("1")) ) {
generate = true;
}
return GrobidRestProcessFiles.processStatelessFulltextDocument(inputStream,
consol, true, startPage, endPage, generate);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processCitationPatentTEI(InputStream, String)
*/
@Path(PATH_CITATION_PATENT_TEI)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public StreamingOutput processCitationPatentTEI(@FormDataParam(INPUT) InputStream pInputStream,
@FormDataParam("consolidate") String consolidate) throws Exception {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processCitationPatentTEI(pInputStream, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processCitationPatentST36(InputStream, String)
*/
@Path(PATH_CITATION_PATENT_ST36)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processCitationPatentST36(@FormDataParam(INPUT) InputStream pInputStream,
@FormDataParam("consolidate") String consolidate) throws Exception {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
pInputStream = ZipUtils.decompressStream(pInputStream);
return GrobidRestProcessFiles.processCitationPatentST36(pInputStream, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processCitationPatentPDF(InputStream, String)
*/
@Path(PATH_CITATION_PATENT_PDF)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processCitationPatentPDF(@FormDataParam(INPUT) InputStream pInputStream,
@FormDataParam("consolidate") String consolidate) throws Exception {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processCitationPatentPDF(pInputStream, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processCitationPatentTXT(String, String)
*/
@Path(PATH_CITATION_PATENT_TXT)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processCitationPatentTXT_post(@FormParam(TEXT) String text,
@FormParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessString.processCitationPatentTXT(text, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processDate(String)
*/
@Path(PATH_DATE)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response processDate_post(@FormParam(DATE) String date) {
return GrobidRestProcessString.processDate(date);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processDate(String)
*/
@Path(PATH_DATE)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@PUT
public Response processDate(@FormParam(DATE) String date) {
return GrobidRestProcessString.processDate(date);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processNamesHeader(String)
*/
@Path(PATH_HEADER_NAMES)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response processNamesHeader_post(@FormParam(NAMES) String names) {
return GrobidRestProcessString.processNamesHeader(names);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processNamesHeader(String)
*/
@Path(PATH_HEADER_NAMES)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@PUT
public Response processNamesHeader(@FormParam(NAMES) String names) {
return GrobidRestProcessString.processNamesHeader(names);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processNamesCitation(String)
*/
@Path(PATH_CITE_NAMES)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response processNamesCitation_post(@FormParam(NAMES) String names) {
return GrobidRestProcessString.processNamesCitation(names);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processNamesCitation(String)
*/
@Path(PATH_CITE_NAMES)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@PUT
public Response processNamesCitation(@FormParam(NAMES) String names) {
return GrobidRestProcessString.processNamesCitation(names);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processAffiliations(String)
*/
@Path(PATH_AFFILIATION)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response processAffiliations_post(@FormParam(AFFILIATIONS) String affiliations) {
return GrobidRestProcessString.processAffiliations(affiliations);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processAffiliations(String)
*/
@Path(PATH_AFFILIATION)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@PUT
public Response processAffiliations(@FormParam(AFFILIATIONS) String affiliation) {
return GrobidRestProcessString.processAffiliations(affiliation);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processCitation(String, String)
*/
@Path(PATH_CITATION)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processCitation_post(@FormParam(CITATION) String citation,
@FormParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessString.processCitation(citation, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessString#processCitation(String, String)
*/
@Path(PATH_CITATION)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processCitation(@FormParam(CITATION) String citation,
@FormParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessString.processCitation(citation, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#processSHA1(String)
*/
@Path(PATH_SHA1)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response processSHA1Post(@FormParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.processSHA1(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#processSHA1(String)
*/
@Path(PATH_SHA1)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@GET
public Response processSHA1Get(@QueryParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.processSHA1(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#getAllPropertiesValues(String)
*/
@Path(PATH_ALL_PROPS)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response getAllPropertiesValuesPost(@FormParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.getAllPropertiesValues(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#getAllPropertiesValues(String)
*/
@Path(PATH_ALL_PROPS)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@GET
public Response getAllPropertiesValuesGet(@QueryParam(SHA1) String sha1) {
return GrobidRestProcessAdmin.getAllPropertiesValues(sha1);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#changePropertyValue(String)
*/
@Path(PATH_CHANGE_PROPERTY_VALUE)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
@POST
public Response changePropertyValuePost(@FormParam(XML) String xml) {
return GrobidRestProcessAdmin.changePropertyValue(xml);
}
/**
* @see org.grobid.service.process.GrobidRestProcessAdmin#changePropertyValue(String)
*/
@Path(PATH_CHANGE_PROPERTY_VALUE)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@GET
public Response changePropertyValueGet(@QueryParam(XML) String xml) {
return GrobidRestProcessAdmin.changePropertyValue(xml);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessReferencesDocument(InputStream, bool)
*/
@Path(PATH_REFERENCES)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@POST
public Response processReferencesDocument_post(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessReferencesDocument(inputStream, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processStatelessReferencesDocument(InputStream, bool)
*/
@Path(PATH_REFERENCES)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_XML)
@PUT
public Response processStatelessReferencesDocument(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("consolidate") String consolidate) {
boolean consol = false;
if ( (consolidate != null) && (consolidate.equals("1")) ) {
consol = true;
}
return GrobidRestProcessFiles.processStatelessReferencesDocument(inputStream, consol);
}
/**
* @see org.grobid.service.process.GrobidRestProcessFiles#processPDFAnnotation(InputStream, GrobidRestUtils.Annotation)
*/
@Path(PATH_PDF_ANNOTATION)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces("application/pdf")
@POST
public Response processAnnotatePDF(@FormDataParam(INPUT) InputStream inputStream,
@FormDataParam("type") String fileName,
@FormDataParam("type") int type) {
GrobidRestUtils.Annotation annotType = null;
if (type == 0)
annotType = GrobidRestUtils.Annotation.CITATION;
else if (type == 1)
annotType = GrobidRestUtils.Annotation.BLOCK;
else if (type == 2)
annotType = GrobidRestUtils.Annotation.FIGURE;
return GrobidRestProcessFiles.processPDFAnnotation(inputStream, fileName, annotType);
}
}
| |
// Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.rest.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.acceptance.GitUtil.pushHead;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.GerritConfig;
import com.google.gerrit.acceptance.GitUtil;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.acceptance.testsuite.request.RequestScopeOperations;
import com.google.gerrit.common.data.LabelFunction;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.extensions.api.changes.MoveInput;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.MethodNotAllowedException;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.server.group.SystemGroupBackend;
import com.google.gerrit.server.project.testing.Util;
import com.google.inject.Inject;
import java.util.Arrays;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Test;
@NoHttpd
public class MoveChangeIT extends AbstractDaemonTest {
@Inject private RequestScopeOperations requestScopeOperations;
@Test
public void moveChangeWithShortRef() throws Exception {
// Move change to a different branch using short ref name
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
move(r.getChangeId(), newBranch.getShortName());
assertThat(r.getChange().change().getDest()).isEqualTo(newBranch);
}
@Test
public void moveChangeWithFullRef() throws Exception {
// Move change to a different branch using full ref name
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
move(r.getChangeId(), newBranch.get());
assertThat(r.getChange().change().getDest()).isEqualTo(newBranch);
}
@Test
public void moveChangeWithMessage() throws Exception {
// Provide a message using --message flag
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
String moveMessage = "Moving for the move test";
move(r.getChangeId(), newBranch.get(), moveMessage);
assertThat(r.getChange().change().getDest()).isEqualTo(newBranch);
StringBuilder expectedMessage = new StringBuilder();
expectedMessage.append("Change destination moved from master to moveTest");
expectedMessage.append("\n\n");
expectedMessage.append(moveMessage);
assertThat(r.getChange().messages().get(1).getMessage()).isEqualTo(expectedMessage.toString());
}
@Test
public void moveChangeToSameRefAsCurrent() throws Exception {
// Move change to the branch same as change's destination
PushOneCommit.Result r = createChange();
exception.expect(ResourceConflictException.class);
exception.expectMessage("Change is already destined for the specified branch");
move(r.getChangeId(), r.getChange().change().getDest().get());
}
@Test
public void moveChangeToSameChangeId() throws Exception {
// Move change to a branch with existing change with same change ID
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
int changeNum = r.getChange().change().getChangeId();
createChange(newBranch.get(), r.getChangeId());
exception.expect(ResourceConflictException.class);
exception.expectMessage(
"Destination "
+ newBranch.getShortName()
+ " has a different change with same change key "
+ r.getChangeId());
move(changeNum, newBranch.get());
}
@Test
public void moveChangeToNonExistentRef() throws Exception {
// Move change to a non-existing branch
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch =
new Branch.NameKey(r.getChange().change().getProject(), "does_not_exist");
exception.expect(ResourceConflictException.class);
exception.expectMessage("Destination " + newBranch.get() + " not found in the project");
move(r.getChangeId(), newBranch.get());
}
@Test
public void moveClosedChange() throws Exception {
// Move a change which is not open
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
merge(r);
exception.expect(ResourceConflictException.class);
exception.expectMessage("Change is merged");
move(r.getChangeId(), newBranch.get());
}
@Test
public void moveMergeCommitChange() throws Exception {
// Move a change which has a merge commit as the current PS
// Create a merge commit and push for review
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
TestRepository<?>.CommitBuilder commitBuilder =
testRepo.branch("HEAD").commit().insertChangeId();
commitBuilder
.parent(r1.getCommit())
.parent(r2.getCommit())
.message("Move change Merge Commit")
.author(admin.newIdent())
.committer(new PersonIdent(admin.newIdent(), testRepo.getDate()));
RevCommit c = commitBuilder.create();
pushHead(testRepo, "refs/for/master", false, false);
// Try to move the merge commit to another branch
Branch.NameKey newBranch = new Branch.NameKey(r1.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
exception.expect(ResourceConflictException.class);
exception.expectMessage("Merge commit cannot be moved");
move(GitUtil.getChangeId(testRepo, c).get(), newBranch.get());
}
@Test
public void moveChangeToBranchWithoutUploadPerms() throws Exception {
// Move change to a destination where user doesn't have upload permissions
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch =
new Branch.NameKey(r.getChange().change().getProject(), "blocked_branch");
createBranch(newBranch);
block(
"refs/for/" + newBranch.get(),
Permission.PUSH,
systemGroupBackend.getGroup(REGISTERED_USERS).getUUID());
exception.expect(AuthException.class);
exception.expectMessage("move not permitted");
move(r.getChangeId(), newBranch.get());
}
@Test
public void moveChangeFromBranchWithoutAbandonPerms() throws Exception {
// Move change for which user does not have abandon permissions
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
block(
r.getChange().change().getDest().get(),
Permission.ABANDON,
systemGroupBackend.getGroup(REGISTERED_USERS).getUUID());
requestScopeOperations.setApiUser(user.id());
exception.expect(AuthException.class);
exception.expectMessage("move not permitted");
move(r.getChangeId(), newBranch.get());
}
@Test
public void moveChangeToBranchThatContainsCurrentCommit() throws Exception {
// Move change to a branch for which current PS revision is reachable from
// tip
// Create a change
PushOneCommit.Result r = createChange();
int changeNum = r.getChange().change().getChangeId();
// Create a branch with that same commit
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
BranchInput bi = new BranchInput();
bi.revision = r.getCommit().name();
gApi.projects().name(newBranch.getParentKey().get()).branch(newBranch.get()).create(bi);
// Try to move the change to the branch with the same commit
exception.expect(ResourceConflictException.class);
exception.expectMessage(
"Current patchset revision is reachable from tip of " + newBranch.get());
move(changeNum, newBranch.get());
}
@Test
public void moveChangeWithCurrentPatchSetLocked() throws Exception {
// Move change that is locked
PushOneCommit.Result r = createChange();
Branch.NameKey newBranch = new Branch.NameKey(r.getChange().change().getProject(), "moveTest");
createBranch(newBranch);
try (ProjectConfigUpdate u = updateProject(project)) {
LabelType patchSetLock = Util.patchSetLock();
u.getConfig().getLabelSections().put(patchSetLock.getName(), patchSetLock);
AccountGroup.UUID registeredUsers = systemGroupBackend.getGroup(REGISTERED_USERS).getUUID();
Util.allow(
u.getConfig(),
Permission.forLabel(patchSetLock.getName()),
0,
1,
registeredUsers,
"refs/heads/*");
u.save();
}
grant(project, "refs/heads/*", Permission.LABEL + "Patch-Set-Lock");
revision(r).review(new ReviewInput().label("Patch-Set-Lock", 1));
exception.expect(ResourceConflictException.class);
exception.expectMessage(
String.format("The current patch set of change %s is locked", r.getChange().getId()));
move(r.getChangeId(), newBranch.get());
}
@Test
public void moveChangeOnlyKeepVetoVotes() throws Exception {
// A vote for a label will be kept after moving if the label's function is *WithBlock and the
// vote holds the minimum value.
createBranch(new Branch.NameKey(project, "foo"));
String codeReviewLabel = "Code-Review"; // 'Code-Review' uses 'MaxWithBlock' function.
String testLabelA = "Label-A";
String testLabelB = "Label-B";
String testLabelC = "Label-C";
configLabel(testLabelA, LabelFunction.ANY_WITH_BLOCK);
configLabel(testLabelB, LabelFunction.MAX_NO_BLOCK);
configLabel(testLabelC, LabelFunction.NO_BLOCK);
AccountGroup.UUID registered = SystemGroupBackend.REGISTERED_USERS;
try (ProjectConfigUpdate u = updateProject(project)) {
Util.allow(
u.getConfig(), Permission.forLabel(testLabelA), -1, +1, registered, "refs/heads/*");
Util.allow(
u.getConfig(), Permission.forLabel(testLabelB), -1, +1, registered, "refs/heads/*");
Util.allow(
u.getConfig(), Permission.forLabel(testLabelC), -1, +1, registered, "refs/heads/*");
u.save();
}
String changeId = createChange().getChangeId();
gApi.changes().id(changeId).current().review(ReviewInput.reject());
amendChange(changeId);
ReviewInput input = new ReviewInput();
input.label(testLabelA, -1);
input.label(testLabelB, -1);
input.label(testLabelC, -1);
gApi.changes().id(changeId).current().review(input);
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().keySet())
.containsExactly(codeReviewLabel, testLabelA, testLabelB, testLabelC);
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().values())
.containsExactly((short) -2, (short) -1, (short) -1, (short) -1);
// Move the change to the 'foo' branch.
assertThat(gApi.changes().id(changeId).get().branch).isEqualTo("master");
move(changeId, "foo");
assertThat(gApi.changes().id(changeId).get().branch).isEqualTo("foo");
// 'Code-Review -2' and 'Label-A -1' will be kept.
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().values())
.containsExactly((short) -2, (short) -1, (short) 0, (short) 0);
// Move the change back to 'master'.
move(changeId, "master");
assertThat(gApi.changes().id(changeId).get().branch).isEqualTo("master");
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().values())
.containsExactly((short) -2, (short) -1, (short) 0, (short) 0);
}
@Test
public void moveToBranchWithoutLabel() throws Exception {
createBranch(new Branch.NameKey(project, "foo"));
String testLabelA = "Label-A";
configLabel(testLabelA, LabelFunction.MAX_WITH_BLOCK, Arrays.asList("refs/heads/master"));
AccountGroup.UUID registered = SystemGroupBackend.REGISTERED_USERS;
try (ProjectConfigUpdate u = updateProject(project)) {
Util.allow(
u.getConfig(), Permission.forLabel(testLabelA), -1, +1, registered, "refs/heads/master");
u.save();
}
String changeId = createChange().getChangeId();
ReviewInput input = new ReviewInput();
input.label(testLabelA, -1);
gApi.changes().id(changeId).current().review(input);
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().keySet())
.containsExactly(testLabelA);
assertThat(gApi.changes().id(changeId).current().reviewer(admin.email()).votes().values())
.containsExactly((short) -1);
move(changeId, "foo");
// TODO(dpursehouse): Assert about state of labels after move
}
@Test
public void moveNoDestinationBranchSpecified() throws Exception {
PushOneCommit.Result r = createChange();
exception.expect(BadRequestException.class);
exception.expectMessage("destination branch is required");
move(r.getChangeId(), null);
}
@Test
@GerritConfig(name = "change.move", value = "false")
public void moveCanBeDisabledByConfig() throws Exception {
PushOneCommit.Result r = createChange();
exception.expect(MethodNotAllowedException.class);
exception.expectMessage("move changes endpoint is disabled");
move(r.getChangeId(), null);
}
private void move(int changeNum, String destination) throws RestApiException {
gApi.changes().id(changeNum).move(destination);
}
private void move(String changeId, String destination) throws RestApiException {
gApi.changes().id(changeId).move(destination);
}
private void move(String changeId, String destination, String message) throws RestApiException {
MoveInput in = new MoveInput();
in.destinationBranch = destination;
in.message = message;
gApi.changes().id(changeId).move(in);
}
private PushOneCommit.Result createChange(String branch, String changeId) throws Exception {
PushOneCommit push = pushFactory.create(admin.newIdent(), testRepo, changeId);
PushOneCommit.Result result = push.to("refs/for/" + branch);
result.assertOkStatus();
return result;
}
}
| |
/**
* Copyright (c) 2011 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.rdfparser.license;
import java.util.List;
import org.spdx.html.InvalidLicenseTemplateException;
import org.spdx.licenseTemplate.LicenseTemplateRuleException;
import org.spdx.licenseTemplate.SpdxLicenseTemplateHelper;
import org.spdx.rdfparser.IModelContainer;
import org.spdx.rdfparser.InvalidSPDXAnalysisException;
import org.spdx.rdfparser.SpdxRdfConstants;
import org.spdx.rdfparser.model.IRdfModel;
import org.apache.jena.graph.Node;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Resource;
/**
* Listed license for SPDX as listed at spdx.org/licenses
* @author Gary O'Neall
*
*/
public class SpdxListedLicense extends License {
private String licenseTextHtml = null;
private String licenseHeaderHtml = null;
private String deprecatedVersion = null;
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param osiApproved True if this is an OSI Approved license
* @param fsfLibre true if FSF describes the license as free / libre, false if FSF describes the license as not free / libre, null if FSF does not reference the license
* @param licenseTextHtml HTML version for the license text
* @param isDeprecated True if this license has been designated as deprecated by the SPDX legal team
* @param deprecatedVersion License list version when this license was first deprecated (null if not deprecated)
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, boolean osiApproved, Boolean fsfLibre,
String licenseTextHtml, boolean isDeprecated, String deprecatedVersion) throws InvalidSPDXAnalysisException {
super(name, id, text, sourceUrl, comments, standardLicenseHeader, template, osiApproved, fsfLibre);
this.licenseTextHtml = licenseTextHtml;
this.setDeprecated(isDeprecated);
this.deprecatedVersion = deprecatedVersion;
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param osiApproved True if this is an OSI Approved license
* @param licenseTextHtml HTML version for the license text
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, boolean osiApproved, String licenseTextHtml) throws InvalidSPDXAnalysisException {
this(name, id, text, sourceUrl, comments, standardLicenseHeader, template, osiApproved, false, licenseTextHtml, false, null);
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param osiApproved True if this is an OSI Approvied license
* @param fsfLibre True if the license is listed by the Free Software Foundation as free / libre
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, boolean osiApproved, boolean fsfLibre) throws InvalidSPDXAnalysisException {
this(name, id, text, sourceUrl, comments, standardLicenseHeader, template, osiApproved, fsfLibre, null, false, null);
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param osiApproved True if this is an OSI Approvied license
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, boolean osiApproved) throws InvalidSPDXAnalysisException {
this(name, id, text, sourceUrl, comments, standardLicenseHeader, template, osiApproved, false, null, false, null);
}
/**
* Constructs an SPDX License from the licenseNode
* @param modelContainer container which includes the license
* @param licenseNode RDF graph node representing the SPDX License
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(IModelContainer modelContainer, Node licenseNode) throws InvalidSPDXAnalysisException {
super(modelContainer, licenseNode);
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param licenseHeaderTemplate optional template for the standard license header
* @param osiApproved True if this is an OSI Approved license
* @param fsfLibre True if the license is listed by the Free Software Foundation as free / libre
* @param licenseTextHtml HTML version for the license text
* @param licenseHeaderHtml HTML version for the standard license header
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, String licenseHeaderTemplate, boolean osiApproved, boolean fsfLibre, String licenseTextHtml,
String licenseHeaderHtml) throws InvalidSPDXAnalysisException {
super(name, id, text, sourceUrl, comments, standardLicenseHeader, template, licenseHeaderTemplate, osiApproved, fsfLibre);
this.licenseTextHtml = licenseTextHtml;
this.licenseHeaderHtml = licenseHeaderHtml;
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param licenseHeaderTemplate optional template for the standard license header
* @param osiApproved True if this is an OSI Approved license
* @param fsfLibre True if the license is listed by the Free Software Foundation as free / libre
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, String comments,
String standardLicenseHeader, String template, String licenseHeaderTemplate, boolean osiApproved, Boolean fsfLibre) throws InvalidSPDXAnalysisException {
super(name, id, text, sourceUrl, comments, standardLicenseHeader, template, licenseHeaderTemplate, osiApproved, fsfLibre);
}
/**
* @param name License name
* @param id License ID
* @param text License text
* @param sourceUrl Optional URLs that reference this license
* @param sourceUrlDetails Optional URLs that reference this license
* @param comments Optional comments
* @param standardLicenseHeader Optional license header
* @param template Optional template
* @param licenseHeaderTemplate optional template for the standard license header
* @param osiApproved True if this is an OSI Approved license
* @param fsfLibre True if the license is listed by the Free Software Foundation as free / libre
* @throws InvalidSPDXAnalysisException
*/
public SpdxListedLicense(String name, String id, String text, String[] sourceUrl, CrossRef[] sourceUrlDetails, String comments,
String standardLicenseHeader, String template, String licenseHeaderTemplate, boolean osiApproved, Boolean fsfLibre) throws InvalidSPDXAnalysisException {
super(name, id, text, sourceUrl, sourceUrlDetails, comments, standardLicenseHeader, template, licenseHeaderTemplate, osiApproved, fsfLibre);
}
@Override
public List<String> verify() {
List<String> retval = super.verify();
if (!LicenseInfoFactory.isSpdxListedLicenseID(this.getLicenseId())) {
retval.add("License "+this.getLicenseId()+" is not a listed license at spdx.org/licenses");
}
if (this.isDeprecated()) {
retval.add(this.licenseId + " is deprecated.");
}
return retval;
}
/**
* Creates a standard license URI by appending the standard license ID to the URL hosting the SPDX licenses
* @param id Standard License ID
* @return
*/
private String createStdLicenseUri(String id) {
return SpdxRdfConstants.STANDARD_LICENSE_URL + "/" + id;
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.RdfModelObject#getType(org.apache.jena.rdf.model.Model)
*/
@Override
public Resource getType(Model model) {
return model.createResource(SpdxRdfConstants.SPDX_NAMESPACE+SpdxRdfConstants.CLASS_SPDX_LICENSE);
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.RdfModelObject#getUri(org.spdx.rdfparser.IModelContainer)
*/
@Override
public String getUri(IModelContainer modelContainer)
throws InvalidSPDXAnalysisException {
return this.createStdLicenseUri(this.licenseId);
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.IRdfModel#equivalent(org.spdx.rdfparser.model.IRdfModel)
*/
@Override
public boolean equivalent(IRdfModel compare) {
if (!(compare instanceof SpdxListedLicense)) {
return false;
}
// For a listed license, if the ID's equal, it is considered equivalent
SpdxListedLicense sCompare = (SpdxListedLicense)compare;
if (this.licenseId == null) {
return sCompare.getLicenseId() == null;
} else if (sCompare.getLicenseId() == null) {
return false;
} else {
return this.licenseId.equalsIgnoreCase(sCompare.getLicenseId());
}
}
/**
* @return HTML fragment containing the License Text
* @throws InvalidLicenseTemplateException
*/
public String getLicenseTextHtml() throws InvalidLicenseTemplateException {
if (licenseTextHtml == null) {
// Format the HTML using the text and template
String templateText = this.getStandardLicenseTemplate();
if (templateText != null && !templateText.trim().isEmpty()) {
try {
licenseTextHtml = SpdxLicenseTemplateHelper.templateTextToHtml(templateText);
} catch(LicenseTemplateRuleException ex) {
throw new InvalidLicenseTemplateException("Invalid license expression found in license text for license "+getName()+":"+ex.getMessage());
}
} else {
licenseTextHtml = SpdxLicenseTemplateHelper.formatEscapeHTML(this.getLicenseText());
}
}
return licenseTextHtml;
}
/**
* Set the licenseTextHtml
* @param licenseTextHtml HTML fragment representing the license text
*/
public void setLicenseTextHtml(String licenseTextHtml) {
this.licenseTextHtml = licenseTextHtml;
}
/**
* @return HTML fragment containing the License standard header text
* @throws InvalidLicenseTemplateException
*/
public String getLicenseHeaderHtml() throws InvalidLicenseTemplateException {
if (licenseHeaderHtml == null) {
// Format the HTML using the text and template
String templateText = this.getStandardLicenseHeaderTemplate();
if (templateText != null && !templateText.trim().isEmpty()) {
try {
licenseHeaderHtml = SpdxLicenseTemplateHelper.templateTextToHtml(templateText);
} catch(LicenseTemplateRuleException ex) {
throw new InvalidLicenseTemplateException("Invalid license expression found in standard license header for license "+getName()+":"+ex.getMessage());
}
} else if (this.getStandardLicenseHeader() == null) {
licenseHeaderHtml = "";
} else {
licenseHeaderHtml = SpdxLicenseTemplateHelper.formatEscapeHTML(this.getStandardLicenseHeader());
}
}
return licenseHeaderHtml;
}
/**
* Set the licenseHeaderTemplateHtml
* @param licenseHeaderHtml HTML fragment representing the license standard header text
*/
public void setLicenseHeaderHtml(String licenseHeaderHtml) {
this.licenseHeaderHtml = licenseHeaderHtml;
}
/**
* @return the deprecatedVersion
*/
public String getDeprecatedVersion() {
return deprecatedVersion;
}
/**
* @param deprecatedVersion the deprecatedVersion to set
*/
public void setDeprecatedVersion(String deprecatedVersion) {
this.deprecatedVersion = deprecatedVersion;
}
}
| |
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage.api;
import sage.*;
/**
*
* @author Narflex
*/
public class SeriesInfoAPI
{
private SeriesInfoAPI() {}
public static void init(Catbert.ReflectionFunctionTable rft)
{
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetAllSeriesInfo")
{
/**
* Returns a list of all of the SeriesInfo which is information about television series
* @return a list of all of the SeriesInfo
* @since 5.1
*
* @declaration public SeriesInfo[] GetAllSeriesInfo();
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
return Wizard.getInstance().getAllSeriesInfo();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesTitle", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the title for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the title for the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesTitle(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getTitle();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesDescription", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the description for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the description for the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesDescription(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getDescription();
}});
rft.put(new PredefinedJEPFunction("Show", "GetSeriesCategory", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the category for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the category for the SeriesInfo
* @since V7.0
*
* @declaration public String GetSeriesCategory(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getCategory();
}});
rft.put(new PredefinedJEPFunction("Show", "GetSeriesSubCategory", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the subcategory for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the subcategory for the SeriesInfo
* @since V7.0
*
* @declaration public String GetSeriesSubCategory(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getSubCategory();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesHistory", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the history description for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the history description for the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesHistory(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getHistory();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesPremiereDate", 1, new String[] { "SeriesInfo" })
{
/**
* Returns a String describing the premiere date for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return a String describing the premiere date for the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesPremiereDate(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getPremiereDate();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesFinaleDate", 1, new String[] { "SeriesInfo" })
{
/**
* Returns a String describing the finale date for the specified SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return a String describing the finale date for the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesFinaleDate(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getFinaleDate();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesNetwork", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the name of the network the specified SeriesInfo airs on
* @param SeriesInfo the SeriesInfo object
* @return the name of the network the specified SeriesInfo airs on
* @since 5.1
*
* @declaration public String GetSeriesNetwork(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getNetwork();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesDayOfWeek", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the name of the day of the week the specified SeriesInfo airs on
* @param SeriesInfo the SeriesInfo object
* @return the name of the day of the week the specified SeriesInfo airs on
* @since 5.1
*
* @declaration public String GetSeriesDayOfWeek(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getAirDow();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesHourAndMinuteTimeslot", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the hour/minute timeslot that the specified SeriesInfo airs at
* @param SeriesInfo the SeriesInfo object
* @return the hour/minute timeslot that the specified SeriesInfo airs at
* @since 5.1
*
* @declaration public String GetSeriesHourAndMinuteTimeslot(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getAirHrMin();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "HasSeriesImage", 1, new String[] { "SeriesInfo" })
{
/**
* Returns true if the specified SeriesInfo has a corresponding image for it
* @param SeriesInfo the SeriesInfo object
* @return true if the specified SeriesInfo has a corresponding image for it
* @since 5.1
*
* @declaration public boolean HasSeriesImage(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return Boolean.FALSE;
return si.hasImage() ? Boolean.TRUE : Boolean.FALSE;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesImage", -1, new String[] { "SeriesInfo" })
{
/**
* Returns the image that corresponds to this SeriesInfo if there is one
* @param SeriesInfo the SeriesInfo object
* @return the image that corresponds to this SeriesInfo if there is one
* @since 5.1
*
* @declaration public MetaImage GetSeriesImage(SeriesInfo SeriesInfo);
*/
/**
* Returns the image that corresponds to this SeriesInfo if there is one
* @param SeriesInfo the SeriesInfo object
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return the image that corresponds to this SeriesInfo if there is one
* @since 8.0
*
* @declaration public MetaImage GetSeriesImage(SeriesInfo SeriesInfo, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = false;
if (curNumberOfParameters == 2)
{
thumb = evalBool(stack.pop());
}
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return null;
String imageURL = si.getImageURL(thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return MetaImage.getMetaImage(imageURL, stack.getUIComponent());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesImageURL", -1, new String[] { "SeriesInfo" })
{
/**
* Returns the URL of the image that corresponds to this SeriesInfo if there is one
* @param SeriesInfo the SeriesInfo object
* @return the URL of the image that corresponds to this SeriesInfo if there is one
* @since 8.0
*
* @declaration public String GetSeriesImageURL(SeriesInfo SeriesInfo);
*/
/**
* Returns the image URL that corresponds to this SeriesInfo if there is one
* @param SeriesInfo the SeriesInfo object
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return the image URL that corresponds to this SeriesInfo if there is one
* @since 8.0
*
* @declaration public String GetSeriesImageURL(SeriesInfo SeriesInfo, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = false;
if (curNumberOfParameters == 2)
{
thumb = evalBool(stack.pop());
}
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return null;
String imageURL = si.getImageURL(thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return imageURL;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesImageCount", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the number of images available that correspond to this SeriesInfo
* @param SeriesInfo the SeriesInfo object
* @return the number of images available that correspond to this SeriesInfo
* @since 8.0
*
* @declaration public int GetSeriesImageCount(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return new Integer(0);
return new Integer(si.getImageCount());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesImageAtIndex", new String[] { "SeriesInfo", "Index", "Thumb" })
{
/**
* Returns the image that corresponds to this SeriesInfo at the specified index (when there are multiple images)
* @param SeriesInfo the SeriesInfo object
* @param Index the 0-based index of which image to return
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return the image that corresponds to this SeriesInfo at the specified index
* @since 8.0
*
* @declaration public MetaImage GetSeriesImageAtIndex(SeriesInfo SeriesInfo, int Index, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = evalBool(stack.pop());
int idx = getInt(stack);
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return null;
String imageURL = si.getImageURL(idx, thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return MetaImage.getMetaImage(imageURL, stack.getUIComponent());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesImageURLAtIndex", new String[] { "SeriesInfo", "Index", "Thumb" })
{
/**
* Returns the image URL that corresponds to this SeriesInfo at the specified index (when there are multiple images)
* @param SeriesInfo the SeriesInfo object
* @param Index the 0-based index of which image to return
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return the image URL that corresponds to this SeriesInfo at the specified index
* @since 8.0
*
* @declaration public String GetSeriesImageURLAtIndex(SeriesInfo SeriesInfo, int Index, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = evalBool(stack.pop());
int idx = getInt(stack);
SeriesInfo si = getSeriesInfo(stack);
if (si == null) return null;
String imageURL = si.getImageURL(idx, thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return imageURL;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "HasSeriesActorImage", new String[] { "SeriesInfo", "Person" })
{
/**
* Returns true if the specified SeriesInfo has a corresponding image for it for the specified Person in it
* @param SeriesInfo the SeriesInfo object
* @param Person the Person to check for an image
* @return true if the specified SeriesInfo has a corresponding image for it for the specified Person in it
* @since 8.0
*
* @declaration public boolean HasSeriesActorImage(SeriesInfo SeriesInfo, Person Person);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
Person p = getPerson(stack);
SeriesInfo si = getSeriesInfo(stack);
if (si == null || p == null) return Boolean.FALSE;
return si.hasActorInCharacterImage(p) ? Boolean.TRUE : Boolean.FALSE;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesActorImage", new String[] { "SeriesInfo", "Person", "Thumb" })
{
/**
* Returns an image of the specified Person in their role in the specified Series
* @param SeriesInfo the SeriesInfo object
* @param Person the Person to check for an image
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return an image of the specified Person in their role in the specified Series
* @since 8.0
*
* @declaration public MetaImage GetSeriesActorImage(SeriesInfo SeriesInfo, Person Person, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = evalBool(stack.pop());
Person p = getPerson(stack);
SeriesInfo si = getSeriesInfo(stack);
if (si == null || p == null) return null;
String imageURL = si.getActorInCharacterImageURL(p, thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return MetaImage.getMetaImage(imageURL, stack.getUIComponent());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesActorImageURL", new String[] { "SeriesInfo", "Person", "Thumb" })
{
/**
* Returns an image URL of the specified Person in their role in the specified Series
* @param SeriesInfo the SeriesInfo object
* @param Person the Person to check for an image
* @param Thumb true if a thumbnail is preferred, false if a full size image is
* @return an image URL of the specified Person in their role in the specified Series
* @since 8.0
*
* @declaration public String GetSeriesActorImageURL(SeriesInfo SeriesInfo, Person Person, boolean Thumb);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
boolean thumb = evalBool(stack.pop());
Person p = getPerson(stack);
SeriesInfo si = getSeriesInfo(stack);
if (si == null || p == null) return null;
String imageURL = si.getActorInCharacterImageURL(p, thumb);
if (imageURL == null || imageURL.length() == 0)
return null;
else
return imageURL;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetNumberOfCharactersInSeries", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the number of characters that we have information on for the specified series
* @param SeriesInfo the SeriesInfo object
* @return the number of characters that we have information on for the specified series
* @since 5.1
*
* @declaration public int GetNumberOfCharactersInSeries(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
return new Integer(getSeriesInfo(stack).getNumberOfCharacters());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesActor", 2, new String[] { "SeriesInfo", "Index" })
{
/**
* Returns the name of the actor/actress for the specfied index in the specified SeriesInfo. The range
* for the index is from 0 to one less than the value of {@link #GetNumberOfCharactersInSeries GetNumberOfCharactersInSeries()}
* @param SeriesInfo the SeriesInfo object
* @param Index the 0-based index of the actor to retrieve
* @return the Person object of the actor/actress for the specfied index in the specified SeriesInfo
* @since 5.1
*
* @declaration public Person GetSeriesActor(SeriesInfo SeriesInfo, int Index);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
int idx = getInt(stack);
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getPersonObj(idx);
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesActorList", new String[] { "SeriesInfo" })
{
/**
* Returns a list of the names of the actors/actresses in the specified SeriesInfo.
* @param SeriesInfo the SeriesInfo object
* @return a list of the Persons of the actors/actresses in the specified SeriesInfo
* @since 5.1
*
* @declaration public Person[] GetSeriesActorList(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? null : si.getPersonObjList();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesCharacter", 2, new String[] { "SeriesInfo", "Index" })
{
/**
* Returns the name of the character for the specfied index in the specified SeriesInfo. The range
* for the index is from 0 to one less than the value of {@link #GetNumberOfCharactersInSeries GetNumberOfCharactersInSeries()}
* @param SeriesInfo the SeriesInfo object
* @param Index the 0-based index of the actor to retrieve
* @return the name of the character for the specfied index in the specified SeriesInfo
* @since 5.1
*
* @declaration public String GetSeriesCharacter(SeriesInfo SeriesInfo, int Index);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
int idx = getInt(stack);
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : si.getCharacter(idx);
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesCharacterList", new String[] { "SeriesInfo" })
{
/**
* Returns a list of the names of the characters in the specified SeriesInfo.
* @param SeriesInfo the SeriesInfo object
* @return a list of the names of the characters in the specified SeriesInfo
* @since 5.1
*
* @declaration public String[] GetSeriesCharacterList(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? null : si.getCharacterList();
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesCharacterForActor", new String[] { "SeriesInfo", "Actor" })
{
/**
* Returns the name of the character that the corresponding actor plays in this series
* @param SeriesInfo the SeriesInfo object
* @param Actor the actor
* @return the name of the character that the corresponding actor plays in this series, the empty string if there's no correlation
* @since 7.0
*
* @declaration public String GetSeriesCharacterForActor(SeriesInfo SeriesInfo, String Actor);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
Object obj = stack.pop();
SeriesInfo si = getSeriesInfo(stack);
if (si == null || obj == null) return "";
if (obj instanceof Person)
return si.getCharacterForActor((Person) obj);
else
return si.guessCharacterForActor(obj.toString());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesID", 1, new String[] { "SeriesInfo" })
{
/**
* Returns the Series ID of the specified SeriesInfo
* NOTE: V8.0 IDs are not backwards compatible with prior versions
* @param SeriesInfo the SeriesInfo object
* @return the Series ID of the specified SeriesInfo object (currently an integer, represented as a String for future expansion)
* @since 7.0
*
* @declaration public String GetSeriesID(SeriesInfo SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
SeriesInfo si = getSeriesInfo(stack);
return si == null ? "" : Integer.toString(si.getShowcardID());
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesInfoForID", 1, new String[] { "SeriesID" })
{
/**
* Returns the SeriesInfo object for the specified Series ID
* NOTE: V8.0 IDs are not backwards compatible with prior versions
* @param SeriesID the ID of the desired SeriesInfo object
* @return the SeriesInfo object with the specified ID, or null if it does not exist
* @since 7.0
*
* @declaration public SeriesInfo GetSeriesInfoForID(String SeriesID);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
String s = getString(stack);
try
{
return Wizard.getInstance().getSeriesInfoForShowcardID(Integer.parseInt(s));
}
catch (NumberFormatException nfe)
{
return null;
}
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "AddSeriesInfo", new String[] { "SeriesID", "Title", "Network", "Description", "History", "PremiereDate", "FinaleDate",
"AirDOW", "AirHrMin", "ImageURL", "People", "Characters" }, true)
{
/**
* Call this to add a SeriesInfo object to the database. If a SeriesInfo with this seriesID is already present, it will be updated
* to this information. You can use null or String[0] for any fields you don't want to specify.
* @param SeriesID the ID of the series, this should match the prefix of corresponding ShowIDs w/out the last 4 digits for proper linkage (i.e. the SeriesID for EP1234567890 would be 123456)
* @param Title the title of the series
* @param Network the network that airs the series
* @param Description a description of this series
* @param History a historical description of the series
* @param PremiereDate a String representation of the date the series premiered
* @param FinaleDate a String representation of the date the series ended
* @param AirDOW a String representation of the day of the week the series airs
* @param AirHrMin a String representation of the time the series airs
* @param ImageURL a URL that links to an image for this series
* @param People names of people/actors in this show
* @param Characters must be same length as people array, should give the character names the corresponding people have in the series
* @return the newly added SeriesInfo object, or the updated object if another SeriesInfo object already existed with the same SeriesID
* @since 7.0
*
* @declaration public SeriesInfo AddSeriesInfo(int SeriesID, String Title, String Network, String Description, String History, String PremiereDate, String FinaleDate, String AirDOW, String AirHrMin, String ImageURL, String[] People, String[] Characters);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
String[] chars = getStringList(stack);
String[] peeps = getStringList(stack);
String url = getString(stack);
String airHrMin = getString(stack);
String airDow = getString(stack);
String finaleDate = getString(stack);
String premiereDate = getString(stack);
String history = getString(stack);
String desc = getString(stack);
String net = getString(stack);
String title = getString(stack);
int serId = getInt(stack);
return Wizard.getInstance().addSeriesInfo(serId, title, net, desc, history, premiereDate, finaleDate, airDow, airHrMin, url, peeps, chars);
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "GetSeriesInfoProperty", 2, new String[] { "SeriesInfo", "PropertyName" })
{
/**
* Returns a property value for a specified SeriesInfo object. This must have been set using SetSeriesInfoProperty.
* Returns the empty string when the property is undefined.
* @param SeriesInfo the SeriesInfo object
* @param PropertyName the name of the property
* @return the property value for the specified SeriesInfo, or the empty string if it is not defined
* @since 7.0
*
* @declaration public String GetSeriesInfoProperty(SeriesInfo SeriesInfo, String PropertyName);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
String prop = getString(stack);
SeriesInfo si = getSeriesInfo(stack);
return si.getProperty(prop);
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "SetSeriesInfoProperty", 3, new String[] { "SeriesInfo", "PropertyName", "PropertyValue" }, true)
{
/**
* Sets a property for this SeriesInfo object. This can be any name/value combination (but the name cannot be null). If the value is null;
* then the specified property will be removed from this SeriesInfo object. This only impacts the return values from GetSeriesInfoProperty and has no other side effects.
* @param SeriesInfo the SeriesInfo object
* @param PropertyName the name of the property
* @param PropertyValue the value of the property
* @since 7.0
*
* @declaration public void SetSeriesInfoProperty(SeriesInfo SeriesInfo, String PropertyName, String PropertyValue);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
String propV = getString(stack);
String propN = getString(stack);
SeriesInfo si = getSeriesInfo(stack);
si.setProperty(propN, propV);
return null;
}});
rft.put(new PredefinedJEPFunction("SeriesInfo", "IsSeriesInfoObject", 1, new String[] { "SeriesInfo" })
{
/**
* Returns true if the argument is a SeriesInfo object. Automatic type conversion is NOT done in this call.
* @param SeriesInfo the object to test
* @return true if the argument is an SeriesInfo object
* @since 7.1
*
* @declaration public boolean IsSeriesInfoObject(Object SeriesInfo);
*/
public Object runSafely(Catbert.FastStack stack) throws Exception{
Object o = stack.pop();
if (o instanceof sage.vfs.MediaNode)
o = ((sage.vfs.MediaNode) o).getDataObject();
return Boolean.valueOf(o instanceof SeriesInfo);
}});
/*
rft.put(new PredefinedJEPFunction("SeriesInfo", "", 1, new String[] { "SeriesInfo" })
{public Object runSafely(Catbert.FastStack stack) throws Exception{
return getSeriesInfo(stack).;
}});
*/
}
}
| |
/**
* Copyright (C) 2013
* by 52 North Initiative for Geospatial Open Source Software GmbH
*
* Contact: Andreas Wytzisk
* 52 North Initiative for Geospatial Open Source Software GmbH
* Martin-Luther-King-Weg 24
* 48155 Muenster, Germany
* info@52north.org
*
* This program is free software; you can redistribute and/or modify it under
* the terms of the GNU General Public License version 2 as published by the
* Free Software Foundation.
*
* This program is distributed WITHOUT ANY WARRANTY; even without the implied
* WARRANTY OF MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* this program (see gnu-gpl v2.txt). If not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA or
* visit the Free Software Foundation web page, http://www.fsf.org.
*/
package org.n52.sos.decode;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import net.opengis.om.x20.NamedValuePropertyType;
import net.opengis.om.x20.NamedValueType;
import net.opengis.om.x20.OMObservationType;
import net.opengis.om.x20.TimeObjectPropertyType;
import org.apache.xmlbeans.XmlBoolean;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlInteger;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlString;
import org.apache.xmlbeans.impl.values.XmlAnyTypeImpl;
import org.n52.sos.exception.CodedException;
import org.n52.sos.exception.ows.InvalidParameterValueException;
import org.n52.sos.exception.ows.MissingParameterValueException;
import org.n52.sos.exception.ows.OwsExceptionCode;
import org.n52.sos.exception.ows.concrete.UnsupportedDecoderInputException;
import org.n52.sos.ogc.gml.AbstractFeature;
import org.n52.sos.ogc.gml.CodeWithAuthority;
import org.n52.sos.ogc.gml.GmlMeasureType;
import org.n52.sos.ogc.gml.ReferenceType;
import org.n52.sos.ogc.gml.time.Time;
import org.n52.sos.ogc.gml.time.Time.NilReason;
import org.n52.sos.ogc.gml.time.Time.TimeIndeterminateValue;
import org.n52.sos.ogc.gml.time.TimeInstant;
import org.n52.sos.ogc.gml.time.TimePeriod;
import org.n52.sos.ogc.om.AbstractPhenomenon;
import org.n52.sos.ogc.om.NamedValue;
import org.n52.sos.ogc.om.ObservationValue;
import org.n52.sos.ogc.om.OmConstants;
import org.n52.sos.ogc.om.OmObservableProperty;
import org.n52.sos.ogc.om.OmObservation;
import org.n52.sos.ogc.om.OmObservationConstellation;
import org.n52.sos.ogc.om.SingleObservationValue;
import org.n52.sos.ogc.om.values.BooleanValue;
import org.n52.sos.ogc.om.values.CategoryValue;
import org.n52.sos.ogc.om.values.CountValue;
import org.n52.sos.ogc.om.values.GeometryValue;
import org.n52.sos.ogc.om.values.NilTemplateValue;
import org.n52.sos.ogc.om.values.QuantityValue;
import org.n52.sos.ogc.om.values.SweDataArrayValue;
import org.n52.sos.ogc.om.values.TextValue;
import org.n52.sos.ogc.ows.OwsExceptionReport;
import org.n52.sos.ogc.sensorML.SensorML;
import org.n52.sos.ogc.sos.ConformanceClasses;
import org.n52.sos.ogc.sos.Sos2Constants;
import org.n52.sos.ogc.sos.SosProcedureDescription;
import org.n52.sos.ogc.swe.SweDataArray;
import org.n52.sos.service.ServiceConstants.SupportedTypeKey;
import org.n52.sos.util.CodingHelper;
import org.n52.sos.util.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.vividsolutions.jts.geom.Geometry;
/**
* @since 4.0.0
*
*/
public class OmDecoderv20 implements Decoder<Object, Object> {
private static final Logger LOGGER = LoggerFactory.getLogger(OmDecoderv20.class);
private static final Set<DecoderKey> DECODER_KEYS = CodingHelper.decoderKeysForElements(OmConstants.NS_OM_2,
OMObservationType.class, NamedValuePropertyType.class, NamedValuePropertyType[].class);
private static final Map<SupportedTypeKey, Set<String>> SUPPORTED_TYPES = ImmutableMap.of(
SupportedTypeKey.ObservationType, (Set<String>) ImmutableSet.of(OmConstants.OBS_TYPE_GEOMETRY_OBSERVATION,
OmConstants.OBS_TYPE_CATEGORY_OBSERVATION, OmConstants.OBS_TYPE_COUNT_OBSERVATION,
OmConstants.OBS_TYPE_MEASUREMENT, OmConstants.OBS_TYPE_TEXT_OBSERVATION,
OmConstants.OBS_TYPE_TRUTH_OBSERVATION, OmConstants.OBS_TYPE_SWE_ARRAY_OBSERVATION));
private static final Set<String> CONFORMANCE_CLASSES = ImmutableSet.of(ConformanceClasses.OM_V2_MEASUREMENT,
ConformanceClasses.OM_V2_CATEGORY_OBSERVATION, ConformanceClasses.OM_V2_COUNT_OBSERVATION,
ConformanceClasses.OM_V2_TRUTH_OBSERVATION,
// ConformanceClasses.OM_V2_GEOMETRY_OBSERVATION,
ConformanceClasses.OM_V2_TEXT_OBSERVATION);
public OmDecoderv20() {
LOGGER.debug("Decoder for the following keys initialized successfully: {}!", Joiner.on(", ")
.join(DECODER_KEYS));
}
@Override
public Set<DecoderKey> getDecoderKeyTypes() {
return Collections.unmodifiableSet(DECODER_KEYS);
}
@Override
public Map<SupportedTypeKey, Set<String>> getSupportedTypes() {
return Collections.unmodifiableMap(SUPPORTED_TYPES);
}
@Override
public Set<String> getConformanceClasses() {
return Collections.unmodifiableSet(CONFORMANCE_CLASSES);
}
@Override
public Object decode(Object object) throws OwsExceptionReport {
// validate document
// XmlHelper.validateDocument((XmlObject) object);
if (object instanceof OMObservationType) {
return parseOmObservation((OMObservationType) object);
} else if (object instanceof NamedValuePropertyType) {
return parseNamedValueType((NamedValuePropertyType) object);
} else if (object instanceof NamedValuePropertyType[]) {
return parseNamedValueTypeArray((NamedValuePropertyType[]) object);
}
throw new UnsupportedDecoderInputException(this, object);
}
private OmObservation parseOmObservation(OMObservationType omObservation) throws OwsExceptionReport {
Map<String, AbstractFeature> featureMap = Maps.newHashMap();
OmObservation sosObservation = new OmObservation();
sosObservation.setIdentifier(getIdentifier(omObservation));
OmObservationConstellation observationConstallation = getObservationConstellation(omObservation);
sosObservation.setObservationConstellation(observationConstallation);
sosObservation.setResultTime(getResultTime(omObservation));
sosObservation.setValidTime(getValidTime(omObservation));
if (omObservation.getParameterArray() != null) {
sosObservation.setParameter(parseNamedValueTypeArray(omObservation.getParameterArray()));
}
sosObservation.setValue(getObservationValue(omObservation));
try {
Object decodeXmlElement = CodingHelper.decodeXmlElement(omObservation.getFeatureOfInterest());
if (decodeXmlElement instanceof AbstractFeature) {
AbstractFeature featureOfInterest = (AbstractFeature) decodeXmlElement;
observationConstallation.setFeatureOfInterest(checkFeatureWithMap(featureOfInterest, featureMap));
}
} catch (OwsExceptionReport e) {
if (sosObservation.getValue() != null && sosObservation.getValue().getPhenomenonTime() != null
&& sosObservation.getPhenomenonTime().isSetNilReason()
&& sosObservation.getValue().getPhenomenonTime().getNilReason().equals(NilReason.template)) {
for (CodedException exception : e.getExceptions()) {
if (exception.getCode().equals(OwsExceptionCode.InvalidParameterValue)) {
throw new InvalidParameterValueException().at(exception.getLocator()).withMessage(
exception.getMessage());
} else if (exception.getCode().equals(OwsExceptionCode.MissingParameterValue)) {
throw new MissingParameterValueException(exception.getLocator());
}
}
}
throw e;
}
// TODO: later for spatial filtering profile
// omObservation.getParameterArray();
return sosObservation;
}
private Set<NamedValue<?>> parseNamedValueTypeArray(NamedValuePropertyType[] namedValuePropertyArray)
throws OwsExceptionReport {
Set<NamedValue<?>> parameters = Sets.newHashSet();
for (NamedValuePropertyType namedValueProperty : namedValuePropertyArray) {
parameters.add(parseNamedValueType(namedValueProperty));
}
return parameters;
}
private NamedValue<?> parseNamedValueType(NamedValuePropertyType namedValueProperty) throws OwsExceptionReport {
if (namedValueProperty.isSetNamedValue()) {
NamedValueType namedValue = namedValueProperty.getNamedValue();
NamedValue<?> sosNamedValue = parseNamedValueValue(namedValue.getValue());
ReferenceType referenceType = (ReferenceType) CodingHelper.decodeXmlObject(namedValue.getName());
sosNamedValue.setName(referenceType);
return sosNamedValue;
} else if (namedValueProperty.isSetHref()) {
NamedValue<?> sosNamedValue = new NamedValue<ReferenceType>();
ReferenceType referenceType = new ReferenceType(namedValueProperty.getHref());
if (namedValueProperty.isSetTitle()) {
referenceType.setTitle(namedValueProperty.getTitle());
}
sosNamedValue.setName(referenceType);
return sosNamedValue;
} else {
throw new UnsupportedDecoderInputException(this, namedValueProperty);
}
}
private NamedValue<?> parseNamedValueValue(XmlObject xmlObject) throws OwsExceptionReport {
if (xmlObject.schemaType() == XmlAnyTypeImpl.type) {
try {
xmlObject = XmlObject.Factory.parse(xmlObject.xmlText().trim());
} catch (XmlException e) {
LOGGER.error("Error while parsing NamedValueValue", e);
}
}
Object value = CodingHelper.decodeXmlObject(xmlObject);
if (value instanceof BooleanValue) {
NamedValue<Boolean> namedValue = new NamedValue<Boolean>();
namedValue.setValue((BooleanValue) value);
return namedValue;
} else if (value instanceof CategoryValue) {
NamedValue<String> namedValue = new NamedValue<String>();
namedValue.setValue((CategoryValue) value);
return namedValue;
} else if (value instanceof CountValue) {
NamedValue<Integer> namedValue = new NamedValue<Integer>();
namedValue.setValue((CountValue) value);
return namedValue;
} else if (value instanceof GeometryValue) {
NamedValue<Geometry> namedValue = new NamedValue<Geometry>();
namedValue.setValue((GeometryValue) value);
return namedValue;
} else if (value instanceof QuantityValue) {
NamedValue<BigDecimal> namedValue = new NamedValue<BigDecimal>();
namedValue.setValue((QuantityValue) value);
return namedValue;
} else if (value instanceof TextValue) {
NamedValue<String> namedValue = new NamedValue<String>();
namedValue.setValue((TextValue) value);
return namedValue;
} else if (value instanceof Geometry) {
NamedValue<Geometry> namedValue = new NamedValue<Geometry>();
namedValue.setValue(new GeometryValue((Geometry) value));
return namedValue;
} else {
throw new UnsupportedDecoderInputException(this, xmlObject);
}
}
private CodeWithAuthority getIdentifier(OMObservationType omObservation) throws OwsExceptionReport {
if (omObservation.getIdentifier() != null) {
Object decodedObject = CodingHelper.decodeXmlObject(omObservation.getIdentifier());
if (decodedObject instanceof CodeWithAuthority) {
return (CodeWithAuthority) decodedObject;
}
}
return null;
}
private OmObservationConstellation getObservationConstellation(OMObservationType omObservation)
throws OwsExceptionReport {
OmObservationConstellation observationConstellation = new OmObservationConstellation();
observationConstellation.setObservationType(getObservationType(omObservation));
observationConstellation.setProcedure(createProcedure(getProcedure(omObservation)));
observationConstellation.setObservableProperty(getObservableProperty(omObservation));
return observationConstellation;
}
private String getObservationType(OMObservationType omObservation) {
if (omObservation.getType() != null) {
return omObservation.getType().getHref();
}
return null;
}
private String getProcedure(OMObservationType omObservation) {
if (omObservation.getProcedure() != null) {
return omObservation.getProcedure().getHref();
}
return null;
}
private AbstractPhenomenon getObservableProperty(OMObservationType omObservation) {
if (omObservation.getObservedProperty() != null) {
return new OmObservableProperty(omObservation.getObservedProperty().getHref());
}
return null;
}
private Time getPhenomenonTime(OMObservationType omObservation) throws OwsExceptionReport {
TimeObjectPropertyType phenomenonTime = omObservation.getPhenomenonTime();
if (phenomenonTime.isSetHref() && phenomenonTime.getHref().startsWith(Constants.NUMBER_SIGN_STRING)) {
TimeInstant timeInstant = new TimeInstant();
timeInstant.setGmlId(phenomenonTime.getHref());
return timeInstant;
} else if (phenomenonTime.isSetNilReason() && phenomenonTime.getNilReason() instanceof String
&& ((String) phenomenonTime.getNilReason()).equals(TimeIndeterminateValue.template.name())) {
TimeInstant timeInstant = new TimeInstant();
timeInstant.setIndeterminateValue(TimeIndeterminateValue.getEnumForString((String) phenomenonTime
.getNilReason()));
return timeInstant;
} else if (phenomenonTime.isSetAbstractTimeObject()) {
Object decodedObject = CodingHelper.decodeXmlObject(phenomenonTime.getAbstractTimeObject());
if (decodedObject instanceof Time) {
return (Time) decodedObject;
}
// FIXME else
}
throw new InvalidParameterValueException().at(Sos2Constants.InsertObservationParams.observation).withMessage(
"The requested phenomenonTime type is not supported by this service!");
}
private TimeInstant getResultTime(OMObservationType omObservation) throws OwsExceptionReport {
if (omObservation.getResultTime().isSetHref()) {
TimeInstant timeInstant = new TimeInstant();
timeInstant.setGmlId(omObservation.getResultTime().getHref());
if (omObservation.getResultTime().getHref().charAt(0) == Constants.NUMBER_SIGN_CHAR) {
// document internal link
// TODO parse linked element
timeInstant.setReference(Sos2Constants.EN_PHENOMENON_TIME);
} else {
timeInstant.setReference(omObservation.getResultTime().getHref());
}
return timeInstant;
} else if (omObservation.getResultTime().isSetNilReason()
&& omObservation.getResultTime().getNilReason() instanceof String
&& NilReason.template.equals(NilReason.getEnumForString((String) omObservation.getResultTime()
.getNilReason()))) {
TimeInstant timeInstant = new TimeInstant();
timeInstant
.setNilReason(NilReason.getEnumForString((String) omObservation.getResultTime().getNilReason()));
return timeInstant;
} else if (omObservation.getResultTime().isSetTimeInstant()) {
Object decodedObject = CodingHelper.decodeXmlObject(omObservation.getResultTime().getTimeInstant());
if (decodedObject instanceof TimeInstant) {
return (TimeInstant) decodedObject;
}
throw new InvalidParameterValueException().at(Sos2Constants.InsertObservationParams.observation)
.withMessage("The requested resultTime type is not supported by this service!");
} else {
throw new InvalidParameterValueException().at(Sos2Constants.InsertObservationParams.observation)
.withMessage("The requested resultTime type is not supported by this service!");
}
}
private TimePeriod getValidTime(OMObservationType omObservation) throws OwsExceptionReport {
if (omObservation.isSetValidTime()) {
Object decodedObject = CodingHelper.decodeXmlObject(omObservation.getValidTime().getTimePeriod());
if (decodedObject instanceof TimePeriod) {
return (TimePeriod) decodedObject;
}
throw new InvalidParameterValueException().at(Sos2Constants.InsertObservationParams.observation)
.withMessage("The requested validTime type is not supported by this service!");
}
return null;
}
private ObservationValue<?> getObservationValue(OMObservationType omObservation) throws OwsExceptionReport {
Time phenomenonTime = getPhenomenonTime(omObservation);
ObservationValue<?> observationValue;
if (!omObservation.getResult().getDomNode().hasChildNodes() && phenomenonTime.isSetNilReason()
&& phenomenonTime.getNilReason().equals(NilReason.template)) {
observationValue = new SingleObservationValue<String>(new NilTemplateValue());
} else {
observationValue = getResult(omObservation);
}
observationValue.setPhenomenonTime(phenomenonTime);
return observationValue;
}
private ObservationValue<?> getResult(OMObservationType omObservation) throws OwsExceptionReport {
XmlObject xbResult = omObservation.getResult();
if (xbResult.schemaType() == XmlAnyTypeImpl.type) {
// Template observation for InsertResultTemplate operation
if (!xbResult.getDomNode().hasChildNodes()) {
return new SingleObservationValue<String>(new NilTemplateValue());
} else {
try {
xbResult = XmlObject.Factory.parse(xbResult.xmlText().trim());
} catch (XmlException e) {
LOGGER.error("Error while parsing NamedValueValue", e);
}
}
}
// // Template observation for InsertResultTemplate operation
// if (omObservation.getResult().schemaType() == XmlAnyTypeImpl.type &&
// !omObservation.getResult().getDomNode().hasChildNodes()) {
// return new SingleObservationValue<String>(new NilTemplateValue());
// }
// TruthObservation
if (xbResult.schemaType() == XmlBoolean.type) {
XmlBoolean xbBoolean = (XmlBoolean) xbResult;
BooleanValue booleanValue = new BooleanValue(xbBoolean.getBooleanValue());
return new SingleObservationValue<Boolean>(booleanValue);
}
// CountObservation
else if (xbResult.schemaType() == XmlInteger.type) {
XmlInteger xbInteger = (XmlInteger) xbResult;
CountValue countValue = new CountValue(Integer.parseInt(xbInteger.getBigIntegerValue().toString()));
return new SingleObservationValue<Integer>(countValue);
}
// TextObservation
else if (xbResult.schemaType() == XmlString.type) {
XmlString xbString = (XmlString) xbResult;
TextValue stringValue = new TextValue(xbString.getStringValue());
return new SingleObservationValue<String>(stringValue);
}
// result elements with other encoding like SWE_ARRAY_OBSERVATION
else {
Object decodedObject = CodingHelper.decodeXmlObject(xbResult);
if (decodedObject instanceof ObservationValue) {
return (ObservationValue) decodedObject;
} else if (decodedObject instanceof GmlMeasureType) {
SingleObservationValue<BigDecimal> result = new SingleObservationValue<BigDecimal>();
GmlMeasureType measureType = (GmlMeasureType) decodedObject;
QuantityValue quantitiyValue = new QuantityValue(measureType.getValue(), measureType.getUnit());
result.setValue(quantitiyValue);
return result;
} else if (decodedObject instanceof ReferenceType) {
SingleObservationValue<String> result = new SingleObservationValue<String>();
result.setValue(new CategoryValue(((ReferenceType) decodedObject).getHref()));
return result;
} else if (decodedObject instanceof Geometry) {
SingleObservationValue<Geometry> result = new SingleObservationValue<Geometry>();
result.setValue(new GeometryValue((Geometry) decodedObject));
return result;
} else if (decodedObject instanceof SweDataArray) {
SweDataArrayValue value = new SweDataArrayValue();
value.setValue((SweDataArray) decodedObject);
SingleObservationValue<SweDataArray> result = new SingleObservationValue<SweDataArray>();
result.setValue(value);
return result;
}
throw new InvalidParameterValueException().at(Sos2Constants.InsertObservationParams.observation)
.withMessage("The requested result type is not supported by this service!");
}
}
private AbstractFeature checkFeatureWithMap(AbstractFeature featureOfInterest,
Map<String, AbstractFeature> featureMap) {
if (featureOfInterest.getGmlId() != null && !featureOfInterest.getGmlId().isEmpty()) {
if (featureMap.containsKey(featureOfInterest.getGmlId())) {
return featureMap.get(featureOfInterest.getGmlId());
} else {
featureMap.put(featureOfInterest.getGmlId(), featureOfInterest);
}
}
return featureOfInterest;
}
private SosProcedureDescription createProcedure(String procedureIdentifier) {
SensorML procedure = new SensorML();
procedure.setIdentifier(procedureIdentifier);
return procedure;
}
}
| |
package org.hisp.dhis.system.grid;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hisp.dhis.common.Grid;
import org.hisp.dhis.common.GridHeader;
import org.junit.Before;
import org.junit.Test;
/**
* @author Lars Helge Overland
* @version $Id$
*/
public class GridTest
{
private Grid gridA;
private Grid gridB;
private GridHeader headerA;
private GridHeader headerB;
private GridHeader headerC;
@Before
public void setUp()
{
gridA = new ListGrid();
gridB = new ListGrid();
headerA = new GridHeader( "ColA", "colA", String.class.getName(), false, true );
headerB = new GridHeader( "ColB", "colB", String.class.getName(), false, true );
headerC = new GridHeader( "ColC", "colC", String.class.getName(), true, false );
gridA.addHeader( headerA );
gridA.addHeader( headerB );
gridA.addHeader( headerC );
gridA.addRow();
gridA.addValue( 11 );
gridA.addValue( 12 );
gridA.addValue( 13 );
gridA.addRow();
gridA.addValue( 21 );
gridA.addValue( 22 );
gridA.addValue( 23 );
gridA.addRow();
gridA.addValue( 31 );
gridA.addValue( 32 );
gridA.addValue( 33 );
gridA.addRow();
gridA.addValue( 41 );
gridA.addValue( 42 );
gridA.addValue( 43 );
gridB.addRow();
gridB.addValue( 11 );
gridB.addValue( 12 );
gridB.addValue( 13 );
}
@Test
public void testAddGrid()
{
gridA.addRows( gridB );
assertEquals( 5, gridA.getHeight() );
}
@Test
public void testSubstituteMetaData()
{
Map<Object, Object> metaData = new HashMap<>();
metaData.put( 11, "Eleven" );
metaData.put( 12, "Twelve" );
metaData.put( 21, "TwentyOne" );
metaData.put( 22, "TwentyTwo" );
assertEquals( 11, gridA.getValue( 0, 0 ) );
assertEquals( 12, gridA.getValue( 0, 1 ) );
assertEquals( 21, gridA.getValue( 1, 0 ) );
assertEquals( 22, gridA.getValue( 1, 1 ) );
gridA.substituteMetaData( metaData );
assertEquals( "Eleven", gridA.getValue( 0, 0 ) );
assertEquals( "Twelve", gridA.getValue( 0, 1 ) );
assertEquals( "TwentyOne", gridA.getValue( 1, 0 ) );
assertEquals( "TwentyTwo", gridA.getValue( 1, 1 ) );
}
@Test
public void testSubstituteMetaDataForIndex()
{
Map<Object, Object> metaData = new HashMap<>();
metaData.put( 11, "Eleven" );
metaData.put( 12, "Twelve" );
metaData.put( 21, "TwentyOne" );
metaData.put( 22, "TwentyTwo" );
assertEquals( 11, gridA.getValue( 0, 0 ) );
assertEquals( 12, gridA.getValue( 0, 1 ) );
assertEquals( 21, gridA.getValue( 1, 0 ) );
assertEquals( 22, gridA.getValue( 1, 1 ) );
gridA.substituteMetaData( 1, metaData );
assertEquals( 11, gridA.getValue( 0, 0 ) );
assertEquals( "Twelve", gridA.getValue( 0, 1 ) );
assertEquals( 21, gridA.getValue( 1, 0 ) );
assertEquals( "TwentyTwo", gridA.getValue( 1, 1 ) );
}
@Test
public void testGetHeight()
{
assertEquals( 4, gridA.getHeight() );
}
@Test
public void testGetWidth()
{
assertEquals( 3, gridA.getWidth() );
}
@Test
public void testGetRow()
{
List<Object> rowA = gridA.getRow( 0 );
assertTrue( rowA.size() == 3 );
assertTrue( rowA.contains( 11 ) );
assertTrue( rowA.contains( 12 ) );
assertTrue( rowA.contains( 13 ) );
List<Object> rowB = gridA.getRow( 1 );
assertTrue( rowB.size() == 3 );
assertTrue( rowB.contains( 21 ) );
assertTrue( rowB.contains( 22 ) );
assertTrue( rowB.contains( 23 ) );
}
@Test
public void testGetHeaders()
{
assertEquals( 3, gridA.getHeaders().size() );
}
@Test
public void tetsGetVisibleHeaders()
{
assertEquals( 2, gridA.getVisibleHeaders().size() );
assertTrue( gridA.getVisibleHeaders().contains( headerA ) );
assertTrue( gridA.getVisibleHeaders().contains( headerB ) );
}
@Test
public void testGetRows()
{
assertEquals( 4, gridA.getRows().size() );
assertEquals( 3, gridA.getWidth() );
}
@Test
public void testGetGetVisibleRows()
{
assertEquals( 4, gridA.getVisibleRows().size() );
assertEquals( 2, gridA.getVisibleRows().get( 0 ).size() );
assertEquals( 2, gridA.getVisibleRows().get( 1 ).size() );
assertEquals( 2, gridA.getVisibleRows().get( 2 ).size() );
assertEquals( 2, gridA.getVisibleRows().get( 3 ).size() );
}
@Test
public void testGetColumn()
{
List<Object> column1 = gridA.getColumn( 1 );
assertEquals( 4, column1.size() );
assertTrue( column1.contains( 12 ) );
assertTrue( column1.contains( 22 ) );
assertTrue( column1.contains( 32 ) );
assertTrue( column1.contains( 42 ) );
List<Object> column2 = gridA.getColumn( 2 );
assertEquals( 4, column2.size() );
assertTrue( column2.contains( 13 ) );
assertTrue( column2.contains( 23 ) );
assertTrue( column2.contains( 33 ) );
assertTrue( column2.contains( 43 ) );
}
@Test
public void testAddColumn()
{
List<Object> columnValues = new ArrayList<>();
columnValues.add( 14 );
columnValues.add( 24 );
columnValues.add( 34 );
columnValues.add( 44 );
gridA.addColumn( columnValues );
List<Object> column3 = gridA.getColumn( 3 );
assertEquals( 4, column3.size() );
assertTrue( column3.contains( 14 ) );
assertTrue( column3.contains( 24 ) );
assertTrue( column3.contains( 34 ) );
assertTrue( column3.contains( 44 ) );
List<Object> row2 = gridA.getRow( 1 );
assertEquals( 4, row2.size() );
assertTrue( row2.contains( 21 ) );
assertTrue( row2.contains( 22 ) );
assertTrue( row2.contains( 23 ) );
assertTrue( row2.contains( 24 ) );
}
@Test
public void testRemoveColumn()
{
assertEquals( 3, gridA.getWidth() );
gridA.removeColumn( 2 );
assertEquals( 2, gridA.getWidth() );
}
@Test
public void testRemoveColumnByHeader()
{
assertEquals( 3, gridA.getWidth() );
gridA.removeColumn( headerB );
assertEquals( 2, gridA.getWidth() );
}
@Test
public void testRemoveCurrentWriteRow()
{
assertEquals( 4, gridA.getRows().size() );
gridA.addRow();
gridA.addValue( 51 );
gridA.addValue( 52 );
gridA.addValue( 53 );
assertEquals( 5, gridA.getRows().size() );
gridA.removeCurrentWriteRow();
assertEquals( 4, gridA.getRows().size() );
gridA.addRow();
gridA.addValue( 51 );
gridA.addValue( 52 );
gridA.addValue( 53 );
assertEquals( 5, gridA.getRows().size() );
}
@Test
public void testLimit()
{
assertEquals( 4, gridA.getRows().size() );
gridA.limitGrid( 2 );
assertEquals( 2, gridA.getRows().size() );
List<Object> rowA = gridA.getRow( 0 );
assertTrue( rowA.contains( 11 ) );
List<Object> rowB = gridA.getRow( 1 );
assertTrue( rowB.contains( 21 ) );
gridA.limitGrid( 0 );
assertEquals( 2, gridA.getRows().size() );
}
@Test
public void testLimitShortList()
{
assertEquals( 4, gridA.getRows().size() );
gridA.limitGrid( 6 );
assertEquals( 4, gridA.getRows().size() );
gridA.limitGrid( 4 );
assertEquals( 4, gridA.getRows().size() );
}
@Test
public void testLimits()
{
assertEquals( 4, gridA.getRows().size() );
gridA.limitGrid( 1, 3 );
assertEquals( 2, gridA.getRows().size() );
List<Object> rowA = gridA.getRow( 0 );
assertTrue( rowA.contains( 21 ) );
List<Object> rowB = gridA.getRow( 1 );
assertTrue( rowB.contains( 31 ) );
}
@Test
public void testSortA()
{
Grid grid = new ListGrid();
grid.addRow().addValue( 1 ).addValue( "a" );
grid.addRow().addValue( 2 ).addValue( "b" );
grid.addRow().addValue( 3 ).addValue( "c" );
grid.sortGrid( 2, 1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( "c" ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( "b" ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( "a" ) );
}
@Test
public void testSortB()
{
Grid grid = new ListGrid();
grid.addRow().addValue( 3 ).addValue( "a" );
grid.addRow().addValue( 2 ).addValue( "b" );
grid.addRow().addValue( 1 ).addValue( "c" );
grid.sortGrid( 1, -1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( 1 ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( 2 ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( 3 ) );
}
@Test
public void testSortC()
{
Grid grid = new ListGrid();
grid.addRow().addValue( 1 ).addValue( "c" );
grid.addRow().addValue( 3 ).addValue( "a" );
grid.addRow().addValue( 2 ).addValue( "b" );
grid.sortGrid( 1, 1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( 3 ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( 2 ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( 1 ) );
}
@Test
public void testSortD()
{
Grid grid = new ListGrid();
grid.addRow().addValue( "a" ).addValue( "a" ).addValue( 5.2 );
grid.addRow().addValue( "b" ).addValue( "b" ).addValue( 0.0 );
grid.addRow().addValue( "c" ).addValue( "c" ).addValue( 108.1 );
grid.addRow().addValue( "d" ).addValue( "d" ).addValue( 45.0 );
grid.addRow().addValue( "e" ).addValue( "e" ).addValue( 4043.9 );
grid.addRow().addValue( "f" ).addValue( "f" ).addValue( 0.1 );
grid = grid.sortGrid( 3, 1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( 4043.9 ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( 108.1 ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( 45.0 ) );
List<Object> row4 = grid.getRow( 3 );
assertTrue( row4.contains( 5.2 ) );
List<Object> row5 = grid.getRow( 4 );
assertTrue( row5.contains( 0.1 ) );
List<Object> row6 = grid.getRow( 5 );
assertTrue( row6.contains( 0.0 ) );
}
@Test
public void testSortE()
{
Grid grid = new ListGrid();
grid.addRow().addValue( "two" ).addValue( 2 );
grid.addRow().addValue( "null" ).addValue( null );
grid.addRow().addValue( "three" ).addValue( 3 );
grid.sortGrid( 2, 1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( "three" ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( "two" ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( "null" ) );
}
@Test
public void testSortF()
{
Grid grid = new ListGrid();
grid.addRow().addValue( "two" ).addValue( 2 );
grid.addRow().addValue( "null" ).addValue( null );
grid.addRow().addValue( "one" ).addValue( 1 );
grid.sortGrid( 2, -1 );
List<Object> row1 = grid.getRow( 0 );
assertTrue( row1.contains( "null" ) );
List<Object> row2 = grid.getRow( 1 );
assertTrue( row2.contains( "one" ) );
List<Object> row3 = grid.getRow( 2 );
assertTrue( row3.contains( "two" ) );
}
@Test
public void testGridRowComparator()
{
List<List<Object>> lists = new ArrayList<>();
List<Object> l1 = getList( "b", "b", 50 );
List<Object> l2 = getList( "c", "c", 400 );
List<Object> l3 = getList( "a", "a", 6 );
lists.add( l1 );
lists.add( l2 );
lists.add( l3 );
Comparator<List<Object>> comparator = new ListGrid.GridRowComparator( 2, -1 );
Collections.sort( lists, comparator );
assertEquals( l3, lists.get( 0 ) );
assertEquals( l1, lists.get( 1 ) );
assertEquals( l2, lists.get( 2 ) );
}
@Test
public void testAddRegressionColumn()
{
gridA = new ListGrid();
gridA.addRow();
gridA.addValue( 10.0 );
gridA.addRow();
gridA.addValue( 50.0 );
gridA.addRow();
gridA.addValue( 20.0 );
gridA.addRow();
gridA.addValue( 60.0 );
gridA.addRegressionColumn( 0, true );
List<Object> column = gridA.getColumn( 1 );
assertTrue( column.size() == 4 );
assertTrue( column.contains( 17.0 ) );
assertTrue( column.contains( 29.0 ) );
assertTrue( column.contains( 41.0 ) );
assertTrue( column.contains( 53.0 ) );
}
@Test
public void testAddCumulativeColumn()
{
gridA = new ListGrid();
gridA.addRow();
gridA.addValue( 10.0 );
gridA.addRow();
gridA.addValue( 50.0 );
gridA.addRow();
gridA.addValue( 20.0 );
gridA.addRow();
gridA.addValue( 60.0 );
gridA.addCumulativeColumn( 0, true );
List<Object> column = gridA.getColumn( 1 );
assertTrue( column.size() == 4 );
assertTrue( column.contains( 10.0 ) );
assertTrue( column.contains( 60.0 ) );
assertTrue( column.contains( 80.0 ) );
assertTrue( column.contains( 140.0 ) );
}
@Test
public void testGetMetaColumnIndexes()
{
List<Integer> expected = new ArrayList<>();
expected.add( 0 );
expected.add( 1 );
assertEquals( expected, gridA.getMetaColumnIndexes() );
}
@Test
public void testGetUniqueValues()
{
gridA.addRow();
gridA.addValue( 11 );
gridA.addValue( 12 );
gridA.addValue( 13 );
Set<Object> expected = new HashSet<>();
expected.add( 12 );
expected.add( 22 );
expected.add( 32 );
expected.add( 42 );
assertEquals( expected, gridA.getUniqueValues( "ColB" ) );
}
@Test
public void testGetAsMap()
{
Map<String, Integer> map = gridA.getAsMap( 2, "-" );
assertEquals( 4, map.size() );
assertEquals( Integer.valueOf( 13 ), map.get( "11-12" ) );
assertEquals( Integer.valueOf( 23 ), map.get( "21-22" ) );
assertEquals( Integer.valueOf( 33 ), map.get( "31-32" ) );
assertEquals( Integer.valueOf( 43 ), map.get( "41-42" ) );
}
@Test
public void testJRDataSource() throws Exception
{
assertTrue( gridA.next() );
assertEquals( 11, gridA.getFieldValue( new MockJRField( "colA" ) ) );
assertEquals( 12, gridA.getFieldValue( new MockJRField( "colB" ) ) );
assertEquals( 13, gridA.getFieldValue( new MockJRField( "colC" ) ) );
assertTrue( gridA.next() );
assertEquals( 21, gridA.getFieldValue( new MockJRField( "colA" ) ) );
assertEquals( 22, gridA.getFieldValue( new MockJRField( "colB" ) ) );
assertEquals( 23, gridA.getFieldValue( new MockJRField( "colC" ) ) );
assertTrue( gridA.next() );
assertEquals( 31, gridA.getFieldValue( new MockJRField( "colA" ) ) );
assertEquals( 32, gridA.getFieldValue( new MockJRField( "colB" ) ) );
assertEquals( 33, gridA.getFieldValue( new MockJRField( "colC" ) ) );
assertTrue( gridA.next() );
assertEquals( 41, gridA.getFieldValue( new MockJRField( "colA" ) ) );
assertEquals( 42, gridA.getFieldValue( new MockJRField( "colB" ) ) );
assertEquals( 43, gridA.getFieldValue( new MockJRField( "colC" ) ) );
assertFalse( gridA.next() );
}
private static List<Object> getList( Object... items )
{
List<Object> list = new ArrayList<>();
for ( Object item : items )
{
list.add( item );
}
return list;
}
}
| |
/*------------------------------------------------------------------------
* (The MIT License)
*
* Copyright (c) 2008-2011 Rhomobile, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* http://rhomobile.com
*------------------------------------------------------------------------*/
package com.rhomobile.rhodes.mainview;
import java.util.ArrayList;
import java.util.Iterator;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.io.InputStream;
import java.util.Map;
import java.util.Vector;
import java.util.List;
import com.rhomobile.rhodes.R;
import com.rhomobile.rhodes.Logger;
import com.rhomobile.rhodes.RhoConf;
import com.rhomobile.rhodes.RhodesActivity;
import com.rhomobile.rhodes.RhodesAppOptions;
import com.rhomobile.rhodes.RhodesService;
import com.rhomobile.rhodes.extmanager.IRhoWebView;
import com.rhomobile.rhodes.file.RhoFileApi;
import com.rhomobile.rhodes.mainview.MainView;
import com.rhomobile.rhodes.nativeview.IRhoCustomView;
import com.rhomobile.rhodes.nativeview.RhoNativeViewManager;
import com.rhomobile.rhodes.util.ContextFactory;
import com.rhomobile.rhodes.util.PerformOnUiThread;
import com.rhomobile.rhodes.util.Utils;
import android.app.ActionBar.LayoutParams;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Message;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.webkit.WebView;
import android.widget.AbsoluteLayout;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
public class SimpleMainView implements MainView {
private final static String TAG = "SimpleMainView";
private static final int WRAP_CONTENT = ViewGroup.LayoutParams.WRAP_CONTENT;
private static final int FILL_PARENT = ViewGroup.LayoutParams.MATCH_PARENT;
private class ActionBack implements View.OnClickListener {
public void onClick(View v) {
goBack();//back(0);
}
};
public static class TransparencyViewForCaptureTouchEvents extends View {
public TransparencyViewForCaptureTouchEvents(Context context) {
super(context);
}
}
private void addWebViewToMainView(IRhoWebView webView, int index, LinearLayout.LayoutParams params) {
view.addView(webView.getContainerView(), index, params);
Utils.platformLog("Development Extra Simple View WV", "$$$$$ setup $$$$$");
Context activity = ContextFactory.getUiContext();
View al = webView.getView();
if (al instanceof WebView) {
// we have android WebView
// check for exist developer extension
try {
Class<?> dev_class = Class.forName("com.rho.development.Development");
// we have Development extension !
// check for already add transparency view
ViewGroup cv = webView.getContainerView();
int child_count = cv.getChildCount();
int i;
boolean hasTransparencyView = false;
for (i = 0; i < child_count; i++) {
View v = cv.getChildAt(i);
if (v instanceof TransparencyViewForCaptureTouchEvents) {
hasTransparencyView = true;
}
}
if (!hasTransparencyView) {
final View dst = al;
View transView = new TransparencyViewForCaptureTouchEvents(activity);
transView.setClickable(true);
transView.setBackgroundColor(Color.TRANSPARENT);
transView.setOnTouchListener(new View.OnTouchListener() {
private View mDST = dst;
private int mCurrentMax = 0;
@Override
public boolean onTouch(View v, MotionEvent event) {
//Utils.platformLog("Development Extra Simple View TV", "$$$$$ TOUCH EVENT TV ["+MotionEvent.actionToString(event.getAction())+", "+String.valueOf(event.getPointerCount())+"] $$$$$");
if ((event.getActionMasked() == MotionEvent.ACTION_UP) || (event.getActionMasked() == MotionEvent.ACTION_POINTER_UP)) {
//Utils.platformLog("Development Extra Simple View", "$$$$$ UP TV ["+String.valueOf(event.getPointerCount())+"] $$$$$");
if ((event.getPointerCount() == 3) && (mCurrentMax == 3)) {
//Utils.platformLog("Development Extra Simple View", "$$$$$ TRIPLE TAP TV $$$$$");
RhodesActivity.onTripleTap();
}
if ((event.getPointerCount() == 4) && (mCurrentMax == 4)) {
//Utils.platformLog("Development Extra Simple View", "$$$$$ QUADRO TAP TV $$$$$");
RhodesActivity.onQuadroTap();
}
if (event.getPointerCount() == 1) {
mCurrentMax = 0;
}
}
else if (event.getAction() == MotionEvent.ACTION_CANCEL) {
mCurrentMax = 0;
}
else if ((event.getActionMasked() == MotionEvent.ACTION_DOWN) || (event.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN)) {
if (event.getPointerCount() > mCurrentMax) {
mCurrentMax = event.getPointerCount();
}
}
mDST.dispatchTouchEvent(event);
return false;
}
});
cv.addView(transView, new AbsoluteLayout.LayoutParams(FILL_PARENT, FILL_PARENT, 0, 0));
cv.bringChildToFront(transView);
}
} catch (ClassNotFoundException e) {
//e.printStackTrace();
}
}
}
private void removeWebViewFromMainView() {
view.removeView(webView.getContainerView());
}
public class MyView extends LinearLayout {
public MyView(Context ctx) {
super(ctx);
}
protected void onSizeChanged (int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
StringBuilder msg = new StringBuilder();
msg.append(" Main Window :: onSizeChanged() old [ ");
msg.append(w);
msg.append(" x ");
msg.append(h);
msg.append(" ] new [ ");
msg.append(oldw);
msg.append(" x ");
msg.append(oldh);
msg.append(" ]");
Utils.platformLog("SimpleMainView.View", msg.toString());
}
}
private class ActionForward implements View.OnClickListener {
public void onClick(View v) {
forward(0);
}
};
private class ActionHome implements View.OnClickListener {
public void onClick(View v) {
navigate(RhodesAppOptions.getStartUrl(), 0);
}
};
private class ActionOptions implements View.OnClickListener {
public void onClick(View v) {
navigate(RhodesAppOptions.getOptionsUrl(), 0);
}
};
private class ActionRefresh implements View.OnClickListener {
public void onClick(View v) {
reload(0);
}
};
private class ActionExit implements View.OnClickListener {
public void onClick(View v) {
restoreWebView();
RhodesService.exit();
}
};
private class ActionCustom implements View.OnClickListener {
private String url;
public ActionCustom(String u) {
url = u;
}
public void onClick(View v) {
PerformOnUiThread.exec(new Runnable() {
public void run() {
RhodesService.loadUrl(ActionCustom.this.url);
}
});
}
};
private LinearLayout view;
private IRhoWebView webView;
private IRhoCustomView mRhoCustomView = null;
private LinearLayout navBar = null;
private LinearLayout toolBar = null;
private int mCustomBackgroundColor = 0;
private boolean mCustomBackgroundColorEnable = false;
public View getView() {
return view;
}
@Override
public IRhoWebView getWebView(int tab_index) {
return webView;
}
@Override
public IRhoWebView getWebView(Object nativeWebView) {
return webView;
}
@Override
public int getWebViewTab(Object nativeWebView) {
return 0;
}
public void setCustomView(IRhoCustomView customView) {
restoreWebView();
if (customView != null) {
removeWebViewFromMainView();
mRhoCustomView = customView;
if (navBar != null) {
view.removeView(navBar);
}
if (toolBar != null) {
view.removeView(toolBar);
}
int index = 0;
if (navBar != null) {
view.addView(navBar, index);
index++;
}
view.addView(customView.getContainerView(), index, new LinearLayout.LayoutParams(FILL_PARENT, 0, 1));
index++;
if (toolBar != null) {
view.addView(toolBar, index);
}
} else {
mRhoCustomView = null;
}
}
public void restoreWebView() {
if (mRhoCustomView != null) {
view.removeView(mRhoCustomView.getContainerView());
mRhoCustomView.destroyView();
mRhoCustomView = null;
if (navBar != null) {
view.removeView(navBar);
}
if (toolBar != null) {
view.removeView(toolBar);
}
int index = 0;
if (navBar != null) {
view.addView(navBar, index);
index++;
}
addWebViewToMainView(webView, index, new LinearLayout.LayoutParams(FILL_PARENT, 0, 1));
index++;
if (toolBar != null) {
view.addView(toolBar, index);
}
}
if (mRhoCustomView != null) {
mRhoCustomView.destroyView();
mRhoCustomView = null;
}
}
private String processForNativeView(String _url) {
Logger.T(TAG, "processForNativiewView: " + _url);
String url = _url;
String callback_prefix = "call_stay_native";
// find protocol:navto pairs
int last = -1;
int cur = url.indexOf(":", last+1);
while (cur > 0) {
String protocol = url.substring(last+1, cur);
String navto = url.substring(cur+1, url.length());
if (callback_prefix.equals(protocol)) {
// navigate but still in native view
String cleared_url = url.substring(callback_prefix.length()+1, url.length());
return cleared_url;
}
// check protocol for nativeView
IRhoCustomView nvf = RhoNativeViewManager.getNativeViewByType(protocol);
if (nvf != null) {
if (mRhoCustomView != null) {
if ( !protocol.equals(mRhoCustomView.getViewType()) ) {
setCustomView(nvf);
}
}
else {
setCustomView(nvf);
}
if (mRhoCustomView != null) {
mRhoCustomView.navigate(navto);
return "";
}
}
last = cur;
int c1 = url.indexOf(":", last+1);
int c2 = url.indexOf("/", last+1);
if ((c1 < c2)) {
if (c1 <= 0) {
cur = c2;
}
else {
cur = c1;
}
}
else {
if (c2 <= 0) {
cur = c1;
}
else {
cur = c2;
}
}
}
restoreWebView();
return url;
}
public IRhoWebView detachWebView() {
restoreWebView();
IRhoWebView v = null;
if (webView != null) {
removeWebViewFromMainView();
v = webView;
webView = null;
}
return v;
}
public void destroy() {
if (webView != null) {
webView.destroy();
}
}
private View createButton(Map<Object,Object> hash) {
Context ctx = RhodesActivity.getContext();
Object actionObj = hash.get("action");
if (actionObj == null || !(actionObj instanceof String))
throw new IllegalArgumentException("'action' should be String");
String action = (String)actionObj;
if (action.length() == 0)
throw new IllegalArgumentException("'action' should not be empty");
Drawable icon = null;
String label = null;
View.OnClickListener onClick = null;
if (action.equalsIgnoreCase("back")) {
icon = ctx.getResources().getDrawable(R.drawable.back);
onClick = new ActionBack();
}
else if (action.equalsIgnoreCase("forward")) {
if (RhodesService.isJQTouch_mode()) {
return null;
}
icon = ctx.getResources().getDrawable(R.drawable.next);
onClick = new ActionForward();
}
else if (action.equalsIgnoreCase("home")) {
icon = ctx.getResources().getDrawable(R.drawable.home);
onClick = new ActionHome();
}
else if (action.equalsIgnoreCase("options")) {
icon = ctx.getResources().getDrawable(R.drawable.options);
onClick = new ActionOptions();
}
else if (action.equalsIgnoreCase("refresh")) {
icon = ctx.getResources().getDrawable(R.drawable.refresh);
onClick = new ActionRefresh();
}
else if (action.equalsIgnoreCase("close") || action.equalsIgnoreCase("exit")) {
icon = ctx.getResources().getDrawable(R.drawable.exit);
onClick = new ActionExit();
}
else if (action.equalsIgnoreCase("separator"))
return null;
DisplayMetrics metrics = new DisplayMetrics();
WindowManager wm = (WindowManager)ctx.getSystemService(Context.WINDOW_SERVICE);
wm.getDefaultDisplay().getMetrics(metrics);
Object iconObj = hash.get("icon");
if (iconObj != null) {
if (!(iconObj instanceof String))
throw new IllegalArgumentException("'icon' should be String");
InputStream is = null;
String iconPath = (String)iconObj;
is = RhoFileApi.open(iconPath);
if (is == null) {
iconPath = RhoFileApi.normalizePath(iconPath);
is = RhoFileApi.open(iconPath);
}
if (is == null) {
iconPath = "apps/" + (String)iconObj;
iconPath = RhoFileApi.normalizePath(iconPath);
is = RhoFileApi.open(iconPath);
}
if (is == null) {
throw new IllegalArgumentException("Can't find icon file: " + iconPath);
}
Bitmap bitmap = BitmapFactory.decodeStream(is);
if (bitmap == null)
throw new IllegalArgumentException("Can't find icon: " + iconPath);
bitmap.setDensity(DisplayMetrics.DENSITY_MEDIUM);
icon = new BitmapDrawable(bitmap);
}
if (icon == null) {
Object labelObj = hash.get("label");
if (labelObj == null || !(labelObj instanceof String))
throw new IllegalArgumentException("'label' should be String");
label = (String)labelObj;
}
if (icon == null && label == null)
throw new IllegalArgumentException("One of 'icon' or 'label' should be specified");
if (onClick == null)
onClick = new ActionCustom(action);
View button;
if (icon != null) {
ImageButton btn = new ImageButton(ctx);
btn.setImageDrawable(icon);
button = btn;
if (mCustomBackgroundColorEnable) {
Drawable d = btn.getBackground();
if (d != null) {
d.setColorFilter(mCustomBackgroundColor, android.graphics.PorterDuff.Mode.SRC_OVER);
}
else {
btn.setBackgroundColor(mCustomBackgroundColor);
}
}
}
else {
Button btn = new Button(ctx);
btn.setText(label);
if (mCustomBackgroundColorEnable) {
btn.setBackgroundColor(mCustomBackgroundColor);
int gray = (((mCustomBackgroundColor & 0xFF0000) >> 16) + ((mCustomBackgroundColor & 0xFF00) >> 8) + ((mCustomBackgroundColor & 0xFF)))/3;
if (gray > 128) {
btn.setTextColor(0xFF000000);
}
else {
btn.setTextColor(0xFFFFFFFF);
}
}
button = btn;
}
button.setOnClickListener(onClick);
return button;
}
@SuppressWarnings("unchecked")
private void setupToolbar(LinearLayout tool_bar, Object params, Object options) {
Context ctx = RhodesActivity.getContext();
mCustomBackgroundColorEnable = false;
List<Object> buttons = null;
if (params != null) {
if (params instanceof List<?>) {
buttons = (List<Object>)params;
}
if ((options != null) && (options instanceof Map<?,?>))
{
Map<Object,Object> settings = (Map<Object,Object>)options;
Object colorObj = settings.get("color");
if (colorObj != null && (colorObj instanceof Map<?,?>)) {
Map<Object,Object> color = (Map<Object,Object>)colorObj;
Object redObj = color.get("red");
Object greenObj = color.get("green");
Object blueObj = color.get("blue");
if (redObj != null && greenObj != null && blueObj != null &&
(redObj instanceof Integer) && (greenObj instanceof Integer) && (blueObj instanceof Integer)) {
try {
int red = ((Integer)redObj).intValue() ;//Integer.parseInt((String)redObj);
int green = ((Integer)greenObj).intValue() ;//Integer.parseInt((String)greenObj);
int blue = ((Integer)blueObj).intValue() ;//Integer.parseInt((String)blueObj);
mCustomBackgroundColor = ((red & 0xFF ) << 16) | ((green & 0xFF ) << 8) | ((blue & 0xFF )) | 0xFF000000;
mCustomBackgroundColorEnable = true;
tool_bar.setBackgroundColor(Color.rgb(red, green, blue));
}
catch (NumberFormatException e) {
// Do nothing here
}
}
}
Object bkgObj = settings.get("backgroundColor");
if ((bkgObj != null) && (bkgObj instanceof Integer)) {
int color = ((Integer)bkgObj).intValue() ;//Integer.decode(((String)bkgObj)).intValue();
int red = (color & 0xFF0000) >> 16;
int green = (color & 0xFF00) >> 8;
int blue = (color & 0xFF);
tool_bar.setBackgroundColor(Color.rgb(red, green, blue));
mCustomBackgroundColor = color | 0xFF000000;
mCustomBackgroundColorEnable = true;
}
//Object buttonsObj = settings.get("buttons");
//if (buttonsObj != null && (buttonsObj instanceof Vector<?>))
// buttons = (Vector<Object>)buttonsObj;
}
}
if (params != null) {
LinearLayout group = null;
// First group should have gravity LEFT
int gravity = Gravity.LEFT;
Object[] buttons_array = buttons.toArray();
for (int i = 0, lim = buttons_array.length; i < lim; ++i) {
Object param = buttons_array[i];
if (!(param instanceof Map<?,?>))
throw new IllegalArgumentException("Hash expected");
Map<Object, Object> hash = (Map<Object, Object>)param;
View button = createButton(hash);
if (button == null) {
group = null;
gravity = Gravity.CENTER;
continue;
}
button.setLayoutParams(new LinearLayout.LayoutParams(WRAP_CONTENT, WRAP_CONTENT));
if (group == null) {
group = new LinearLayout(ctx);
group.setGravity(gravity);
group.setOrientation(LinearLayout.HORIZONTAL);
group.setLayoutParams(new LinearLayout.LayoutParams(WRAP_CONTENT, FILL_PARENT, 1));
tool_bar.addView(group);
}
group.addView(button);
}
// Last group should have gravity RIGHT
if (group != null) {
group.setGravity(Gravity.RIGHT);
tool_bar.requestLayout();
}
}
}
private void init(Object params) {
Context activity = ContextFactory.getUiContext();
view = new MyView(activity);
view.setOrientation(LinearLayout.VERTICAL);
view.setGravity(Gravity.BOTTOM);
view.setLayoutParams(new LinearLayout.LayoutParams(FILL_PARENT, FILL_PARENT));
LinearLayout bottom = new LinearLayout(activity);
bottom.setOrientation(LinearLayout.HORIZONTAL);
bottom.setBackgroundColor(Color.GRAY);
bottom.setLayoutParams(new LinearLayout.LayoutParams(FILL_PARENT, WRAP_CONTENT, 0));
view.addView(bottom);
toolBar = bottom;
setupToolbar(toolBar, params, null);
}
public SimpleMainView() {
init(null);
}
public SimpleMainView(IRhoWebView v) {
init(null);
setWebView(v, 0);
}
public SimpleMainView(IRhoWebView v, Object params) {
init(params);
setWebView(v, 0);
}
@Override
public void setWebView(IRhoWebView view, int tabIndex) {
webView = view;
addWebViewToMainView(webView, 0, new LinearLayout.LayoutParams(FILL_PARENT, 0, 1));
}
public void setWebBackgroundColor(int color) {
view.setBackgroundColor(color);
webView.getContainerView().setBackgroundColor(color);
webView.getView().setBackgroundColor(color);
}
public void back(int index) {
restoreWebView();
String startUrl = RhoConf.getString("start_path");
boolean bStartPage = startUrl.equals(webView.getUrl());
if ( !bStartPage && webView.canGoBack() ) {
webView.goBack();
}
else
{
RhodesActivity.safeGetInstance().moveTaskToBack(true);
}
}
public void goBack()
{
RhodesService.navigateBack();
}
public void forward(int index) {
restoreWebView();
webView.goForward();
}
public void navigate(String url, int index) {
//Utils.platformLog("@&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&@", "navigate("+url+")");
String cleared_url = processForNativeView(url);
Logger.I(TAG, "Cleared URL: " + url);
if (cleared_url.length() > 0) {
// check for handle because if we call loadUrl - WebView do not check this url for handle
if (!RhodesService.getInstance().handleUrlLoading(cleared_url)) {
webView.loadUrl(cleared_url);
}
}
}
//@Override
public void executeJS(String js, int index) {
//Utils.platformLog("@$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$@", "ExecuteJS("+js+")");
//((android.webkit.WebView)webView.getView()).executeJS();
if ((android.os.Build.VERSION.SDK_INT < 14) || (android.os.Build.VERSION.SDK_INT >= 19)) { // 14 is 4.0, 19 is 4.4
navigate("javascript:"+js, index);
return;
}
Method mStringByEvaluatingJavaScriptFromString = null;
Method mSendMessage = null;
Object mWebViewCore = null;
Object mBrowserFrame = null;
boolean mHasPossibleUseOfReflectionExecuteJS = false;
Object webViewObject = this;
Class webViewClass = android.webkit.WebView.class;
try {
Field mp = webViewClass.getDeclaredField("mProvider");
mp.setAccessible(true);
webViewObject = mp.get((android.webkit.WebView)webView.getView());
webViewClass = webViewObject.getClass();
Field wc = webViewClass.getDeclaredField("mWebViewCore");
wc.setAccessible(true);
mWebViewCore = wc.get(webViewObject);
if (mWebViewCore != null) {
mSendMessage = mWebViewCore.getClass().getDeclaredMethod("sendMessage", Message.class);
mSendMessage.setAccessible(true);
/*
Field bf= mWebViewCore.getClass().getDeclaredField("mBrowserFrame");
bf.setAccessible(true);
mBrowserFrame = bf.get(mWebViewCore);
mStringByEvaluatingJavaScriptFromString = mBrowserFrame.getClass().getDeclaredMethod("stringByEvaluatingJavaScriptFromString", String.class);
mStringByEvaluatingJavaScriptFromString.setAccessible(true);
*/
}
mHasPossibleUseOfReflectionExecuteJS = true;
} catch (Throwable e) {
mHasPossibleUseOfReflectionExecuteJS = false;
//e.printStackTrace();
}
boolean mHasReflectionWasExecutedOK = false;
if (mHasPossibleUseOfReflectionExecuteJS && (mSendMessage != null)) {
try {
//mStringByEvaluatingJavaScriptFromString.invoke(mBrowserFrame, js);
Message execJSCodeMsg = Message.obtain(null, 194, js);
mSendMessage.invoke(mWebViewCore, execJSCodeMsg);
mHasReflectionWasExecutedOK = true;
//Utils.platformLog("@#########################@", "EvaluateJS("+js+")");
} catch (Throwable e) {
//e.printStackTrace();
}
}
if (!mHasReflectionWasExecutedOK) {
//com.rhomobile.rhodes.WebView.executeJs(js, index);
navigate("javascript:"+js, index);
}
}
public void reload(int index) {
if (mRhoCustomView != null) {
mRhoCustomView.getView().invalidate();
}
else {
webView.reload();
}
}
public void stopNavigate(int index) {
if (mRhoCustomView == null) {
webView.stopLoad();
}
else {
mRhoCustomView.stop();
}
}
public String currentLocation(int index) {
return webView.getUrl();
}
public void switchTab(int index) {
// Nothing
}
public int activeTab() {
return 0;
}
public void loadData(String data, int index) {
restoreWebView();
webView.loadData(data, "text/html", "utf-8");
}
public void addNavBar(String title, Map<Object,Object> left, Map<Object,Object> right) {
removeNavBar();
Context ctx = RhodesActivity.getContext();
LinearLayout top = new LinearLayout(ctx);
top.setOrientation(LinearLayout.HORIZONTAL);
top.setBackgroundColor(Color.GRAY);
top.setGravity(Gravity.CENTER);
top.setLayoutParams(new LinearLayout.LayoutParams(FILL_PARENT, WRAP_CONTENT, 0));
View leftButton = createButton(left);
leftButton.setLayoutParams(new LinearLayout.LayoutParams(WRAP_CONTENT, WRAP_CONTENT, 1));
top.addView(leftButton);
TextView label = new TextView(ctx);
label.setText(title);
label.setGravity(Gravity.CENTER);
label.setTextSize((float)30.0);
label.setLayoutParams(new LinearLayout.LayoutParams(WRAP_CONTENT, WRAP_CONTENT, 2));
top.addView(label);
if (right != null) {
View rightButton = createButton(right);
rightButton.setLayoutParams(new LinearLayout.LayoutParams(WRAP_CONTENT, WRAP_CONTENT, 1));
top.addView(rightButton);
}
navBar = top;
view.addView(navBar, 0);
}
public void removeNavBar() {
if (navBar == null)
return;
view.removeViewAt(0);
navBar = null;
}
public void setToolbar(Object params, Object options) {
toolBar.setBackgroundColor(Color.GRAY);
toolBar.removeAllViews();
setupToolbar(toolBar, params, options);
toolBar.requestLayout();
view.requestLayout();
}
public void removeToolbar() {
toolBar.removeAllViews();
toolBar.requestLayout();
view.requestLayout();
}
@Override
public int getTabsCount() {
return 1;
}
@Override
public void saveCurrentPage(String format, String path, int index) {
if (format.equalsIgnoreCase(MainView.FORMAT_JPEG)) {
webView.capture(IRhoWebView.CaptureFormat.CAPTURE_FORMAT_JPEG, path);
}
else {
Logger.E(TAG, "Wrong format to save current page: " + format);
}
}
public String get_current_url(int tab) {
if (webView == null) {
return "";
}
return webView.getUrl();
}
@Override
public void removeSplashScreen() {
}
@Override
public String getTabDefaultUrl() {
// TODO Auto-generated method stub
return "";
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.iterative;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.cost.CachingCostCalculator;
import com.facebook.presto.cost.CachingStatsCalculator;
import com.facebook.presto.cost.CostCalculator;
import com.facebook.presto.cost.StatsCalculator;
import com.facebook.presto.matching.MatchingEngine;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.StatsRecorder;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.optimizations.PlanOptimizer;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.google.common.collect.ImmutableList;
import io.airlift.units.Duration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.spi.StandardErrorCode.OPTIMIZER_TIMEOUT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.String.format;
public class IterativeOptimizer
implements PlanOptimizer
{
private final List<PlanOptimizer> legacyRules;
private final MatchingEngine<Rule> ruleStore;
private final StatsRecorder stats;
private final StatsCalculator statsCalculator;
private final CostCalculator costCalculator;
public IterativeOptimizer(StatsRecorder stats, StatsCalculator statsCalculator, CostCalculator costCalculator, Set<Rule> rules)
{
this(stats, statsCalculator, costCalculator, ImmutableList.of(), rules);
}
public IterativeOptimizer(StatsRecorder stats, StatsCalculator statsCalculator, CostCalculator costCalculator, List<PlanOptimizer> legacyRules, Set<Rule> newRules)
{
this.legacyRules = ImmutableList.copyOf(legacyRules);
this.ruleStore = MatchingEngine.<Rule>builder()
.register(newRules)
.build();
this.stats = stats;
this.statsCalculator = statsCalculator;
this.costCalculator = costCalculator;
stats.registerAll(newRules);
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
// only disable new rules if we have legacy rules to fall back to
if (!SystemSessionProperties.isNewOptimizerEnabled(session) && !legacyRules.isEmpty()) {
for (PlanOptimizer optimizer : legacyRules) {
plan = optimizer.optimize(plan, session, symbolAllocator.getTypes(), symbolAllocator, idAllocator);
}
return plan;
}
Memo memo = new Memo(idAllocator, plan);
Lookup lookup = Lookup.from(memo::resolve, new CachingStatsCalculator(statsCalculator), new CachingCostCalculator(costCalculator));
Duration timeout = SystemSessionProperties.getOptimizerTimeout(session);
exploreGroup(memo.getRootGroup(), new Context(memo, lookup, idAllocator, symbolAllocator, System.nanoTime(), timeout.toMillis(), session));
return memo.extract();
}
private boolean exploreGroup(int group, Context context)
{
// tracks whether this group or any children groups change as
// this method executes
boolean progress = exploreNode(group, context);
while (exploreChildren(group, context)) {
progress = true;
// if children changed, try current group again
// in case we can match additional rules
if (!exploreNode(group, context)) {
// no additional matches, so bail out
break;
}
}
return progress;
}
private boolean exploreNode(int group, Context context)
{
PlanNode node = context.getMemo().getNode(group);
boolean done = false;
boolean progress = false;
while (!done) {
if (isTimeLimitExhausted(context)) {
throw new PrestoException(OPTIMIZER_TIMEOUT, format("The optimizer exhausted the time limit of %d ms", context.getTimeoutInMilliseconds()));
}
done = true;
Iterator<Rule> possiblyMatchingRules = ruleStore.getCandidates(node).iterator();
while (possiblyMatchingRules.hasNext()) {
Rule rule = possiblyMatchingRules.next();
Optional<PlanNode> transformed;
if (!rule.getPattern().matches(node)) {
continue;
}
long duration;
try {
long start = System.nanoTime();
transformed = rule.apply(node, context.getLookup(), context.getIdAllocator(), context.getSymbolAllocator(), context.getSession());
duration = System.nanoTime() - start;
}
catch (RuntimeException e) {
stats.recordFailure(rule);
throw e;
}
stats.record(rule, duration, transformed.isPresent());
if (transformed.isPresent()) {
node = context.getMemo().replace(group, transformed.get(), rule.getClass().getName());
done = false;
progress = true;
}
}
}
return progress;
}
private boolean isTimeLimitExhausted(Context context)
{
return ((System.nanoTime() - context.getStartTimeInNanos()) / 1_000_000) >= context.getTimeoutInMilliseconds();
}
private boolean exploreChildren(int group, Context context)
{
boolean progress = false;
PlanNode expression = context.getMemo().getNode(group);
for (PlanNode child : expression.getSources()) {
checkState(child instanceof GroupReference, "Expected child to be a group reference. Found: " + child.getClass().getName());
if (exploreGroup(((GroupReference) child).getGroupId(), context)) {
progress = true;
}
}
return progress;
}
private static class Context
{
private final Memo memo;
private final Lookup lookup;
private final PlanNodeIdAllocator idAllocator;
private final SymbolAllocator symbolAllocator;
private final long startTimeInNanos;
private final long timeoutInMilliseconds;
private final Session session;
public Context(
Memo memo,
Lookup lookup,
PlanNodeIdAllocator idAllocator,
SymbolAllocator symbolAllocator,
long startTimeInNanos,
long timeoutInMilliseconds,
Session session)
{
checkArgument(timeoutInMilliseconds >= 0, "Timeout has to be a non-negative number [milliseconds]");
this.memo = memo;
this.lookup = lookup;
this.idAllocator = idAllocator;
this.symbolAllocator = symbolAllocator;
this.startTimeInNanos = startTimeInNanos;
this.timeoutInMilliseconds = timeoutInMilliseconds;
this.session = session;
}
public Memo getMemo()
{
return memo;
}
public Lookup getLookup()
{
return lookup;
}
public PlanNodeIdAllocator getIdAllocator()
{
return idAllocator;
}
public SymbolAllocator getSymbolAllocator()
{
return symbolAllocator;
}
public long getStartTimeInNanos()
{
return startTimeInNanos;
}
public long getTimeoutInMilliseconds()
{
return timeoutInMilliseconds;
}
public Session getSession()
{
return session;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.SchedulingRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ContainerUpdates;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeResourceUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ReleaseContainerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.PendingAsk;
import org.apache.hadoop.yarn.server.scheduler.SchedulerRequestKey;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.utils.Lock;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentSkipListMap;
@LimitedPrivate("yarn")
@Evolving
@SuppressWarnings("unchecked")
public class FifoScheduler extends
AbstractYarnScheduler<FifoAppAttempt, FiCaSchedulerNode> implements
Configurable {
private static final Log LOG = LogFactory.getLog(FifoScheduler.class);
private static final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
Configuration conf;
private boolean usePortForNodeName;
private ActiveUsersManager activeUsersManager;
private static final String DEFAULT_QUEUE_NAME = "default";
private QueueMetrics metrics;
private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator();
private final Queue DEFAULT_QUEUE = new Queue() {
@Override
public String getQueueName() {
return DEFAULT_QUEUE_NAME;
}
@Override
public QueueMetrics getMetrics() {
return metrics;
}
@Override
public QueueInfo getQueueInfo(
boolean includeChildQueues, boolean recursive) {
QueueInfo queueInfo = recordFactory.newRecordInstance(QueueInfo.class);
queueInfo.setQueueName(DEFAULT_QUEUE.getQueueName());
queueInfo.setCapacity(1.0f);
Resource clusterResource = getClusterResource();
if (clusterResource.getMemorySize() == 0) {
queueInfo.setCurrentCapacity(0.0f);
} else {
queueInfo.setCurrentCapacity((float) usedResource.getMemorySize()
/ clusterResource.getMemorySize());
}
queueInfo.setMaximumCapacity(1.0f);
queueInfo.setChildQueues(new ArrayList<QueueInfo>());
queueInfo.setQueueState(QueueState.RUNNING);
return queueInfo;
}
public Map<QueueACL, AccessControlList> getQueueAcls() {
Map<QueueACL, AccessControlList> acls =
new HashMap<QueueACL, AccessControlList>();
for (QueueACL acl : QueueACL.values()) {
acls.put(acl, new AccessControlList("*"));
}
return acls;
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo(
UserGroupInformation unused) {
QueueUserACLInfo queueUserAclInfo =
recordFactory.newRecordInstance(QueueUserACLInfo.class);
queueUserAclInfo.setQueueName(DEFAULT_QUEUE_NAME);
queueUserAclInfo.setUserAcls(Arrays.asList(QueueACL.values()));
return Collections.singletonList(queueUserAclInfo);
}
@Override
public boolean hasAccess(QueueACL acl, UserGroupInformation user) {
return getQueueAcls().get(acl).isUserAllowed(user);
}
@Override
public ActiveUsersManager getAbstractUsersManager() {
return activeUsersManager;
}
@Override
public void recoverContainer(Resource clusterResource,
SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer) {
if (rmContainer.getState().equals(RMContainerState.COMPLETED)) {
return;
}
increaseUsedResources(rmContainer);
updateAppHeadRoom(schedulerAttempt);
updateAvailableResourcesMetrics();
}
@Override
public Set<String> getAccessibleNodeLabels() {
// TODO add implementation for FIFO scheduler
return null;
}
@Override
public String getDefaultNodeLabelExpression() {
// TODO add implementation for FIFO scheduler
return null;
}
@Override
public void incPendingResource(String nodeLabel, Resource resourceToInc) {
}
@Override
public void decPendingResource(String nodeLabel, Resource resourceToDec) {
}
@Override
public Priority getDefaultApplicationPriority() {
// TODO add implementation for FIFO scheduler
return null;
}
@Override
public void incReservedResource(String partition, Resource reservedRes) {
// TODO add implementation for FIFO scheduler
}
@Override
public void decReservedResource(String partition, Resource reservedRes) {
// TODO add implementation for FIFO scheduler
}
};
public FifoScheduler() {
super(FifoScheduler.class.getName());
}
private synchronized void initScheduler(Configuration conf) {
validateConf(conf);
//Use ConcurrentSkipListMap because applications need to be ordered
this.applications =
new ConcurrentSkipListMap<>();
this.minimumAllocation = super.getMinimumAllocation();
initMaximumResourceCapability(super.getMaximumAllocation());
this.usePortForNodeName = conf.getBoolean(
YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME,
YarnConfiguration.DEFAULT_RM_SCHEDULER_USE_PORT_FOR_NODE_NAME);
this.metrics = QueueMetrics.forQueue(DEFAULT_QUEUE_NAME, null, false,
conf);
this.activeUsersManager = new ActiveUsersManager(metrics);
}
@Override
public void serviceInit(Configuration conf) throws Exception {
initScheduler(conf);
super.serviceInit(conf);
// Initialize SchedulingMonitorManager
schedulingMonitorManager.initialize(rmContext, conf);
}
@Override
public void serviceStart() throws Exception {
super.serviceStart();
}
@Override
public void serviceStop() throws Exception {
super.serviceStop();
}
@Override
public synchronized void setConf(Configuration conf) {
this.conf = conf;
}
private void validateConf(Configuration conf) {
// validate scheduler memory allocation setting
int minMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
int maxMem = conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
if (minMem <= 0 || minMem > maxMem) {
throw new YarnRuntimeException("Invalid resource scheduler memory"
+ " allocation configuration"
+ ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB
+ "=" + minMem
+ ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB
+ "=" + maxMem + ", min and max should be greater than 0"
+ ", max should be no smaller than min.");
}
}
@Override
public synchronized Configuration getConf() {
return conf;
}
@Override
public int getNumClusterNodes() {
return nodeTracker.nodeCount();
}
@Override
public synchronized void setRMContext(RMContext rmContext) {
this.rmContext = rmContext;
}
@Override
public synchronized void
reinitialize(Configuration conf, RMContext rmContext) throws IOException
{
setConf(conf);
super.reinitialize(conf, rmContext);
}
@Override
public Allocation allocate(ApplicationAttemptId applicationAttemptId,
List<ResourceRequest> ask, List<SchedulingRequest> schedulingRequests,
List<ContainerId> release, List<String> blacklistAdditions, List<String> blacklistRemovals,
ContainerUpdates updateRequests) {
FifoAppAttempt application = getApplicationAttempt(applicationAttemptId);
if (application == null) {
LOG.error("Calling allocate on removed or non existent application " +
applicationAttemptId.getApplicationId());
return EMPTY_ALLOCATION;
}
// The allocate may be the leftover from previous attempt, and it will
// impact current attempt, such as confuse the request and allocation for
// current attempt's AM container.
// Note outside precondition check for the attempt id may be
// outdated here, so double check it here is necessary.
if (!application.getApplicationAttemptId().equals(applicationAttemptId)) {
LOG.error("Calling allocate on previous or removed " +
"or non existent application attempt " + applicationAttemptId);
return EMPTY_ALLOCATION;
}
// Sanity check
normalizeResourceRequests(ask);
// Release containers
releaseContainers(release, application);
synchronized (application) {
// make sure we aren't stopping/removing the application
// when the allocate comes in
if (application.isStopped()) {
LOG.info("Calling allocate on a stopped " +
"application " + applicationAttemptId);
return EMPTY_ALLOCATION;
}
if (!ask.isEmpty()) {
LOG.debug("allocate: pre-update" +
" applicationId=" + applicationAttemptId +
" application=" + application);
application.showRequests();
// Update application requests
application.updateResourceRequests(ask);
LOG.debug("allocate: post-update" +
" applicationId=" + applicationAttemptId +
" application=" + application);
application.showRequests();
LOG.debug("allocate:" +
" applicationId=" + applicationAttemptId +
" #ask=" + ask.size());
}
application.updateBlacklist(blacklistAdditions, blacklistRemovals);
Resource headroom = application.getHeadroom();
application.setApplicationHeadroomForMetrics(headroom);
return new Allocation(application.pullNewlyAllocatedContainers(),
headroom, null, null, null, application.pullUpdatedNMTokens());
}
}
@VisibleForTesting
public synchronized void addApplication(ApplicationId applicationId,
String queue, String user, boolean isAppRecovering) {
SchedulerApplication<FifoAppAttempt> application =
new SchedulerApplication<>(DEFAULT_QUEUE, user);
applications.put(applicationId, application);
metrics.submitApp(user);
LOG.info("Accepted application " + applicationId + " from user: " + user
+ ", currently num of applications: " + applications.size());
if (isAppRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationId + " is recovering. Skip notifying APP_ACCEPTED");
}
} else {
rmContext.getDispatcher().getEventHandler()
.handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED));
}
}
@VisibleForTesting
public synchronized void
addApplicationAttempt(ApplicationAttemptId appAttemptId,
boolean transferStateFromPreviousAttempt,
boolean isAttemptRecovering) {
SchedulerApplication<FifoAppAttempt> application =
applications.get(appAttemptId.getApplicationId());
String user = application.getUser();
// TODO: Fix store
FifoAppAttempt schedulerApp =
new FifoAppAttempt(appAttemptId, user, DEFAULT_QUEUE,
activeUsersManager, this.rmContext);
if (transferStateFromPreviousAttempt) {
schedulerApp.transferStateFromPreviousAttempt(application
.getCurrentAppAttempt());
}
application.setCurrentAppAttempt(schedulerApp);
metrics.submitAppAttempt(user);
LOG.info("Added Application Attempt " + appAttemptId
+ " to scheduler from user " + application.getUser());
if (isAttemptRecovering) {
if (LOG.isDebugEnabled()) {
LOG.debug(appAttemptId
+ " is recovering. Skipping notifying ATTEMPT_ADDED");
}
} else {
rmContext.getDispatcher().getEventHandler().handle(
new RMAppAttemptEvent(appAttemptId,
RMAppAttemptEventType.ATTEMPT_ADDED));
}
}
private synchronized void doneApplication(ApplicationId applicationId,
RMAppState finalState) {
SchedulerApplication<FifoAppAttempt> application =
applications.get(applicationId);
if (application == null){
LOG.warn("Couldn't find application " + applicationId);
return;
}
// Inform the activeUsersManager
activeUsersManager.deactivateApplication(application.getUser(),
applicationId);
application.stop(finalState);
applications.remove(applicationId);
}
private synchronized void doneApplicationAttempt(
ApplicationAttemptId applicationAttemptId,
RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers)
throws IOException {
FifoAppAttempt attempt = getApplicationAttempt(applicationAttemptId);
SchedulerApplication<FifoAppAttempt> application =
applications.get(applicationAttemptId.getApplicationId());
if (application == null || attempt == null) {
throw new IOException("Unknown application " + applicationAttemptId +
" has completed!");
}
// Kill all 'live' containers
for (RMContainer container : attempt.getLiveContainers()) {
if (keepContainers
&& container.getState().equals(RMContainerState.RUNNING)) {
// do not kill the running container in the case of work-preserving AM
// restart.
LOG.info("Skip killing " + container.getContainerId());
continue;
}
super.completedContainer(container,
SchedulerUtils.createAbnormalContainerStatus(
container.getContainerId(), SchedulerUtils.COMPLETED_APPLICATION),
RMContainerEventType.KILL);
}
// Clean up pending requests, metrics etc.
attempt.stop(rmAppAttemptFinalState);
}
/**
* Heart of the scheduler...
*
* @param node node on which resources are available to be allocated
*/
private void assignContainers(FiCaSchedulerNode node) {
LOG.debug("assignContainers:" +
" node=" + node.getRMNode().getNodeAddress() +
" #applications=" + applications.size());
// Try to assign containers to applications in fifo order
for (Map.Entry<ApplicationId, SchedulerApplication<FifoAppAttempt>> e : applications
.entrySet()) {
FifoAppAttempt application = e.getValue().getCurrentAppAttempt();
if (application == null) {
continue;
}
LOG.debug("pre-assignContainers");
application.showRequests();
synchronized (application) {
// Check if this resource is on the blacklist
if (SchedulerAppUtils.isPlaceBlacklisted(application, node, LOG)) {
continue;
}
for (SchedulerRequestKey schedulerKey :
application.getSchedulerKeys()) {
int maxContainers =
getMaxAllocatableContainers(application, schedulerKey, node,
NodeType.OFF_SWITCH);
// Ensure the application needs containers of this priority
if (maxContainers > 0) {
int assignedContainers =
assignContainersOnNode(node, application, schedulerKey);
// Do not assign out of order w.r.t priorities
if (assignedContainers == 0) {
break;
}
}
}
}
LOG.debug("post-assignContainers");
application.showRequests();
// Done
if (Resources.lessThan(resourceCalculator, getClusterResource(),
node.getUnallocatedResource(), minimumAllocation)) {
break;
}
}
// Update the applications' headroom to correctly take into
// account the containers assigned in this update.
for (SchedulerApplication<FifoAppAttempt> application : applications.values()) {
FifoAppAttempt attempt =
(FifoAppAttempt) application.getCurrentAppAttempt();
if (attempt == null) {
continue;
}
updateAppHeadRoom(attempt);
}
}
private int getMaxAllocatableContainers(FifoAppAttempt application,
SchedulerRequestKey schedulerKey, FiCaSchedulerNode node, NodeType type) {
PendingAsk offswitchAsk = application.getPendingAsk(schedulerKey,
ResourceRequest.ANY);
int maxContainers = offswitchAsk.getCount();
if (type == NodeType.OFF_SWITCH) {
return maxContainers;
}
if (type == NodeType.RACK_LOCAL) {
PendingAsk rackLocalAsk = application.getPendingAsk(schedulerKey,
node.getRackName());
if (rackLocalAsk.getCount() <= 0) {
return maxContainers;
}
maxContainers = Math.min(maxContainers,
rackLocalAsk.getCount());
}
if (type == NodeType.NODE_LOCAL) {
PendingAsk nodeLocalAsk = application.getPendingAsk(schedulerKey,
node.getRMNode().getHostName());
if (nodeLocalAsk.getCount() > 0) {
maxContainers = Math.min(maxContainers,
nodeLocalAsk.getCount());
}
}
return maxContainers;
}
private int assignContainersOnNode(FiCaSchedulerNode node,
FifoAppAttempt application, SchedulerRequestKey schedulerKey
) {
// Data-local
int nodeLocalContainers =
assignNodeLocalContainers(node, application, schedulerKey);
// Rack-local
int rackLocalContainers =
assignRackLocalContainers(node, application, schedulerKey);
// Off-switch
int offSwitchContainers =
assignOffSwitchContainers(node, application, schedulerKey);
LOG.debug("assignContainersOnNode:" +
" node=" + node.getRMNode().getNodeAddress() +
" application=" + application.getApplicationId().getId() +
" priority=" + schedulerKey.getPriority() +
" #assigned=" +
(nodeLocalContainers + rackLocalContainers + offSwitchContainers));
return (nodeLocalContainers + rackLocalContainers + offSwitchContainers);
}
private int assignNodeLocalContainers(FiCaSchedulerNode node,
FifoAppAttempt application, SchedulerRequestKey schedulerKey) {
int assignedContainers = 0;
PendingAsk nodeLocalAsk = application.getPendingAsk(schedulerKey,
node.getNodeName());
if (nodeLocalAsk.getCount() > 0) {
// Don't allocate on this node if we don't need containers on this rack
if (application.getOutstandingAsksCount(schedulerKey,
node.getRackName()) <= 0) {
return 0;
}
int assignableContainers = Math.min(
getMaxAllocatableContainers(application, schedulerKey, node,
NodeType.NODE_LOCAL), nodeLocalAsk.getCount());
assignedContainers =
assignContainer(node, application, schedulerKey, assignableContainers,
nodeLocalAsk.getPerAllocationResource(), NodeType.NODE_LOCAL);
}
return assignedContainers;
}
private int assignRackLocalContainers(FiCaSchedulerNode node,
FifoAppAttempt application, SchedulerRequestKey schedulerKey) {
int assignedContainers = 0;
PendingAsk rackAsk = application.getPendingAsk(schedulerKey,
node.getRMNode().getRackName());
if (rackAsk.getCount() > 0) {
// Don't allocate on this rack if the application doens't need containers
if (application.getOutstandingAsksCount(schedulerKey,
ResourceRequest.ANY) <= 0) {
return 0;
}
int assignableContainers =
Math.min(getMaxAllocatableContainers(application, schedulerKey, node,
NodeType.RACK_LOCAL), rackAsk.getCount());
assignedContainers =
assignContainer(node, application, schedulerKey, assignableContainers,
rackAsk.getPerAllocationResource(), NodeType.RACK_LOCAL);
}
return assignedContainers;
}
private int assignOffSwitchContainers(FiCaSchedulerNode node,
FifoAppAttempt application, SchedulerRequestKey schedulerKey) {
int assignedContainers = 0;
PendingAsk offswitchAsk = application.getPendingAsk(schedulerKey,
ResourceRequest.ANY);
if (offswitchAsk.getCount() > 0) {
assignedContainers =
assignContainer(node, application, schedulerKey,
offswitchAsk.getCount(),
offswitchAsk.getPerAllocationResource(), NodeType.OFF_SWITCH);
}
return assignedContainers;
}
private int assignContainer(FiCaSchedulerNode node, FifoAppAttempt application,
SchedulerRequestKey schedulerKey, int assignableContainers,
Resource capability, NodeType type) {
LOG.debug("assignContainers:" +
" node=" + node.getRMNode().getNodeAddress() +
" application=" + application.getApplicationId().getId() +
" priority=" + schedulerKey.getPriority().getPriority() +
" assignableContainers=" + assignableContainers +
" capability=" + capability + " type=" + type);
// TODO: A buggy application with this zero would crash the scheduler.
int availableContainers =
(int) (node.getUnallocatedResource().getMemorySize() /
capability.getMemorySize());
int assignedContainers =
Math.min(assignableContainers, availableContainers);
if (assignedContainers > 0) {
for (int i=0; i < assignedContainers; ++i) {
NodeId nodeId = node.getRMNode().getNodeID();
ContainerId containerId = BuilderUtils.newContainerId(application
.getApplicationAttemptId(), application.getNewContainerId());
// Create the container
Container container = BuilderUtils.newContainer(containerId, nodeId,
node.getRMNode().getHttpAddress(), capability,
schedulerKey.getPriority(), null,
schedulerKey.getAllocationRequestId());
// Allocate!
// Inform the application
RMContainer rmContainer = application.allocate(type, node, schedulerKey,
container);
// Inform the node
node.allocateContainer(rmContainer);
// Update usage for this container
increaseUsedResources(rmContainer);
}
}
return assignedContainers;
}
private void increaseUsedResources(RMContainer rmContainer) {
Resources.addTo(usedResource, rmContainer.getAllocatedResource());
}
private void updateAppHeadRoom(SchedulerApplicationAttempt schedulerAttempt) {
schedulerAttempt.setHeadroom(Resources.subtract(getClusterResource(),
usedResource));
}
private void updateAvailableResourcesMetrics() {
metrics.setAvailableResourcesToQueue(
Resources.subtract(getClusterResource(), usedResource));
}
@Override
public void handle(SchedulerEvent event) {
switch(event.getType()) {
case NODE_ADDED:
{
NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event;
addNode(nodeAddedEvent.getAddedRMNode());
recoverContainersOnNode(nodeAddedEvent.getContainerReports(),
nodeAddedEvent.getAddedRMNode());
}
break;
case NODE_REMOVED:
{
NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event;
removeNode(nodeRemovedEvent.getRemovedRMNode());
}
break;
case NODE_RESOURCE_UPDATE:
{
NodeResourceUpdateSchedulerEvent nodeResourceUpdatedEvent =
(NodeResourceUpdateSchedulerEvent)event;
updateNodeResource(nodeResourceUpdatedEvent.getRMNode(),
nodeResourceUpdatedEvent.getResourceOption());
}
break;
case NODE_UPDATE:
{
NodeUpdateSchedulerEvent nodeUpdatedEvent =
(NodeUpdateSchedulerEvent)event;
nodeUpdate(nodeUpdatedEvent.getRMNode());
}
break;
case APP_ADDED:
{
AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event;
addApplication(appAddedEvent.getApplicationId(),
appAddedEvent.getQueue(), appAddedEvent.getUser(),
appAddedEvent.getIsAppRecovering());
}
break;
case APP_REMOVED:
{
AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event;
doneApplication(appRemovedEvent.getApplicationID(),
appRemovedEvent.getFinalState());
}
break;
case APP_ATTEMPT_ADDED:
{
AppAttemptAddedSchedulerEvent appAttemptAddedEvent =
(AppAttemptAddedSchedulerEvent) event;
addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(),
appAttemptAddedEvent.getTransferStateFromPreviousAttempt(),
appAttemptAddedEvent.getIsAttemptRecovering());
}
break;
case APP_ATTEMPT_REMOVED:
{
AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent =
(AppAttemptRemovedSchedulerEvent) event;
try {
doneApplicationAttempt(
appAttemptRemovedEvent.getApplicationAttemptID(),
appAttemptRemovedEvent.getFinalAttemptState(),
appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts());
} catch(IOException ie) {
LOG.error("Unable to remove application "
+ appAttemptRemovedEvent.getApplicationAttemptID(), ie);
}
}
break;
case CONTAINER_EXPIRED:
{
ContainerExpiredSchedulerEvent containerExpiredEvent =
(ContainerExpiredSchedulerEvent) event;
ContainerId containerid = containerExpiredEvent.getContainerId();
super.completedContainer(getRMContainer(containerid),
SchedulerUtils.createAbnormalContainerStatus(
containerid,
SchedulerUtils.EXPIRED_CONTAINER),
RMContainerEventType.EXPIRE);
}
break;
case RELEASE_CONTAINER: {
if (!(event instanceof ReleaseContainerEvent)) {
throw new RuntimeException("Unexpected event type: " + event);
}
RMContainer container = ((ReleaseContainerEvent) event).getContainer();
completedContainer(container,
SchedulerUtils.createAbnormalContainerStatus(
container.getContainerId(),
SchedulerUtils.RELEASED_CONTAINER),
RMContainerEventType.RELEASED);
}
break;
default:
LOG.error("Invalid eventtype " + event.getType() + ". Ignoring!");
}
}
@Lock(FifoScheduler.class)
@Override
protected synchronized void completedContainerInternal(
RMContainer rmContainer, ContainerStatus containerStatus,
RMContainerEventType event) {
// Get the application for the finished container
Container container = rmContainer.getContainer();
FifoAppAttempt application =
getCurrentAttemptForContainer(container.getId());
ApplicationId appId =
container.getId().getApplicationAttemptId().getApplicationId();
// Get the node on which the container was allocated
FiCaSchedulerNode node = (FiCaSchedulerNode) getNode(container.getNodeId());
if (application == null) {
LOG.info("Unknown application: " + appId +
" released container " + container.getId() +
" on node: " + node +
" with event: " + event);
return;
}
// Inform the application
application.containerCompleted(rmContainer, containerStatus, event,
RMNodeLabelsManager.NO_LABEL);
// Inform the node
node.releaseContainer(rmContainer.getContainerId(), false);
// Update total usage
Resources.subtractFrom(usedResource, container.getResource());
LOG.info("Application attempt " + application.getApplicationAttemptId() +
" released container " + container.getId() +
" on node: " + node +
" with event: " + event);
}
private Resource usedResource = recordFactory.newRecordInstance(Resource.class);
private synchronized void removeNode(RMNode nodeInfo) {
FiCaSchedulerNode node = nodeTracker.getNode(nodeInfo.getNodeID());
if (node == null) {
return;
}
// Kill running containers
for(RMContainer container : node.getCopiedListOfRunningContainers()) {
super.completedContainer(container,
SchedulerUtils.createAbnormalContainerStatus(
container.getContainerId(),
SchedulerUtils.LOST_CONTAINER),
RMContainerEventType.KILL);
}
nodeTracker.removeNode(nodeInfo.getNodeID());
}
@Override
public QueueInfo getQueueInfo(String queueName,
boolean includeChildQueues, boolean recursive) {
return DEFAULT_QUEUE.getQueueInfo(false, false);
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo() {
return DEFAULT_QUEUE.getQueueUserAclInfo(null);
}
@Override
public ResourceCalculator getResourceCalculator() {
return resourceCalculator;
}
private synchronized void addNode(RMNode nodeManager) {
FiCaSchedulerNode schedulerNode = new FiCaSchedulerNode(nodeManager,
usePortForNodeName);
nodeTracker.addNode(schedulerNode);
}
@Override
public void recover(RMState state) {
// NOT IMPLEMENTED
}
@Override
public RMContainer getRMContainer(ContainerId containerId) {
FifoAppAttempt attempt = getCurrentAttemptForContainer(containerId);
return (attempt == null) ? null : attempt.getRMContainer(containerId);
}
@Override
public QueueMetrics getRootQueueMetrics() {
return DEFAULT_QUEUE.getMetrics();
}
@Override
public synchronized boolean checkAccess(UserGroupInformation callerUGI,
QueueACL acl, String queueName) {
return DEFAULT_QUEUE.hasAccess(acl, callerUGI);
}
@Override
public synchronized List<ApplicationAttemptId>
getAppsInQueue(String queueName) {
if (queueName.equals(DEFAULT_QUEUE.getQueueName())) {
List<ApplicationAttemptId> attempts =
new ArrayList<ApplicationAttemptId>(applications.size());
for (SchedulerApplication<FifoAppAttempt> app : applications.values()) {
attempts.add(app.getCurrentAppAttempt().getApplicationAttemptId());
}
return attempts;
} else {
return null;
}
}
public Resource getUsedResource() {
return usedResource;
}
@Override
protected synchronized void nodeUpdate(RMNode nm) {
super.nodeUpdate(nm);
FiCaSchedulerNode node = (FiCaSchedulerNode) getNode(nm.getNodeID());
if (rmContext.isWorkPreservingRecoveryEnabled()
&& !rmContext.isSchedulerReadyForAllocatingContainers()) {
return;
}
if (Resources.greaterThanOrEqual(resourceCalculator, getClusterResource(),
node.getUnallocatedResource(), minimumAllocation)) {
LOG.debug("Node heartbeat " + nm.getNodeID() +
" available resource = " + node.getUnallocatedResource());
assignContainers(node);
LOG.debug("Node after allocation " + nm.getNodeID() + " resource = "
+ node.getUnallocatedResource());
}
updateAvailableResourcesMetrics();
}
@VisibleForTesting
@Override
public void killContainer(RMContainer container) {
ContainerStatus status = SchedulerUtils.createKilledContainerStatus(
container.getContainerId(),
"Killed by RM to simulate an AM container failure");
LOG.info("Killing container " + container);
completedContainer(container, status, RMContainerEventType.KILL);
}
@Override
public synchronized void recoverContainersOnNode(
List<NMContainerStatus> containerReports, RMNode nm) {
super.recoverContainersOnNode(containerReports, nm);
}
}
| |
package org.broadinstitute.hellbender.cmdline;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.intel.gkl.compression.IntelDeflaterFactory;
import com.intel.gkl.compression.IntelInflaterFactory;
import htsjdk.samtools.Defaults;
import htsjdk.samtools.metrics.Header;
import htsjdk.samtools.metrics.MetricBase;
import htsjdk.samtools.metrics.MetricsFile;
import htsjdk.samtools.metrics.StringHeader;
import htsjdk.samtools.util.BlockCompressedOutputStream;
import htsjdk.samtools.util.BlockGunzipper;
import htsjdk.samtools.util.Log;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.broadinstitute.barclay.argparser.*;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.LoggingUtils;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.config.ConfigFactory;
import org.broadinstitute.hellbender.utils.gcs.BucketUtils;
import org.broadinstitute.hellbender.utils.help.HelpConstants;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.*;
import java.text.DecimalFormat;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
/**
* Abstract class to facilitate writing command-line programs.
*
* To use:
*
* 1. Extend this class with a concrete class that has data members annotated with @Argument, @PositionalArguments
* and/or @Usage annotations.
*
* 2. If there is any custom command-line validation, override customCommandLineValidation(). When this method is
* called, the command line has been parsed and set into the data members of the concrete class.
*
* 3. Implement a method doWork(). This is called after successful command-line processing.
* The doWork() method may return null or a result object (they are not interpreted by the toolkit and passed onto the caller).
* doWork() may throw unchecked exceptions, which are NOT caught and passed onto the VM.
*
*/
public abstract class CommandLineProgram implements CommandLinePluginProvider {
// Logger is a protected instance variable here to output the correct class name
// with concrete sub-classes of CommandLineProgram. Since CommandLineProgram is
// abstract, this is fine (as long as no logging has to happen statically in this class).
protected final Logger logger = LogManager.getLogger(this.getClass());
private static final String DEFAULT_TOOLKIT_SHORT_NAME = "GATK";
@Argument(fullName = StandardArgumentDefinitions.TMP_DIR_NAME, common=true, optional=true, doc = "Temp directory to use.")
public String tmpDir;
@ArgumentCollection(doc="Special Arguments that have meaning to the argument parsing system. " +
"It is unlikely these will ever need to be accessed by the command line program")
public SpecialArgumentsCollection specialArgumentsCollection = new SpecialArgumentsCollection();
@Argument(fullName = StandardArgumentDefinitions.VERBOSITY_NAME, shortName = StandardArgumentDefinitions.VERBOSITY_NAME, doc = "Control verbosity of logging.", common = true, optional = true)
public Log.LogLevel VERBOSITY = Log.LogLevel.INFO;
@Argument(fullName = StandardArgumentDefinitions.QUIET_NAME, doc = "Whether to suppress job-summary info on System.err.", common=true)
public Boolean QUIET = false;
@Argument(fullName = StandardArgumentDefinitions.USE_JDK_DEFLATER_LONG_NAME, shortName = StandardArgumentDefinitions.USE_JDK_DEFLATER_SHORT_NAME, doc = "Whether to use the JdkDeflater (as opposed to IntelDeflater)", common=true)
public boolean useJdkDeflater = false;
@Argument(fullName = StandardArgumentDefinitions.USE_JDK_INFLATER_LONG_NAME, shortName = StandardArgumentDefinitions.USE_JDK_INFLATER_SHORT_NAME, doc = "Whether to use the JdkInflater (as opposed to IntelInflater)", common=true)
public boolean useJdkInflater = false;
@Argument(fullName = StandardArgumentDefinitions.NIO_MAX_REOPENS_LONG_NAME, shortName = StandardArgumentDefinitions.NIO_MAX_REOPENS_SHORT_NAME, doc = "If the GCS bucket channel errors out, how many times it will attempt to re-initiate the connection", optional = true)
public int NIO_MAX_REOPENS = ConfigFactory.getInstance().getGATKConfig().gcsMaxRetries();
@Argument(fullName = StandardArgumentDefinitions.NIO_PROJECT_FOR_REQUESTER_PAYS_LONG_NAME, doc = "Project to bill when accessing \"requester pays\" buckets. If unset, these buckets cannot be accessed.", optional = true)
public String NIO_PROJECT_FOR_REQUESTER_PAYS = ConfigFactory.getInstance().getGATKConfig().gcsProjectForRequesterPays();
// This option is here for documentation completeness.
// This is actually parsed out in Main to initialize configuration files because
// we need to have the configuration completely set up before we create our CommandLinePrograms.
// (Some of the CommandLinePrograms have default values set to config values, and these are loaded
// at class load time as static initializers).
@Argument(fullName = StandardArgumentDefinitions.GATK_CONFIG_FILE_OPTION,
doc = "A configuration file to use with the GATK.",
common = true,
optional = true)
public String GATK_CONFIG_FILE = null;
private CommandLineParser commandLineParser;
private final List<Header> defaultHeaders = new ArrayList<>();
/**
* The reconstructed commandline used to run this program. Used for logging
* and debugging.
*/
private String commandLine;
/**
* Perform initialization/setup after command-line argument parsing but before doWork() is invoked.
* Default implementation does nothing.
* Subclasses can override to perform initialization.
*/
protected void onStartup() {}
/**
* Do the work after command line has been parsed. RuntimeException may be
* thrown by this method, and are reported appropriately.
* @return the return value or null is there is none.
*/
protected abstract Object doWork();
/**
* Perform cleanup after doWork() is finished. Always executes even if an exception is thrown during the run.
* Default implementation does nothing.
* Subclasses can override to perform cleanup.
*/
protected void onShutdown() {}
/**
* Template method that runs the startup hook, doWork and then the shutdown hook.
*/
public final Object runTool(){
try {
logger.info("Initializing engine");
onStartup();
logger.info("Done initializing engine");
return doWork();
} finally {
logger.info("Shutting down engine");
onShutdown();
}
}
public Object instanceMainPostParseArgs() {
// Provide one temp directory if the caller didn't
// TODO - this should use the HTSJDK IOUtil.getDefaultTmpDirPath, which is somehow broken in the current HTSJDK version
if (tmpDir == null || tmpDir.isEmpty()) {
tmpDir = IOUtils.getAbsolutePathWithoutFileProtocol(IOUtils.getPath(System.getProperty("java.io.tmpdir")));
}
// Build the default headers
final ZonedDateTime startDateTime = ZonedDateTime.now();
this.defaultHeaders.add(new StringHeader(commandLine));
this.defaultHeaders.add(new StringHeader("Started on: " + Utils.getDateTimeForDisplay(startDateTime)));
LoggingUtils.setLoggingLevel(VERBOSITY); // propagate the VERBOSITY level to logging frameworks
// set the temp directory as a java property, checking for existence and read/write access
final Path p = IOUtils.getPath(tmpDir);
try {
p.getFileSystem().provider().checkAccess(p, AccessMode.READ, AccessMode.WRITE);
System.setProperty("java.io.tmpdir", IOUtils.getAbsolutePathWithoutFileProtocol(p));
} catch (final AccessDeniedException | NoSuchFileException e) {
// TODO: it may be that the program does not need a tmp dir
// TODO: if it fails, the problem can be discovered downstream
// TODO: should log a warning instead?
throw new UserException.BadTempDir(p, "should exist and have read/write access", e);
} catch (final IOException e) {
// other exceptions with the tmp directory
throw new UserException.BadTempDir(p, e.getMessage(), e);
}
//Set defaults (note: setting them here means they are not controllable by the user)
if (! useJdkDeflater) {
BlockCompressedOutputStream.setDefaultDeflaterFactory(new IntelDeflaterFactory());
}
if (! useJdkInflater) {
BlockGunzipper.setDefaultInflaterFactory(new IntelInflaterFactory());
}
BucketUtils.setGlobalNIODefaultOptions(NIO_MAX_REOPENS, NIO_PROJECT_FOR_REQUESTER_PAYS);
if (!QUIET) {
printStartupMessage(startDateTime);
}
warnOnToolStatus();
try {
return runTool();
} finally {
// Emit the time even if program throws
if (!QUIET) {
final ZonedDateTime endDateTime = ZonedDateTime.now();
final double elapsedMinutes = (Duration.between(startDateTime, endDateTime).toMillis()) / (1000d * 60d);
final String elapsedString = new DecimalFormat("#,##0.00").format(elapsedMinutes);
System.err.println("[" + Utils.getDateTimeForDisplay(endDateTime) + "] " +
getClass().getName() + " done. Elapsed time: " + elapsedString + " minutes.");
System.err.println("Runtime.totalMemory()=" + Runtime.getRuntime().totalMemory());
}
}
}
public Object instanceMain(final String[] argv) {
if (!parseArgs(argv)) {
//an information only argument like help or version was specified, just exit
return 0;
}
return instanceMainPostParseArgs();
}
/**
* Put any custom command-line validation in an override of this method.
* clp is initialized at this point and can be used to print usage and access argv.
* Any arguments set by command-line parser can be validated.
* @return null if command line is valid. If command line is invalid, returns an array of error message
* to be written to the appropriate place.
* @throws CommandLineException if command line is invalid and handling as exception is preferred.
*/
protected String[] customCommandLineValidation() {
return null;
}
/**
* Parse arguments and initialize any values annotated with {@link Argument}
* @return true if program should be executed, false if an information only argument like {@link SpecialArgumentsCollection#HELP_FULLNAME} was specified
* @throws CommandLineException if command line validation fails
*/
protected final boolean parseArgs(final String[] argv) {
final boolean ret = getCommandLineParser().parseArguments(System.err, argv);
commandLine = getCommandLineParser().getCommandLine();
if (!ret) {
return false;
}
final String[] customErrorMessages = customCommandLineValidation();
if (customErrorMessages != null) {
throw new CommandLineException("Command Line Validation failed:" + Arrays.stream(customErrorMessages).collect(
Collectors.joining(", ")));
}
return true;
}
/**
* Return the list of GATKCommandLinePluginDescriptors to be used for this CLP.
* Default implementation returns null. Subclasses can override this to return a custom list.
*/
public List<? extends CommandLinePluginDescriptor<?>> getPluginDescriptors() { return new ArrayList<>(); }
/** Gets a MetricsFile with default headers already written into it. */
protected <A extends MetricBase,B extends Comparable<?>> MetricsFile<A,B> getMetricsFile() {
final MetricsFile<A,B> file = new MetricsFile<>();
for (final Header h : this.defaultHeaders) {
file.addHeader(h);
}
return file;
}
/**
* Prints a user-friendly message on startup with some information about who we are and the
* runtime environment.
*
* May be overridden by subclasses to provide a custom implementation if desired.
*
* @param startDateTime Startup date/time
*/
protected void printStartupMessage(final ZonedDateTime startDateTime) {
try {
logger.info(Utils.dupChar('-', 60));
logger.info(String.format("%s v%s", getToolkitName(), getVersion()));
logger.info(getSupportInformation());
logger.info(String.format("Executing as %s@%s on %s v%s %s",
System.getProperty("user.name"), InetAddress.getLocalHost().getHostName(),
System.getProperty("os.name"), System.getProperty("os.version"), System.getProperty("os.arch")));
logger.info(String.format("Java runtime: %s v%s",
System.getProperty("java.vm.name"), System.getProperty("java.runtime.version")));
logger.info("Start Date/Time: " + Utils.getDateTimeForDisplay(startDateTime));
logger.info(Utils.dupChar('-', 60));
logger.info(Utils.dupChar('-', 60));
// Print versions of important dependencies
printLibraryVersions();
// Print important settings to the logger:
printSettings();
}
catch (final Exception e) { /* Unpossible! */ }
}
/**
* If this tool is either Experimental or Beta, return a warning message advising against use in production
* environment.
* @param useTerminalColor true if the message should include highlighting terminal colorization
* @return a warning message if the tool is Beta or Experimental, otherwise null
*/
protected String getToolStatusWarning(final boolean useTerminalColor) {
final String KNRM = "\u001B[0m"; // reset
final String BOLDRED = "\u001B[1m\u001B[31m";
final int BORDER_LENGTH = 60;
String warningMessage = null;
if (isBetaFeature()) {
warningMessage = String.format(
"\n\n%s %s\n\n Warning: %s is a BETA tool and is not yet ready for use in production\n\n %s%s\n\n",
useTerminalColor ? BOLDRED : "",
Utils.dupChar('!', BORDER_LENGTH),
this.getClass().getSimpleName(),
Utils.dupChar('!', BORDER_LENGTH),
useTerminalColor ? KNRM : ""
);
}
else if (isExperimentalFeature()) {
warningMessage = String.format(
"\n\n%s %s\n\n Warning: %s is an EXPERIMENTAL tool and should not be used for production\n\n %s%s\n\n",
useTerminalColor ? BOLDRED : "",
Utils.dupChar('!', BORDER_LENGTH),
this.getClass().getSimpleName(),
Utils.dupChar('!', BORDER_LENGTH),
useTerminalColor ? KNRM : ""
);
}
return warningMessage;
}
/**
* If a tool is either Experimental or Beta, log a warning against use in production a environment.
*/
protected void warnOnToolStatus() {
final String warningMessage = getToolStatusWarning(true);
if (warningMessage != null) {
logger.warn(warningMessage);
}
}
/**
* @return true if this tool has {@code BetaFeature} status.
*/
public boolean isBetaFeature() { return this.getClass().getAnnotation(BetaFeature.class) != null; }
/**
* @return true if this tool has {@code ExperimentalFeature} status.
*/
public boolean isExperimentalFeature() { return this.getClass().getAnnotation(ExperimentalFeature.class) != null; }
/**
* @return The name of this toolkit. The default implementation uses "Implementation-Title" from the
* jar manifest, or (if that's not available) the package name.
*
* May be overridden by subclasses to provide a custom implementation if desired.
*/
protected String getToolkitName() {
final String implementationTitle = getClass().getPackage().getImplementationTitle();
return implementationTitle != null ? implementationTitle : getClass().getPackage().getName();
}
/**
* @return An abbreviated name of the toolkit for this tool. Subclasses may override to provide
* a custom toolkit name.
*/
protected String getToolkitShortName() {
// TODO: stored in the jar manifest, like {@link CommandLineProgram#getToolkitName}
return DEFAULT_TOOLKIT_SHORT_NAME;
}
/**
* @return the version of this tool. It is the version stored in the manifest of the jarfile
* by default, or "Unavailable" if that's not available.
*
* May be overridden by subclasses to provide a custom implementation if desired.
*/
protected String getVersion() {
String versionString = this.getClass().getPackage().getImplementationVersion();
return versionString != null ?
versionString :
"Unavailable";
}
/**
* @return A String containing information about how to get support for this toolkit.
*
* May be overridden by subclasses to provide a custom implementation if desired.
*/
protected String getSupportInformation() {
return "For support and documentation go to " + HelpConstants.GATK_MAIN_SITE;
}
/**
* Output versions of important dependencies to the logger.
*
* May be overridden by subclasses to provide a custom implementation if desired.
*/
protected void printLibraryVersions() {
try {
final String classPath = getClass().getResource(getClass().getSimpleName() + ".class").toString();
if (classPath.startsWith("jar")) {
final String manifestPath = classPath.substring(0, classPath.lastIndexOf("!") + 1) + "/META-INF/MANIFEST.MF";
try ( final InputStream manifestStream = new URL(manifestPath).openStream() ) {
final Attributes manifestAttributes = new Manifest(manifestStream).getMainAttributes();
final String htsjdkVersion = manifestAttributes.getValue("htsjdk-Version");
final String picardVersion = manifestAttributes.getValue("Picard-Version");
logger.info("HTSJDK Version: " + (htsjdkVersion != null ? htsjdkVersion : "unknown"));
logger.info("Picard Version: " + (picardVersion != null ? picardVersion : "unknown"));
}
}
}
catch (IOException ignored) {
}
}
/**
* Output a curated set of important settings to the logger.
*
* May be overridden by subclasses to specify a different set of settings to output.
*/
protected void printSettings() {
if ( VERBOSITY != Log.LogLevel.DEBUG ) {
logger.info("HTSJDK Defaults.COMPRESSION_LEVEL : " + Defaults.COMPRESSION_LEVEL);
logger.info("HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS);
logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS);
logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : " + Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE);
}
else {
// At DEBUG verbosity, print all the HTSJDK defaults:
Defaults.allDefaults().entrySet().stream().forEach(e->
logger.info("HTSJDK " + Defaults.class.getSimpleName() + "." + e.getKey() + " : " + e.getValue())
);
}
// Log the configuration options:
ConfigFactory.logConfigFields(ConfigFactory.getInstance().getGATKConfig(), Log.LogLevel.DEBUG);
final boolean usingIntelDeflater = (BlockCompressedOutputStream.getDefaultDeflaterFactory() instanceof IntelDeflaterFactory && ((IntelDeflaterFactory)BlockCompressedOutputStream.getDefaultDeflaterFactory()).usingIntelDeflater());
logger.info("Deflater: " + (usingIntelDeflater ? "IntelDeflater": "JdkDeflater"));
final boolean usingIntelInflater = (BlockGunzipper.getDefaultInflaterFactory() instanceof IntelInflaterFactory && ((IntelInflaterFactory)BlockGunzipper.getDefaultInflaterFactory()).usingIntelInflater());
logger.info("Inflater: " + (usingIntelInflater ? "IntelInflater": "JdkInflater"));
logger.info("GCS max retries/reopens: " + BucketUtils.getCloudStorageConfiguration(NIO_MAX_REOPENS, "").maxChannelReopens());
if (Strings.isNullOrEmpty(NIO_PROJECT_FOR_REQUESTER_PAYS)) {
logger.info("Requester pays: disabled");
} else {
logger.info("Requester pays: enabled. Billed to: " + NIO_PROJECT_FOR_REQUESTER_PAYS);
}
}
/**
* @return the commandline used to run this program, will be null if arguments have not yet been parsed
*/
public final String getCommandLine() {
return commandLine;
}
/**
* @return get usage and help information for this command line program if it is available
*
*/
public final String getUsage(){
return getCommandLineParser().usage(true, specialArgumentsCollection.SHOW_HIDDEN);
}
/**
* Replaces the set of default metrics headers by the given argument.
* The given list is copied.
*/
public final void setDefaultHeaders(final List<Header> headers) {
Utils.nonNull(headers);
this.defaultHeaders.clear();
this.defaultHeaders.addAll(headers);
}
/**
* Returns the (live) list of default metrics headers used by this tool.
*/
public final List<Header> getDefaultHeaders() {
return this.defaultHeaders;
}
/**
* @return this programs CommandLineParser. If one is not initialized yet this will initialize it.
*/
@VisibleForTesting
public final CommandLineParser getCommandLineParser() {
if( commandLineParser == null) {
commandLineParser = new CommandLineArgumentParser(this, getPluginDescriptors(), Collections.emptySet());
}
return commandLineParser;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.runtime.app;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.util.concurrent.Service;
import com.google.common.util.concurrent.ServiceManager;
import gobblin.annotation.Alpha;
import gobblin.admin.AdminWebServer;
import gobblin.configuration.ConfigurationKeys;
import gobblin.configuration.State;
import gobblin.metrics.GobblinMetrics;
import gobblin.rest.JobExecutionInfoServer;
import gobblin.runtime.services.JMXReportingService;
import gobblin.runtime.services.MetricsReportingService;
import gobblin.util.ApplicationLauncherUtils;
/**
* An implementation of {@link ApplicationLauncher} that defines an application as a set of {@link Service}s that should
* be started and stopped. The class will run a set of core services, some of which are optional, some of which are
* mandatory. These {@link Service}s are as follows:
*
* <ul>
* <li>{@link MetricsReportingService} is optional and controlled by {@link ConfigurationKeys#METRICS_ENABLED_KEY}</li>
* <li>{@link JobExecutionInfoServer} is optional and controlled by {@link ConfigurationKeys#JOB_EXECINFO_SERVER_ENABLED_KEY}</li>
* <li>{@link AdminWebServer} is optional and controlled by {@link ConfigurationKeys#ADMIN_SERVER_ENABLED_KEY}</li>
* <li>{@link JMXReportingService} is mandatory</li>
* </ul>
*
* <p>
* Additional {@link Service}s can be added via the {@link #addService(Service)} method. A {@link Service} cannot be
* added after the application has started. Additional {@link Service}s can also be specified via the configuration
* key {@link #APP_ADDITIONAL_SERVICES}.
* </p>
*
* <p>
* An {@link ServiceBasedAppLauncher} cannot be restarted.
* </p>
*/
@Alpha
public class ServiceBasedAppLauncher implements ApplicationLauncher {
/**
* The name of the application. Not applicable for YARN jobs, which uses a separate key for the application name.
*/
public static final String APP_NAME = "app.name";
/**
* The number of seconds to wait for the application to stop, the default value is {@link #DEFAULT_APP_STOP_TIME_SECONDS}
*/
public static final String APP_STOP_TIME_SECONDS = "app.stop.time.seconds";
private static final String DEFAULT_APP_STOP_TIME_SECONDS = Long.toString(60);
/**
* A comma separated list of fully qualified classes that implement the {@link Service} interface. These
* {@link Service}s will be run in addition to the core services.
*/
public static final String APP_ADDITIONAL_SERVICES = "app.additional.services";
private static final Logger LOG = LoggerFactory.getLogger(ServiceBasedAppLauncher.class);
private final int stopTime;
private final String appId;
private final List<Service> services;
private volatile boolean hasStarted = false;
private volatile boolean hasStopped = false;
private ServiceManager serviceManager;
public ServiceBasedAppLauncher(Properties properties, String appName) throws Exception {
this.stopTime = Integer.parseInt(properties.getProperty(APP_STOP_TIME_SECONDS, DEFAULT_APP_STOP_TIME_SECONDS));
this.appId = ApplicationLauncherUtils.newAppId(appName);
this.services = new ArrayList<>();
// Add core Services needed for any application
addJobExecutionServerAndAdminUI(properties);
addMetricsService(properties);
addJMXReportingService();
// Add any additional Services specified via configuration keys
addServicesFromProperties(properties);
// Add a shutdown hook that interrupts the main thread
addInterruptedShutdownHook();
}
/**
* Starts the {@link ApplicationLauncher} by starting all associated services. This method also adds a shutdown hook
* that invokes {@link #stop()} and the {@link #close()} methods. So {@link #stop()} and {@link #close()} need not be
* called explicitly; they can be triggered during the JVM shutdown.
*/
@Override
public synchronized void start() {
if (this.hasStarted) {
LOG.warn("ApplicationLauncher has already started");
return;
}
this.hasStarted = true;
this.serviceManager = new ServiceManager(this.services);
// A listener that shutdowns the application if any service fails.
this.serviceManager.addListener(new ServiceManager.Listener() {
@Override
public void failure(Service service) {
super.failure(service);
LOG.error(String.format("Service %s has failed.", service.getClass().getSimpleName()), service.failureCause());
try {
service.stopAsync();
ServiceBasedAppLauncher.this.stop();
} catch (ApplicationException ae) {
LOG.error("Could not shutdown services gracefully. This may cause the application to hang.");
}
}
});
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
ServiceBasedAppLauncher.this.stop();
} catch (ApplicationException e) {
LOG.error("Failed to shutdown application", e);
} finally {
try {
ServiceBasedAppLauncher.this.close();
} catch (IOException e) {
LOG.error("Failed to close application", e);
}
}
}
});
LOG.info("Starting the Gobblin application and all its associated Services");
// Start the application
this.serviceManager.startAsync().awaitHealthy();
}
/**
* Stops the {@link ApplicationLauncher} by stopping all associated services.
*/
@Override
public synchronized void stop() throws ApplicationException {
if (!this.hasStarted) {
LOG.warn("ApplicationLauncher was never started");
return;
}
if (this.hasStopped) {
LOG.warn("ApplicationLauncher has already stopped");
return;
}
this.hasStopped = true;
LOG.info("Shutting down the application");
try {
this.serviceManager.stopAsync().awaitStopped(this.stopTime, TimeUnit.SECONDS);
} catch (TimeoutException te) {
LOG.error("Timeout in stopping the service manager", te);
}
}
@Override
public void close() throws IOException {
// Do nothing
}
/**
* Add a {@link Service} to be run by this {@link ApplicationLauncher}.
*
* <p>
* This method is public because there are certain classes launchers (such as Azkaban) that require the
* {@link ApplicationLauncher} to extend a pre-defined class. Since Java classes cannot extend multiple classes,
* composition needs to be used. In which case this method needs to be public.
* </p>
*/
public void addService(Service service) {
if (this.hasStarted) {
throw new IllegalArgumentException("Cannot add a service while the application is running!");
}
this.services.add(service);
}
private void addJobExecutionServerAndAdminUI(Properties properties) {
boolean jobExecInfoServerEnabled = Boolean
.valueOf(properties.getProperty(ConfigurationKeys.JOB_EXECINFO_SERVER_ENABLED_KEY, Boolean.FALSE.toString()));
boolean adminUiServerEnabled =
Boolean.valueOf(properties.getProperty(ConfigurationKeys.ADMIN_SERVER_ENABLED_KEY, Boolean.FALSE.toString()));
if (jobExecInfoServerEnabled) {
LOG.info("Will launch the job execution info server");
JobExecutionInfoServer executionInfoServer = new JobExecutionInfoServer(properties);
addService(executionInfoServer);
if (adminUiServerEnabled) {
LOG.info("Will launch the admin UI server");
addService(new AdminWebServer(properties, executionInfoServer.getAdvertisedServerUri()));
}
} else if (adminUiServerEnabled) {
LOG.warn("Not launching the admin UI because the job execution info server is not enabled");
}
}
private void addMetricsService(Properties properties) {
if (GobblinMetrics.isEnabled(properties)) {
addService(new MetricsReportingService(properties, this.appId));
}
}
private void addJMXReportingService() {
addService(new JMXReportingService());
}
private void addServicesFromProperties(Properties properties)
throws IllegalAccessException, InstantiationException, ClassNotFoundException, InvocationTargetException {
if (properties.containsKey(APP_ADDITIONAL_SERVICES)) {
for (String serviceClassName : new State(properties).getPropAsSet(APP_ADDITIONAL_SERVICES)) {
Class<?> serviceClass = Class.forName(serviceClassName);
if (Service.class.isAssignableFrom(serviceClass)) {
Service service;
Constructor<?> constructor =
ConstructorUtils.getMatchingAccessibleConstructor(serviceClass, Properties.class);
if (constructor != null) {
service = (Service) constructor.newInstance(properties);
} else {
service = (Service) serviceClass.newInstance();
}
addService(service);
} else {
throw new IllegalArgumentException(String.format("Class %s specified by %s does not implement %s",
serviceClassName, APP_ADDITIONAL_SERVICES, Service.class.getSimpleName()));
}
}
}
}
private void addInterruptedShutdownHook() {
final Thread mainThread = Thread.currentThread();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
mainThread.interrupt();
}
});
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.segmentrouting;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.DeviceId;
import org.onosproject.net.PortNumber;
import org.onosproject.segmentrouting.PortAuthTracker.BlockState;
import org.onosproject.segmentrouting.config.BlockedPortsConfig;
import org.onosproject.segmentrouting.config.BlockedPortsConfigTest;
import java.io.IOException;
import java.io.InputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.onosproject.net.ConnectPoint.deviceConnectPoint;
import static org.onosproject.net.DeviceId.deviceId;
import static org.onosproject.net.PortNumber.portNumber;
import static org.onosproject.segmentrouting.PortAuthTracker.BlockState.AUTHENTICATED;
import static org.onosproject.segmentrouting.PortAuthTracker.BlockState.BLOCKED;
import static org.onosproject.segmentrouting.PortAuthTracker.BlockState.UNCHECKED;
/**
* Unit Tests for {@link PortAuthTracker}.
*/
public class PortAuthTrackerTest {
private static final ApplicationId APP_ID = new DefaultApplicationId(1, "foo");
private static final String KEY = "blocked";
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final String PATH_CFG = "/blocked-ports.json";
private static final String PATH_CFG_ALT = "/blocked-ports-alt.json";
private static final String DEV1 = "of:0000000000000001";
private static final String DEV3 = "of:0000000000000003";
private static final String DEV4 = "of:0000000000000004";
private BlockedPortsConfig cfg;
private AugmentedPortAuthTracker tracker;
private void print(String s) {
System.out.println(s);
}
private void print(Object o) {
print(o.toString());
}
private void print(String fmt, Object... params) {
print(String.format(fmt, params));
}
private void title(String s) {
print("=== %s ===", s);
}
private BlockedPortsConfig makeConfig(String path) throws IOException {
InputStream blockedPortsJson = BlockedPortsConfigTest.class
.getResourceAsStream(path);
JsonNode node = MAPPER.readTree(blockedPortsJson);
BlockedPortsConfig cfg = new BlockedPortsConfig();
cfg.init(APP_ID, KEY, node, MAPPER, null);
return cfg;
}
ConnectPoint cp(String devId, int port) {
return ConnectPoint.deviceConnectPoint(devId + "/" + port);
}
@Before
public void setUp() throws IOException {
cfg = makeConfig(PATH_CFG);
tracker = new AugmentedPortAuthTracker();
}
private void verifyPortState(String devId, int first, BlockState... states) {
DeviceId dev = deviceId(devId);
int last = first + states.length;
int pn = first;
int i = 0;
while (pn < last) {
PortNumber pnum = portNumber(pn);
BlockState actual = tracker.currentState(dev, pnum);
print("%s/%s [%s] --> %s", devId, pn, states[i], actual);
assertEquals("oops: " + devId + "/" + pn + "~" + actual,
states[i], actual);
pn++;
i++;
}
}
@Test
public void basic() {
title("basic");
print(tracker);
print(cfg);
assertEquals("wrong entry count", 0, tracker.entryCount());
// let's assume that the net config just got loaded..
tracker.configurePortBlocking(cfg);
assertEquals("wrong entry count", 13, tracker.entryCount());
verifyPortState(DEV1, 1, BLOCKED, BLOCKED, BLOCKED, BLOCKED, UNCHECKED);
verifyPortState(DEV1, 6, UNCHECKED, BLOCKED, BLOCKED, BLOCKED, UNCHECKED);
verifyPortState(DEV3, 1, UNCHECKED, UNCHECKED, UNCHECKED);
verifyPortState(DEV3, 6, UNCHECKED, BLOCKED, BLOCKED, BLOCKED, UNCHECKED);
verifyPortState(DEV4, 1, BLOCKED, UNCHECKED, UNCHECKED, UNCHECKED, BLOCKED);
}
@Test
public void logonLogoff() {
title("logonLogoff");
tracker.configurePortBlocking(cfg);
assertEquals("wrong entry count", 13, tracker.entryCount());
verifyPortState(DEV1, 1, BLOCKED, BLOCKED, BLOCKED);
ConnectPoint cp = deviceConnectPoint(DEV1 + "/2");
tracker.radiusAuthorize(cp);
print("");
verifyPortState(DEV1, 1, BLOCKED, AUTHENTICATED, BLOCKED);
tracker.radiusLogoff(cp);
print("");
verifyPortState(DEV1, 1, BLOCKED, BLOCKED, BLOCKED);
}
@Test
public void installedFlows() {
title("installed flows");
assertEquals(0, tracker.installed.size());
tracker.configurePortBlocking(cfg);
assertEquals(13, tracker.installed.size());
assertTrue(tracker.installed.contains(cp(DEV1, 1)));
assertTrue(tracker.installed.contains(cp(DEV3, 7)));
assertTrue(tracker.installed.contains(cp(DEV4, 5)));
}
@Test
public void flowsLogonLogoff() {
title("flows logon logoff");
tracker.configurePortBlocking(cfg);
// let's pick a connect point from the configuration
ConnectPoint cp = cp(DEV4, 5);
assertTrue(tracker.installed.contains(cp));
assertEquals(0, tracker.cleared.size());
tracker.resetMetrics();
tracker.radiusAuthorize(cp);
// verify we requested the blocking flow to be cleared
assertTrue(tracker.cleared.contains(cp));
tracker.resetMetrics();
assertEquals(0, tracker.installed.size());
tracker.radiusLogoff(cp);
// verify we requested the blocking flow to be reinstated
assertTrue(tracker.installed.contains(cp));
}
@Test
public void uncheckedPortIgnored() {
title("unchecked port ignored");
tracker.configurePortBlocking(cfg);
tracker.resetMetrics();
// let's pick a connect point NOT in the configuration
ConnectPoint cp = cp(DEV4, 2);
assertEquals(BlockState.UNCHECKED, tracker.currentState(cp));
assertEquals(0, tracker.installed.size());
assertEquals(0, tracker.cleared.size());
tracker.radiusAuthorize(cp);
assertEquals(0, tracker.installed.size());
assertEquals(0, tracker.cleared.size());
tracker.radiusLogoff(cp);
assertEquals(0, tracker.installed.size());
assertEquals(0, tracker.cleared.size());
}
@Test
public void reconfiguration() throws IOException {
title("reconfiguration");
/* see 'blocked-ports.json' and 'blocked-ports-alt.json'
cfg: "1": ["1-4", "7-9"],
"3": ["7-9"],
"4": ["1", "5", "9"]
alt: "1": ["1-9"],
"3": ["7"],
"4": ["1"]
*/
tracker.configurePortBlocking(cfg);
// dev1: ports 5 and 6 are NOT configured in the original CFG
assertFalse(tracker.installed.contains(cp(DEV1, 5)));
assertFalse(tracker.installed.contains(cp(DEV1, 6)));
tracker.resetMetrics();
assertEquals(0, tracker.installed.size());
assertEquals(0, tracker.cleared.size());
BlockedPortsConfig alt = makeConfig(PATH_CFG_ALT);
tracker.configurePortBlocking(alt);
// dev1: ports 5 and 6 ARE configured in the alternate CFG
assertTrue(tracker.installed.contains(cp(DEV1, 5)));
assertTrue(tracker.installed.contains(cp(DEV1, 6)));
// also, check for the ports that were decommissioned
assertTrue(tracker.cleared.contains(cp(DEV3, 8)));
assertTrue(tracker.cleared.contains(cp(DEV3, 9)));
assertTrue(tracker.cleared.contains(cp(DEV4, 5)));
assertTrue(tracker.cleared.contains(cp(DEV4, 9)));
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.client.queries;
import com.eas.client.DatabasesClientWithResource;
import com.eas.client.SqlQuery;
import com.eas.client.cache.ApplicationSourceIndexer;
import com.eas.client.metadata.Field;
import com.eas.client.metadata.Fields;
import com.eas.client.settings.DbConnectionSettings;
import com.eas.script.JsDoc;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author pk, mg
*/
public class ScriptedQueryFactoryTest {
protected static ApplicationSourceIndexer indexer;
protected static DatabasesClientWithResource resource;
public ScriptedQueryFactoryTest() {
}
@BeforeClass
public static void setUpClass() throws Exception {
indexer = new ApplicationSourceIndexer("c:/projects/PlatypusTests");
DbConnectionSettings settings = new DbConnectionSettings();
settings.setUrl("jdbc:oracle:thin:@asvr/adb");
settings.setUser("eas");
settings.setPassword("eas");
settings.setSchema("eas");
settings.setMaxConnections(1);
settings.setMaxStatements(1);
resource = new DatabasesClientWithResource(settings);
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
public void testFirstAnnotationsComma1() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n"
+ " * %s %s , %s,%s\n"
+ " * \n"
+ " * \n"
+ " */\n"
+ "select from dual", JsDoc.Tag.ROLES_ALLOWED_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testFirstAnnotationsComma2() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s , %s,%s\r\n"
+ " * \n\r"
+ " * \r\n"
+ " */\n"
+ "select from dual", JsDoc.Tag.ROLES_ALLOWED_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testFirstAnnotationsSpace() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " * \n\r"
+ " * \r\n"
+ " */\n"
+ "select from dual", JsDoc.Tag.ROLES_ALLOWED_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testLastAnnotationsSpace() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format("select from dual"
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " * \n\r"
+ " * \r\n"
+ " */\n"
+ "", JsDoc.Tag.ROLES_ALLOWED_TAG.toUpperCase(), role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testMiddleAnnotationsSpace1() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * select from dual\r\n"
+ " * %s %s %s %s\r\n"
+ " * \r\n"
+ " */\n"
+ "", JsDoc.Tag.ROLES_ALLOWED_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testMiddleAnnotationsSpace2() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " * select from dual\r\n"
+ " * \r\n"
+ " */\n"
+ "", JsDoc.Tag.ROLES_ALLOWED_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(q.getReadRoles(), q.getWriteRoles());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testMiddleReadAnnotationsSpace2() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " * select from dual\r\n"
+ " * \r\n"
+ " */\n"
+ "", JsDoc.Tag.ROLES_ALLOWED_READ_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(0, q.getWriteRoles().size());
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
}
@Test
public void testMiddleWriteAnnotationsSpace2() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " * select from dual\r\n"
+ " * \r\n"
+ " */\n"
+ "", JsDoc.Tag.ROLES_ALLOWED_WRITE_TAG, role1, role2, role3);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(0, q.getReadRoles().size());
assertEquals(3, q.getWriteRoles().size());
assertTrue(q.getWriteRoles().contains(role1));
assertTrue(q.getWriteRoles().contains(role2));
assertTrue(q.getWriteRoles().contains(role3));
}
@Test
public void testMiddleReadWriteAnnotationsSpace2() throws Exception {
String role1 = "admin";
String role2 = "mechaniker";
String role3 = "dispatcher";
String sqlText = String.format(""
+ "/**\n\r"
+ " * %s %s %s %s\r\n"
+ " */\n"
+ "select \r\n"
+ "/**\n"
+ " * %s %s %s \r\n"
+ " * \r\n"
+ " */\n"
+ "from dual",
JsDoc.Tag.ROLES_ALLOWED_READ_TAG, role1, role2, role3,
JsDoc.Tag.ROLES_ALLOWED_WRITE_TAG, role1, role2);
SqlQuery q = new SqlQuery(null, sqlText);
ScriptedQueryFactory.putRolesMutatables(q);
assertEquals(3, q.getReadRoles().size());
assertTrue(q.getReadRoles().contains(role1));
assertTrue(q.getReadRoles().contains(role2));
assertTrue(q.getReadRoles().contains(role3));
assertEquals(2, q.getWriteRoles().size());
assertTrue(q.getWriteRoles().contains(role1));
assertTrue(q.getWriteRoles().contains(role2));
assertFalse(q.getWriteRoles().contains(role3));
}
@Test
public void testCompilingWithSubqueries() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("sub_query_compile", null, null, null);
assertEquals("/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name sub_query_compile\n"
+ " */\n"
+ "SELECT T0.ORDER_NO, 'Some text' AS VALUE_FIELD_1, TABLE1.ID, TABLE1.F1, TABLE1.F3, T0.AMOUNT FROM TABLE1, TABLE2, (/**\n"
+ " * @name namedQuery4Tests\n"
+ "*/\n"
+ "Select goodOrder.ORDER_ID as ORDER_NO, goodOrder.AMOUNT, customers.CUSTOMER_NAME as CUSTOMER \n"
+ "From GOODORDER goodOrder\n"
+ " Inner Join CUSTOMER customers on (goodOrder.CUSTOMER = customers.CUSTOMER_ID)\n"
+ " and (goodOrder.AMOUNT > customers.CUSTOMER_NAME)\n Where :P4 = goodOrder.GOOD) T0 WHERE ((TABLE2.FIELDA<TABLE1.F1) AND (:P2=TABLE1.F3)) AND (:P3=T0.AMOUNT)\n",
testQuery.getSqlText());
assertEquals(6, testQuery.getFields().getFieldsCount());
for (int i = 0; i < testQuery.getFields().getFieldsCount(); i++) {
Field fieldMtd = testQuery.getFields().get(i + 1);
assertNotNull(fieldMtd);
/* Jdbc friver of oracle <= ojdbc6 does not support remarks for tables and for columns
if (i == 0 || i == 5) {
assertNotNull(fieldMtd.getDescription());
} else {
assertNull(fieldMtd.getDescription());
}
*/
}
assertEquals(4, testQuery.getParameters().getParametersCount());
}
@Test
public void testCompilingWithSubqueriesBad() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("bad_schema", null, null, null);
assertEquals("/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name bad_schema\n"
+ " */\n"
+ "SELECT T0.ORDER_NO, 'Some text', TABLE1.ID, TABLE1.F1, TABLE1.F3, T0.AMOUNT FROM TABLE1, TABLE2, (/**\n"
+ " * @name 128082898425059\n"
+ "*/\n"
+ "Select goodOrder.ORDER_ID as ORDER_NO, goodOrder.AMOUNT, customers.CUSTOMER_NAME as CUSTOMER \n"
+ "From GOODORDER goodOrder\n"
+ " Inner Join CUSTOMER customers on (goodOrder.CUSTOMER = customers.CUSTOMER_ID)\n"
+ " and (goodOrder.AMOUNT > customers.CUSTOMER_NAME)\n"
+ " Where :P4 = goodOrder.GOOD) T0 WHERE ((TABLE2.FIELDA<TABLE1.F1) AND (:P2=TABLE1.F3)) AND (:P3=T0.AMOUNT)\n",
testQuery.getSqlText());
assertEquals(6, testQuery.getFields().getFieldsCount());
for (int i = 0; i < testQuery.getFields().getFieldsCount(); i++) {
Field fieldMtd = testQuery.getFields().get(i + 1);
assertNotNull(fieldMtd);
/* Jdbc friver of oracle <= ojdbc6 does not support remarks for tables and for columns
if (i == 0 || i == 5) {
assertNotNull(fieldMtd.getDescription());
} else {
assertNull(fieldMtd.getDescription());
}
*/
}
assertEquals(4, testQuery.getParameters().getParametersCount());
}
@Test
public void testAsteriskMetadata() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("asterisk_schema", null, null, null);
assertEquals(""
+ "/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name asterisk_schema\n"
+ " */\n"
+ "SELECT * FROM TABLE1, TABLE2, (/**\n"
+ " * @name 128082898425059\n"
+ "*/\n"
+ "Select goodOrder.ORDER_ID as ORDER_NO, goodOrder.AMOUNT, customers.CUSTOMER_NAME as CUSTOMER \nFrom GOODORDER goodOrder\n Inner Join CUSTOMER customers on (goodOrder.CUSTOMER = customers.CUSTOMER_ID)\n and (goodOrder.AMOUNT > customers.CUSTOMER_NAME)\n Where :P4 = goodOrder.GOOD) T0 WHERE ((TABLE2.FIELDA<TABLE1.F1) AND (:P2=TABLE1.F3)) AND (:P3=T0.AMOUNT)",
testQuery.getSqlText());
assertEquals(11, testQuery.getFields().getFieldsCount());
for (int i = 0; i < testQuery.getFields().getFieldsCount(); i++) {
Field fieldMtd = testQuery.getFields().get(i + 1);
assertNotNull(fieldMtd);
}
assertEquals(4, testQuery.getParameters().getParametersCount());
}
@Test
public void testBadSubquery() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("bad_subquery", null, null, null);
assertEquals("/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name bad_subquery\n"
+ " */\n"
+ "SELECT * FROM TABLE1, TABLE2, #_1_2_8082898425059 T0 WHERE ((TABLE2.FIELDA<TABLE1.F1) AND (:P2=TABLE1.F3)) AND (:P3=T0.AMOUNT)\n"
+ "", testQuery.getSqlText());
}
@Test
public void testPartialTablesAsteriskMetadata() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("partial_asterisk_schema", null, null, null);
assertEquals("/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name partial_asterisk_schema\n"
+ " */\n"
+ "SELECT TABLE1.*, TABLE2.FiELdB FROM TABLE1, TABLE2, (/**\n"
+ " * @name namedQuery4Tests\n"
+ "*/\n"
+ "Select goodOrder.ORDER_ID as ORDER_NO, goodOrder.AMOUNT, customers.CUSTOMER_NAME as CUSTOMER \n"
+ "From GOODORDER goodOrder\n"
+ " Inner Join CUSTOMER customers on (goodOrder.CUSTOMER = customers.CUSTOMER_ID)\n"
+ " and (goodOrder.AMOUNT > customers.CUSTOMER_NAME)\n"
+ " Where :P4 = goodOrder.GOOD) T0 WHERE ((TABLE2.FIELDA<TABLE1.F1) AND (:P2=TABLE1.F3)) AND (:P3=T0.AMOUNT)\n",
testQuery.getSqlText());
assertEquals(5, testQuery.getFields().getFieldsCount());
for (int i = 0; i < testQuery.getFields().getFieldsCount(); i++) {
Field fieldMtd = testQuery.getFields().get(i + 1);
assertNotNull(fieldMtd);
}
assertEquals(4, testQuery.getParameters().getParametersCount());
}
@Test
public void testPrimaryKey() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("primary_key", null, null, null);
Fields fields = testQuery.getFields();
assertNotNull(fields);
assertTrue(fields.getFieldsCount() > 0);
assertTrue(fields.get(1).isPk());
}
@Test
public void testMultiplePrimaryKeys() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("multiple_primary_keys", null, null, null);
Fields fields = testQuery.getFields();
assertNotNull(fields);
assertTrue(fields.getFieldsCount() == 2);
assertTrue(fields.get(1).isPk());
assertTrue(fields.get(2).isPk());
}
@Test
public void testWithoutAliases_Schema_NonSchema_Schema_Columns() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("without_aliases_with_schema_without_schema_columns_from_single_table", null, null, null);
assertEquals("/**\n"
+ " * \n"
+ " * @author mg\n"
+ " * @name without_aliases_with_schema_without_schema_columns_from_single_table\n"
+ " */\n"
+ "SELECT EAS.MTD_EntitiES.MDENt_ID, MTD_EntitiES.MDENT_NAME, EAS.MTD_EntitiES.MDENT_TYPe, MDENT_ORDER FROM EaS.MTD_EntitiES\n",
testQuery.getSqlText());
assertEquals(4, testQuery.getFields().getFieldsCount());
for (int i = 0; i < testQuery.getFields().getFieldsCount(); i++) {
Field fieldMtd = testQuery.getFields().get(i + 1);
assertNotNull(fieldMtd);
}
assertEquals(0, testQuery.getParameters().getParametersCount());
}
@Test
public void testMultiplePrimaryKeysWithAsterisk() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("multiple_primary_keys_asterisk", null, null, null);
Fields fields = testQuery.getFields();
assertNotNull(fields);
assertTrue(fields.getFieldsCount() == 23);
assertNotNull(fields.get("MdENT_ID"));
assertTrue(fields.get("MDENT_iD").isPk());
assertNotNull(fields.get("MDlOG_ID"));
assertTrue(fields.get("MDLOG_ID").isPk());
assertFalse(fields.getPrimaryKeys().isEmpty());
assertEquals(2, fields.getPrimaryKeys().size());
assertEquals("mdent_id", fields.getPrimaryKeys().get(0).getName());
assertEquals("mdlog_id", fields.getPrimaryKeys().get(1).getName());
}
@Test
public void testGetQuery() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
SqlQuery testQuery = queriesProxy.getQuery("get_query", null, null, null);
Fields metadata = testQuery.getFields();
assertEquals(3, metadata.getFieldsCount());
}
@Test
public void testGetEmptyQuery() throws Exception {
LocalQueriesProxy queriesProxy = new LocalQueriesProxy(resource.getClient(), indexer);
try {
SqlQuery testQuery = queriesProxy.getQuery("empty_query", null, null, null);
fail("Empty query must lead to an exception, but it doesn't. Why?");
} catch (Exception ex) {
//fine. there muist be an exception
}
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rage;
import static com.facebook.buck.zip.ZipOutputStreams.HandleDuplicates.APPEND_TO_ZIP;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.slb.ClientSideSlb;
import com.facebook.buck.slb.HttpResponse;
import com.facebook.buck.slb.HttpService;
import com.facebook.buck.slb.LoadBalancedService;
import com.facebook.buck.slb.RetryingHttpService;
import com.facebook.buck.slb.SlbBuckConfig;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.zip.CustomZipEntry;
import com.facebook.buck.zip.CustomZipOutputStream;
import com.facebook.buck.zip.ZipOutputStreams;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.ByteStreams;
import com.google.common.io.CharStreams;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okio.BufferedSink;
/**
* Takes care of actually writing out the report.
*/
public class DefaultDefectReporter implements DefectReporter {
private static final Logger LOG = Logger.get(AbstractReport.class);
private static final String REPORT_FILE_NAME = "report.json";
private static final String DIFF_FILE_NAME = "changes.diff";
private static final int HTTP_SUCCESS_CODE = 200;
private static final String REQUEST_PROTOCOL_VERSION = "x-buck-protocol-version";
private final ProjectFilesystem filesystem;
private final RageConfig rageConfig;
private final BuckEventBus buckEventBus;
private final Clock clock;
public DefaultDefectReporter(
ProjectFilesystem filesystem,
RageConfig rageConfig,
BuckEventBus buckEventBus,
Clock clock
) {
this.filesystem = filesystem;
this.rageConfig = rageConfig;
this.buckEventBus = buckEventBus;
this.clock = clock;
}
private void addFilesToArchive(
CustomZipOutputStream out,
ImmutableSet<Path> paths) throws IOException {
for (Path logFile : paths) {
Preconditions.checkArgument(!logFile.isAbsolute(), "Should be a relative Path.", logFile);
// If the file is hidden(UNIX terms) save it as normal file.
if (logFile.getFileName().toString().startsWith(".")) {
out.putNextEntry(new CustomZipEntry(
Paths.get(logFile.getFileName().toString().substring(1))));
} else {
out.putNextEntry(new CustomZipEntry(logFile));
}
try (InputStream input = filesystem.newFileInputStream(logFile)) {
ByteStreams.copy(input, out);
}
out.closeEntry();
}
}
private void addStringsAsFilesToArchive(
CustomZipOutputStream out,
ImmutableMap<String, String> files) throws IOException {
for (Map.Entry<String, String> file : files.entrySet()) {
out.putNextEntry(new CustomZipEntry(file.getKey()));
out.write(file.getValue().getBytes(Charsets.UTF_8));
out.closeEntry();
}
}
@Override
public DefectSubmitResult submitReport(DefectReport defectReport) throws IOException {
DefectSubmitResult.Builder defectSubmitResult = DefectSubmitResult.builder();
defectSubmitResult.setRequestProtocol(rageConfig.getProtocolVersion());
Optional<SlbBuckConfig> frontendConfig = rageConfig.getFrontendConfig();
if (frontendConfig.isPresent()) {
Optional<ClientSideSlb> slb =
frontendConfig.get().tryCreatingClientSideSlb(clock, buckEventBus);
if (slb.isPresent()) {
try {
return uploadReport(defectReport, defectSubmitResult, slb.get());
} catch (IOException e) {
LOG.debug(e, "Failed uploading report to server.");
defectSubmitResult.setIsRequestSuccessful(false);
defectSubmitResult.setReportSubmitErrorMessage(e.getMessage());
}
}
}
filesystem.mkdirs(filesystem.getBuckPaths().getBuckOut());
Path defectReportPath = filesystem.createTempFile(
filesystem.getBuckPaths().getBuckOut(),
"defect_report",
".zip");
try (OutputStream outputStream = filesystem.newFileOutputStream(defectReportPath)) {
writeReport(defectReport, outputStream);
}
return defectSubmitResult
.setIsRequestSuccessful(Optional.empty())
.setReportSubmitLocation(defectReportPath.toString())
.build();
}
private void writeReport(
DefectReport defectReport,
OutputStream outputStream) throws IOException {
try (BufferedOutputStream baseOut = new BufferedOutputStream(outputStream);
CustomZipOutputStream out =
ZipOutputStreams.newOutputStream(baseOut, APPEND_TO_ZIP)) {
if (defectReport.getSourceControlInfo().isPresent() &&
defectReport.getSourceControlInfo().get().getDiff().isPresent()) {
addStringsAsFilesToArchive(
out,
ImmutableMap.of(
DIFF_FILE_NAME,
defectReport.getSourceControlInfo().get().getDiff().get()));
}
addFilesToArchive(out, defectReport.getIncludedPaths());
out.putNextEntry(new CustomZipEntry(REPORT_FILE_NAME));
ObjectMappers.WRITER.writeValue(out, defectReport);
}
}
private DefectSubmitResult uploadReport(
final DefectReport defectReport,
DefectSubmitResult.Builder defectSubmitResult,
ClientSideSlb slb) throws IOException {
long timeout = rageConfig.getHttpTimeout();
OkHttpClient httpClient = new OkHttpClient.Builder()
.connectTimeout(timeout, TimeUnit.MILLISECONDS)
.readTimeout(timeout, TimeUnit.MILLISECONDS)
.writeTimeout(timeout, TimeUnit.MILLISECONDS)
.build();
HttpService httpService = new RetryingHttpService(buckEventBus,
new LoadBalancedService(slb, httpClient, buckEventBus),
rageConfig.getMaxUploadRetries());
try {
Request.Builder requestBuilder = new Request.Builder();
requestBuilder.addHeader(
REQUEST_PROTOCOL_VERSION,
rageConfig.getProtocolVersion().name().toLowerCase());
requestBuilder.post(
new RequestBody() {
@Override
public MediaType contentType() {
return MediaType.parse("application/x-www-form-urlencoded");
}
@Override
public void writeTo(BufferedSink bufferedSink) throws IOException {
writeReport(defectReport, bufferedSink.outputStream());
}
});
HttpResponse response = httpService.makeRequest(
rageConfig.getReportUploadPath(),
requestBuilder);
String responseBody;
try (InputStream inputStream = response.getBody()) {
responseBody = CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8));
}
if (response.statusCode() == HTTP_SUCCESS_CODE) {
defectSubmitResult.setIsRequestSuccessful(true);
if (rageConfig.getProtocolVersion().equals(AbstractRageConfig.RageProtocolVersion.SIMPLE)) {
return defectSubmitResult
.setReportSubmitMessage(responseBody)
.setReportSubmitLocation(responseBody)
.build();
} else {
// Decode Json response.
RageJsonResponse json = ObjectMappers.READER.readValue(
ObjectMappers.createParser(responseBody.getBytes(Charsets.UTF_8)),
RageJsonResponse.class);
return defectSubmitResult
.setIsRequestSuccessful(json.getRequestSuccessful())
.setReportSubmitErrorMessage(json.getErrorMessage())
.setReportSubmitMessage(json.getMessage())
.setReportSubmitLocation(json.getRageUrl())
.build();
}
} else {
throw new IOException(
String.format(
"Connection to %s returned code %d and message: %s",
response.requestUrl(),
response.statusCode(),
responseBody));
}
} catch (IOException e) {
throw new IOException(String.format("Failed uploading report because [%s].", e.getMessage()));
} finally {
httpService.close();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.packaging.impl.elements;
import com.intellij.CommonBundle;
import com.intellij.ide.util.ClassFilter;
import com.intellij.ide.util.TreeClassChooser;
import com.intellij.ide.util.TreeClassChooserFactory;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.compiler.make.ManifestBuilder;
import com.intellij.openapi.deployment.DeploymentUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.OrderEnumerator;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.packaging.artifacts.ArtifactType;
import com.intellij.packaging.elements.CompositePackagingElement;
import com.intellij.packaging.elements.PackagingElement;
import com.intellij.packaging.elements.PackagingElementFactory;
import com.intellij.packaging.elements.PackagingElementResolvingContext;
import com.intellij.packaging.impl.artifacts.ArtifactUtil;
import com.intellij.packaging.impl.artifacts.PackagingElementPath;
import com.intellij.packaging.impl.artifacts.PackagingElementProcessor;
import com.intellij.packaging.ui.ArtifactEditorContext;
import com.intellij.packaging.ui.ManifestFileConfiguration;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiClass;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiMethodUtil;
import com.intellij.util.PathUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
/**
* @author nik
*/
public class ManifestFileUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.roots.ui.configuration.artifacts.ArtifactEditorContextImpl");
public static final String MANIFEST_PATH = JarFile.MANIFEST_NAME;
public static final String MANIFEST_FILE_NAME = PathUtil.getFileName(MANIFEST_PATH);
public static final String MANIFEST_DIR_NAME = PathUtil.getParentPath(MANIFEST_PATH);
private ManifestFileUtil() {
}
@Nullable
public static VirtualFile findManifestFile(@NotNull CompositePackagingElement<?> root, PackagingElementResolvingContext context, ArtifactType artifactType) {
return ArtifactUtil.findSourceFileByOutputPath(root, MANIFEST_PATH, context, artifactType);
}
@Nullable
public static VirtualFile suggestManifestFileDirectory(@NotNull CompositePackagingElement<?> root, PackagingElementResolvingContext context, ArtifactType artifactType) {
final VirtualFile metaInfDir = ArtifactUtil.findSourceFileByOutputPath(root, MANIFEST_DIR_NAME, context, artifactType);
if (metaInfDir != null) {
return metaInfDir;
}
final Ref<VirtualFile> sourceDir = Ref.create(null);
final Ref<VirtualFile> sourceFile = Ref.create(null);
ArtifactUtil.processElementsWithSubstitutions(root.getChildren(), context, artifactType, PackagingElementPath.EMPTY, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
if (element instanceof FileCopyPackagingElement) {
final VirtualFile file = ((FileCopyPackagingElement)element).findFile();
if (file != null) {
sourceFile.set(file);
}
}
else if (element instanceof DirectoryCopyPackagingElement) {
final VirtualFile file = ((DirectoryCopyPackagingElement)element).findFile();
if (file != null) {
sourceDir.set(file);
return false;
}
}
return true;
}
});
if (!sourceDir.isNull()) {
return sourceDir.get();
}
final Project project = context.getProject();
return suggestBaseDir(project, sourceFile.get());
}
@Nullable
public static VirtualFile suggestManifestFileDirectory(@NotNull Project project, @Nullable Module module) {
OrderEnumerator enumerator = module != null ? OrderEnumerator.orderEntries(module) : OrderEnumerator.orderEntries(project);
final VirtualFile[] files = enumerator.withoutDepModules().withoutLibraries().withoutSdk().productionOnly().sources().getRoots();
if (files.length > 0) {
return files[0];
}
return suggestBaseDir(project, null);
}
@Nullable
private static VirtualFile suggestBaseDir(@NotNull Project project, final @Nullable VirtualFile file) {
final VirtualFile[] contentRoots = ProjectRootManager.getInstance(project).getContentRoots();
if (file == null && contentRoots.length > 0) {
return contentRoots[0];
}
if (file != null) {
for (VirtualFile contentRoot : contentRoots) {
if (VfsUtil.isAncestor(contentRoot, file, false)) {
return contentRoot;
}
}
}
return project.getBaseDir();
}
public static Manifest readManifest(@NotNull VirtualFile manifestFile) {
try {
final InputStream inputStream = manifestFile.getInputStream();
final Manifest manifest;
try {
manifest = new Manifest(inputStream);
}
finally {
inputStream.close();
}
return manifest;
}
catch (IOException ignored) {
return new Manifest();
}
}
public static void updateManifest(@NotNull final VirtualFile file, final @Nullable String mainClass, final @Nullable List<String> classpath, final boolean replaceValues) {
final Manifest manifest = readManifest(file);
final Attributes mainAttributes = manifest.getMainAttributes();
if (mainClass != null) {
mainAttributes.put(Attributes.Name.MAIN_CLASS, mainClass);
}
else if (replaceValues) {
mainAttributes.remove(Attributes.Name.MAIN_CLASS);
}
if (classpath != null && !classpath.isEmpty()) {
List<String> updatedClasspath;
if (replaceValues) {
updatedClasspath = classpath;
}
else {
updatedClasspath = new ArrayList<>();
final String oldClasspath = (String)mainAttributes.get(Attributes.Name.CLASS_PATH);
if (!StringUtil.isEmpty(oldClasspath)) {
updatedClasspath.addAll(StringUtil.split(oldClasspath, " "));
}
for (String path : classpath) {
if (!updatedClasspath.contains(path)) {
updatedClasspath.add(path);
}
}
}
mainAttributes.put(Attributes.Name.CLASS_PATH, StringUtil.join(updatedClasspath, " "));
}
else if (replaceValues) {
mainAttributes.remove(Attributes.Name.CLASS_PATH);
}
ManifestBuilder.setVersionAttribute(mainAttributes);
ApplicationManager.getApplication().runWriteAction(() -> {
try {
final OutputStream outputStream = file.getOutputStream(ManifestFileUtil.class);
try {
manifest.write(outputStream);
}
finally {
outputStream.close();
}
}
catch (IOException e) {
LOG.info(e);
}
});
}
@NotNull
public static ManifestFileConfiguration createManifestFileConfiguration(@NotNull VirtualFile manifestFile) {
final String path = manifestFile.getPath();
Manifest manifest = readManifest(manifestFile);
String mainClass = manifest.getMainAttributes().getValue(Attributes.Name.MAIN_CLASS);
final String classpathText = manifest.getMainAttributes().getValue(Attributes.Name.CLASS_PATH);
final List<String> classpath = new ArrayList<>();
if (classpathText != null) {
classpath.addAll(StringUtil.split(classpathText, " "));
}
return new ManifestFileConfiguration(path, classpath, mainClass, manifestFile.isWritable());
}
public static List<String> getClasspathForElements(List<? extends PackagingElement<?>> elements, PackagingElementResolvingContext context, final ArtifactType artifactType) {
final List<String> classpath = new ArrayList<>();
final PackagingElementProcessor<PackagingElement<?>> processor = new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
if (element instanceof FileCopyPackagingElement) {
final String fileName = ((FileCopyPackagingElement)element).getOutputFileName();
classpath.add(DeploymentUtil.appendToPath(path.getPathString(), fileName));
}
else if (element instanceof DirectoryCopyPackagingElement) {
classpath.add(path.getPathString());
}
else if (element instanceof ArchivePackagingElement) {
final String archiveName = ((ArchivePackagingElement)element).getName();
classpath.add(DeploymentUtil.appendToPath(path.getPathString(), archiveName));
}
return true;
}
};
for (PackagingElement<?> element : elements) {
ArtifactUtil.processPackagingElements(element, null, processor, context, true, artifactType);
}
return classpath;
}
@Nullable
public static VirtualFile showDialogAndCreateManifest(final ArtifactEditorContext context, final CompositePackagingElement<?> element) {
FileChooserDescriptor descriptor = createDescriptorForManifestDirectory();
final VirtualFile directory = suggestManifestFileDirectory(element, context, context.getArtifactType());
final VirtualFile file = FileChooser.chooseFile(descriptor, context.getProject(), directory);
if (file == null) {
return null;
}
return createManifestFile(file, context.getProject());
}
@Nullable
public static VirtualFile createManifestFile(final @NotNull VirtualFile directory, final @NotNull Project project) {
ApplicationManager.getApplication().assertIsDispatchThread();
final Ref<IOException> exc = Ref.create(null);
final VirtualFile file = new WriteAction<VirtualFile>() {
protected void run(@NotNull final Result<VirtualFile> result) {
VirtualFile dir = directory;
try {
if (!dir.getName().equals(MANIFEST_DIR_NAME)) {
dir = VfsUtil.createDirectoryIfMissing(dir, MANIFEST_DIR_NAME);
}
final VirtualFile file = dir.createChildData(this, MANIFEST_FILE_NAME);
final OutputStream output = file.getOutputStream(this);
try {
final Manifest manifest = new Manifest();
ManifestBuilder.setVersionAttribute(manifest.getMainAttributes());
manifest.write(output);
}
finally {
output.close();
}
result.setResult(file);
}
catch (IOException e) {
exc.set(e);
}
}
}.execute().getResultObject();
final IOException exception = exc.get();
if (exception != null) {
LOG.info(exception);
Messages.showErrorDialog(project, exception.getMessage(), CommonBundle.getErrorTitle());
return null;
}
return file;
}
public static FileChooserDescriptor createDescriptorForManifestDirectory() {
FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor();
descriptor.setTitle("Select Directory for META-INF/MANIFEST.MF file");
return descriptor;
}
public static void addManifestFileToLayout(final @NotNull String path, final @NotNull ArtifactEditorContext context,
final @NotNull CompositePackagingElement<?> element) {
context.editLayout(context.getArtifact(), () -> {
final VirtualFile file = findManifestFile(element, context, context.getArtifactType());
if (file == null || !FileUtil.pathsEqual(file.getPath(), path)) {
PackagingElementFactory.getInstance().addFileCopy(element, MANIFEST_DIR_NAME, path, MANIFEST_FILE_NAME);
}
});
}
@Nullable
public static PsiClass selectMainClass(Project project, final @Nullable String initialClassName) {
final TreeClassChooserFactory chooserFactory = TreeClassChooserFactory.getInstance(project);
final GlobalSearchScope searchScope = GlobalSearchScope.allScope(project);
final PsiClass aClass = initialClassName != null ? JavaPsiFacade.getInstance(project).findClass(initialClassName, searchScope) : null;
final TreeClassChooser chooser =
chooserFactory.createWithInnerClassesScopeChooser("Select Main Class", searchScope, new MainClassFilter(), aClass);
chooser.showDialog();
return chooser.getSelected();
}
public static void setupMainClassField(final Project project, final TextFieldWithBrowseButton field) {
field.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final PsiClass selected = selectMainClass(project, field.getText());
if (selected != null) {
field.setText(selected.getQualifiedName());
}
}
});
}
private static class MainClassFilter implements ClassFilter {
public boolean isAccepted(final PsiClass aClass) {
return ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
@Override
public Boolean compute() {
return PsiMethodUtil.MAIN_CLASS.value(aClass) && PsiMethodUtil.hasMainMethod(aClass);
}
});
}
}
}
| |
package com.mentor.nucleus.bp.io.mdl.test.dangle;
import org.eclipse.core.runtime.NullProgressMonitor;
import com.mentor.nucleus.bp.core.AttributeReferenceInClass_c;
import com.mentor.nucleus.bp.core.Attribute_c;
import com.mentor.nucleus.bp.core.BridgeParameter_c;
import com.mentor.nucleus.bp.core.Bridge_c;
import com.mentor.nucleus.bp.core.ClassIdentifierAttribute_c;
import com.mentor.nucleus.bp.core.ClassIdentifier_c;
import com.mentor.nucleus.bp.core.DataType_c;
import com.mentor.nucleus.bp.core.FunctionParameter_c;
import com.mentor.nucleus.bp.core.Function_c;
import com.mentor.nucleus.bp.core.ImportedClass_c;
import com.mentor.nucleus.bp.core.ModelClass_c;
import com.mentor.nucleus.bp.core.OperationParameter_c;
import com.mentor.nucleus.bp.core.Operation_c;
import com.mentor.nucleus.bp.core.ReferentialAttribute_c;
import com.mentor.nucleus.bp.core.ReferredToClassInAssoc_c;
import com.mentor.nucleus.bp.core.ReferredToIdentifierAttribute_c;
import com.mentor.nucleus.bp.core.StateMachineEventDataItem_c;
import com.mentor.nucleus.bp.core.UserDataType_c;
import com.mentor.nucleus.bp.test.TestUtil;
//=====================================================================
//
//File: $RCSfile: DanglingReferenceTest.java,v $
//Version: $Revision: 1.14 $
//Modified: $Date: 2013/01/10 23:13:31 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
public class DanglingReferenceTest extends DRBaseTest {
public DanglingReferenceTest(String name) {
super(name);
}
// /remove dangling marker by replace with history
public void testDanglingFunctionParam() throws Exception {
FunctionParameter_c param = FunctionParameter_c
.FunctionParameterInstance(modelRoot,
new FunctionParam_by_name_c("FParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR26(param));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(param, dt);
}
public void testDanglingFunctionReturnType() throws Exception {
Function_c func = Function_c.FunctionInstance(modelRoot,
new Function_by_name_c("fFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR25(func));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(func, dt);
}
public void testDanglingBridgeParam() throws Exception {
BridgeParameter_c me = BridgeParameter_c.BridgeParameterInstance(
modelRoot, new BridgeParam_by_name_c("BParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR22(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingBridgeReturnType() throws Exception {
Bridge_c me = Bridge_c.BridgeInstance(modelRoot, new Bridge_by_name_c(
"bFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR20(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingOperationParam() throws Exception {
OperationParameter_c me = OperationParameter_c
.OperationParameterInstance(modelRoot,
new OperationParam_by_name_c("ZParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR118(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingOperationReturnType() throws Exception {
Operation_c me = Operation_c.OperationInstance(modelRoot,
new Operation_by_name_c("zFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR116(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingEvtDataItem() throws Exception {
StateMachineEventDataItem_c me = StateMachineEventDataItem_c
.StateMachineEventDataItemInstance(modelRoot,
new StateMachineEventDataItem_by_name_c("State"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR524(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingAttributeDataType() throws Exception {
Attribute_c me = Attribute_c.AttributeInstance(modelRoot,
new Attribute_by_name_c("A_Attr"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR114(me));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithHistory(me, dt);
}
public void testDanglingImportedClass() throws Exception {
ImportedClass_c me = ImportedClass_c.ImportedClassInstance(modelRoot,
new ImportedClass_by_name_c("Z"));
ModelClass_c mc = ModelClass_c.getOneO_OBJOnR101(me);
// TODO - when I run with this, the dialog never shows. Not sure why this
// doesn't work. But it would be great to have the dialog automatically dismissed.
TestUtil.selectButtonInDialog(7000, "&Yes");
performDRCheckByReplaceWithHistory(me, mc);
checkoutProject=true;
}
// /remove dangling marker by replacing dangling element with other element
public void testDanglingEvtDataItemReplace() throws Exception {
StateMachineEventDataItem_c me = StateMachineEventDataItem_c
.StateMachineEventDataItemInstance(modelRoot,
new StateMachineEventDataItem_by_name_c("State"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR524(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingAttributeReplace() throws Exception {
ModelClass_c mc = ModelClass_c.ModelClassInstance(modelRoot,
new ModelClass_by_name_c("A"));
Attribute_c me = Attribute_c.getOneO_ATTROnR102(mc,
new Attribute_by_name_c("A_Attr"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR114(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingFunctionParamReplace() throws Exception {
FunctionParameter_c me = FunctionParameter_c.FunctionParameterInstance(
modelRoot, new FunctionParam_by_name_c("FParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR26(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingFunctionReturnTypeReplace() throws Exception {
Function_c func = Function_c.FunctionInstance(modelRoot,
new Function_by_name_c("fFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR25(func));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(func, dt, newDT);
}
public void testDanglingBridgeParamReplace() throws Exception {
BridgeParameter_c me = BridgeParameter_c.BridgeParameterInstance(
modelRoot, new BridgeParam_by_name_c("BParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR22(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingBridgeReturnTypeReplace() throws Exception {
Bridge_c me = Bridge_c.BridgeInstance(modelRoot, new Bridge_by_name_c(
"bFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR20(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingOperationParamReplace() throws Exception {
OperationParameter_c me = OperationParameter_c
.OperationParameterInstance(modelRoot,
new OperationParam_by_name_c("ZParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR118(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
}
public void testDanglingOperationReturnTypeReplace() throws Exception {
Operation_c me = Operation_c.OperationInstance(modelRoot,
new Operation_by_name_c("zFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR116(me));
DataType_c newDT = DataType_c.DataTypeInstance(modelRoot,
new Datatype_by_name_c("boolean"));
TestUtil.okToDialog(500);
performDRCheckByReplaceWithElement(me, dt, newDT);
checkoutProject = true;
}
// /remove dangling marker by removing RGO
public void testDanglingEvtDataItemRemoveRGO() throws Exception {
StateMachineEventDataItem_c me = StateMachineEventDataItem_c
.StateMachineEventDataItemInstance(modelRoot,
new StateMachineEventDataItem_by_name_c("State"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR524(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingAttributeRemoveRGO() throws Exception {
Attribute_c me = Attribute_c.AttributeInstance(modelRoot,
new Attribute_by_name_c("A_Attr"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR114(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingFunctionParamRemoveRGO() throws Exception {
Function_c func = Function_c.FunctionInstance(modelRoot,
new Function_by_name_c("fFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR25(func));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(func, dt);
}
public void testDanglingFunctionReturnTypeRemoveRGO() throws Exception {
FunctionParameter_c me = FunctionParameter_c.FunctionParameterInstance(
modelRoot, new FunctionParam_by_name_c("FParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR26(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingBridgeParamRemoveRGO() throws Exception {
BridgeParameter_c me = BridgeParameter_c.BridgeParameterInstance(
modelRoot, new BridgeParam_by_name_c("BParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR22(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingBridgeReturnTypeRemoveRGO() throws Exception {
Bridge_c me = Bridge_c.BridgeInstance(modelRoot, new Bridge_by_name_c(
"bFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR20(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingOperationParamRemoveRGO() throws Exception {
OperationParameter_c me = OperationParameter_c
.OperationParameterInstance(modelRoot,
new OperationParam_by_name_c("ZParam"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR118(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
}
public void testDanglingOperationReturnTypeRemoveRGO() throws Exception {
Operation_c me = Operation_c.OperationInstance(modelRoot,
new Operation_by_name_c("zFunc"));
UserDataType_c dt = UserDataType_c.getOneS_UDTOnR17(DataType_c
.getOneS_DTOnR116(me));
TestUtil.okToDialog(500);
performDRCheckByDeletingRGO(me, dt);
project.delete(true, true, new NullProgressMonitor());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.nar;
import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
import org.apache.nifi.authentication.LoginIdentityProvider;
import org.apache.nifi.authorization.Authorizer;
import org.apache.nifi.bundle.Bundle;
import org.apache.nifi.bundle.BundleCoordinate;
import org.apache.nifi.bundle.BundleDetails;
import org.apache.nifi.components.ConfigurableComponent;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.state.StateProvider;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.controller.repository.ContentRepository;
import org.apache.nifi.controller.repository.FlowFileRepository;
import org.apache.nifi.controller.repository.FlowFileSwapManager;
import org.apache.nifi.controller.status.history.ComponentStatusRepository;
import org.apache.nifi.flowfile.FlowFilePrioritizer;
import org.apache.nifi.init.ConfigurableComponentInitializer;
import org.apache.nifi.init.ConfigurableComponentInitializerFactory;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.provenance.ProvenanceRepository;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.reporting.ReportingTask;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* Scans through the classpath to load all FlowFileProcessors, FlowFileComparators, and ReportingTasks using the service provider API and running through all classloaders (root, NARs).
*
* @ThreadSafe - is immutable
*/
@SuppressWarnings("rawtypes")
public class ExtensionManager {
private static final Logger logger = LoggerFactory.getLogger(ExtensionManager.class);
public static final BundleCoordinate SYSTEM_BUNDLE_COORDINATE = new BundleCoordinate(
BundleCoordinate.DEFAULT_GROUP, "system", BundleCoordinate.DEFAULT_VERSION);
// Maps a service definition (interface) to those classes that implement the interface
private static final Map<Class, Set<Class>> definitionMap = new HashMap<>();
private static final Map<String, List<Bundle>> classNameBundleLookup = new HashMap<>();
private static final Map<BundleCoordinate, Bundle> bundleCoordinateBundleLookup = new HashMap<>();
private static final Map<ClassLoader, Bundle> classLoaderBundleLookup = new HashMap<>();
private static final Set<String> requiresInstanceClassLoading = new HashSet<>();
private static final Map<String, ClassLoader> instanceClassloaderLookup = new ConcurrentHashMap<>();
static {
definitionMap.put(Processor.class, new HashSet<>());
definitionMap.put(FlowFilePrioritizer.class, new HashSet<>());
definitionMap.put(ReportingTask.class, new HashSet<>());
definitionMap.put(ControllerService.class, new HashSet<>());
definitionMap.put(Authorizer.class, new HashSet<>());
definitionMap.put(LoginIdentityProvider.class, new HashSet<>());
definitionMap.put(ProvenanceRepository.class, new HashSet<>());
definitionMap.put(ComponentStatusRepository.class, new HashSet<>());
definitionMap.put(FlowFileRepository.class, new HashSet<>());
definitionMap.put(FlowFileSwapManager.class, new HashSet<>());
definitionMap.put(ContentRepository.class, new HashSet<>());
definitionMap.put(StateProvider.class, new HashSet<>());
}
/**
* Loads all FlowFileProcessor, FlowFileComparator, ReportingTask class types that can be found on the bootstrap classloader and by creating classloaders for all NARs found within the classpath.
* @param narBundles the bundles to scan through in search of extensions
*/
public static void discoverExtensions(final Bundle systemBundle, final Set<Bundle> narBundles) {
// get the current context class loader
ClassLoader currentContextClassLoader = Thread.currentThread().getContextClassLoader();
// load the system bundle first so that any extensions found in JARs directly in lib will be registered as
// being from the system bundle and not from all the other NARs
loadExtensions(systemBundle);
bundleCoordinateBundleLookup.put(systemBundle.getBundleDetails().getCoordinate(), systemBundle);
// consider each nar class loader
for (final Bundle bundle : narBundles) {
// Must set the context class loader to the nar classloader itself
// so that static initialization techniques that depend on the context class loader will work properly
final ClassLoader ncl = bundle.getClassLoader();
Thread.currentThread().setContextClassLoader(ncl);
loadExtensions(bundle);
// Create a look-up from coordinate to bundle
bundleCoordinateBundleLookup.put(bundle.getBundleDetails().getCoordinate(), bundle);
}
// restore the current context class loader if appropriate
if (currentContextClassLoader != null) {
Thread.currentThread().setContextClassLoader(currentContextClassLoader);
}
}
/**
* Returns a bundle representing the system class loader.
*
* @param niFiProperties a NiFiProperties instance which will be used to obtain the default NAR library path,
* which will become the working directory of the returned bundle
* @return a bundle for the system class loader
*/
public static Bundle createSystemBundle(final NiFiProperties niFiProperties) {
final ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader();
final String narLibraryDirectory = niFiProperties.getProperty(NiFiProperties.NAR_LIBRARY_DIRECTORY);
if (StringUtils.isBlank(narLibraryDirectory)) {
throw new IllegalStateException("Unable to create system bundle because " + NiFiProperties.NAR_LIBRARY_DIRECTORY + " was null or empty");
}
final BundleDetails systemBundleDetails = new BundleDetails.Builder()
.workingDir(new File(narLibraryDirectory))
.coordinate(SYSTEM_BUNDLE_COORDINATE)
.build();
return new Bundle(systemBundleDetails, systemClassLoader);
}
/**
* Loads extensions from the specified bundle.
*
* @param bundle from which to load extensions
*/
@SuppressWarnings("unchecked")
private static void loadExtensions(final Bundle bundle) {
for (final Map.Entry<Class, Set<Class>> entry : definitionMap.entrySet()) {
final boolean isControllerService = ControllerService.class.equals(entry.getKey());
final boolean isProcessor = Processor.class.equals(entry.getKey());
final boolean isReportingTask = ReportingTask.class.equals(entry.getKey());
final ServiceLoader<?> serviceLoader = ServiceLoader.load(entry.getKey(), bundle.getClassLoader());
for (final Object o : serviceLoader) {
// only consider extensions discovered directly in this bundle
boolean registerExtension = bundle.getClassLoader().equals(o.getClass().getClassLoader());
if (registerExtension) {
final Class extensionType = o.getClass();
if (isControllerService && !checkControllerServiceEligibility(extensionType)) {
registerExtension = false;
logger.error(String.format(
"Skipping Controller Service %s because it is bundled with its supporting APIs and requires instance class loading.", extensionType.getName()));
}
final boolean canReferenceControllerService = (isControllerService || isProcessor || isReportingTask) && o instanceof ConfigurableComponent;
if (canReferenceControllerService && !checkControllerServiceReferenceEligibility((ConfigurableComponent) o, bundle.getClassLoader())) {
registerExtension = false;
logger.error(String.format(
"Skipping component %s because it is bundled with its referenced Controller Service APIs and requires instance class loading.", extensionType.getName()));
}
if (registerExtension) {
registerServiceClass(o.getClass(), classNameBundleLookup, bundle, entry.getValue());
}
}
}
classLoaderBundleLookup.put(bundle.getClassLoader(), bundle);
}
}
private static boolean checkControllerServiceReferenceEligibility(final ConfigurableComponent component, final ClassLoader classLoader) {
// if the extension does not require instance classloading, its eligible
final boolean requiresInstanceClassLoading = component.getClass().isAnnotationPresent(RequiresInstanceClassLoading.class);
ConfigurableComponentInitializer initializer = null;
try {
initializer = ConfigurableComponentInitializerFactory.createComponentInitializer(component.getClass());
initializer.initialize(component);
final Set<Class> cobundledApis = new HashSet<>();
try (final NarCloseable closeable = NarCloseable.withComponentNarLoader(component.getClass().getClassLoader())) {
final List<PropertyDescriptor> descriptors = component.getPropertyDescriptors();
if (descriptors != null && !descriptors.isEmpty()) {
for (final PropertyDescriptor descriptor : descriptors) {
final Class<? extends ControllerService> serviceApi = descriptor.getControllerServiceDefinition();
if (serviceApi != null && classLoader.equals(serviceApi.getClassLoader())) {
cobundledApis.add(serviceApi);
}
}
}
}
if (!cobundledApis.isEmpty()) {
logger.warn(String.format(
"Component %s is bundled with its referenced Controller Service APIs %s. The service APIs should not be bundled with component implementations that reference it.",
component.getClass().getName(), StringUtils.join(cobundledApis.stream().map(cls -> cls.getName()).collect(Collectors.toSet()), ", ")));
}
// the component is eligible when it does not require instance classloading or when the supporting APIs are bundled in a parent NAR
return requiresInstanceClassLoading == false || cobundledApis.isEmpty();
} catch (final InitializationException e) {
logger.warn(String.format("Unable to verify if component %s references any bundled Controller Service APIs due to %s", component.getClass().getName(), e.getMessage()));
return true;
} finally {
if (initializer != null) {
initializer.teardown(component);
}
}
}
private static boolean checkControllerServiceEligibility(Class extensionType) {
final Class originalExtensionType = extensionType;
final ClassLoader originalExtensionClassLoader = extensionType.getClassLoader();
// if the extension does not require instance classloading, its eligible
final boolean requiresInstanceClassLoading = extensionType.isAnnotationPresent(RequiresInstanceClassLoading.class);
final Set<Class> cobundledApis = new HashSet<>();
while (extensionType != null) {
for (final Class i : extensionType.getInterfaces()) {
if (originalExtensionClassLoader.equals(i.getClassLoader())) {
cobundledApis.add(i);
}
}
extensionType = extensionType.getSuperclass();
}
if (!cobundledApis.isEmpty()) {
logger.warn(String.format("Controller Service %s is bundled with its supporting APIs %s. The service APIs should not be bundled with the implementations.",
originalExtensionType.getName(), StringUtils.join(cobundledApis.stream().map(cls -> cls.getName()).collect(Collectors.toSet()), ", ")));
}
// the service is eligible when it does not require instance classloading or when the supporting APIs are bundled in a parent NAR
return requiresInstanceClassLoading == false || cobundledApis.isEmpty();
}
/**
* Registers extension for the specified type from the specified Bundle.
*
* @param type the extension type
* @param classNameBundleMap mapping of classname to Bundle
* @param bundle the Bundle being mapped to
* @param classes to map to this classloader but which come from its ancestors
*/
private static void registerServiceClass(final Class<?> type, final Map<String, List<Bundle>> classNameBundleMap, final Bundle bundle, final Set<Class> classes) {
final String className = type.getName();
// get the bundles that have already been registered for the class name
List<Bundle> registeredBundles = classNameBundleMap.get(className);
if (registeredBundles == null) {
registeredBundles = new ArrayList<>();
classNameBundleMap.put(className, registeredBundles);
}
boolean alreadyRegistered = false;
for (final Bundle registeredBundle : registeredBundles) {
final BundleCoordinate registeredCoordinate = registeredBundle.getBundleDetails().getCoordinate();
// if the incoming bundle has the same coordinate as one of the registered bundles then consider it already registered
if (registeredCoordinate.equals(bundle.getBundleDetails().getCoordinate())) {
alreadyRegistered = true;
break;
}
// if the type wasn't loaded from an ancestor, and the type isn't a processor, cs, or reporting task, then
// fail registration because we don't support multiple versions of any other types
if (!multipleVersionsAllowed(type)) {
throw new IllegalStateException("Attempt was made to load " + className + " from "
+ bundle.getBundleDetails().getCoordinate().getCoordinate()
+ " but that class name is already loaded/registered from " + registeredBundle.getBundleDetails().getCoordinate()
+ " and multiple versions are not supported for this type"
);
}
}
// if none of the above was true then register the new bundle
if (!alreadyRegistered) {
registeredBundles.add(bundle);
classes.add(type);
if (type.isAnnotationPresent(RequiresInstanceClassLoading.class)) {
requiresInstanceClassLoading.add(className);
}
}
}
/**
* @param type a Class that we found from a service loader
* @return true if the given class is a processor, controller service, or reporting task
*/
private static boolean multipleVersionsAllowed(Class<?> type) {
return Processor.class.isAssignableFrom(type) || ControllerService.class.isAssignableFrom(type) || ReportingTask.class.isAssignableFrom(type);
}
/**
* Determines the effective ClassLoader for the instance of the given type.
*
* @param classType the type of class to lookup the ClassLoader for
* @param instanceIdentifier the identifier of the specific instance of the classType to look up the ClassLoader for
* @param bundle the bundle where the classType exists
* @return the ClassLoader for the given instance of the given type, or null if the type is not a detected extension type
*/
public static ClassLoader createInstanceClassLoader(final String classType, final String instanceIdentifier, final Bundle bundle) {
if (StringUtils.isEmpty(classType)) {
throw new IllegalArgumentException("Class-Type is required");
}
if (StringUtils.isEmpty(instanceIdentifier)) {
throw new IllegalArgumentException("Instance Identifier is required");
}
if (bundle == null) {
throw new IllegalArgumentException("Bundle is required");
}
final ClassLoader bundleClassLoader = bundle.getClassLoader();
// If the class is annotated with @RequiresInstanceClassLoading and the registered ClassLoader is a URLClassLoader
// then make a new InstanceClassLoader that is a full copy of the NAR Class Loader, otherwise create an empty
// InstanceClassLoader that has the NAR ClassLoader as a parent
ClassLoader instanceClassLoader;
if (requiresInstanceClassLoading.contains(classType) && (bundleClassLoader instanceof URLClassLoader)) {
final URLClassLoader registeredUrlClassLoader = (URLClassLoader) bundleClassLoader;
instanceClassLoader = new InstanceClassLoader(instanceIdentifier, classType, registeredUrlClassLoader.getURLs(), registeredUrlClassLoader.getParent());
} else {
instanceClassLoader = new InstanceClassLoader(instanceIdentifier, classType, new URL[0], bundleClassLoader);
}
instanceClassloaderLookup.put(instanceIdentifier, instanceClassLoader);
return instanceClassLoader;
}
/**
* Retrieves the InstanceClassLoader for the component with the given identifier.
*
* @param instanceIdentifier the identifier of a component
* @return the instance class loader for the component
*/
public static ClassLoader getInstanceClassLoader(final String instanceIdentifier) {
return instanceClassloaderLookup.get(instanceIdentifier);
}
/**
* Removes the ClassLoader for the given instance and closes it if necessary.
*
* @param instanceIdentifier the identifier of a component to remove the ClassLoader for
* @return the removed ClassLoader for the given instance, or null if not found
*/
public static ClassLoader removeInstanceClassLoaderIfExists(final String instanceIdentifier) {
if (instanceIdentifier == null) {
return null;
}
final ClassLoader classLoader = instanceClassloaderLookup.remove(instanceIdentifier);
if (classLoader != null && (classLoader instanceof URLClassLoader)) {
final URLClassLoader urlClassLoader = (URLClassLoader) classLoader;
try {
urlClassLoader.close();
} catch (IOException e) {
logger.warn("Unable to class URLClassLoader for " + instanceIdentifier);
}
}
return classLoader;
}
/**
* Checks if the given class type requires per-instance class loading (i.e. contains the @RequiresInstanceClassLoading annotation)
*
* @param classType the class to check
* @return true if the class is found in the set of classes requiring instance level class loading, false otherwise
*/
public static boolean requiresInstanceClassLoading(final String classType) {
if (classType == null) {
throw new IllegalArgumentException("Class type cannot be null");
}
return requiresInstanceClassLoading.contains(classType);
}
/**
* Retrieves the bundles that have a class with the given name.
*
* @param classType the class name of an extension
* @return the list of bundles that contain an extension with the given class name
*/
public static List<Bundle> getBundles(final String classType) {
if (classType == null) {
throw new IllegalArgumentException("Class type cannot be null");
}
final List<Bundle> bundles = classNameBundleLookup.get(classType);
return bundles == null ? Collections.emptyList() : new ArrayList<>(bundles);
}
/**
* Retrieves the bundle with the given coordinate.
*
* @param bundleCoordinate a coordinate to look up
* @return the bundle with the given coordinate, or null if none exists
*/
public static Bundle getBundle(final BundleCoordinate bundleCoordinate) {
if (bundleCoordinate == null) {
throw new IllegalArgumentException("BundleCoordinate cannot be null");
}
return bundleCoordinateBundleLookup.get(bundleCoordinate);
}
/**
* Retrieves the bundle for the given class loader.
*
* @param classLoader the class loader to look up the bundle for
* @return the bundle for the given class loader
*/
public static Bundle getBundle(final ClassLoader classLoader) {
if (classLoader == null) {
throw new IllegalArgumentException("ClassLoader cannot be null");
}
return classLoaderBundleLookup.get(classLoader);
}
public static Set<Class> getExtensions(final Class<?> definition) {
if (definition == null) {
throw new IllegalArgumentException("Class cannot be null");
}
final Set<Class> extensions = definitionMap.get(definition);
return (extensions == null) ? Collections.<Class>emptySet() : extensions;
}
public static void logClassLoaderMapping() {
final StringBuilder builder = new StringBuilder();
builder.append("Extension Type Mapping to Bundle:");
for (final Map.Entry<Class, Set<Class>> entry : definitionMap.entrySet()) {
builder.append("\n\t=== ").append(entry.getKey().getSimpleName()).append(" Type ===");
for (final Class type : entry.getValue()) {
final List<Bundle> bundles = classNameBundleLookup.containsKey(type.getName())
? classNameBundleLookup.get(type.getName()) : Collections.emptyList();
builder.append("\n\t").append(type.getName());
for (final Bundle bundle : bundles) {
final String coordinate = bundle.getBundleDetails().getCoordinate().getCoordinate();
final String workingDir = bundle.getBundleDetails().getWorkingDirectory().getPath();
builder.append("\n\t\t").append(coordinate).append(" || ").append(workingDir);
}
}
builder.append("\n\t=== End ").append(entry.getKey().getSimpleName()).append(" types ===");
}
logger.info(builder.toString());
}
}
| |
package com.fsck.k9.mail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import com.fsck.k9.mail.internet.BinaryTempFileBody;
import com.fsck.k9.mail.internet.MimeMessage;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.spongycastle.openpgp.PGPCompressedData;
import org.spongycastle.openpgp.PGPException;
import org.spongycastle.openpgp.PGPObjectFactory;
import org.spongycastle.openpgp.PGPPublicKey;
import org.spongycastle.openpgp.PGPPublicKeyRingCollection;
import org.spongycastle.openpgp.PGPSignature;
import org.spongycastle.openpgp.PGPSignatureList;
import org.spongycastle.openpgp.PGPUtil;
import org.spongycastle.openpgp.bc.BcPGPObjectFactory;
import org.spongycastle.openpgp.bc.BcPGPPublicKeyRingCollection;
import org.spongycastle.openpgp.operator.bc.BcPGPContentVerifierBuilderProvider;
import static junit.framework.Assert.assertTrue;
@RunWith(AndroidJUnit4.class)
public class PgpMimeMessageTest {
private static final String PUBLIC_KEY = "-----BEGIN PGP PUBLIC KEY BLOCK-----\n" +
"Version: GnuPG v1\n" +
"\n" +
"mQINBE49+OsBEADIu2zVIYllkqLYaCZq2d8r80titzegJiXTaW8fRS0FKGE7KmNt\n" +
"tWvWdiyLqvWlP4Py9OZPmEBdz8AaPxqCFmVZfJimf28CW0wz2sRCYmmbQqaHFfpD\n" +
"rK+EJofckOu2j81coaFVLbvkvUNhWU7/DKyv4+EBFt9fjxptbfpNKttwI0aeUVCa\n" +
"+Z/m18+OLpeE33BXd5POrBb4edAlMCwKk8m4nDXJ3B+KmR0qfCLB79gqEjsDLl+y\n" +
"65NcRk5uxIk53NRXHkmQujX1bsf5VFLha4KbUaB7BCtcSi1rY99WXfO/PWzTelOh\n" +
"pKDIRq+v3Kl21TipY0t4kco4AUlIx5b1F0EHPpmIDr0gEheZBali5c9wUR8czc/H\n" +
"aNkRP81hTPeBtUqp1S7GtJfcuWv6dyfBBVlnev98PCKOJo05meVwf3hkOLrciTfo\n" +
"1yuy/9hF18u3GhL8HLrxMQksLhD6sPzDto4jJQDxKAa7v9aLoR7oIdeWkn1TU61E\n" +
"ODR/254BRMoq619hqJwSNt6yOjGT2BBvlwbKdS8Xfw7SsBGGW8WnVJrqFCusfjSm\n" +
"DBdV/KWstRnOMqw4nhAwNFfXmAL2L8a+rLHxalFggfGcvVpzDhJyTg+/R1y3JMCo\n" +
"FfdFuhOTfkMqjGx8FgTmINOt54Wf9Xg6W0hQh3i98Wza3n8NuSPQJtAdqQARAQAB\n" +
"tBVja2V0dGkgPGNrQGNrZXR0aS5kZT6JAhwEEAECAAYFAk+6naAACgkQctTBoSHq\n" +
"3aHS+g/+MNxxfoEK+zopjWgvJmigOvejIpBWsLYJOJrpOgQuA61dQnQg0eLXPMDc\n" +
"xQTrPtIlkn7idtLbaG2FScheOS0RdApL8UJTiU18dzjHUWsLLhEFhOAgw/kqcdG0\n" +
"A95apNucybWU9jxynN9arxU6U+HZ67/JKxRjfdPxm+CmjiQwFPU9d6kGU/D08y58\n" +
"1VIn7IopHlbqOYRuQcX0p6Q642oRBp4b6+ggov21mgqscKe/eBQ8yUxf61eywLbb\n" +
"On63fkF1vl/RvsVcOnxcPLxUH4vmhuGPJ546RN7CCNjVF0QvuH9R8dnxS7/+rLe7\n" +
"BVtZ/8sAy9r8LvnehZWVww4Wo9haVQxB69+ns63lEb+dzbBmsKbGvQ98S/Hs62Wj\n" +
"nkMy7k+xzoRMa7tbKEtwwppxJVVSW//CVvEsS7DyaZna0udLh16MBCbMDzfAa3T4\n" +
"PmgQPmV1BeysHcFOn3p6p2ZRcQGEdvMBYUjqxxExstwZEY8nGagvG7j5YCJKzBNY\n" +
"xdBwkHXU3R3iM9o4aCKBsG2DMGHyhkHJXuGv9jFM32tAAf36qUJZ9eTKtoUt4xGt\n" +
"LuxgnkS830c7nZBfJARro75SDG9eew91u2aIDGO3aNXeOODrYl2KOWbpXg/NJDwS\n" +
"mlUZdwInb0PL6EDij1NtDiap2sIBKxtDjAeilS6vwS8s2P9HZdqJAkEEEwECACsC\n" +
"GyMFCRLMAwAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJOPftbAhkBAAoJEO4v\n" +
"7zp9qOKJG+oP/RBN5ahJCpwrk7U05J8x7UOPuP4UElMYoPYZCSp55mH6Xmr2C626\n" +
"DvTxhElz1WY7oIOJ7Mgp1RtGqZYV52d6fER10jowGbSkiFTvKb4PhQl4+AcGODMY\n" +
"LRVBw90rRhDSXzBQMeiyUf7Wse1jPsBfuOe6V1TsAtqjaAzrUDQOcsjQqW5ezvIj\n" +
"GNTFunX6wMUHzSBX6Lh0fLAp5ICp+l3agJ8S41Y4tSuFVil2IRX3o4vqxvU4f0C+\n" +
"KDIeJriLAMHajUp0V6VdisRHujjoTkZAGogJhNmNg0YH191a7AAKvVePgMQ/fsoW\n" +
"1hm9afwth/HOKvMx8fgKMwkn004V/to7qHByWDND33rgwlv1LYuvumEFd/paIABh\n" +
"dLhC6o6moVzwlOqhGfoD8DZAIzNCS4q2uCg8ik4temetPbCc5wMFtd+FO+FOb1tO\n" +
"/RahWeBfULreEijnv/zUZPetkJV9jTZXgXqCI9GCf6MTJrOLZ+G3hVxFyyHTKlWt\n" +
"iIzJHlX9rd3oQc7YJbdDFMZA+SdlGqiGdsjBmq0kcRqhhEa5QsnoNm9tuPuFnL5o\n" +
"GG7OFPztj9tr9ViRvsFBlx9jvmjRbRNF3287j1r+4lbGigsA1o8bRkLLXVSK1gCw\n" +
"bOLAPNJYH5bde6O+Qb8bepg9TByiohsFssxYXHwbgu/pcCMU1hCf15t4iQI+BBMB\n" +
"AgAoBQJOPfr+AhsjBQkSzAMABgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDu\n" +
"L+86fajiic/5EACHIaprlic0VKeV1H564KionZd7y3i3mX+e7Mdkd9QBFkb14UBw\n" +
"3RFnQhvq1MtaAC1lIYACYdIMF6/8LB1WQjB7kyt0IHbjEyodBVHq3U9n+mt+ZFy3\n" +
"6loA2r1odFJIaUWA2jBlBhtd3AQriANv0yciv4dPqPQfeAR5GxDiRbzGP1FZ47To\n" +
"PXZDHY9EKwaXo4q5D7XHzQy2aFe0IVUzXnofSE2KP9bu/wUU2DjZJ4cVXFdGFv5D\n" +
"xQ48UgXfhmPXSx1eeElDWdZHhH8BI7DOL66+FKm9PLiDYHUuVTvPxFSppu/+Gw5p\n" +
"gqDBwWEeKtJ1Yf3a5Vvbt+EK8BgC1/KaqY7A++dD2vM7w8PIKcf57WXF4O6KkIiW\n" +
"0M36eoAqAyuwqeTh3+mCWewegQBS2wORBYipbDf9OPTj/fsyCkaaXM2/wee79m+W\n" +
"+/67HVYlpIJPIKJIGs1N0PTl8WYZdaMLSL7nU/y3j51ytdidiKvRWl5X3MaCpp07\n" +
"T8MSogntMxXLU2zEnUqJjykXVpavFfXi1piw98qd+5wKMwiGLRq52z73N+q5nWk+\n" +
"5B2gqA3soXvloxXmoVuoTZDSnTjuQZk1kVl2XA+enE5rjVzpGte56QRYOGrjI9II\n" +
"SjH/PYLKSwjw8YzTeYFrv5UHegjU1G7auq5nJLsCupxADoRBw2y99Oiyg7QeY2tl\n" +
"dHRpIDxja2V0dGlAZ29vZ2xlbWFpbC5jb20+iQIcBBABAgAGBQJPup3IAAoJEHLU\n" +
"waEh6t2h1EoP/1Uw+cWK2lJU2BTwWuSTgL/SPoFoR+UKWQ7fES4eTZ330hHmWb4V\n" +
"Xpg+ZR6QYhXnJxMOMZ2tnya95GgdMJ+Hd4vlq6qb8746wmzIOt5XjhdMr3yiUsY9\n" +
"NC6P6ymuYEwuNMQBU/Z53rpuoFaF4Wc9nycK+3Gj6t3aPU0JX+qiFJl63+8GNw/Q\n" +
"CL+JQ4URQB3Vw/RADZfTBbT3VmrdSLGX2/I+nm64ysXvn6nt3q1JTHWXapPGrJXi\n" +
"HTlvjg+Niw38iBeHOkZ9Td5BIPBlj/8SXy9weG55ruTJFw0SXhV3VXIGbN0ZuJ3g\n" +
"nsusNCo4pJrFvJ0j3hzYrgOf/8jRUeesu7HlUPnYdBiJTNgKdCh5LlrKXlaisobl\n" +
"H33aufjO6i5HrX+/b1U9wE/G7MIzopcgiaeSYSJpO9huBJ0+Jri/4tdxvgT6aeNz\n" +
"9uL4rQKH2gUr9E89Np4aZ3zpp1QxfoJTVaR5AyJNaiiDOvZbvELYXK6QjAwgXIVr\n" +
"ScopPOXL1E+fdV9tsvYJfTbTJLZ9qeMRIOBPyhSbiDrB4r/i5zYyfydeEFVxackY\n" +
"vgSp++5HZt5lG0LFVjNnaPZETVCgVb5wmCxNsDqYV1fuxlAmPlTuXfMAvr+bxU/z\n" +
"3dmBDc7X1VfJVLzb0M5Z0KqvQlWTZkAkIPdQarJchvOBnFa7Rb6qFpcAiQI+BBMB\n" +
"AgAoAhsjBQkSzAMABgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAUCTj37WgAKCRDu\n" +
"L+86fajiiXgTD/9tA3FTGjiCE4Z0Nzi/Q+jmNJXGr/MQvgSlbKTGKJKkNk64kLTu\n" +
"HhYdbNhj/8419fINhxOzbetdWi+RUIRqk/FstBNGCbFYwNBbhp7jSToHLw1oESoN\n" +
"zPhxkuptvjyaEjrn50ydykVdTeMjytmZ3w7iu5eOt+tNS0x0thGfM3a4kdYoKW0v\n" +
"mp2BmrtUAXXsOJ475EK6IXeoGLMbgA+JtiDnWH12t/Dfl7L/6Nxjk1fGlihcJl6P\n" +
"Z1ZytDuRjnvlt77nqMaka7N+GadqmPUWonhKg/aGPMEgQUD4IWM/2Y2EpJIqVfB5\n" +
"Dv7llScCRB8mte/T8/dvpgr5B0KqGJDudb7Cgp+8zDGCU+M3uHU5ZQRlBO3bbCML\n" +
"nwT6BxmLT/6ufW7nT1eXscDi+DFKsLa6FQmDY38tzB6tyYlHxQU3RTkm4cLfDzI8\n" +
"/0JPRfx/RlKLW39QEmFJySMB3IVRtp5R0KNoKaAtYb5hRvD2JJJnx5q0u3h+me6j\n" +
"RzCMPJWxRKQjx0MdKEJedAH02XEqgeTunm7Kitb3aYuSykHUt2D/fgA4/CQoThF5\n" +
"SYUVbviYToEu/1hQAeHe9S1F92jCrjuTUmqejoVotk5O3uHBr7A3ASOoBrdaXxuS\n" +
"x9WpcRprfdtoD36TDWsSuarNxFVzcGFDaV2yN6mIf2LXTNgw2UAOHJzUqokCPgQT\n" +
"AQIAKAUCTj346wIbIwUJEswDAAYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQ\n" +
"7i/vOn2o4onPNxAAq6jqkpbx0g/UIdNR2Q7mGQ0QJNbkt2P8Vq7jqwUu4td6GJdC\n" +
"vy4+RUWo5aRNQ3NgzRFkjLxIrTeSfK+yjruk01r3naGh6h0rk/EY1RCw1sA6GHVV\n" +
"gFcf83JtfgxH4NE8br+eiNnMODhOXG/UJsBMNo8bfyZu3FnJdUebCODMACJimKWb\n" +
"gBXa5EOnDZzXjYQrNRt95/yHse76V8JLdHqSnYPvVwcIT6MubF2NPspSFjfnFsj9\n" +
"J1Fb6aiI+3ob6HJNt2kyN0CdnnR/ZEZun8KQ37jJy7f5LXI6FDDT52oPBfddRRwy\n" +
"qZsmprbQjxUdIPKAYyjIELy+iAoFTrsJYvGNrgGMHI2ecyC2TE3uJ3qFALLhkFAS\n" +
"xYR+sSjAI3nJHPcfsfg10clrCfhh1KDWJjlVGgFjNd0MKIhLKA4kfwQvU4BSr5Al\n" +
"3fzflkRQuLDTNEeM9fwVW6ew+7IHpBNmYtnkSbmURcZoA4y8VuHH7qHID756kf4W\n" +
"u+wfNLf0SUZ1061y+PI77wUPUEVI2uJzo0xuHMG+L0TitRUv0zvaIGFt9ClX03FU\n" +
"6r1PPLGG1JNWuBORNgTJVIQzhLM3du7OnCdc4NhfOqZUfdWrIbgPEc870DnQSdmn\n" +
"J9OTF082SXEfEbjYzLuS5/aImXENypp6A7zeHBJ+TBJUNQj0c7S1qBeQGey0IUNo\n" +
"cmlzdGlhbiBLZXR0ZXJlciA8Y2tAY2tldHRpLmRlPokCPgQTAQIAKAUCU/eh2wIb\n" +
"IwUJEswDAAYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQ7i/vOn2o4omSGg/9\n" +
"EIj+Zz5rqC9BOC3sxbvyvZaPz0G6gT36i0ZW9Qe1drqxs7rcUelYPii8TPB/4v+H\n" +
"cx82qQpSnD6X7e8hNuRgsulkgZIhT/jnBFEJJoyMtt25UZIolj4JFpw1g+PRkufu\n" +
"KlVZCisJup+fFN2O6IcsfFqXxnaITWalFYMvOXwJ9rbT+2kczsH+MnxqeRvFQw7u\n" +
"Gy7Q3bu+A9+rntBhz/LPdzBOJBJh672Y9f9UVsmEFB66d84l7yUs1vlzi9DAqJK4\n" +
"y49hCe9QMv+NwL9rB9QxLdrbX724IRvGVwRzufd5jHOgAsYbizO+QltBJTsmRHvi\n" +
"yKClxuiUE4ygQyd5TT3ATC8wQKGfAGWRWaZoi3X6wWvKvW8cPg8ilMoTtPTlZuPL\n" +
"G32n0NaD7dacpmKfaLeopPAJgrnTl9LwPEDg4dwcSK+ETCY1BcoVGtOVxH1ghMd2\n" +
"IYOX+BSJiG39ApiHHBwPtc/PIqPjtR7MGB6dCldZZ46eHleCB8Re5HPrQAok+ijb\n" +
"XX0gx7ACYTniH+TsFszZyuLGstR8Cs8s7MwnbAX40506lDrj9c+0FE69/rJIMQsc\n" +
"wauGk1x1UaK2+gzBw28ymilhBbuOFabuStAHfGx/1niJMgBO4BiOPIBTjMOYtARR\n" +
"OSZ9dNGXkKYnxtN6T/kTO3F5N/fFJ42WjDWbrvfqDSy5Ag0ETj346wEQALEnw5y/\n" +
"zL3QAug9xuHktdVKCbxwAy8Q1ei5UA/GTGnTLdsHIN5e1B2bJyZaYcPTIT+xNgzP\n" +
"hwDQTosFFpg/JLP1xI28mShk8ai3ls73EhJLUGazOZ0ujxyMkWD0rIBMee6YkQMG\n" +
"zUkJKaEtqeVLci67Q8QLHLfE331JyTtd0gwlps6FAd7PuCl/50cayr0yXMx67iwK\n" +
"kyvXaLHYUjdK13MC2xoc4VrirzfNtX0JtCmAYoJ2i2Yq7vgLQasUjbzUsLUuwhol\n" +
"yoxwE6lB6paBdTh1dTa4mCN3Y8gM+CMveqQUcZuOyFZDWNtMPPCNeWWRkKgfc+fw\n" +
"HSiCHhDWu/7S6/xSqDb3qegXm6cAA2WFxJ+oEwTSRvK/89y6T3oiFbjmZs+sSRjr\n" +
"ZAsE3rDC2WFRUFBq6/V7+eO2F1fqNLPzXOaVQX9i3BHv4XjxC0PQoVFnvpSJlHSW\n" +
"Vuw5xA3Qqa8GuB80zWEqVBJ30gfqj1BAErpKwaVKJOuvRuQa2wkq7iXO/Io4S7UQ\n" +
"HFO+U9W87PaPNdfjxxEsVmexeXhF8l5zwHYyqKK0Pch/YDoUk/+w7Jn3cpmpceim\n" +
"YVEDr/YqrbvLpakHuEQiDgWZmcHHEVA7DbfsOULqq1vnpVq0TictdZ20Z8MJ2gAM\n" +
"P9HCZHPxLafI3YqQrXR3UIHb48Zwy9tdMv7NABEBAAGJAiUEGAECAA8FAk49+OsC\n" +
"GwwFCRLMAwAACgkQ7i/vOn2o4okF+BAAkN0Kd404HPy/35mCCdWm5DHpcxEURoY1\n" +
"X6mv6D+pvPQHUN9GKeYYT6wjcpsDsCn2UX9mp0e24SXOxZoVlJ7T6L/QN+MUwnt2\n" +
"LAO9XCZLMijhe7KX51FJjld1W9XfauqhPlR1Lzr9cJI3UdiYcsZH3X6SfW/hLLRE\n" +
"MWm/3YfACVVWNkG9PanhroNcVr925k/y58WRKdJOOgMGGBYyIAvtWb6m0Qn978AE\n" +
"53r7msHwZq06sPXIZJpCl6CTeyMrqU90G+JJY3BfP9rFsU9OLkDRrsAELleI9iXP\n" +
"QGw6Ixezdi93CqY+Y4weCjtYxm/5vKxwssg/ALVkM/VftWgWRSnZmnZwubgBzgwy\n" +
"wBwGHxPHz7CV3lBKZfw8U3L4Md3u1bMUu6Y+jW+322D+7+ZaLdJejmmJcEvLaItd\n" +
"c60IHTM/GbtV7TDiqQaRmyLY5KxnwGLthcYUsGI7HYDNqEa1+cRctB8lEWpgTjHK\n" +
"nwemvB5c1fPxao7w15O0tvSCX2kD5UMoAbvWJJvxcUTPTPBEHTYWrAk+Ny7CbdMA\n" +
"+71r942RXo9Xdm4hqjfMcDXdQmfjftfFB1rsBd5Qui8ideQP7ypllsWC8fJUkWN6\n" +
"3leW5gysLx9Mj6bu6XB4rYS1zH2keGtZe4Qqlxss7JPVsJzD9xSotg+G/Wb7F3HL\n" +
"HzpeeqkwzVU=\n" +
"=3yEX\n" +
"-----END PGP PUBLIC KEY BLOCK-----\n";
@Test
public void testSignedMessage() throws IOException, MessagingException, PGPException {
String messageSource = "Date: Mon, 08 Dec 2014 17:44:18 +0100\r\n" +
"From: cketti <cketti@googlemail.com>\r\n" +
"MIME-Version: 1.0\r\n" +
"To: test@example.com\r\n" +
"Subject: OpenPGP signature test\r\n" +
"Content-Type: multipart/signed; micalg=pgp-sha1;\r\n" +
" protocol=\"application/pgp-signature\";\r\n" +
" boundary=\"24Bem7EnUI1Ipn9jNXuLgsetqa6wOkIxM\"\r\n" +
"\r\n" +
"This is an OpenPGP/MIME signed message (RFC 4880 and 3156)\r\n" +
"--24Bem7EnUI1Ipn9jNXuLgsetqa6wOkIxM\r\n" +
"Content-Type: multipart/mixed;\r\n" +
" boundary=\"------------030308060900040601010501\"\r\n" +
"\r\n" +
"This is a multi-part message in MIME format.\r\n" +
"--------------030308060900040601010501\r\n" +
"Content-Type: text/plain; charset=utf-8\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"\r\n" +
"Message body\r\n" +
"goes here\r\n" +
"\r\n" +
"\r\n" +
"--------------030308060900040601010501\r\n" +
"Content-Type: text/plain; charset=UTF-8;\r\n" +
" name=\"attachment.txt\"\r\n" +
"Content-Transfer-Encoding: base64\r\n" +
"Content-Disposition: attachment;\r\n" +
" filename=\"attachment.txt\"\r\n" +
"\r\n" +
"VGV4dCBhdHRhY2htZW50Cg==\r\n" +
"--------------030308060900040601010501--\r\n" +
"\r\n" +
"--24Bem7EnUI1Ipn9jNXuLgsetqa6wOkIxM\r\n" +
"Content-Type: application/pgp-signature; name=\"signature.asc\"\r\n" +
"Content-Description: OpenPGP digital signature\r\n" +
"Content-Disposition: attachment; filename=\"signature.asc\"\r\n" +
"\r\n" +
"-----BEGIN PGP SIGNATURE-----\r\n" +
"Version: GnuPG v1\r\n" +
"\r\n" +
"iQIcBAEBAgAGBQJUhdVqAAoJEO4v7zp9qOKJ8DQP/1+JE8UF7UmirnN1ZO+25hFC\r\n" +
"jAfFMxRWMWXN0gGB+6ySy6ah0bCwmRwHpRBsW/tNcsmOPKb2XBf9zwF06uk/lLp4\r\n" +
"ZmGXxSdQ9XJrlaHk8Sitn9Gi/1L+MNWgrsrLROAZv2jfc9wqN3FOrhN9NC1QXQvO\r\n" +
"+D7sMorSr3l94majoIDrzvxEnfJVfrZWNTUaulJofOJ55GBZ3UJNob1WKjrnculL\r\n" +
"IwmSERmVUoFBUfe/MBqqZH0WDJq9nt//NZFHLunj6nGsrpush1dQRcbR3zzQfXkk\r\n" +
"s7zDLDa8VUv6OxcefjsVN/O7EenoWWgNg6GfW6tY2+oUsLSP2OS3JXvYsylQP4hR\r\n" +
"iU1V9vvsu2Ax6bVb0+uTqw3jNiqVFy3o4mBigVUqp1EFIwBYmyNbe5wj4ACs9Avj\r\n" +
"9t2reFSfXobWQFUS4s71JeMefNAHHJWZI63wNTxE6LOw01YxdJiDaPWGTOyM75MK\r\n" +
"yqn7r5uIfeSv8NypGJaUv4firxKbrcZKk7Wpeh/rZuUSgoPcf3I1IzXfGKKIBHjU\r\n" +
"WUMhTF5SoC5kIZyeXvHrhTM8HszcS8EoG2XcmcYArwgCUlOunFwZNqLPsfdMTRL6\r\n" +
"9rcioaohEtroqoJiGAToJtIz8kqCaamnP/ASBkp9qqJizRd6fqt+tE8BsmJbuPLS\r\n" +
"6lBpS8j0TqmaZMYfB9u4\r\n" +
"=QvET\r\n" +
"-----END PGP SIGNATURE-----\r\n" +
"\r\n" +
"--24Bem7EnUI1Ipn9jNXuLgsetqa6wOkIxM--\r\n";
BinaryTempFileBody.setTempDirectory(InstrumentationRegistry.getTargetContext().getCacheDir());
InputStream messageInputStream = new ByteArrayInputStream(messageSource.getBytes());
MimeMessage message;
try {
message = MimeMessage.parseMimeMessage(messageInputStream, true);
} finally {
messageInputStream.close();
}
Multipart multipartSigned = (Multipart) message.getBody();
BodyPart signedPart = multipartSigned.getBodyPart(0);
ByteArrayOutputStream signedPartOutputStream = new ByteArrayOutputStream();
signedPart.writeTo(signedPartOutputStream);
byte[] signedData = signedPartOutputStream.toByteArray();
Body signatureBody = multipartSigned.getBodyPart(1).getBody();
ByteArrayOutputStream signatureBodyOutputStream = new ByteArrayOutputStream();
signatureBody.writeTo(signatureBodyOutputStream);
byte[] signatureData = signatureBodyOutputStream.toByteArray();
assertTrue(verifySignature(signedData, signatureData));
}
private boolean verifySignature(byte[] signedData, byte[] signatureData) throws IOException, PGPException {
InputStream signatureInputStream = PGPUtil.getDecoderStream(new ByteArrayInputStream(signatureData));
PGPObjectFactory pgpObjectFactory = new BcPGPObjectFactory(signatureInputStream);
Object pgpObject = pgpObjectFactory.nextObject();
PGPSignatureList pgpSignatureList;
if (pgpObject instanceof PGPCompressedData) {
PGPCompressedData compressedData = (PGPCompressedData) pgpObject;
pgpObjectFactory = new BcPGPObjectFactory(compressedData.getDataStream());
pgpSignatureList = (PGPSignatureList) pgpObjectFactory.nextObject();
} else {
pgpSignatureList = (PGPSignatureList) pgpObject;
}
PGPSignature signature = pgpSignatureList.get(0);
InputStream keyInputStream = PGPUtil.getDecoderStream(new ByteArrayInputStream(PUBLIC_KEY.getBytes()));
PGPPublicKeyRingCollection pgpPublicKeyRingCollection = new BcPGPPublicKeyRingCollection(keyInputStream);
PGPPublicKey publicKey = pgpPublicKeyRingCollection.getPublicKey(signature.getKeyID());
signature.init(new BcPGPContentVerifierBuilderProvider(), publicKey);
InputStream signedDataInputStream = new ByteArrayInputStream(signedData);
int ch;
while ((ch = signedDataInputStream.read()) >= 0) {
signature.update((byte) ch);
}
signedDataInputStream.close();
keyInputStream.close();
signatureInputStream.close();
return signature.verify();
}
}
| |
package com.eolhing.droidshooter.GameEntities;
import java.util.LinkedList;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.eolhing.droidshooter.GamePhysics.Collidable;
import com.eolhing.droidshooter.GamePhysics.Physics;
import com.eolhing.droidshooter.Tools.Clock;
public class Ship extends Collidable
{
public static final int ALIVE = 1;
public static final int DESTROYING = 2;
public static final int DESTROYED = 3;
protected Clock clock;
public int state;
public boolean directionAligned;
protected Sprite sprite;
protected Vector2 origin;
public Vector2 moveTarget;
public float speed;
public float maxHealth;
public float health;
public float maxPower;
public float power;
public LinkedList<Bullet> bullets;
public LinkedList<Weapon> weapons;
public EffectContainer effectContainer;
public ParticlesContainer particleEffectContainer;
float mAlpha;
public Ship(LinkedList<Bullet> bullets, Vector2 position)
{
super();
clock = new Clock();
state = ALIVE;
mAlpha = 1.f;
directionAligned = false;
sprite = new Sprite();
origin = new Vector2();
moveTarget = new Vector2();
speed = 0.f;
maxHealth = 500.f;
health = maxHealth;
maxPower = 100.f;
power = 0.f;
this.bullets = bullets;
weapons = new LinkedList<Weapon>();
setPosition(position);
moveTarget.set(position);
collisionType = Physics.POINT;
clock.reset();
}
public float getX()
{
return sprite.getX() + origin.x;
}
public float getY()
{
return sprite.getY() + origin.y;
}
public Vector2 getPosition()
{
return new Vector2(sprite.getX() + origin.x, sprite.getY() + origin.y);
}
public void setPosition(Vector2 position)
{
if (moveTarget.x == sprite.getX() + origin.x && moveTarget.y == sprite.getY() + origin.y)
moveTarget = position;
sprite.setPosition(position.x - origin.x, position.y - origin.y);
}
public void setTextureRegion(TextureRegion textureRegion)
{
sprite.setRegion(textureRegion);
origin.set(textureRegion.getRegionWidth() / 2, textureRegion.getRegionHeight() / 2);
sprite.setBounds(sprite.getX() - origin.x, sprite.getY() - origin.y, textureRegion.getRegionWidth(), textureRegion.getRegionHeight());
sprite.setOrigin(sprite.getRegionWidth() / 2, sprite.getRegionHeight() / 2);
}
public Rectangle getRect()
{
return sprite.getBoundingRectangle();
}
public void draw(SpriteBatch batch)
{
sprite.draw(batch, mAlpha);
}
public void collide(Bullet bullet)
{
if (state != DESTROYED && state != DESTROYING)
{
health = Math.max(0.f, health - bullet.power.getValue());
if (health <= 0)
{
destroy();
bullet.originShip.onDestroyShip(bullet);
}
else
bullet.originShip.onHitShip(bullet);
bullet.hit();
}
}
public void update()
{
float time = clock.getTime() / 1000.f;
clock.reset();
if (state == ALIVE)
{
// Move Ship
if (getX() != moveTarget.x || getY() != moveTarget.y)
{
Vector2 moveOffset = new Vector2(moveTarget.x - getX(), moveTarget.y - getY());
if (speed * time >= moveTarget.dst(getX(), getY()))
setPosition(moveTarget);
else
{
float angle = (float) (moveOffset.angle() * Math.PI / 180.f);
setPosition(new Vector2((float) (getX() + Math.cos(angle) * speed * time), (float) (getY() + Math.sin(angle) * speed * time)));
if (directionAligned)
sprite.setRotation(angle * 180.f / (float) Math.PI);
}
}
// Update weapons
for (Weapon weapon : weapons)
weapon.update();
collisionPoint.x = getX();
collisionPoint.y = getY();
}
}
public void setFiring(boolean firing)
{
for (Weapon weapon : weapons)
weapon.setFiring(firing);
}
public void onHitShip(Bullet bullet)
{
}
public void onDestroyShip(Bullet bullet)
{
}
public void destroy()
{
state = DESTROYED;
}
public void castEffect()
{
}
float getDirection()
{
return sprite.getRotation() / 180 * (float) Math.PI;
}
public void addHealth(float healthOffset)
{
health = Math.min(health + healthOffset, maxHealth);
if (health <= 0)
state = DESTROYING;
}
public void setTimeFactor(float factor)
{
clock.setFactor(factor);
for (Weapon weapon : weapons)
{
weapon.setTimeFactor(factor);
}
}
}
| |
package connect.ui.activity.chat.model.content;
import android.text.TextUtils;
import com.google.gson.Gson;
import com.google.protobuf.ByteString;
import connect.db.MemoryDataManager;
import connect.db.SharedPreferenceUtil;
import connect.db.green.DaoHelper.ContactHelper;
import connect.db.green.DaoHelper.ParamHelper;
import connect.db.green.bean.ContactEntity;
import connect.db.green.bean.ParamEntity;
import connect.im.bean.MsgType;
import connect.im.bean.Session;
import connect.im.bean.SocketACK;
import connect.im.bean.UserCookie;
import connect.im.model.ChatSendManager;
import connect.ui.activity.chat.bean.ExtBean;
import connect.ui.activity.chat.bean.MsgDefinBean;
import connect.ui.activity.chat.bean.MsgEntity;
import connect.ui.activity.chat.bean.MsgSender;
import connect.ui.activity.chat.bean.RoomSession;
import connect.utils.TimeUtil;
import connect.utils.cryption.EncryptionUtil;
import connect.utils.cryption.SupportKeyUril;
import connect.wallet.jni.AllNativeMethod;
import protos.Connect;
/**
* friend chat
* Created by gtq on 2016/12/19.
*/
public class FriendChat extends NormalChat {
private String Tag = "FriendChat";
private ContactEntity contactEntity = null;
/** user Cookie */
private UserCookie userCookie = null;
/** friend Cookie */
private UserCookie friendCookie = null;
public enum EncryType {
NORMAL,
HALF,
BOTH,
}
private EncryType encryType = EncryType.BOTH;
public FriendChat(ContactEntity entity) {
contactEntity = entity;
ContactEntity dbEntity = ContactHelper.getInstance().loadFriendEntity(entity.getPub_key());
isStranger = (dbEntity == null);
if (dbEntity != null) {
contactEntity = dbEntity;
}
}
@Override
public MsgEntity createBaseChat(MsgType type) {
MsgDefinBean msgDefinBean = new MsgDefinBean();
msgDefinBean.setType(type.type);
msgDefinBean.setUser_name(contactEntity.getUsername());
msgDefinBean.setSendtime(TimeUtil.getCurrentTimeInLong());
msgDefinBean.setMessage_id(TimeUtil.timestampToMsgid());
msgDefinBean.setPublicKey(contactEntity.getPub_key());
msgDefinBean.setUser_id(address());
msgDefinBean.setSenderInfoExt(new MsgSender(MemoryDataManager.getInstance().getPubKey(),
MemoryDataManager.getInstance().getName(),
MemoryDataManager.getInstance().getAddress(),
MemoryDataManager.getInstance().getAvatar()));
long burntime = RoomSession.getInstance().getBurntime();
if (burntime > 0) {
ExtBean extBean = new ExtBean();
extBean.setLuck_delete(burntime);
msgDefinBean.setExt(new Gson().toJson(extBean));
}
MsgEntity chatBean = new MsgEntity();
chatBean.setMsgDefinBean(msgDefinBean);
chatBean.setPubkey(contactEntity.getPub_key());
chatBean.setRecAddress(address());
chatBean.setSendstate(0);
return chatBean;
}
@Override
public void sendPushMsg(Object bean) {
MsgDefinBean definBean = ((MsgEntity) bean).getMsgDefinBean();
String msgStr = new Gson().toJson(definBean);
String priKey = null;
byte[] randomSalt = null;
String friendKey = null;
loadUserCookie();
loadFriendCookie(definBean.getPublicKey());
SupportKeyUril.EcdhExts ecdhExts = null;
Connect.MessageData.Builder builder = Connect.MessageData.newBuilder();
switch (encryType) {
case NORMAL:
priKey = MemoryDataManager.getInstance().getPriKey();
friendKey = definBean.getPublicKey();
ecdhExts = SupportKeyUril.EcdhExts.EMPTY;
break;
case HALF:
priKey = userCookie.getPriKey();
randomSalt = userCookie.getSalt();
friendKey = definBean.getPublicKey();
ecdhExts = SupportKeyUril.EcdhExts.OTHER;
ecdhExts.setBytes(randomSalt);
builder.setSalt(ByteString.copyFrom(randomSalt)).setChatPubKey(userCookie.getPubKey());
break;
case BOTH:
priKey = userCookie.getPriKey();
randomSalt = userCookie.getSalt();
friendKey = friendCookie.getPubKey();
byte[] friendSalt = friendCookie.getSalt();
ecdhExts = SupportKeyUril.EcdhExts.OTHER;
ecdhExts.setBytes(SupportKeyUril.xor(randomSalt, friendSalt, 64));
builder.setSalt(ByteString.copyFrom(randomSalt)).setChatPubKey(userCookie.getPubKey()).
setVer(ByteString.copyFrom(friendCookie.getSalt()));
break;
}
Connect.GcmData gcmData = EncryptionUtil.encodeAESGCM(ecdhExts, priKey, friendKey, msgStr.getBytes());
builder.setCipherData(gcmData).
setMsgId(definBean.getMessage_id()).
setTyp(definBean.getType()).
setReceiverAddress(((MsgEntity) bean).getRecAddress());
Connect.MessageData messageData = builder.build();
ChatSendManager.getInstance().sendChatAckMsg(SocketACK.SINGLE_CHAT, definBean.getPublicKey(), messageData);
}
@Override
public String headImg() {
return contactEntity.getAvatar();
}
@Override
public String nickName() {
return TextUtils.isEmpty(contactEntity.getRemark()) ? contactEntity.getUsername() : contactEntity.getRemark();
}
@Override
public String address() {
String address = contactEntity.getAddress();
if (TextUtils.isEmpty(address)) {
if (TextUtils.isEmpty(roomKey())) {
address = "";
} else {
address = AllNativeMethod.cdGetBTCAddrFromPubKey(roomKey());
}
}
return address;
}
@Override
public String roomKey() {
return contactEntity.getPub_key();
}
@Override
public int roomType() {
return 0;
}
public void setFriendEntity(ContactEntity friendEntity) {
this.contactEntity = friendEntity;
}
public void setEncryType(EncryType encryType) {
this.encryType = encryType;
}
public void setFriendCookie(UserCookie friendCookie) {
this.friendCookie = friendCookie;
}
private void loadUserCookie() {
String pubkey = MemoryDataManager.getInstance().getPubKey();
userCookie = Session.getInstance().getUserCookie(pubkey);
if (userCookie == null) {
String cookieKey = "COOKIE:" + pubkey;
ParamEntity paramEntity = ParamHelper.getInstance().likeParamEntityDESC(cookieKey);//local cookie
if (paramEntity != null) {
userCookie = new Gson().fromJson(paramEntity.getValue(), UserCookie.class);
}
}
if (userCookie == null) {
encryType = EncryType.NORMAL;
}
}
public void loadFriendCookie(String pubkey) {
friendCookie = Session.getInstance().getUserCookie(pubkey);
if (friendCookie == null) {
String cookieFriend = "COOKIE:" + pubkey;
ParamEntity friendEntity = ParamHelper.getInstance().likeParamEntityDESC(cookieFriend);
if (friendEntity != null) {
friendCookie = new Gson().fromJson(friendEntity.getValue(), UserCookie.class);
}
}
if (friendCookie == null) {
encryType = EncryType.NORMAL;
}
}
}
| |
/*
* Copyright 1999-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.sql;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.orient.core.command.OCommandRequestText;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.ODatabaseRecordAbstract;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.iterator.ORecordIteratorClass;
import com.orientechnologies.orient.core.iterator.ORecordIteratorClusters;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.security.ODatabaseSecurityResources;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.ORecordSchemaAware;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.sql.filter.OSQLFilter;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField;
import com.orientechnologies.orient.core.sql.operator.OQueryOperator;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquals;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorNotEquals;
import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
/**
* Executes a TRAVERSE crossing records. Returns a List<OIdentifiable> containing all the traversed records that match the WHERE
* condition.
* <p>
* SYNTAX: <code>TRAVERSE <field>* FROM <target> WHERE <condition></code>
* </p>
* <p>
* In the command context you've access to the variable $depth containing the depth level from the root node. This is useful to
* limit the traverse up to a level. For example to consider from the first depth level (0 is root node) to the third use:
* <code>TRAVERSE children FROM #5:23 WHERE $depth BETWEEN 1 AND 3</code>. To filter traversed records use it combined with a SELECT
* statement:
* </p>
* <p>
* <code>SELECT FROM (TRAVERSE children FROM #5:23 WHERE $depth BETWEEN 1 AND 3) WHERE city.name = 'Rome'</code>
* </p>
*
* @author Luca Garulli
*/
@SuppressWarnings("unchecked")
public abstract class OCommandExecutorSQLExtractAbstract extends OCommandExecutorSQLAbstract {
protected static final String KEYWORD_FROM_2FIND = " " + KEYWORD_FROM + " ";
protected OSQLAsynchQuery<ORecordSchemaAware<?>> request;
protected OSQLFilter compiledFilter;
protected Iterable<? extends OIdentifiable> target;
protected List<OIdentifiable> tempResult;
protected int resultCount;
protected int skip = 0;
/**
* Compile the filter conditions only the first time.
*/
public OCommandExecutorSQLExtractAbstract parse(final OCommandRequestText iRequest) {
final ODatabaseRecord database = getDatabase();
database.checkSecurity(ODatabaseSecurityResources.COMMAND, ORole.PERMISSION_READ);
init(iRequest.getText());
if (iRequest instanceof OSQLSynchQuery) {
request = (OSQLSynchQuery<ORecordSchemaAware<?>>) iRequest;
} else if (iRequest instanceof OSQLAsynchQuery)
request = (OSQLAsynchQuery<ORecordSchemaAware<?>>) iRequest;
else {
// BUILD A QUERY OBJECT FROM THE COMMAND REQUEST
request = new OSQLSynchQuery<ORecordSchemaAware<?>>(iRequest.getText());
if (iRequest.getResultListener() != null)
request.setResultListener(iRequest.getResultListener());
}
return this;
}
public List<OIdentifiable> getResult() {
if (tempResult != null)
return tempResult;
if (request instanceof OSQLSynchQuery)
return (List<OIdentifiable>) ((OSQLSynchQuery<ORecordSchemaAware<?>>) request).getResult();
return null;
}
/**
* Assign the right TARGET if found.
*
* @param iArgs
* Parameters to bind
* @return true if the target has been recognized, otherwise false
*/
protected boolean assignTarget(final Map<Object, Object> iArgs) {
parameters = iArgs;
// TODO: SUPPORT MULTIPLE CLASSES LIKE A SQL JOIN
compiledFilter.bindParameters(iArgs);
if (target == null)
if (compiledFilter.getTargetClasses() != null)
searchInClasses();
else if (compiledFilter.getTargetClusters() != null)
searchInClusters();
else if (compiledFilter.getTargetRecords() != null)
target = compiledFilter.getTargetRecords();
else
return false;
return true;
}
protected Object handleResult() {
if (tempResult != null) {
for (OIdentifiable d : tempResult)
if (d != null)
request.getResultListener().result(d);
}
if (request instanceof OSQLSynchQuery)
return ((OSQLSynchQuery<ORecordSchemaAware<?>>) request).getResult();
return null;
}
protected boolean addResult(final OIdentifiable iRecord) {
resultCount++;
OIdentifiable recordCopy = iRecord instanceof ORecord<?> ? ((ORecord<?>) iRecord).copy() : iRecord.getIdentity().copy();
if (recordCopy != null)
// CALL THE LISTENER NOW
if (request.getResultListener() != null)
request.getResultListener().result(recordCopy);
if (limit > -1 && resultCount >= limit)
// BREAK THE EXECUTION
return false;
return true;
}
/**
* Parses the limit keyword if found.
*
* @param word
* StringBuilder to parse
* @return
* @return the limit found as integer, or -1 if no limit is found. -1 means no limits.
* @throws OCommandSQLParsingException
* if no valid limit has been found
*/
protected int parseLimit(final StringBuilder word) throws OCommandSQLParsingException {
if (!word.toString().equals(KEYWORD_LIMIT))
return -1;
currentPos = OSQLHelper.nextWord(text, textUpperCase, currentPos, word, true);
try {
limit = Integer.parseInt(word.toString());
} catch (Exception e) {
throw new OCommandSQLParsingException("Invalid LIMIT value setted to '" + word
+ "' but it should be a valid integer. Example: LIMIT 10", text, currentPos);
}
if (limit < 0)
throw new OCommandSQLParsingException("Invalid LIMIT value setted to the negative number '" + word
+ "'. Only positive numbers are valid. Example: LIMIT 10", text, currentPos);
return limit;
}
/**
* Parses the skip keyword if found.
*
* @param word
* StringBuilder to parse
* @return
* @return the skip found as integer, or -1 if no skip is found. -1 means no skip.
* @throws OCommandSQLParsingException
* if no valid skip has been found
*/
protected int parseSkip(final StringBuilder word) throws OCommandSQLParsingException {
if (!word.toString().equals(KEYWORD_SKIP))
return -1;
currentPos = OSQLHelper.nextWord(text, textUpperCase, currentPos, word, true);
try {
skip = Integer.parseInt(word.toString());
} catch (Exception e) {
throw new OCommandSQLParsingException("Invalid SKIP value setted to '" + word
+ "' but it should be a valid positive integer. Example: SKIP 10", text, currentPos);
}
if (skip < 0)
throw new OCommandSQLParsingException("Invalid SKIP value setted to the negative number '" + word
+ "'. Only positive numbers are valid. Example: SKIP 10", text, currentPos);
return skip;
}
protected boolean filter(final ORecordInternal<?> iRecord) {
return compiledFilter.evaluate(iRecord, context);
}
protected void searchInClasses() {
final OClass cls = compiledFilter.getTargetClasses().keySet().iterator().next();
final ODatabaseRecord database = getDatabase();
database.checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, cls.getName().toLowerCase());
// NO INDEXES: SCAN THE ENTIRE CLUSTER
final ORID[] range = getRange();
target = new ORecordIteratorClass<ORecordInternal<?>>(database, (ODatabaseRecordAbstract) database, cls.getName(), true)
.setRange(range[0], range[1]);
}
protected void searchInClusters() {
final ODatabaseRecord database = getDatabase();
final Set<Integer> clusterIds = new HashSet<Integer>();
for (String clusterName : compiledFilter.getTargetClusters().keySet()) {
if (clusterName == null || clusterName.length() == 0)
throw new OCommandExecutionException("No cluster or schema class selected in query");
database.checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, clusterName.toLowerCase());
if (Character.isDigit(clusterName.charAt(0))) {
// GET THE CLUSTER NUMBER
for (int clusterId : OStringSerializerHelper.splitIntArray(clusterName)) {
if (clusterId == -1)
throw new OCommandExecutionException("Cluster '" + clusterName + "' not found");
clusterIds.add(clusterId);
}
} else {
// GET THE CLUSTER NUMBER BY THE CLASS NAME
final int clusterId = database.getClusterIdByName(clusterName.toLowerCase());
if (clusterId == -1)
throw new OCommandExecutionException("Cluster '" + clusterName + "' not found");
clusterIds.add(clusterId);
}
}
// CREATE CLUSTER AS ARRAY OF INT
final int[] clIds = new int[clusterIds.size()];
int i = 0;
for (int c : clusterIds)
clIds[i++] = c;
final ORID[] range = getRange();
target = new ORecordIteratorClusters<ORecordInternal<?>>(database, (ODatabaseRecordAbstract) database, clIds).setRange(
range[0], range[1]);
}
protected void applyLimit() {
if (tempResult != null && limit > 0) {
final List<OIdentifiable> newList = new ArrayList<OIdentifiable>();
// APPLY LIMIT
final int tot = Math.min(limit, tempResult.size());
for (int i = 0; i < tot; ++i)
newList.add(tempResult.get(i));
tempResult.clear();
tempResult = newList;
}
}
/**
* Optimizes the condition tree.
*/
protected void optimize() {
if (compiledFilter == null)
return;
optimizeBranch(null, compiledFilter.getRootCondition());
}
protected void optimizeBranch(final OSQLFilterCondition iParentCondition, OSQLFilterCondition iCondition) {
if (iCondition == null)
return;
final Object left = iCondition.getLeft();
if (left instanceof OSQLFilterCondition)
// ANALYSE LEFT RECURSIVELY
optimizeBranch(iCondition, (OSQLFilterCondition) left);
final Object right = iCondition.getRight();
if (right instanceof OSQLFilterCondition)
// ANALYSE RIGHT RECURSIVELY
optimizeBranch(iCondition, (OSQLFilterCondition) right);
final OQueryOperator oper = iCondition.getOperator();
Object result = null;
if (left instanceof OSQLFilterItemField && right instanceof OSQLFilterItemField) {
if (((OSQLFilterItemField) left).getRoot().equals(((OSQLFilterItemField) right).getRoot())) {
if (oper instanceof OQueryOperatorEquals)
result = Boolean.TRUE;
else if (oper instanceof OQueryOperatorNotEquals)
result = Boolean.FALSE;
}
}
if (result != null) {
if (iParentCondition != null)
if (iCondition == iParentCondition.getLeft())
// REPLACE LEFT
iCondition.setLeft(result);
else
// REPLACE RIGHT
iCondition.setRight(result);
else {
// REPLACE ROOT CONDITION
if (result instanceof Boolean && ((Boolean) result))
compiledFilter.setRootCondition(null);
}
}
}
protected ORID[] getRange() {
final ORID beginRange;
final ORID endRange;
final OSQLFilterCondition rootCondition = compiledFilter.getRootCondition();
if (rootCondition == null) {
if (request instanceof OSQLSynchQuery)
beginRange = ((OSQLSynchQuery<ORecordSchemaAware<?>>) request).getNextPageRID();
else
beginRange = null;
endRange = null;
} else {
final ORID conditionBeginRange = rootCondition.getBeginRidRange();
final ORID conditionEndRange = rootCondition.getEndRidRange();
final ORID nextPageRid;
if (request instanceof OSQLSynchQuery)
nextPageRid = ((OSQLSynchQuery<ORecordSchemaAware<?>>) request).getNextPageRID();
else
nextPageRid = null;
if (conditionBeginRange != null && nextPageRid != null)
beginRange = conditionBeginRange.compareTo(nextPageRid) > 0 ? conditionBeginRange : nextPageRid;
else if (conditionBeginRange != null)
beginRange = conditionBeginRange;
else
beginRange = nextPageRid;
endRange = conditionEndRange;
}
return new ORID[] { beginRange, endRange };
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: DSCParser.java 1345683 2012-06-03 14:50:33Z gadams $ */
package org.apache.xmlgraphics.ps.dsc;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import lombok.extern.slf4j.Slf4j;
import org.apache.xmlgraphics.ps.DSCConstants;
import org.apache.xmlgraphics.ps.PSGenerator;
import org.apache.xmlgraphics.ps.dsc.events.DSCAtend;
import org.apache.xmlgraphics.ps.dsc.events.DSCComment;
import org.apache.xmlgraphics.ps.dsc.events.DSCEvent;
import org.apache.xmlgraphics.ps.dsc.events.DSCHeaderComment;
import org.apache.xmlgraphics.ps.dsc.events.PostScriptComment;
import org.apache.xmlgraphics.ps.dsc.events.PostScriptLine;
import org.apache.xmlgraphics.ps.dsc.events.UnparsedDSCComment;
import org.apache.xmlgraphics.ps.dsc.tools.DSCTools;
/**
* Parser for DSC-compliant PostScript files (DSC = Document Structuring
* Conventions). The parser is implemented as a pull parser but has the ability
* to act as a push parser through the DSCHandler interface.
*/
@Slf4j
public class DSCParser implements DSCParserConstants {
private InputStream in;
private BufferedReader reader;
private boolean eofFound = false;
private boolean checkEOF = true;
private DSCEvent currentEvent;
private DSCEvent nextEvent;
private DSCListener nestedDocumentHandler;
private DSCListener filterListener;
private List<DSCListener> listeners;
private boolean listenersDisabled = false;
/**
* Creates a new DSC parser.
*
* @param in
* InputStream to read the PostScript file from (the stream is
* not closed by this class, the caller is responsible for that)
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public DSCParser(final InputStream in) throws IOException, DSCException {
if (in.markSupported()) {
this.in = in;
} else {
// Decorate for better performance
this.in = new BufferedInputStream(this.in);
}
final String encoding = "US-ASCII";
try {
this.reader = new BufferedReader(new InputStreamReader(this.in,
encoding));
} catch (final UnsupportedEncodingException e) {
log.error("UnsupportedEncodingException", e);
throw new RuntimeException("Incompatible VM! " + e.getMessage());
}
parseNext();
}
/**
* Returns the InputStream the PostScript code is read from.
*
* @return the InputStream the PostScript code is read from
*/
public InputStream getInputStream() {
return this.in;
}
/**
* This method is used to write out warning messages for the parsing
* process. Subclass to override this method. The default implementation
* writes to logger.
*
* @param msg
* the warning message
*/
protected void warn(final String msg) {
log.warn(msg);
}
/**
* Reads one line from the input file
*
* @return the line or null if there are no more lines
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
protected String readLine() throws IOException, DSCException {
String line;
line = this.reader.readLine();
checkLine(line);
return line;
}
private void checkLine(final String line) throws DSCException {
if (line == null) {
if (!this.eofFound) {
throw new DSCException(
"%%EOF not found. File is not well-formed.");
}
} else if (line.length() > 255) {
warn("Line longer than 255 characters. This file is not fully PostScript conforming.");
}
}
private boolean isWhitespace(final char c) {
return c == ' ' || c == '\t';
}
private DSCComment parseDSCLine(final String line) throws IOException,
DSCException {
final int colon = line.indexOf(':');
String name;
String value = "";
if (colon > 0) {
name = line.substring(2, colon);
int startOfValue = colon + 1;
if (startOfValue < line.length()) {
if (isWhitespace(line.charAt(startOfValue))) {
startOfValue++;
}
value = line.substring(startOfValue).trim();
if (value.equals(DSCConstants.ATEND.toString())) {
return new DSCAtend(name);
}
}
String nextLine;
final StringBuilder newValue = new StringBuilder();
while (true) {
this.reader.mark(512);
nextLine = readLine();
if (nextLine == null) {
break;
} else if (!nextLine.startsWith("%%+")) {
break;
}
newValue.append(nextLine.substring(3));
}
value += newValue.toString();
this.reader.reset();
} else {
name = line.substring(2);
value = null;
}
return parseDSCComment(name, value);
}
private DSCComment parseDSCComment(final String name, final String value) {
final DSCComment parsed = DSCCommentFactory.createDSCCommentFor(name);
if (parsed != null) {
try {
parsed.parseValue(value);
return parsed;
} catch (final Exception e) {
log.error("Exception", e);
// ignore and fall back to unparsed DSC comment
}
}
final UnparsedDSCComment unparsed = new UnparsedDSCComment(name);
unparsed.parseValue(value);
return unparsed;
}
/**
* Starts the parser in push parsing mode sending events to the DSCHandler
* instance.
*
* @param handler
* the DSCHandler instance to send the events to
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public void parse(final DSCHandler handler) throws IOException,
DSCException {
final DSCHeaderComment header = DSCTools.checkAndSkipDSC30Header(this);
handler.startDocument("%!" + header.getComment());
DSCEvent event;
while (hasNext()) {
event = nextEvent();
switch (event.getEventType()) {
case HEADER_COMMENT:
handler.startDocument("%!"
+ ((DSCHeaderComment) event).getComment());
break;
case DSC_COMMENT:
handler.handleDSCComment(event.asDSCComment());
break;
case COMMENT:
handler.comment(((PostScriptComment) event).getComment());
break;
case LINE:
handler.line(getLine());
break;
case EOF:
if (isCheckEOF()) {
this.eofFound = true;
}
handler.endDocument();
break;
default:
throw new IllegalStateException("Illegal event type: "
+ event.getEventType());
}
}
}
/**
* Indicates whether there are additional items.
*
* @return true if there are additonal items, false if the end of the file
* has been reached
*/
public boolean hasNext() {
return this.nextEvent != null;
}
/**
* Steps to the next item indicating the type of event.
*
* @return the type of event (See {@link DSCParserConstants})
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
* @throws NoSuchElementException
* If an attempt was made to advance beyond the end of the file
*/
public int next() throws IOException, DSCException {
if (hasNext()) {
this.currentEvent = this.nextEvent;
parseNext();
processListeners();
return this.currentEvent.getEventType();
} else {
throw new NoSuchElementException("There are no more events");
}
}
private void processListeners() throws IOException, DSCException {
if (isListenersDisabled()) {
return;
}
if (this.filterListener != null) {
// Filter always comes first
this.filterListener.processEvent(this.currentEvent, this);
}
if (this.listeners != null) {
for (final DSCListener listener : this.listeners) {
listener.processEvent(this.currentEvent, this);
}
}
}
/**
* Steps to the next item returning the new event.
*
* @return the new event
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public DSCEvent nextEvent() throws IOException, DSCException {
next();
return getCurrentEvent();
}
/**
* Returns the current event.
*
* @return the current event
*/
public DSCEvent getCurrentEvent() {
return this.currentEvent;
}
/**
* Returns the next event without moving the cursor to the next event.
*
* @return the next event
*/
public DSCEvent peek() {
return this.nextEvent;
}
/**
* Parses the next event.
*
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
protected void parseNext() throws IOException, DSCException {
final String line = readLine();
if (line != null) {
if (this.eofFound && line.length() > 0) {
throw new DSCException("Content found after EOF");
}
if (line.startsWith("%%")) {
final DSCComment comment = parseDSCLine(line);
if (comment.getEventType() == EOF && isCheckEOF()) {
this.eofFound = true;
}
this.nextEvent = comment;
} else if (line.startsWith("%!")) {
this.nextEvent = new DSCHeaderComment(line.substring(2));
} else if (line.startsWith("%")) {
this.nextEvent = new PostScriptComment(line.substring(1));
} else {
this.nextEvent = new PostScriptLine(line);
}
} else {
this.nextEvent = null;
}
}
/**
* Returns the current PostScript line.
*
* @return the current PostScript line
* @throws IllegalStateException
* if the current event is not a normal PostScript line
*/
public String getLine() {
if (this.currentEvent.getEventType() == LINE) {
return ((PostScriptLine) this.currentEvent).getLine();
} else {
throw new IllegalStateException(
"Current event is not a PostScript line");
}
}
/**
* Advances to the next DSC comment with the given name.
*
* @param name
* the name of the DSC comment
* @return the requested DSC comment or null if the end of the file is
* reached
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public DSCComment nextDSCComment(final String name) throws IOException,
DSCException {
return nextDSCComment(name, null);
}
/**
* Advances to the next DSC comment with the given name.
*
* @param name
* the name of the DSC comment
* @param gen
* PSGenerator to pass the skipped events though to
* @return the requested DSC comment or null if the end of the file is
* reached
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public DSCComment nextDSCComment(final String name, final PSGenerator gen)
throws IOException, DSCException {
while (hasNext()) {
final DSCEvent event = nextEvent();
if (event.isDSCComment()) {
final DSCComment comment = event.asDSCComment();
if (name.equals(comment.getName())) {
return comment;
}
}
if (gen != null) {
event.generate(gen); // Pipe through to PSGenerator
}
}
return null;
}
/**
* Advances to the next PostScript comment with the given prefix. This is
* used to find comments following the DSC extension mechanism.
* <p>
* Example: To find FOP's custom comments, pass in "FOP" as a prefix. This
* will find comments like "%FOPFontSetup".
*
* @param prefix
* the prefix of the extension comment
* @param gen
* PSGenerator to pass the skipped events though to
* @return the requested PostScript comment or null if the end of the file
* is reached
* @throws IOException
* In case of an I/O error
* @throws DSCException
* In case of a violation of the DSC spec
*/
public PostScriptComment nextPSComment(final String prefix,
final PSGenerator gen) throws IOException, DSCException {
while (hasNext()) {
final DSCEvent event = nextEvent();
if (event.isComment()) {
final PostScriptComment comment = (PostScriptComment) event;
if (comment.getComment().startsWith(prefix)) {
return comment;
}
}
if (gen != null) {
event.generate(gen); // Pipe through to PSGenerator
}
}
return null;
}
/**
* Sets a filter for DSC events.
*
* @param filter
* the filter to use or null to disable filtering
*/
public void setFilter(final DSCFilter filter) {
if (filter != null) {
this.filterListener = new FilteringEventListener(filter);
} else {
this.filterListener = null;
}
}
/**
* Adds a DSC event listener.
*
* @param listener
* the listener
*/
public void addListener(final DSCListener listener) {
if (listener == null) {
throw new NullPointerException("listener must not be null");
}
if (this.listeners == null) {
this.listeners = new ArrayList<>();
}
this.listeners.add(listener);
}
/**
* Removes a DSC event listener.
*
* @param listener
* the listener to remove
*/
public void removeListener(final DSCListener listener) {
if (this.listeners != null) {
this.listeners.remove(listener);
}
}
/**
* Allows to disable all listeners. This can be used to disable any
* filtering, for example in nested documents.
*
* @param value
* true to disable all listeners, false to re-enable them
*/
public void setListenersDisabled(final boolean value) {
this.listenersDisabled = value;
}
/**
* Indicates whether the listeners are currently disabled.
*
* @return true if they are disabled
*/
public boolean isListenersDisabled() {
return this.listenersDisabled;
}
/**
* Sets a NestedDocumentHandler which is used to skip nested documents like
* embedded EPS files. You can also process those parts in a special way.
* <p>
* It is suggested to use the more generally usable
* {@link #addListener(DSCListener)} and
* {@link #removeListener(DSCListener)} instead. NestedDocumentHandler is
* internally mapped onto a {@link DSCListener}.
*
* @param handler
* the NestedDocumentHandler instance or null to disable the
* feature
*/
public void setNestedDocumentHandler(final NestedDocumentHandler handler) {
if (handler == null) {
removeListener(this.nestedDocumentHandler);
} else {
addListener(new DSCListener() {
@Override
public void processEvent(final DSCEvent event,
final DSCParser parser) throws IOException,
DSCException {
handler.handle(event, parser);
}
});
}
}
/**
* Tells the parser whether to check for content after the EOF comment. This
* can be disabled to skip nested documents.
*
* @param value
* true if the check is enabled
*/
public void setCheckEOF(final boolean value) {
this.checkEOF = value;
}
/**
* Indicates whether the parser is configured to check for content after the
* EOF comment.
*
* @return true if the check is enabled.
*/
public boolean isCheckEOF() {
return this.checkEOF;
}
}
| |
/*
* Copyright 2006-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.actions;
import com.consol.citrus.context.TestContext;
import com.consol.citrus.exceptions.CitrusRuntimeException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Action to enable class invocation through java reflection
*
* @author Christoph Deppisch
* @since 2006
*/
public class JavaAction extends AbstractTestAction {
/** Instance to be invoked, injected through java reflection */
private Object instance;
/** Name of class */
private String className;
/** Name of method to invoke */
private String methodName;
/** Method args */
private List<Object> methodArgs = new ArrayList<Object>();
/** Constructor args */
private List<Object> constructorArgs = new ArrayList<Object>();
/** Logger */
private static Logger log = LoggerFactory.getLogger(JavaAction.class);
/**
* Default constructor.
*/
public JavaAction() {
setName("java");
}
@SuppressWarnings("unchecked")
@Override
public void doExecute(TestContext context) {
try {
if (instance == null) {
instance = getObjectInstanceFromClass(context);
}
Class<?>[] methodTypes = new Class<?>[methodArgs.size()];
Object[] methodObjects = new Object[methodArgs.size()];
for (int i = 0; i < methodArgs.size(); i++) {
methodTypes[i] = methodArgs.get(i).getClass();
if (methodArgs.get(i).getClass().equals(List.class)) {
String[] converted = StringUtils.toStringArray((List<String>)methodArgs.get(i));
for (int j = 0; j < converted.length; j++) {
converted[j] = context.replaceDynamicContentInString(converted[j]);
}
methodObjects[i] = converted;
} else if (methodArgs.get(i).getClass().equals(String[].class)) {
String[] params = (String[])methodArgs.get(i);
String[] converted = Arrays.copyOf(params, params.length);
for (int j = 0; j < converted.length; j++) {
converted[j] = context.replaceDynamicContentInString(converted[j]);
}
methodObjects[i] = converted;
} else if (methodArgs.get(i).getClass().equals(String.class)) {
methodObjects[i] = context.replaceDynamicContentInString(methodArgs.get(i).toString());
} else {
methodObjects[i] = methodArgs.get(i);
}
}
invokeMethod(methodTypes, methodObjects);
} catch (RuntimeException e) {
throw new CitrusRuntimeException("Failed to invoke Java method due to runtime error", e);
} catch (Exception e) {
throw new CitrusRuntimeException("Failed to invoke Java method", e);
}
}
private void invokeMethod(Class<?>[] methodTypes, Object[] methodObjects) throws IllegalArgumentException, InvocationTargetException, IllegalAccessException, CitrusRuntimeException {
Method methodToRun = ReflectionUtils.findMethod(instance.getClass(), methodName, methodTypes);
if (methodToRun == null) {
throw new CitrusRuntimeException("Unable to find method '" + methodName + "(" +
StringUtils.arrayToCommaDelimitedString(methodTypes) + ")' for class '" + instance.getClass() + "'");
}
log.info("Invoking method '" + methodToRun.toString() + "' on instance '" + instance.getClass() + "'");
methodToRun.invoke(instance, methodObjects);
}
/**
* Instantiate class for name. Constructor arguments are supported if
* specified.
*
* @param context the current test context.
* @return
* @throws ClassNotFoundException
* @throws NoSuchMethodException
* @throws SecurityException
* @throws InvocationTargetException
* @throws IllegalAccessException
* @throws InstantiationException
* @throws IllegalArgumentException
*/
private Object getObjectInstanceFromClass(TestContext context) throws ClassNotFoundException, SecurityException, NoSuchMethodException,
IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
if (!StringUtils.hasText(className)) {
throw new CitrusRuntimeException("Neither class name nor object instance reference " +
"is set for Java reflection call");
}
log.info("Instantiating class for name '" + className + "'");
Class<?> classToRun = Class.forName(className);
Class<?>[] constructorTypes = new Class<?>[constructorArgs.size()];
Object[] constructorObjects = new Object[constructorArgs.size()];
for (int i = 0; i < constructorArgs.size(); i++) {
constructorTypes[i] = constructorArgs.get(i).getClass();
if (constructorArgs.get(i).getClass().equals(String.class)) {
constructorObjects[i] = context.replaceDynamicContentInString(constructorArgs.get(i).toString());
} else {
constructorObjects[i] = constructorArgs.get(i);
}
}
Constructor<?> constr = classToRun.getConstructor(constructorTypes);
return constr.newInstance(constructorObjects);
}
/**
* Setter for class name
* @param className
*/
public JavaAction setClassName(String className) {
this.className = className;
return this;
}
/**
* Setter for constructor args
* @param constructorArgs
*/
public JavaAction setConstructorArgs(List<Object> constructorArgs) {
this.constructorArgs = constructorArgs;
return this;
}
/**
* Setter for method args
* @param methodArgs
*/
public JavaAction setMethodArgs(List<Object> methodArgs) {
this.methodArgs = methodArgs;
return this;
}
/**
* Setter for method name
* @param methodName
*/
public JavaAction setMethodName(String methodName) {
this.methodName = methodName;
return this;
}
/**
* Setter for object instance
* @param instance
*/
public JavaAction setInstance(Object instance) {
this.instance = instance;
return this;
}
/**
* Gets the instance.
* @return the instance
*/
public Object getInstance() {
return instance;
}
/**
* Gets the className.
* @return the className
*/
public String getClassName() {
return className;
}
/**
* Gets the methodName.
* @return the methodName
*/
public String getMethodName() {
return methodName;
}
/**
* Gets the methodArgs.
* @return the methodArgs
*/
public List<Object> getMethodArgs() {
return methodArgs;
}
/**
* Gets the constructorArgs.
* @return the constructorArgs
*/
public List<Object> getConstructorArgs() {
return constructorArgs;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.jmsmanager.server;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URI;
import java.util.Map;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletConfig;
import javax.portlet.PortletException;
import javax.portlet.PortletRequest;
import javax.portlet.PortletRequestDispatcher;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.WindowState;
import org.apache.activemq.broker.BrokerService;
import org.apache.geronimo.console.util.PortletManager;
import org.apache.geronimo.gbean.AbstractName;
import org.apache.geronimo.gbean.GBeanData;
import org.apache.geronimo.kernel.GBeanNotFoundException;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.config.Configuration;
import org.apache.geronimo.kernel.proxy.GeronimoManagedBean;
import org.apache.geronimo.system.serverinfo.ServerInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Basic list of JMS brokers
*
* @version $Rev$ $Date$
*/
public class JMSBrokerPortlet extends BaseJMSPortlet {
private static final Logger log = LoggerFactory.getLogger(JMSBrokerPortlet.class);
private PortletRequestDispatcher editView;
private PortletRequestDispatcher helpView;
private PortletRequestDispatcher maximizedView;
private PortletRequestDispatcher normalView;
private boolean deleteFolder(File folder) {
for (File file : folder.listFiles()) {
if (file.isFile())
file.delete();
else
deleteFolder(file);
}
return folder.delete();
}
public void destroy() {
helpView = null;
normalView = null;
maximizedView = null;
editView = null;
super.destroy();
}
protected void doCreate(RenderRequest renderRequest, RenderResponse renderResponse) throws IOException,
PortletException {
renderRequest.setAttribute("mode", "create");
String sBrokerTemplateConfigurationXML = loadConfiguratonFileAsString(resolveConfigurationFile(renderRequest,
"var/activemq/template/activemq-template.xml"));
renderRequest.setAttribute("configXML", sBrokerTemplateConfigurationXML);
editView.include(renderRequest, renderResponse);
}
protected void doHelp(RenderRequest renderRequest, RenderResponse renderResponse) throws PortletException,
IOException {
helpView.include(renderRequest, renderResponse);
}
protected void doList(RenderRequest renderRequest, RenderResponse renderResponse) throws IOException,
PortletException {
Map<String, BrokerServiceWrapper> brokerServices = getBrokerServices();
renderRequest.setAttribute("brokers", brokerServices.values());
if (WindowState.NORMAL.equals(renderRequest.getWindowState())) {
normalView.include(renderRequest, renderResponse);
} else {
maximizedView.include(renderRequest, renderResponse);
}
}
/*
protected void doUpdate(RenderRequest renderRequest, RenderResponse renderResponse) throws IOException,
PortletException {
String sBrokerURI = renderRequest.getParameter("brokerURI");
String sBrokerName = renderRequest.getParameter("brokerName");
String sBrokerConfigurationXML = null;
try {
Kernel kernel = PortletManager.getKernel();
GBeanData brokerGBeanData = kernel.getGBeanData(new AbstractName(URI.create(sBrokerURI)));
String sBrokerConfigurationXMLPath = brokerGBeanData.getAttribute("amqBaseDir").toString()
+ brokerGBeanData.getAttribute("amqConfigFile").toString();
sBrokerConfigurationXML = loadConfiguratonFileAsString(resolveConfigurationFile(renderRequest,
sBrokerConfigurationXMLPath));
} catch (GBeanNotFoundException e) {
e.printStackTrace();
} catch (InternalKernelException e) {
e.printStackTrace();
}
renderRequest.setAttribute("configXML", sBrokerConfigurationXML);
renderRequest.setAttribute("mode", "update");
renderRequest.setAttribute("brokerWrapper", getBrokerWrapper(renderRequest, new AbstractName(URI
.create(sBrokerURI))));
editView.include(renderRequest, renderResponse);
}
*/
protected void doView(RenderRequest renderRequest, RenderResponse renderResponse) throws IOException,
PortletException {
try {
if (WindowState.MINIMIZED.equals(renderRequest.getWindowState())) {
return;
}
String mode = renderRequest.getParameter("mode");
if (mode == null)
mode = "list";
renderRequest.setAttribute("mode", mode);
if (mode.equals("create")) {
//doCreate(renderRequest, renderResponse);
} else if (mode.equals("update")) {
//doUpdate(renderRequest, renderResponse);
} else {
doList(renderRequest, renderResponse);
}
} catch (Throwable e) {
addErrorMessage(renderRequest, e.getMessage());
log.error(e.getMessage(), e);
}
}
public void init(PortletConfig portletConfig) throws PortletException {
super.init(portletConfig);
normalView = portletConfig.getPortletContext().getRequestDispatcher(
"/WEB-INF/view/jmsmanager/server/normal.jsp");
maximizedView = portletConfig.getPortletContext().getRequestDispatcher(
"/WEB-INF/view/jmsmanager/server/maximized.jsp");
helpView = portletConfig.getPortletContext().getRequestDispatcher("/WEB-INF/view/jmsmanager/server/help.jsp");
editView = portletConfig.getPortletContext().getRequestDispatcher("/WEB-INF/view/jmsmanager/server/edit.jsp");
}
protected String loadConfiguratonFileAsString(File brokerConfigFile) throws IOException {
if (!brokerConfigFile.exists())
throw new IOException("Can not load the ActiveMQ broker configuration file ["
+ brokerConfigFile.getAbsolutePath() + "]");
BufferedReader reader = null;
StringBuilder configBuilder = new StringBuilder();
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(brokerConfigFile), "iso-8859-1"));
String sCurrentReadLine = null;
while ((sCurrentReadLine = reader.readLine()) != null)
configBuilder.append(sCurrentReadLine).append("\n");
return configBuilder.toString();
} finally {
if (reader != null)
try {
reader.close();
} catch (Exception e) {
}
}
}
public void processAction(ActionRequest actionRequest, ActionResponse actionResponse) throws PortletException,
IOException {
try {
String mode = actionRequest.getParameter("mode");
if (mode == null)
mode = "list";
if (mode.equals("start")) {
processStartAction(actionRequest, actionResponse);
} else if (mode.equals("stop")) {
processStopAction(actionRequest, actionResponse);
} else if (mode.equals("delete")) {
//processDeleteAction(actionRequest, actionResponse);
} else if (mode.equals("create")) {
//processCreateAction(actionRequest, actionResponse);
} else if (mode.equals("update")) {
processUpdateAction(actionRequest, actionResponse);
} else
actionResponse.setRenderParameter("mode", mode);
} catch (Throwable e) {
log.error(e.getMessage(), e);
addErrorMessage(actionRequest, e.getMessage());
}
}
/**
* 1. Save the configuration XML file to /var/activemq/conf with the name ${brokerName}.xml
* 2. Start the broker GBean
* @param actionRequest
* @param actionResponse
* @throws PortletException
* @throws IOException
*/
/*
protected void processCreateAction(ActionRequest actionRequest, ActionResponse actionResponse)
throws PortletException, IOException {
String sConfigurationXML = actionRequest.getParameter("configXML");
validateConfigXML(sConfigurationXML, actionRequest);
String sBrokerName = actionRequest.getParameter("brokerName");
Kernel kernel = PortletManager.getKernel();
AbstractName brokerAbstractName = kernel.getNaming().createSiblingName(
PortletManager.getNameFor(actionRequest, getActiveMQManager(actionRequest)), sBrokerName, "JMSServer");
validateBrokerName(brokerAbstractName, actionRequest);
saveConfigurationFile(resolveConfigurationFile(actionRequest, "var/activemq/conf/" + sBrokerName + ".xml"),
sConfigurationXML);
GBeanData brokerGBeanData = new GBeanData(brokerAbstractName, BrokerServiceGBeanImpl.class);
brokerGBeanData.setAttribute("brokerName", sBrokerName);
brokerGBeanData.setAttribute("amqBaseDir", "var/activemq/");
brokerGBeanData.setAttribute("amqDataDir", "data/" + sBrokerName);
brokerGBeanData.setAttribute("amqConfigFile", "conf/" + sBrokerName + ".xml");
brokerGBeanData.setAttribute("useShutdownHook", false);
brokerGBeanData.setReferencePattern("ServerInfo", new AbstractNameQuery(null, Collections.EMPTY_MAP,
ServerInfo.class.getName()));
brokerGBeanData.setReferencePattern("MBeanServerReference", new AbstractNameQuery(null, Collections.EMPTY_MAP,
MBeanServerReference.class.getName()));
try {
JMSBroker jmsBroker = getActiveMQManager(actionRequest).addBroker(sBrokerName, brokerGBeanData);
((GeronimoManagedBean) jmsBroker).startRecursive();
addInfoMessage(actionRequest, getLocalizedString(actionRequest, "jmsmanager.broker.successAddBroker",
sBrokerName));
} catch (PortletException e) {
throw e;
} catch (Exception e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failAddBroker",
sBrokerName, e.getMessage()), e);
}
actionResponse.setRenderParameter("mode", "list");
}
*/
/**
* 1. Remove the configuration XML file
* 2. Remove the broker GBean
* @param actionRequest
* @param actionResponse
* @throws PortletException
* @throws IOException
*/
/*
protected void processDeleteAction(ActionRequest actionRequest, ActionResponse actionResponse)
throws PortletException, IOException {
String sBrokerName = actionRequest.getParameter("brokerName");
String sBrokerURI = actionRequest.getParameter("brokerURI");
try {
Kernel kernel = PortletManager.getKernel();
AbstractName brokerAbstractName = new AbstractName(URI.create(sBrokerURI));
GBeanData brokerGBeanData = kernel.getGBeanData(brokerAbstractName);
String sBrokerConfigurationXMLPath = brokerGBeanData.getAttribute("amqBaseDir").toString()
+ brokerGBeanData.getAttribute("amqConfigFile").toString();
getActiveMQManager(actionRequest).removeBroker(brokerAbstractName);
File brokerConfigFile = resolveConfigurationFile(actionRequest, sBrokerConfigurationXMLPath);
if (!brokerConfigFile.delete()) {
String sWarningMessage = getLocalizedString(actionRequest, "jmsmanager.broker.failDeleteBrokerConfig",
brokerConfigFile.getAbsolutePath());
log.warn(sWarningMessage);
addWarningMessage(actionRequest, sWarningMessage);
} else {
String sInfoMessage = getLocalizedString(actionRequest, "jmsmanager.broker.successDeleteBrokerConfig",
brokerConfigFile.getAbsolutePath());
log.info(sInfoMessage);
addInfoMessage(actionRequest, sInfoMessage);
}
String sBrokerDataDirectoryPath = brokerGBeanData.getAttribute("amqBaseDir").toString()
+ brokerGBeanData.getAttribute("amqDataDir").toString();
File brokerDataDirectory = resolveConfigurationFile(actionRequest, sBrokerDataDirectoryPath);
if (!deleteFolder(brokerDataDirectory)) {
String sWarningMessage = getLocalizedString(actionRequest, "jmsmanager.broker.failDeleteBrokerData",
brokerDataDirectory.getAbsolutePath());
log.warn(sWarningMessage);
addWarningMessage(actionRequest, sWarningMessage);
} else {
String sInfoMessage = getLocalizedString(actionRequest, "jmsmanager.broker.successDeleteBrokerData",
brokerDataDirectory.getAbsolutePath());
log.info(sInfoMessage);
addInfoMessage(actionRequest, sInfoMessage);
}
addInfoMessage(actionRequest, getLocalizedString(actionRequest, "jmsmanager.broker.successDeleteBroker",
sBrokerName));
} catch (GBeanNotFoundException e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failFindBroker",
sBrokerName, e.getMessage()), e);
} catch (Exception e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failDeleteBroker",
sBrokerName, e.getMessage()), e);
}
actionResponse.setRenderParameter("mode", "list");
}
*/
protected void processStartAction(ActionRequest actionRequest, ActionResponse actionResponse)
throws PortletException, IOException {
//String sBrokerURI = actionRequest.getParameter("brokerURI");
String sBrokerName = actionRequest.getParameter("brokerName");
try {
Map<String, BrokerServiceWrapper> brokerServices = getBrokerServices();
BrokerService brokerService = brokerServices.get(sBrokerName).getBrokerService();
if (brokerService.isStarted()) {
return;
}
brokerService.start(true);
brokerService.waitUntilStarted();
if (!brokerService.isStarted()) {
throw new PortletException(getLocalizedString(actionRequest,
"jmsmanager.broker.failStartBrokerNoReason", sBrokerName));
}
addInfoMessage(actionRequest, getLocalizedString(actionRequest, "jmsmanager.broker.successStartBroker",
sBrokerName));
} catch (PortletException e) {
throw e;
} catch (Exception e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failStartBroker",
sBrokerName, e.getMessage()));
}
actionResponse.setRenderParameter("mode", "list");
}
protected void processStopAction(ActionRequest actionRequest, ActionResponse actionResponse)
throws PortletException, IOException {
//String sBrokerURI = actionRequest.getParameter("brokerURI");
String sBrokerName = actionRequest.getParameter("brokerName");
try {
Map<String, BrokerServiceWrapper> brokerServices = getBrokerServices();
BrokerService brokerService = brokerServices.get(sBrokerName).getBrokerService();
if (!brokerService.isStarted()) {
return;
}
brokerService.stop();
brokerService.waitUntilStopped();
if (brokerService.isStarted()) {
throw new PortletException(getLocalizedString(actionRequest,
"jmsmanager.broker.failStopBrokerNoReason", sBrokerName));
}
addInfoMessage(actionRequest, getLocalizedString(actionRequest, "jmsmanager.broker.successStopBroker",
sBrokerName));
} catch (PortletException e) {
throw e;
} catch (Exception e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failStopBroker",
sBrokerName, e.getMessage()));
}
actionResponse.setRenderParameter("mode", "list");
}
/**
* 1. Save the configuration XML file to to /var/activemq/conf with the name ${brokerName}.xml
* 2. Restart the broker GBean
* @param actionRequest
* @param actionResponse
* @throws PortletException
* @throws IOException
*/
protected void processUpdateAction(ActionRequest actionRequest, ActionResponse actionResponse)
throws PortletException, IOException {
String sConfigurationXML = actionRequest.getParameter("configXML");
String sBrokerName = actionRequest.getParameter("brokerName");
String sBrokerURI = actionRequest.getParameter("brokerURI");
validateConfigXML(sConfigurationXML, actionRequest);
Kernel kernel = PortletManager.getKernel();
try {
AbstractName brokerAbstractName = new AbstractName(URI.create(sBrokerURI));
GBeanData brokerGBeanData = kernel.getGBeanData(new AbstractName(URI.create(sBrokerURI)));
String sBrokerConfigurationXMLPath = brokerGBeanData.getAttribute("amqBaseDir").toString()
+ brokerGBeanData.getAttribute("amqConfigFile").toString();
saveConfigurationFile(resolveConfigurationFile(actionRequest, sBrokerConfigurationXMLPath),
sConfigurationXML);
GeronimoManagedBean jmsBroker = PortletManager.getManagedBean(actionRequest, brokerAbstractName);
if (kernel.isRunning(brokerAbstractName)) {
jmsBroker.stop();
}
jmsBroker.startRecursive();
if (!kernel.isRunning(brokerAbstractName)) {
throw new PortletException(getLocalizedString(actionRequest,
"jmsmanager.broker.failUpdateBrokerNoReason", sBrokerName));
}
addInfoMessage(actionRequest, getLocalizedString(actionRequest, "jmsmanager.broker.successUpdateBroker",
sBrokerName));
} catch (PortletException e) {
throw e;
} catch (GBeanNotFoundException e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failFindBroker",
sBrokerName, e.getMessage()));
} catch (Exception e) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.failUpdateBroker",
sBrokerName, e.getMessage()));
}
actionResponse.setRenderParameter("mode", "list");
}
protected File resolveConfigurationFile(PortletRequest portletRequest, String filePath) {
ServerInfo serverInfo = PortletManager.getCurrentServer(portletRequest).getServerInfo();
return serverInfo.resolve(filePath);
}
protected void saveConfigurationFile(File brokerConfigFile, String configurationXML) throws IOException {
BufferedWriter writer = null;
try {
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(brokerConfigFile), "iso-8859-1"));
writer.write(configurationXML);
} finally {
if (writer != null)
try {
writer.close();
} catch (Exception e) {
}
}
}
private void validateBrokerName(AbstractName brokerAbName, ActionRequest actionRequest) throws PortletException {
Configuration brokersConfiguration = PortletManager.getConfigurationManager().getConfiguration(
brokerAbName.getArtifact());
Map<AbstractName, GBeanData> abNameGBeanDataMap = (Map<AbstractName, GBeanData>) brokersConfiguration
.getGBeans();
String sNewBrokerName = brokerAbName.getNameProperty("name");
for (AbstractName abName : abNameGBeanDataMap.keySet()) {
String sGBeanName = abName.getNameProperty("name");
if (sNewBrokerName.equals(sGBeanName)) {
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.invalidBrokerName",
sGBeanName));
}
}
}
private void validateConfigXML(String configXML, ActionRequest actionRequest) throws PortletException {
if (configXML == null || configXML.trim().length() == 0)
throw new PortletException(getLocalizedString(actionRequest, "jmsmanager.broker.invalidBrokerConfig"));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.classgen;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.MethodNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.control.SourceUnit;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* @author Paul King
*/
public class ClassCompletionVerifierTest extends TestSupport {
private SourceUnit source;
private ClassCompletionVerifier verifier;
private static final String ABSTRACT_FINAL_CLASS = "AbstractFinalClass";
private static final String FINAL_INTERFACE = "FinalInterface";
private static final String EXPECTED_CLASS_MODIFIER_ERROR_MESSAGE =
"The class '" + ABSTRACT_FINAL_CLASS + "' must not be both final and abstract.";
private static final String EXPECTED_INTERFACE_MODIFIER_ERROR_MESSAGE =
"The interface '" + FINAL_INTERFACE + "' must not be final. It is by definition abstract.";
private static final String EXPECTED_INTERFACE_FINAL_METHOD_ERROR_MESSAGE =
"The method 'java.lang.Object xxx()' from interface 'zzz' must not be final. It is by definition abstract.";
private static final String EXPECTED_INTERFACE_STATIC_METHOD_ERROR_MESSAGE =
"The method 'java.lang.Object yyy()' from interface 'zzz' must not be static. Only fields may be static in an interface.";
private static final String EXPECTED_TRANSIENT_CLASS_ERROR_MESSAGE =
"The class 'DodgyClass' has an incorrect modifier transient.";
// can't check synchronized here as it doubles up with ACC_SUPER
//private static final String EXPECTED_SYNCHRONIZED_CLASS_ERROR_MESSAGE =
// "The class 'DodgyClass' has an incorrect modifier synchronized.";
private static final String EXPECTED_NATIVE_CLASS_ERROR_MESSAGE =
"The class 'DodgyClass' has an incorrect modifier native.";
private static final String EXPECTED_VOLATILE_CLASS_ERROR_MESSAGE =
"The class 'DodgyClass' has an incorrect modifier volatile.";
private static final String EXPECTED_DUPLICATE_METHOD_ERROR_CLASS_MESSAGE =
"Repetitive method name/signature for method 'java.lang.Object xxx()' in class 'zzz'.";
private static final String EXPECTED_DUPLICATE_METHOD_ERROR_INTERFACE_MESSAGE =
"Repetitive method name/signature for method 'java.lang.Object xxx(java.lang.String)' in interface 'zzz'.";
// can't check volatile here as it doubles up with bridge
//private static final String EXPECTED_VOLATILE_METHOD_ERROR_MESSAGE =
// "The method 'java.lang.Object vo()' has an incorrect modifier volatile.";
private static final String EXPECTED_STRICT_METHOD_ERROR_MESSAGE =
"The method 'java.lang.Object st()' has an incorrect modifier strictfp.";
private static final String EXPECTED_NATIVE_METHOD_ERROR_MESSAGE =
"The method 'java.lang.Object na()' has an incorrect modifier native.";
private static final String EXPECTED_SYNCHRONIZED_METHOD_ERROR_MESSAGE =
"The method 'java.lang.Object sy()' has an incorrect modifier synchronized.";
private static final String EXPECTED_PROTECTED_FIELD_ERROR_MESSAGE =
"The field 'prof' is not 'public static final' but is defined in interface 'zzz'.";
private static final String EXPECTED_PRIVATE_FIELD_ERROR_MESSAGE =
"The field 'prif' is not 'public static final' but is defined in interface 'zzz'.";
private static final String EXPECTED_PROTECTED_METHOD_ERROR_MESSAGE =
"Method 'prom' is protected but should be public in interface 'zzz'.";
private static final String EXPECTED_PRIVATE_METHOD_ERROR_MESSAGE =
"Method 'prim' is private but should be public in interface 'zzz'.";
protected void setUp() throws Exception {
super.setUp();
source = SourceUnit.create("dummy.groovy", "");
verifier = new ClassCompletionVerifier(source);
}
public void testDetectsFinalAbstractClass() throws Exception {
checkVisitErrors("FinalClass", ACC_FINAL, false);
checkVisitErrors("AbstractClass", ACC_ABSTRACT, false);
checkVisitErrors(ABSTRACT_FINAL_CLASS, ACC_ABSTRACT | ACC_FINAL, true);
checkErrorMessage(EXPECTED_CLASS_MODIFIER_ERROR_MESSAGE);
}
public void testDetectsDuplicateMethodsForClassNoParams() throws Exception {
checkDetectsDuplicateMethods(0, EXPECTED_DUPLICATE_METHOD_ERROR_CLASS_MESSAGE, Parameter.EMPTY_ARRAY);
}
public void testDetectsDuplicateMethodsForInterfaceOneParam() throws Exception {
Parameter[] stringParam = {new Parameter(ClassHelper.STRING_TYPE, "x")};
checkDetectsDuplicateMethods(ACC_INTERFACE, EXPECTED_DUPLICATE_METHOD_ERROR_INTERFACE_MESSAGE, stringParam);
}
private void checkDetectsDuplicateMethods(int modifiers, String expectedErrorMessage, Parameter[] params) {
ClassNode node = new ClassNode("zzz", modifiers, ClassHelper.OBJECT_TYPE);
node.addMethod(new MethodNode("xxx", ACC_PUBLIC, ClassHelper.OBJECT_TYPE, params, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("xxx", ACC_PUBLIC, ClassHelper.OBJECT_TYPE, params, ClassNode.EMPTY_ARRAY, null));
verifier.visitClass(node);
checkErrorCount(2);
checkErrorMessage(expectedErrorMessage);
}
public void testDetectsIncorrectOtherModifier() throws Exception {
// can't check synchronized here as it doubles up with ACC_SUPER
checkVisitErrors("DodgyClass", ACC_TRANSIENT | ACC_VOLATILE | ACC_NATIVE, true);
checkErrorMessage(EXPECTED_TRANSIENT_CLASS_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_VOLATILE_CLASS_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_NATIVE_CLASS_ERROR_MESSAGE);
}
public void testDetectsFinalAbstractInterface() throws Exception {
checkVisitErrors(FINAL_INTERFACE, ACC_ABSTRACT | ACC_FINAL | ACC_INTERFACE, true);
checkErrorMessage(EXPECTED_INTERFACE_MODIFIER_ERROR_MESSAGE);
}
public void testDetectsFinalAndStaticMethodsInInterface() throws Exception {
ClassNode node = new ClassNode("zzz", ACC_ABSTRACT | ACC_INTERFACE, ClassHelper.OBJECT_TYPE);
node.addMethod(new MethodNode("xxx", ACC_PUBLIC | ACC_FINAL, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("yyy", ACC_PUBLIC | ACC_STATIC, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
addDummyConstructor(node);
verifier.visitClass(node);
checkErrorCount(2);
checkErrorMessage(EXPECTED_INTERFACE_FINAL_METHOD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_INTERFACE_STATIC_METHOD_ERROR_MESSAGE);
}
public void testDetectsIncorrectMethodModifiersInInterface() throws Exception {
// can't check volatile here as it doubles up with bridge
ClassNode node = new ClassNode("zzz", ACC_ABSTRACT | ACC_INTERFACE, ClassHelper.OBJECT_TYPE);
node.addMethod(new MethodNode("st", ACC_STRICT, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("na", ACC_NATIVE, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("sy", ACC_SYNCHRONIZED, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
addDummyConstructor(node);
verifier.visitClass(node);
checkErrorCount(3);
checkErrorMessage(EXPECTED_STRICT_METHOD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_NATIVE_METHOD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_SYNCHRONIZED_METHOD_ERROR_MESSAGE);
}
public void testDetectsIncorrectMemberVisibilityInInterface() throws Exception {
ClassNode node = new ClassNode("zzz", ACC_ABSTRACT | ACC_INTERFACE, ClassHelper.OBJECT_TYPE);
node.addMethod(new MethodNode("prim", ACC_PRIVATE, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("prom", ACC_PROTECTED, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addField("prif", ACC_PRIVATE, ClassHelper.OBJECT_TYPE, null);
node.addField("prof", ACC_PROTECTED, ClassHelper.OBJECT_TYPE, null);
addDummyConstructor(node);
verifier.visitClass(node);
checkErrorCount(4);
checkErrorMessage(EXPECTED_PROTECTED_FIELD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_PRIVATE_FIELD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_PROTECTED_METHOD_ERROR_MESSAGE);
checkErrorMessage(EXPECTED_PRIVATE_METHOD_ERROR_MESSAGE);
}
public void testDetectsCorrectMethodModifiersInClass() throws Exception {
// can't check volatile here as it doubles up with bridge
ClassNode node = new ClassNode("zzz", ACC_PUBLIC, ClassHelper.OBJECT_TYPE);
node.addMethod(new MethodNode("st", ACC_STRICT, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("na", ACC_NATIVE, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
node.addMethod(new MethodNode("sy", ACC_SYNCHRONIZED, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
addDummyConstructor(node);
verifier.visitClass(node);
checkErrorCount(0);
}
private void addDummyConstructor(ClassNode node) {
// constructors should not be treated as errors (they have no real meaning for interfaces anyway)
node.addMethod(new MethodNode("<clinit>", ACC_PUBLIC | ACC_STATIC, ClassHelper.OBJECT_TYPE, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null));
}
private void checkErrorCount(int count) {
assertEquals(buildErrorMessage(count), count, source.getErrorCollector().getErrorCount());
}
private String buildErrorMessage(int count) {
StringBuilder sb = new StringBuilder();
sb.append("Expected ").append(count);
sb.append(" error messages but found ");
sb.append(source.getErrorCollector().getErrorCount()).append(":\n");
sb.append(flattenErrorMessage());
return sb.toString();
}
private void checkVisitErrors(String name, int modifiers, boolean expectedToFail) {
ClassNode node = new ClassNode(name, modifiers, ClassHelper.OBJECT_TYPE);
verifier.visitClass(node);
assertTrue(source.getErrorCollector().hasErrors() == expectedToFail);
}
private void checkErrorMessage(String expectedErrorMessage) {
assertTrue("Expected an error message but none found.", source.getErrorCollector().hasErrors());
assertTrue("Expected message to contain <" + expectedErrorMessage +
"> but was <" + flattenErrorMessage() + ">.",
flattenErrorMessage().contains(expectedErrorMessage));
}
private String flattenErrorMessage() {
StringWriter stringWriter = new StringWriter();
PrintWriter writer = new PrintWriter(stringWriter, true);
for (int i = source.getErrorCollector().getErrorCount() - 1; i >= 0; i--) {
source.getErrorCollector().getError(i).write(writer);
}
writer.close();
return stringWriter.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure2.store.wal;
import static java.lang.System.currentTimeMillis;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
public class ProcedureWALLoaderPerformanceEvaluation extends AbstractHBaseTool {
protected static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
// Command line options and defaults.
public static int DEFAULT_NUM_PROCS = 1000000; // 1M
public static Option NUM_PROCS_OPTION = new Option("procs", true,
"Total number of procedures. Default: " + DEFAULT_NUM_PROCS);
public static int DEFAULT_NUM_WALS = 0;
public static Option NUM_WALS_OPTION = new Option("wals", true,
"Number of WALs to write. If -ve or 0, uses " + WALProcedureStore.ROLL_THRESHOLD_CONF_KEY +
" conf to roll the logs. Default: " + DEFAULT_NUM_WALS);
public static int DEFAULT_STATE_SIZE = 1024; // 1KB
public static Option STATE_SIZE_OPTION = new Option("state_size", true,
"Size of serialized state in bytes to write on update. Default: " + DEFAULT_STATE_SIZE
+ " bytes");
public static int DEFAULT_UPDATES_PER_PROC = 5;
public static Option UPDATES_PER_PROC_OPTION = new Option("updates_per_proc", true,
"Number of update states to write for each proc. Default: " + DEFAULT_UPDATES_PER_PROC);
public static double DEFAULT_DELETE_PROCS_FRACTION = 0.50;
public static Option DELETE_PROCS_FRACTION_OPTION = new Option("delete_procs_fraction", true,
"Fraction of procs for which to write delete state. Distribution of procs chosen for "
+ "delete is uniform across all procs. Default: " + DEFAULT_DELETE_PROCS_FRACTION);
public int numProcs;
public int updatesPerProc;
public double deleteProcsFraction;
public int numWals;
private WALProcedureStore store;
static byte[] serializedState;
private static class LoadCounter implements ProcedureStore.ProcedureLoader {
public LoadCounter() {}
@Override
public void setMaxProcId(long maxProcId) {
}
@Override
public void load(ProcedureIterator procIter) throws IOException {
while (procIter.hasNext()) {
procIter.next();
}
}
@Override
public void handleCorrupted(ProcedureIterator procIter) throws IOException {
while (procIter.hasNext()) {
procIter.next();
}
}
}
@Override
protected void addOptions() {
addOption(NUM_PROCS_OPTION);
addOption(UPDATES_PER_PROC_OPTION);
addOption(DELETE_PROCS_FRACTION_OPTION);
addOption(NUM_WALS_OPTION);
addOption(STATE_SIZE_OPTION);
}
@Override
protected void processOptions(CommandLine cmd) {
numProcs = getOptionAsInt(cmd, NUM_PROCS_OPTION.getOpt(), DEFAULT_NUM_PROCS);
numWals = getOptionAsInt(cmd, NUM_WALS_OPTION.getOpt(), DEFAULT_NUM_WALS);
int stateSize = getOptionAsInt(cmd, STATE_SIZE_OPTION.getOpt(), DEFAULT_STATE_SIZE);
serializedState = new byte[stateSize];
updatesPerProc = getOptionAsInt(cmd, UPDATES_PER_PROC_OPTION.getOpt(),
DEFAULT_UPDATES_PER_PROC);
deleteProcsFraction = getOptionAsDouble(cmd, DELETE_PROCS_FRACTION_OPTION.getOpt(),
DEFAULT_DELETE_PROCS_FRACTION);
setupConf();
}
private void setupConf() {
if (numWals > 0) {
conf.setLong(WALProcedureStore.ROLL_THRESHOLD_CONF_KEY, Long.MAX_VALUE);
}
}
public void setUpProcedureStore() throws IOException {
Path testDir = UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
Path logDir = new Path(testDir, "proc-logs");
System.out.println("\n\nLogs directory : " + logDir.toString() + "\n\n");
fs.delete(logDir, true);
store = ProcedureTestingUtility.createWalStore(conf, logDir);
store.start(1);
store.recoverLease();
store.load(new LoadCounter());
}
/**
* @return a list of shuffled integers which represent state of proc id. First occurrence of a
* number denotes insert state, consecutive occurrences denote update states, and -ve
* value denotes delete state.
*/
private List<Integer> shuffleProcWriteSequence() {
Random rand = new Random();
List<Integer> procStatesSequence = new ArrayList<>();
Set<Integer> toBeDeletedProcs = new HashSet<>();
// Add n + 1 entries of the proc id for insert + updates. If proc is chosen for delete, add
// extra entry which is marked -ve in the loop after shuffle.
for (int procId = 1; procId <= numProcs; ++procId) {
procStatesSequence.addAll(Collections.nCopies(updatesPerProc + 1, procId));
if (rand.nextFloat() < deleteProcsFraction) {
procStatesSequence.add(procId);
toBeDeletedProcs.add(procId);
}
}
Collections.shuffle(procStatesSequence);
// Mark last occurrences of proc ids in toBeDeletedProcs with -ve to denote it's a delete state.
for (int i = procStatesSequence.size() - 1; i >= 0; --i) {
int procId = procStatesSequence.get(i);
if (toBeDeletedProcs.contains(procId)) {
procStatesSequence.set(i, -1 * procId);
toBeDeletedProcs.remove(procId);
}
}
return procStatesSequence;
}
private void writeWals() throws IOException {
List<Integer> procStates = shuffleProcWriteSequence();
TestProcedure[] procs = new TestProcedure[numProcs + 1]; // 0 is not used.
int numProcsPerWal = numWals > 0 ? procStates.size() / numWals : Integer.MAX_VALUE;
long startTime = currentTimeMillis();
long lastTime = startTime;
for (int i = 0; i < procStates.size(); ++i) {
int procId = procStates.get(i);
if (procId < 0) {
store.delete(procs[-procId].getProcId());
procs[-procId] = null;
} else if (procs[procId] == null) {
procs[procId] = new TestProcedure(procId, 0);
procs[procId].setData(serializedState);
store.insert(procs[procId], null);
} else {
store.update(procs[procId]);
}
if (i > 0 && i % numProcsPerWal == 0) {
long currentTime = currentTimeMillis();
System.out.println("Forcing wall roll. Time taken on last WAL: " +
(currentTime - lastTime) / 1000.0f + " sec");
store.rollWriterForTesting();
lastTime = currentTime;
}
}
long timeTaken = currentTimeMillis() - startTime;
System.out.println("\n\nDone writing WALs.\nNum procs : " + numProcs + "\nTotal time taken : "
+ StringUtils.humanTimeDiff(timeTaken) + "\n\n");
}
private void storeRestart(ProcedureStore.ProcedureLoader loader) throws IOException {
System.out.println("Restarting procedure store to read back the WALs");
store.stop(false);
store.start(1);
store.recoverLease();
long startTime = currentTimeMillis();
store.load(loader);
long timeTaken = System.currentTimeMillis() - startTime;
System.out.println("******************************************");
System.out.println("Load time : " + (timeTaken / 1000.0f) + "sec");
System.out.println("******************************************");
System.out.println("Raw format for scripts");
System.out.println(String.format("RESULT [%s=%s, %s=%s, %s=%s, %s=%s, %s=%s, "
+ "total_time_ms=%s]",
NUM_PROCS_OPTION.getOpt(), numProcs, STATE_SIZE_OPTION.getOpt(), serializedState.length,
UPDATES_PER_PROC_OPTION.getOpt(), updatesPerProc, DELETE_PROCS_FRACTION_OPTION.getOpt(),
deleteProcsFraction, NUM_WALS_OPTION.getOpt(), numWals, timeTaken));
}
public void tearDownProcedureStore() {
store.stop(false);
try {
store.getFileSystem().delete(store.getWALDir(), true);
} catch (IOException e) {
System.err.println("Error: Couldn't delete log dir. You can delete it manually to free up "
+ "disk space. Location: " + store.getWALDir().toString());
System.err.println(e.toString());
}
}
@Override
protected int doWork() {
try {
setUpProcedureStore();
writeWals();
storeRestart(new LoadCounter());
return EXIT_SUCCESS;
} catch (IOException e) {
e.printStackTrace();
return EXIT_FAILURE;
} finally {
tearDownProcedureStore();
}
}
public static void main(String[] args) throws IOException {
ProcedureWALLoaderPerformanceEvaluation tool = new ProcedureWALLoaderPerformanceEvaluation();
tool.setConf(UTIL.getConfiguration());
tool.run(args);
}
}
| |
package com.chriszou.algorithms.stdlib;
/*************************************************************************
* Compilation: javac StdRandom.java
* Execution: java StdRandom
*
* A library of static methods to generate pseudo-random numbers from
* different distributions (bernoulli, uniform, gaussian, discrete,
* and exponential). Also includes a method for shuffling an array.
*
*
* % java StdRandom 5
* seed = 1316600602069
* 59 16.81826 true 8.83954 0
* 32 91.32098 true 9.11026 0
* 35 10.11874 true 8.95396 3
* 92 32.88401 true 8.87089 0
* 72 92.55791 true 9.46241 0
*
* % java StdRandom 5
* seed = 1316600616575
* 96 60.17070 true 8.72821 0
* 79 32.01607 true 8.58159 0
* 81 59.49065 true 9.10423 1
* 96 51.65818 true 9.02102 0
* 99 17.55771 true 8.99762 0
*
* % java StdRandom 5 1316600616575
* seed = 1316600616575
* 96 60.17070 true 8.72821 0
* 79 32.01607 true 8.58159 0
* 81 59.49065 true 9.10423 1
* 96 51.65818 true 9.02102 0
* 99 17.55771 true 8.99762 0
*
*
* Remark
* ------
* - Relies on randomness of nextDouble() method in java.util.Random
* to generate pseudorandom numbers in [0, 1).
*
* - This library allows you to set and get the pseudorandom number seed.
*
* - See http://www.honeylocust.com/RngPack/ for an industrial
* strength random number generator in Java.
*
*************************************************************************/
import java.util.Random;
/**
* <i>Standard random</i>. This class provides methods for generating
* random number from various distributions.
* <p>
* For additional documentation, see <a href="http://introcs.cs.princeton.edu/22library">Section 2.2</a> of
* <i>Introduction to Programming in Java: An Interdisciplinary Approach</i> by Robert Sedgewick and Kevin Wayne.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public final class StdRandom {
private static Random random; // pseudo-random number generator
private static long seed; // pseudo-random number generator seed
// static initializer
static {
// this is how the seed was set in Java 1.4
seed = System.currentTimeMillis();
random = new Random(seed);
}
// don't instantiate
private StdRandom() { }
/**
* Set the seed of the psedurandom number generator.
*/
public static void setSeed(long s) {
seed = s;
random = new Random(seed);
}
/**
* Get the seed of the psedurandom number generator.
*/
public static long getSeed() {
return seed;
}
/**
* Return real number uniformly in [0, 1).
*/
public static double uniform() {
return random.nextDouble();
}
/**
* Return an integer uniformly between 0 (inclusive) and N (exclusive).
*/
public static int uniform(int N) {
return random.nextInt(N);
}
///////////////////////////////////////////////////////////////////////////
// STATIC METHODS BELOW RELY ON JAVA.UTIL.RANDOM ONLY INDIRECTLY VIA
// THE STATIC METHODS ABOVE.
///////////////////////////////////////////////////////////////////////////
/**
* Return real number uniformly in [0, 1).
*/
public static double random() {
return uniform();
}
/**
* Return int uniformly in [a, b).
*/
public static int uniform(int a, int b) {
return a + uniform(b - a);
}
/**
* Return real number uniformly in [a, b).
*/
public static double uniform(double a, double b) {
return a + uniform() * (b-a);
}
/**
* Return a boolean, which is true with probability p, and false otherwise.
*/
public static boolean bernoulli(double p) {
return uniform() < p;
}
/**
* Return a boolean, which is true with probability .5, and false otherwise.
*/
public static boolean bernoulli() {
return bernoulli(0.5);
}
/**
* Return a real number with a standard Gaussian distribution.
*/
public static double gaussian() {
// use the polar form of the Box-Muller transform
double r, x, y;
do {
x = uniform(-1.0, 1.0);
y = uniform(-1.0, 1.0);
r = x*x + y*y;
} while (r >= 1 || r == 0);
return x * Math.sqrt(-2 * Math.log(r) / r);
// Remark: y * Math.sqrt(-2 * Math.log(r) / r)
// is an independent random gaussian
}
/**
* Return a real number from a gaussian distribution with given mean and stddev
*/
public static double gaussian(double mean, double stddev) {
return mean + stddev * gaussian();
}
/**
* Return an integer with a geometric distribution with mean 1/p.
*/
public static int geometric(double p) {
// using algorithm given by Knuth
return (int) Math.ceil(Math.log(uniform()) / Math.log(1.0 - p));
}
/**
* Return an integer with a Poisson distribution with mean lambda.
*/
public static int poisson(double lambda) {
// using algorithm given by Knuth
// see http://en.wikipedia.org/wiki/Poisson_distribution
int k = 0;
double p = 1.0;
double L = Math.exp(-lambda);
do {
k++;
p *= uniform();
} while (p >= L);
return k-1;
}
/**
* Return a real number with a Pareto distribution with parameter alpha.
*/
public static double pareto(double alpha) {
return Math.pow(1 - uniform(), -1.0/alpha) - 1.0;
}
/**
* Return a real number with a Cauchy distribution.
*/
public static double cauchy() {
return Math.tan(Math.PI * (uniform() - 0.5));
}
/**
* Return a number from a discrete distribution: i with probability a[i].
* Precondition: array entries are nonnegative and their sum (very nearly) equals 1.0.
*/
public static int discrete(double[] a) {
double EPSILON = 1E-14;
double sum = 0.0;
for (int i = 0; i < a.length; i++) {
if (a[i] < 0.0) throw new IllegalArgumentException("array entry " + i + " is negative: " + a[i]);
sum = sum + a[i];
}
if (sum > 1.0 + EPSILON || sum < 1.0 - EPSILON)
throw new IllegalArgumentException("sum of array entries not equal to one: " + sum);
// the for loop may not return a value when both r is (nearly) 1.0 and when the
// cumulative sum is less than 1.0 (as a result of floating-point roundoff error)
while (true) {
double r = uniform();
sum = 0.0;
for (int i = 0; i < a.length; i++) {
sum = sum + a[i];
if (sum > r) return i;
}
}
}
/**
* Return a real number from an exponential distribution with rate lambda.
*/
public static double exp(double lambda) {
return -Math.log(1 - uniform()) / lambda;
}
/**
* Rearrange the elements of an array in random order.
*/
public static void shuffle(Object[] a) {
int N = a.length;
for (int i = 0; i < N; i++) {
int r = i + uniform(N-i); // between i and N-1
Object temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearrange the elements of a double array in random order.
*/
public static void shuffle(double[] a) {
int N = a.length;
for (int i = 0; i < N; i++) {
int r = i + uniform(N-i); // between i and N-1
double temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearrange the elements of an int array in random order.
*/
public static void shuffle(int[] a) {
int N = a.length;
for (int i = 0; i < N; i++) {
int r = i + uniform(N-i); // between i and N-1
int temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearrange the elements of the subarray a[lo..hi] in random order.
*/
public static void shuffle(Object[] a, int lo, int hi) {
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + uniform(hi-i+1); // between i and hi
Object temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearrange the elements of the subarray a[lo..hi] in random order.
*/
public static void shuffle(double[] a, int lo, int hi) {
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + uniform(hi-i+1); // between i and hi
double temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearrange the elements of the subarray a[lo..hi] in random order.
*/
public static void shuffle(int[] a, int lo, int hi) {
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + uniform(hi-i+1); // between i and hi
int temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Unit test.
*/
public static void main(String[] args) {
int N = Integer.parseInt(args[0]);
if (args.length == 2) StdRandom.setSeed(Long.parseLong(args[1]));
double[] t = { .5, .3, .1, .1 };
StdOut.println("seed = " + StdRandom.getSeed());
for (int i = 0; i < N; i++) {
StdOut.printf("%2d " , uniform(100));
StdOut.printf("%8.5f ", uniform(10.0, 99.0));
StdOut.printf("%5b " , bernoulli(.5));
StdOut.printf("%7.5f ", gaussian(9.0, .2));
StdOut.printf("%2d " , discrete(t));
StdOut.println();
}
String[] a = "A B C D E F G".split(" ");
for (String s : a)
StdOut.print(s + " ");
StdOut.println();
}
}
| |
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* First created by JCasGen Fri Nov 13 19:45:22 MSK 2015 */
package com.textocat.textokit.morph.fs;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.JCasRegistry;
import org.apache.uima.jcas.cas.TOP_Type;
import org.apache.uima.jcas.tcas.Annotation;
import com.textocat.textokit.tokenizer.fstype.Token;
import org.apache.uima.jcas.cas.StringArray;
/**
* Updated by JCasGen Fri Nov 13 19:45:22 MSK 2015
* XML source: src/main/resources/com/textocat/textokit/morph/morphology-ts.xml
* @generated */
public class SimplyWord extends Annotation {
/** @generated
* @ordered
*/
@SuppressWarnings ("hiding")
public final static int typeIndexID = JCasRegistry.register(SimplyWord.class);
/** @generated
* @ordered
*/
@SuppressWarnings ("hiding")
public final static int type = typeIndexID;
/** @generated
* @return index of the type
*/
@Override
public int getTypeIndexID() {return typeIndexID;}
/** Never called. Disable default constructor
* @generated */
protected SimplyWord() {/* intentionally empty block */}
/** Internal - constructor used by generator
* @generated
* @param addr low level Feature Structure reference
* @param type the type of this Feature Structure
*/
public SimplyWord(int addr, TOP_Type type) {
super(addr, type);
readObject();
}
/** @generated
* @param jcas JCas to which this Feature Structure belongs
*/
public SimplyWord(JCas jcas) {
super(jcas);
readObject();
}
/** @generated
* @param jcas JCas to which this Feature Structure belongs
* @param begin offset to the begin spot in the SofA
* @param end offset to the end spot in the SofA
*/
public SimplyWord(JCas jcas, int begin, int end) {
super(jcas);
setBegin(begin);
setEnd(end);
readObject();
}
/**
* <!-- begin-user-doc -->
* Write your own initialization here
* <!-- end-user-doc -->
*
* @generated modifiable
*/
private void readObject() {/*default - does nothing empty block */}
//*--------------*
//* Feature: posTag
/** getter for posTag - gets
* @generated
* @return value of the feature
*/
public String getPosTag() {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_posTag == null)
jcasType.jcas.throwFeatMissing("posTag", "com.textocat.textokit.morph.fs.SimplyWord");
return jcasType.ll_cas.ll_getStringValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_posTag);}
/** setter for posTag - sets
* @generated
* @param v value to set into the feature
*/
public void setPosTag(String v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_posTag == null)
jcasType.jcas.throwFeatMissing("posTag", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.ll_cas.ll_setStringValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_posTag, v);}
//*--------------*
//* Feature: grammems
/** getter for grammems - gets
* @generated
* @return value of the feature
*/
public StringArray getGrammems() {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_grammems == null)
jcasType.jcas.throwFeatMissing("grammems", "com.textocat.textokit.morph.fs.SimplyWord");
return (StringArray)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems)));}
/** setter for grammems - sets
* @generated
* @param v value to set into the feature
*/
public void setGrammems(StringArray v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_grammems == null)
jcasType.jcas.throwFeatMissing("grammems", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.ll_cas.ll_setRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems, jcasType.ll_cas.ll_getFSRef(v));}
/** indexed getter for grammems - gets an indexed value -
* @generated
* @param i index in the array to get
* @return value of the element at index i
*/
public String getGrammems(int i) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_grammems == null)
jcasType.jcas.throwFeatMissing("grammems", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.jcas.checkArrayBounds(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems), i);
return jcasType.ll_cas.ll_getStringArrayValue(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems), i);}
/** indexed setter for grammems - sets an indexed value -
* @generated
* @param i index in the array to set
* @param v value to set into the array
*/
public void setGrammems(int i, String v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_grammems == null)
jcasType.jcas.throwFeatMissing("grammems", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.jcas.checkArrayBounds(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems), i);
jcasType.ll_cas.ll_setStringArrayValue(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_grammems), i, v);}
//*--------------*
//* Feature: lemma
/** getter for lemma - gets
* @generated
* @return value of the feature
*/
public String getLemma() {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_lemma == null)
jcasType.jcas.throwFeatMissing("lemma", "com.textocat.textokit.morph.fs.SimplyWord");
return jcasType.ll_cas.ll_getStringValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_lemma);}
/** setter for lemma - sets
* @generated
* @param v value to set into the feature
*/
public void setLemma(String v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_lemma == null)
jcasType.jcas.throwFeatMissing("lemma", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.ll_cas.ll_setStringValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_lemma, v);}
//*--------------*
//* Feature: lemmaId
/** getter for lemmaId - gets
* @generated
* @return value of the feature
*/
public int getLemmaId() {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_lemmaId == null)
jcasType.jcas.throwFeatMissing("lemmaId", "com.textocat.textokit.morph.fs.SimplyWord");
return jcasType.ll_cas.ll_getIntValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_lemmaId);}
/** setter for lemmaId - sets
* @generated
* @param v value to set into the feature
*/
public void setLemmaId(int v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_lemmaId == null)
jcasType.jcas.throwFeatMissing("lemmaId", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.ll_cas.ll_setIntValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_lemmaId, v);}
//*--------------*
//* Feature: token
/** getter for token - gets
* @generated
* @return value of the feature
*/
public Token getToken() {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_token == null)
jcasType.jcas.throwFeatMissing("token", "com.textocat.textokit.morph.fs.SimplyWord");
return (Token)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_token)));}
/** setter for token - sets
* @generated
* @param v value to set into the feature
*/
public void setToken(Token v) {
if (SimplyWord_Type.featOkTst && ((SimplyWord_Type)jcasType).casFeat_token == null)
jcasType.jcas.throwFeatMissing("token", "com.textocat.textokit.morph.fs.SimplyWord");
jcasType.ll_cas.ll_setRefValue(addr, ((SimplyWord_Type)jcasType).casFeatCode_token, jcasType.ll_cas.ll_getFSRef(v));}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.actions.AbstractAction;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Actions;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.rules.cpp.LinkerInputs.LibraryToLink;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
/**
* Creates mangled symlinks in the solib directory for all shared libraries.
* Libraries that have a potential to contain SONAME field rely on the mangled
* symlink to the parent directory instead.
*
* Such symlinks are used by the linker to ensure that all rpath entries can be
* specified relative to the $ORIGIN.
*/
public final class SolibSymlinkAction extends AbstractAction {
private final Artifact library;
private final Path target;
private final Artifact symlink;
private SolibSymlinkAction(ActionOwner owner, Artifact library, Artifact symlink) {
super(owner, ImmutableList.of(library), ImmutableList.of(symlink));
Preconditions.checkArgument(Link.SHARED_LIBRARY_FILETYPES.matches(library.getFilename()));
this.library = Preconditions.checkNotNull(library);
this.symlink = Preconditions.checkNotNull(symlink);
this.target = library.getPath();
}
@Override
protected void deleteOutputs(Path execRoot) throws IOException {
// Do not delete outputs if action does not intend to do anything.
if (target != null) {
super.deleteOutputs(execRoot);
}
}
@Override
public void execute(
ActionExecutionContext actionExecutionContext) throws ActionExecutionException {
Path mangledPath = symlink.getPath();
try {
FileSystemUtils.createDirectoryAndParents(mangledPath.getParentDirectory());
mangledPath.createSymbolicLink(target);
} catch (IOException e) {
throw new ActionExecutionException("failed to create _solib symbolic link '"
+ symlink.prettyPrint() + "' to target '" + target + "'", e, this, false);
}
}
@Override
public Artifact getPrimaryInput() {
return library;
}
@Override
public Artifact getPrimaryOutput() {
return symlink;
}
@Override
public ResourceSet estimateResourceConsumption(Executor executor) {
return ResourceSet.createWithRamCpuIo(/*memoryMb=*/0, /*cpuUsage=*/0, /*ioUsage=*/0.0);
}
@Override
protected String computeKey() {
Fingerprint f = new Fingerprint();
f.addPath(symlink.getPath());
if (target != null) {
f.addPath(target);
}
return f.hexDigestAndReset();
}
@Override
public String getMnemonic() { return "SolibSymlink"; }
@Override
public String describeStrategy(Executor executor) {
return "local";
}
@Override
protected String getRawProgressMessage() { return null; }
/**
* Replaces shared library artifact with mangled symlink and creates related
* symlink action. For artifacts that should retain filename (e.g. libraries
* with SONAME tag), link is created to the parent directory instead.
*
* This action is performed to minimize number of -rpath entries used during
* linking process (by essentially "collecting" as many shared libraries as
* possible in the single directory), since we will be paying quadratic price
* for each additional entry on the -rpath.
*
* @param ruleContext rule context, that requested symlink.
* @param library Shared library artifact that needs to be mangled.
* @param preserveName whether to preserve the name of the library
* @param prefixConsumer whether to prefix the output artifact name with the label of the
* consumer
* @return mangled symlink artifact.
*/
public static LibraryToLink getDynamicLibrarySymlink(final RuleContext ruleContext,
final Artifact library,
boolean preserveName,
boolean prefixConsumer,
BuildConfiguration configuration) {
PathFragment mangledName = getMangledName(
ruleContext, library.getRootRelativePath(), preserveName, prefixConsumer,
configuration.getFragment(CppConfiguration.class));
return getDynamicLibrarySymlinkInternal(ruleContext, library, mangledName, configuration);
}
/**
* Version of {@link #getDynamicLibrarySymlink} for the special case of C++ runtime libraries.
* These are handled differently than other libraries: neither their names nor directories are
* mangled, i.e. libstdc++.so.6 is symlinked from _solib_[arch]/libstdc++.so.6
*/
public static LibraryToLink getCppRuntimeSymlink(RuleContext ruleContext, Artifact library,
String solibDirOverride, BuildConfiguration configuration) {
PathFragment solibDir = new PathFragment(solibDirOverride != null
? solibDirOverride
: configuration.getFragment(CppConfiguration.class).getSolibDirectory());
PathFragment symlinkName = solibDir.getRelative(library.getRootRelativePath().getBaseName());
return getDynamicLibrarySymlinkInternal(ruleContext, library, symlinkName, configuration);
}
/**
* Internal implementation that takes a pre-determined symlink name; supports both the
* generic {@link #getDynamicLibrarySymlink} and the specialized {@link #getCppRuntimeSymlink}.
*/
private static LibraryToLink getDynamicLibrarySymlinkInternal(RuleContext ruleContext,
Artifact library, PathFragment symlinkName, BuildConfiguration configuration) {
Preconditions.checkArgument(Link.SHARED_LIBRARY_FILETYPES.matches(library.getFilename()));
Preconditions.checkArgument(!library.getRootRelativePath().getSegment(0).startsWith("_solib_"));
// Ignore libraries that are already represented by the symlinks.
Root root = configuration.getBinDirectory();
Artifact symlink = ruleContext.getShareableArtifact(symlinkName, root);
ruleContext.registerAction(
new SolibSymlinkAction(ruleContext.getActionOwner(), library, symlink));
return LinkerInputs.solibLibraryToLink(symlink, library);
}
/**
* Returns the name of the symlink that will be created for a library, given
* its name.
*
* @param ruleContext rule context that requests symlink
* @param libraryPath the root-relative path of the library
* @param preserveName true if filename should be preserved
* @param prefixConsumer true if the result should be prefixed with the label of the consumer
* @returns root relative path name
*/
public static PathFragment getMangledName(RuleContext ruleContext,
PathFragment libraryPath,
boolean preserveName,
boolean prefixConsumer,
CppConfiguration cppConfiguration) {
String escapedRulePath = Actions.escapedPath(
"_" + ruleContext.getLabel());
String soname = getDynamicLibrarySoname(libraryPath, preserveName);
PathFragment solibDir = new PathFragment(cppConfiguration.getSolibDirectory());
if (preserveName) {
String escapedLibraryPath =
Actions.escapedPath("_" + libraryPath.getParentDirectory().getPathString());
PathFragment mangledDir = solibDir.getRelative(prefixConsumer
? escapedRulePath + "__" + escapedLibraryPath
: escapedLibraryPath);
return mangledDir.getRelative(soname);
} else {
return solibDir.getRelative(prefixConsumer
? escapedRulePath + "__" + soname
: soname);
}
}
/**
* Compute the SONAME to use for a dynamic library. This name is basically the
* name of the shared library in its final symlinked location.
*
* @param libraryPath name of the shared library that needs to be mangled
* @param preserveName true if filename should be preserved, false - mangled
* @return soname to embed in the dynamic library
*/
public static String getDynamicLibrarySoname(PathFragment libraryPath,
boolean preserveName) {
String mangledName;
if (preserveName) {
mangledName = libraryPath.getBaseName();
} else {
mangledName = "lib" + Actions.escapedPath(libraryPath.getPathString());
}
return mangledName;
}
@Override
public boolean shouldReportPathPrefixConflict(Action action) {
return false; // Always ignore path prefix conflict for the SolibSymlinkAction.
}
}
| |
package eu.uqasar.web.dashboard.widget.reportingwidget;
/*
* #%L
* U-QASAR
* %%
* Copyright (C) 2012 - 2015 U-QASAR Consortium
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.markup.html.panel.GenericPanel;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.StringResourceModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import ro.fortsoft.wicket.dashboard.Dashboard;
import ro.fortsoft.wicket.dashboard.Widget;
import ro.fortsoft.wicket.dashboard.web.DashboardContext;
import ro.fortsoft.wicket.dashboard.web.DashboardContextAware;
import ro.fortsoft.wicket.dashboard.web.DashboardPanel;
import ro.fortsoft.wicket.dashboard.web.WidgetPanel;
import eu.uqasar.model.dashboard.DbDashboard;
import eu.uqasar.service.dataadapter.SonarDataService;
import eu.uqasar.web.dashboard.DashboardViewPage;
/**
*
*
*
*/
public class ReportingSettingsPanel extends GenericPanel<ReportingWidget> implements DashboardContextAware {
// public class ReportingSettingsPanel extends BasePage{
private static final long serialVersionUID = 1L;
private transient DashboardContext dashboardContext;
private String cube;
SonarDataService dataService;
List<String> projects = new ArrayList<String>();
private Map<String, List<String>> rulesMap = new HashMap<String, List<String>>(); // map:rule->additionalRules
private String selectedRule;
private String selectedAdditionalRule;
private String chartType;
private ListView<Rule> rulesView;
private String urlToLoad;
private WebMarkupContainer ruleWebMrkUpContainer;
private List<Rule> proposedRules = new ArrayList<Rule>();
DropDownChoice<String> rules;
DropDownChoice<String> additionalRules;
public ReportingSettingsPanel(String id, IModel<ReportingWidget> model) {
super(id, model);
setOutputMarkupPlaceholderTag(true);
final ReportingWidget qualityWidget = (ReportingWidget) model.getObject();
Form<Widget> form = new Form<Widget>("form");
cube = getModelObject().getSettings().get("cube");
if (cube == null) {
cube = "jira";
}
selectedAdditionalRule = getModelObject().getSettings().get("selectedAdditionalRule");
selectedRule = getModelObject().getSettings().get("selectedRule");
chartType = getModelObject().getSettings().get("chartType");
if (chartType == null) {
chartType = ReportingWidget.COLUMN_TYPE;
}
try {
InitialContext ic = new InitialContext();
dataService = (SonarDataService) ic.lookup("java:module/SonarDataService");
projects = dataService.getSonarProjects();
} catch (NamingException e) {
e.printStackTrace();
}
// Add Rules and Additional Rules as DropDownList
rulesMap = qualityWidget.getRulesMap(projects);
// //Add selection of cubes for report generation.
List<String> cubes = Arrays.asList("jira", "sonarcube");
final DropDownChoice<String> selectedCubes = new DropDownChoice<String>("cube",
new PropertyModel<String>(this, "cube"), cubes);
selectedCubes.setRequired(true);
form.add(selectedCubes);
// Field for the chart type
chartType = getModelObject().getSettings().get("chartType");
DropDownChoice<String> choice = new DropDownChoice<String>("chartType", new PropertyModel<String>(this, "chartType"),
ReportingWidget.TYPES);
form.add(choice);
// Create a void form for ListView and WebMarkupContainer
Form<Void> formVoid = new Form<>("formVoid");
ruleWebMrkUpContainer = new WebMarkupContainer("ruleContainer", new Model<Rule>());
ruleWebMrkUpContainer.setOutputMarkupId(true);
formVoid.add(ruleWebMrkUpContainer);
ruleWebMrkUpContainer.add(rulesView = new ListView<Rule>("rulesListView", Model.ofList(proposedRules)) {
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure() {
super.onConfigure();
// update model
rulesView.setModelObject(proposedRules);
}
@Override
protected void populateItem(ListItem<Rule> item) {
final Rule proposedRule = item.getModelObject();
// //get dropdown list method will give two different lists..
IModel<List<? extends String>> ruleChoices = new AbstractReadOnlyModel<List<? extends String>>() {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public List<String> getObject() {
return new ArrayList<String>(rulesMap.keySet());
}
};
IModel<List<? extends String>> additionalRuleChoices = new AbstractReadOnlyModel<List<? extends String>>() {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public List<String> getObject() {
List<String> models = rulesMap.get(proposedRule.getSelectedRule()); // very important
// System.out.println("selectedRule : " + proposedUser.getSelectedRule());
if (models == null) {
models = Collections.emptyList();
}
return models;
}
};
item.add(rules = new DropDownChoice<String>("rules", new PropertyModel<String>(proposedRule, "selectedRule"),
ruleChoices));
rules.setOutputMarkupId(true);
rules.setNullValid(true);
rules.setRequired(true);
rules.setMarkupId("rules" + item.getIndex()); // very important
item.add(additionalRules = new DropDownChoice<String>("additionalRules", new PropertyModel<String>(
proposedRule, "selectedAdditionalRule"), additionalRuleChoices));
additionalRules.setOutputMarkupId(true);
additionalRules.setMarkupId("additionalRules" + item.getIndex()); // very important
additionalRules.setNullValid(true);
additionalRules.setRequired(true);
rules.add(new AjaxFormComponentUpdatingBehavior("onchange") { // very important
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
protected void onUpdate(AjaxRequestTarget target) {
target.add(additionalRules);
target.add(rules);
}
});
additionalRules.add(new AjaxFormComponentUpdatingBehavior("onchange") { // very important
private static final long serialVersionUID = 1L;
@Override
protected void onUpdate(AjaxRequestTarget target) {
target.add(additionalRules);
target.add(rules);
}
});
}
});
AjaxSubmitLink addRuleButton = new AjaxSubmitLink("add.rule", formVoid) {
private static final long serialVersionUID = 1L;
@Override
protected void onError(AjaxRequestTarget target, Form<?> formVoid) {
// target.add(feedbackPanel);
target.add(formVoid);
}
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> formVoid) {
addNewRuleToList(target, formVoid);
}
};
addRuleButton.add(new Label("button.add.save",
new StringResourceModel("button.add.save", this, Model.of(proposedRules))));
formVoid.add(addRuleButton);
rulesView.setOutputMarkupId(true);
form.add(formVoid);
form.add(new AjaxSubmitLink("submit") {
private static final long serialVersionUID = 1L;
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
Dashboard dashboard = findParent(DashboardPanel.class).getDashboard();
if (dashboard != null && dashboardContext != null) {
// Here, create a url query based on selected Rule and Additional Rule from dynamic dropdown lists..
urlToLoad = createRule();
System.out.println(urlToLoad);
getModelObject().getSettings().put("urlToLoad", urlToLoad);
getModelObject().getSettings().put("cube", cube);
getModelObject().getSettings().put("selectedAdditionalRule", selectedAdditionalRule);
getModelObject().getSettings().put("selectedRule", selectedRule);
getModelObject().getSettings().put("chartType", chartType);
System.out.print("dashboard : " + dashboard);
dashboardContext.getDashboardPersiter().save(dashboard);
hideSettingPanel(target);
WidgetPanel widgetPanel = findParent(WidgetPanel.class);
ReportingWidget tasksWidget = (ReportingWidget) widgetPanel.getModelObject();
tasksWidget.setTitle("Reporting Widget For " + cube + " cube");
ReportingWidgetView widgetView = (ReportingWidgetView) widgetPanel.getWidgetView();
target.add(widgetView);
PageParameters params = new PageParameters();
DbDashboard dbdb = (DbDashboard) dashboard;
params.add("id", dbdb.getId());
setResponsePage(DashboardViewPage.class, params);
}
}
@Override
protected void onError(AjaxRequestTarget target, Form<?> form) {
}
});
form.add(new AjaxLink<Void>("cancel") {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
hideSettingPanel(target);
}
});
add(form);
}
@Override
public void setDashboardContext(DashboardContext dashboardContext) {
this.dashboardContext = dashboardContext;
}
private void hideSettingPanel(AjaxRequestTarget target) {
setVisible(false);
target.add(this);
}
/**
*
* @param target
* @param form
*/
private void addNewRuleToList(AjaxRequestTarget target, Form<?> form) {
proposedRules.add(new Rule("", ""));
target.add(ruleWebMrkUpContainer);
target.add(form);
}
private String createRule() {
String urlToLoad = "/aggregate?";
// for all proposed Rules from dynamic dropdown lists
for (Rule rule : proposedRules) {
if (rule.getSelectedRule().equalsIgnoreCase("drilldown") && !rule.getSelectedRule().equalsIgnoreCase("")) {
// both selectedrules and Additionalrules can not be empty
if (!rule.getSelectedAdditionalRule().equalsIgnoreCase("")) {
urlToLoad += "&drilldown=" + rule.getSelectedAdditionalRule();
}
} else {
// both selectedrules and Additionalrules can not be empty
if (!rule.getSelectedAdditionalRule().equalsIgnoreCase("") && !rule.getSelectedRule().equalsIgnoreCase("")) {
urlToLoad += "&cut=" + rule.getSelectedRule() + ":" + rule.getSelectedAdditionalRule();
}
}
}
return urlToLoad;
}// EOM
}// EOC
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.