gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nutch.tools; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.text.SimpleDateFormat; import java.util.HashMap; import java.util.Map.Entry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.nutch.crawl.CrawlDatum; import org.apache.nutch.crawl.Generator; import org.apache.nutch.crawl.URLPartitioner; import org.apache.nutch.net.URLFilters; import org.apache.nutch.net.URLNormalizers; import org.apache.nutch.scoring.ScoringFilters; import org.apache.nutch.util.NutchConfiguration; import org.apache.nutch.util.NutchJob; import org.apache.nutch.util.TimingUtil; /** * This tool generates fetchlists (segments to be fetched) from plain text files * containing one URL per line. It's useful when arbitrary URL-s need to be * fetched without adding them first to the CrawlDb, or during testing. */ public class FreeGenerator extends Configured implements Tool { private static final Logger LOG = LoggerFactory .getLogger(MethodHandles.lookup().lookupClass()); private static final String FILTER_KEY = "free.generator.filter"; private static final String NORMALIZE_KEY = "free.generator.normalize"; public static class FG { public static class FGMapper extends Mapper<WritableComparable<?>, Text, Text, Generator.SelectorEntry> { private URLNormalizers normalizers = null; private URLFilters filters = null; private ScoringFilters scfilters; private CrawlDatum datum = new CrawlDatum(); private Text url = new Text(); private int defaultInterval = 0; Generator.SelectorEntry entry = new Generator.SelectorEntry(); @Override public void setup(Mapper<WritableComparable<?>, Text, Text, Generator.SelectorEntry>.Context context) { Configuration conf = context.getConfiguration(); defaultInterval = conf.getInt("db.fetch.interval.default", 0); scfilters = new ScoringFilters(conf); if (conf.getBoolean(FILTER_KEY, false)) { filters = new URLFilters(conf); } if (conf.getBoolean(NORMALIZE_KEY, false)) { normalizers = new URLNormalizers(conf, URLNormalizers.SCOPE_INJECT); } } @Override public void map(WritableComparable<?> key, Text value, Context context) throws IOException, InterruptedException { // value is a line of text String urlString = value.toString(); try { if (normalizers != null) { urlString = normalizers.normalize(urlString, URLNormalizers.SCOPE_INJECT); } if (urlString != null && filters != null) { urlString = filters.filter(urlString); } if (urlString != null) { url.set(urlString); scfilters.injectedScore(url, datum); } } catch (Exception e) { LOG.warn("Error adding url '{}', skipping: {}", value, StringUtils.stringifyException(e)); return; } if (urlString == null) { LOG.debug("- skipping {}", value); return; } entry.datum = datum; entry.url = url; // https://issues.apache.org/jira/browse/NUTCH-1430 entry.datum.setFetchInterval(defaultInterval); context.write(url, entry); } } public static class FGReducer extends Reducer<Text, Generator.SelectorEntry, Text, CrawlDatum> { @Override public void reduce(Text key, Iterable<Generator.SelectorEntry> values, Context context) throws IOException, InterruptedException { // pick unique urls from values - discard the reduce key due to hash // collisions HashMap<Text, CrawlDatum> unique = new HashMap<>(); for (Generator.SelectorEntry entry : values) { unique.put(entry.url, entry.datum); } // output unique urls for (Entry<Text, CrawlDatum> e : unique.entrySet()) { context.write(e.getKey(), e.getValue()); } } } } @Override public int run(String[] args) throws Exception { if (args.length < 2) { System.err.println( "Usage: FreeGenerator <inputDir> <segmentsDir> [-filter] [-normalize] [-numFetchers <n>]"); System.err .println("\tinputDir\tinput directory containing one or more input files."); System.err .println("\t \tEach text file contains a list of URLs, one URL per line"); System.err .println("\tsegmentsDir\toutput directory, where new segment will be created"); System.err.println("\t-filter \trun current URLFilters on input URLs"); System.err .println("\t-normalize\trun current URLNormalizers on input URLs"); System.err.println( "\t-numFetchers <n>\tnumber of generated fetch lists, determines number of fetcher tasks"); return -1; } boolean filter = false; boolean normalize = false; int numFetchers = -1; if (args.length > 2) { for (int i = 2; i < args.length; i++) { if (args[i].equals("-filter")) { filter = true; } else if (args[i].equals("-normalize")) { normalize = true; } else if ("-numFetchers".equals(args[i])) { numFetchers = Integer.parseInt(args[i + 1]); i++; } else { LOG.error("Unknown argument: " + args[i] + ", exiting ..."); return -1; } } } SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); long start = System.currentTimeMillis(); LOG.info("FreeGenerator: starting at " + sdf.format(start)); Job job = NutchJob.getInstance(getConf()); Configuration conf = job.getConfiguration(); conf.setBoolean(FILTER_KEY, filter); conf.setBoolean(NORMALIZE_KEY, normalize); FileInputFormat.addInputPath(job, new Path(args[0])); job.setInputFormatClass(TextInputFormat.class); job.setJarByClass(FG.class); job.setMapperClass(FG.FGMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Generator.SelectorEntry.class); job.setPartitionerClass(URLPartitioner.class); job.setReducerClass(FG.FGReducer.class); String segName = Generator.generateSegmentName(); if (numFetchers == -1) { /* for politeness create exactly one partition per fetch task */ numFetchers = Integer.parseInt(conf.get("mapreduce.job.maps")); } if ("local".equals(conf.get("mapreduce.framework.name")) && numFetchers != 1) { // override LOG.info( "FreeGenerator: running in local mode, generating exactly one partition."); numFetchers = 1; } job.setNumReduceTasks(numFetchers); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(CrawlDatum.class); job.setSortComparatorClass(Generator.HashComparator.class); FileOutputFormat.setOutputPath(job, new Path(args[1], new Path(segName, CrawlDatum.GENERATE_DIR_NAME))); try { boolean success = job.waitForCompletion(true); if (!success) { String message = "FreeGenerator job did not succeed, job status:" + job.getStatus().getState() + ", reason: " + job.getStatus().getFailureInfo(); LOG.error(message); throw new RuntimeException(message); } } catch (IOException | InterruptedException | ClassNotFoundException e) { LOG.error("FAILED: " + StringUtils.stringifyException(e)); return -1; } long end = System.currentTimeMillis(); LOG.info("FreeGenerator: finished at " + sdf.format(end) + ", elapsed: " + TimingUtil.elapsedTime(start, end)); return 0; } public static void main(String[] args) throws Exception { int res = ToolRunner.run(NutchConfiguration.create(), new FreeGenerator(), args); System.exit(res); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.logging.logback; import java.net.URL; import java.security.CodeSource; import java.security.ProtectionDomain; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.joran.JoranConfigurator; import ch.qos.logback.classic.jul.LevelChangePropagator; import ch.qos.logback.classic.turbo.TurboFilter; import ch.qos.logback.classic.util.ContextInitializer; import ch.qos.logback.core.joran.spi.JoranException; import ch.qos.logback.core.spi.FilterReply; import ch.qos.logback.core.status.Status; import org.slf4j.ILoggerFactory; import org.slf4j.Logger; import org.slf4j.Marker; import org.slf4j.impl.StaticLoggerBinder; import org.springframework.boot.logging.LogFile; import org.springframework.boot.logging.LogLevel; import org.springframework.boot.logging.LoggerConfiguration; import org.springframework.boot.logging.LoggingInitializationContext; import org.springframework.boot.logging.LoggingSystem; import org.springframework.boot.logging.Slf4JLoggingSystem; import org.springframework.util.Assert; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; /** * {@link LoggingSystem} for <a href="http://logback.qos.ch">logback</a>. * * @author Phillip Webb * @author Dave Syer * @author Andy Wilkinson * @author Ben Hale */ public class LogbackLoggingSystem extends Slf4JLoggingSystem { private static final String CONFIGURATION_FILE_PROPERTY = "logback.configurationFile"; private static final LogLevels<Level> LEVELS = new LogLevels<>(); static { LEVELS.map(LogLevel.TRACE, Level.TRACE); LEVELS.map(LogLevel.TRACE, Level.ALL); LEVELS.map(LogLevel.DEBUG, Level.DEBUG); LEVELS.map(LogLevel.INFO, Level.INFO); LEVELS.map(LogLevel.WARN, Level.WARN); LEVELS.map(LogLevel.ERROR, Level.ERROR); LEVELS.map(LogLevel.FATAL, Level.ERROR); LEVELS.map(LogLevel.OFF, Level.OFF); } private static final TurboFilter FILTER = new TurboFilter() { @Override public FilterReply decide(Marker marker, ch.qos.logback.classic.Logger logger, Level level, String format, Object[] params, Throwable t) { return FilterReply.DENY; } }; public LogbackLoggingSystem(ClassLoader classLoader) { super(classLoader); } @Override protected String[] getStandardConfigLocations() { return new String[] { "logback-test.groovy", "logback-test.xml", "logback.groovy", "logback.xml" }; } @Override public void beforeInitialize() { LoggerContext loggerContext = getLoggerContext(); if (isAlreadyInitialized(loggerContext)) { return; } super.beforeInitialize(); loggerContext.getTurboFilterList().add(FILTER); } @Override public void initialize(LoggingInitializationContext initializationContext, String configLocation, LogFile logFile) { LoggerContext loggerContext = getLoggerContext(); if (isAlreadyInitialized(loggerContext)) { return; } loggerContext.getTurboFilterList().remove(FILTER); super.initialize(initializationContext, configLocation, logFile); markAsInitialized(loggerContext); if (StringUtils.hasText(System.getProperty(CONFIGURATION_FILE_PROPERTY))) { getLogger(LogbackLoggingSystem.class.getName()).warn( "Ignoring '" + CONFIGURATION_FILE_PROPERTY + "' system property. " + "Please use 'logging.config' instead."); } } @Override protected void loadDefaults(LoggingInitializationContext initializationContext, LogFile logFile) { LoggerContext context = getLoggerContext(); stopAndReset(context); LogbackConfigurator configurator = new LogbackConfigurator(context); context.putProperty("LOG_LEVEL_PATTERN", initializationContext.getEnvironment().resolvePlaceholders( "${logging.pattern.level:${LOG_LEVEL_PATTERN:%5p}}")); new DefaultLogbackConfiguration(initializationContext, logFile) .apply(configurator); context.setPackagingDataEnabled(true); } @Override protected void loadConfiguration(LoggingInitializationContext initializationContext, String location, LogFile logFile) { super.loadConfiguration(initializationContext, location, logFile); LoggerContext loggerContext = getLoggerContext(); stopAndReset(loggerContext); try { configureByResourceUrl(initializationContext, loggerContext, ResourceUtils.getURL(location)); } catch (Exception ex) { throw new IllegalStateException( "Could not initialize Logback logging from " + location, ex); } List<Status> statuses = loggerContext.getStatusManager().getCopyOfStatusList(); StringBuilder errors = new StringBuilder(); for (Status status : statuses) { if (status.getLevel() == Status.ERROR) { errors.append(errors.length() > 0 ? String.format("%n") : ""); errors.append(status.toString()); } } if (errors.length() > 0) { throw new IllegalStateException( String.format("Logback configuration error detected: %n%s", errors)); } } private void configureByResourceUrl( LoggingInitializationContext initializationContext, LoggerContext loggerContext, URL url) throws JoranException { if (url.toString().endsWith("xml")) { JoranConfigurator configurator = new SpringBootJoranConfigurator( initializationContext); configurator.setContext(loggerContext); configurator.doConfigure(url); } else { new ContextInitializer(loggerContext).configureByResource(url); } } private void stopAndReset(LoggerContext loggerContext) { loggerContext.stop(); loggerContext.reset(); if (isBridgeHandlerAvailable()) { addLevelChangePropagator(loggerContext); } } private void addLevelChangePropagator(LoggerContext loggerContext) { LevelChangePropagator levelChangePropagator = new LevelChangePropagator(); levelChangePropagator.setResetJUL(true); levelChangePropagator.setContext(loggerContext); loggerContext.addListener(levelChangePropagator); } @Override public void cleanUp() { LoggerContext context = getLoggerContext(); markAsUninitialized(context); super.cleanUp(); context.getStatusManager().clear(); context.getTurboFilterList().remove(FILTER); } @Override protected void reinitialize(LoggingInitializationContext initializationContext) { getLoggerContext().reset(); getLoggerContext().getStatusManager().clear(); loadConfiguration(initializationContext, getSelfInitializationConfig(), null); } @Override public List<LoggerConfiguration> getLoggerConfigurations() { List<LoggerConfiguration> result = new ArrayList<>(); for (ch.qos.logback.classic.Logger logger : getLoggerContext().getLoggerList()) { result.add(getLoggerConfiguration(logger)); } Collections.sort(result, CONFIGURATION_COMPARATOR); return result; } @Override public LoggerConfiguration getLoggerConfiguration(String loggerName) { return getLoggerConfiguration(getLogger(loggerName)); } private LoggerConfiguration getLoggerConfiguration( ch.qos.logback.classic.Logger logger) { if (logger == null) { return null; } LogLevel level = LEVELS.convertNativeToSystem(logger.getLevel()); LogLevel effectiveLevel = LEVELS .convertNativeToSystem(logger.getEffectiveLevel()); String name = logger.getName(); if (!StringUtils.hasLength(name) || Logger.ROOT_LOGGER_NAME.equals(name)) { name = ROOT_LOGGER_NAME; } return new LoggerConfiguration(name, level, effectiveLevel); } @Override public Set<LogLevel> getSupportedLogLevels() { return LEVELS.getSupported(); } @Override public void setLogLevel(String loggerName, LogLevel level) { ch.qos.logback.classic.Logger logger = getLogger(loggerName); if (logger != null) { logger.setLevel(LEVELS.convertSystemToNative(level)); } } @Override public Runnable getShutdownHandler() { return new ShutdownHandler(); } private ch.qos.logback.classic.Logger getLogger(String name) { LoggerContext factory = getLoggerContext(); if (StringUtils.isEmpty(name) || ROOT_LOGGER_NAME.equals(name)) { name = Logger.ROOT_LOGGER_NAME; } return factory.getLogger(name); } private LoggerContext getLoggerContext() { ILoggerFactory factory = StaticLoggerBinder.getSingleton().getLoggerFactory(); Assert.isInstanceOf(LoggerContext.class, factory, String.format( "LoggerFactory is not a Logback LoggerContext but Logback is on " + "the classpath. Either remove Logback or the competing " + "implementation (%s loaded from %s). If you are using " + "WebLogic you will need to add 'org.slf4j' to " + "prefer-application-packages in WEB-INF/weblogic.xml", factory.getClass(), getLocation(factory))); return (LoggerContext) factory; } private Object getLocation(ILoggerFactory factory) { try { ProtectionDomain protectionDomain = factory.getClass().getProtectionDomain(); CodeSource codeSource = protectionDomain.getCodeSource(); if (codeSource != null) { return codeSource.getLocation(); } } catch (SecurityException ex) { // Unable to determine location } return "unknown location"; } private boolean isAlreadyInitialized(LoggerContext loggerContext) { return loggerContext.getObject(LoggingSystem.class.getName()) != null; } private void markAsInitialized(LoggerContext loggerContext) { loggerContext.putObject(LoggingSystem.class.getName(), new Object()); } private void markAsUninitialized(LoggerContext loggerContext) { loggerContext.removeObject(LoggingSystem.class.getName()); } private final class ShutdownHandler implements Runnable { @Override public void run() { getLoggerContext().stop(); } } }
package com.dianping.cat.report.page.app; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.unidal.tuple.Pair; import org.unidal.web.mvc.view.annotation.EntityMeta; import org.unidal.web.mvc.view.annotation.ModelMeta; import com.dianping.cat.Constants; import com.dianping.cat.configuration.app.entity.Code; import com.dianping.cat.configuration.app.entity.Command; import com.dianping.cat.configuration.app.entity.Item; import com.dianping.cat.configuration.app.speed.entity.Speed; import com.dianping.cat.configuration.server.filter.entity.CrashLogDomain; import com.dianping.cat.helper.JsonBuilder; import com.dianping.cat.home.app.entity.AppReport; import com.dianping.cat.mvc.AbstractReportModel; import com.dianping.cat.problem.model.entity.ProblemReport; import com.dianping.cat.report.ReportPage; import com.dianping.cat.report.graph.LineChart; import com.dianping.cat.report.graph.PieChart; import com.dianping.cat.report.page.app.display.AppDataDetail; import com.dianping.cat.report.page.app.display.AppSpeedDetail; import com.dianping.cat.report.page.app.display.AppSpeedDisplayInfo; import com.dianping.cat.report.page.app.display.DisplayCommands; import com.dianping.cat.report.page.app.display.PieChartDetailInfo; import com.dianping.cat.report.page.app.processor.CrashLogProcessor.FieldsInfo; @ModelMeta(Constants.APP) public class Model extends AbstractReportModel<Action, ReportPage, Context> { @EntityMeta private LineChart m_lineChart; @EntityMeta private PieChart m_pieChart; private List<PieChartDetailInfo> m_pieChartDetailInfos; private Map<Integer, Item> m_cities; private Map<Integer, Item> m_versions; private Map<Integer, Item> m_connectionTypes; private Map<Integer, Item> m_operators; private Map<Integer, Item> m_networks; private Map<Integer, Item> m_platforms; private List<Command> m_commands; private List<AppDataDetail> m_appDataDetailInfos; private Map<String, AppDataDetail> m_comparisonAppDetails; private AppSpeedDisplayInfo m_appSpeedDisplayInfo; private String m_content; private String m_fetchData; private int m_commandId; private ProblemStatistics m_problemStatistics; private FieldsInfo m_fieldsInfo; private ProblemReport m_problemReport; private Map<String, List<Speed>> m_speeds; private Map<Integer, Code> m_codes; private List<String> m_codeDistributions; private Map<Integer, List<Code>> m_command2Codes; private Map<String, Pair<String, String>> m_domain2Departments; private Map<String, Command> m_command2Id; private String m_defaultCommand; @EntityMeta private AppReport m_appReport; private DisplayCommands m_displayCommands; private Collection<CrashLogDomain> m_crashLogDomains; public Model(Context ctx) { super(ctx); } public List<AppDataDetail> getAppDataDetailInfos() { return m_appDataDetailInfos; } public AppReport getAppReport() { return m_appReport; } public Map<String, Map<Integer, AppSpeedDetail>> getAppSpeedDetails() { Map<String, Map<Integer, AppSpeedDetail>> map = new LinkedHashMap<String, Map<Integer, AppSpeedDetail>>(); Map<String, List<AppSpeedDetail>> details = m_appSpeedDisplayInfo.getAppSpeedDetails(); if (details != null && !details.isEmpty()) { for (Entry<String, List<AppSpeedDetail>> entry : details.entrySet()) { Map<Integer, AppSpeedDetail> m = new LinkedHashMap<Integer, AppSpeedDetail>(); for (AppSpeedDetail detail : entry.getValue()) { m.put(detail.getMinuteOrder(), detail); } map.put(entry.getKey(), m); } } return map; } public AppSpeedDisplayInfo getAppSpeedDisplayInfo() { return m_appSpeedDisplayInfo; } public Map<String, Map<Integer, AppSpeedDetail>> getAppSpeedSummarys() { Map<String, Map<Integer, AppSpeedDetail>> map = new LinkedHashMap<String, Map<Integer, AppSpeedDetail>>(); Map<String, AppSpeedDetail> details = m_appSpeedDisplayInfo.getAppSpeedSummarys(); if (details != null && !details.isEmpty()) { for (Entry<String, AppSpeedDetail> entry : details.entrySet()) { Map<Integer, AppSpeedDetail> m = new LinkedHashMap<Integer, AppSpeedDetail>(); AppSpeedDetail d = entry.getValue(); m.put(d.getMinuteOrder(), d); map.put(entry.getKey(), m); } } return map; } public Map<Integer, Item> getCities() { return m_cities; } public List<String> getCodeDistributions() { return m_codeDistributions; } public Map<Integer, Code> getCodes() { return m_codes; } public Map<Integer, List<Code>> getCommand2Codes() { return m_command2Codes; } public String getCommand2CodesJson() { return new JsonBuilder().toJson(m_command2Codes); } public Map<String, Command> getCommand2Id() { return m_command2Id; } public String getCommand2IdJson() { return new JsonBuilder().toJson(m_command2Id); } public int getCommandId() { return m_commandId; } public List<Command> getCommands() { return m_commands; } public Map<String, AppDataDetail> getComparisonAppDetails() { return m_comparisonAppDetails; } public Map<Integer, Item> getConnectionTypes() { return m_connectionTypes; } public String getContent() { return m_content; } public Collection<CrashLogDomain> getCrashLogDomains() { return m_crashLogDomains; } @Override public Action getDefaultAction() { return Action.LINECHART; } public String getDefaultCommand() { return m_defaultCommand; } public DisplayCommands getDisplayCommands() { return m_displayCommands; } @Override public String getDomain() { return getDisplayDomain(); } public Map<String, Pair<String, String>> getDomain2Departments() { return m_domain2Departments; } @Override public Collection<String> getDomains() { return new ArrayList<String>(); } public String getFetchData() { return m_fetchData; } public FieldsInfo getFieldsInfo() { return m_fieldsInfo; } public LineChart getLineChart() { return m_lineChart; } public Map<Integer, Item> getNetworks() { return m_networks; } public Map<Integer, Item> getOperators() { return m_operators; } public String getPage2StepsJson() { return new JsonBuilder().toJson(m_speeds); } public Set<String> getPages() { return m_speeds.keySet(); } public PieChart getPieChart() { return m_pieChart; } public List<PieChartDetailInfo> getPieChartDetailInfos() { return m_pieChartDetailInfos; } public Map<Integer, Item> getPlatforms() { return m_platforms; } public ProblemReport getProblemReport() { return m_problemReport; } public ProblemStatistics getProblemStatistics() { return m_problemStatistics; } public Map<String, List<Speed>> getSpeeds() { return m_speeds; } public Map<Integer, Item> getVersions() { return m_versions; } public void setAppDataDetailInfos(List<AppDataDetail> appDataDetailInfos) { m_appDataDetailInfos = appDataDetailInfos; } public void setAppReport(AppReport appReport) { m_appReport = appReport; } public void setAppSpeedDisplayInfo(AppSpeedDisplayInfo appSpeedDisplayInfo) { m_appSpeedDisplayInfo = appSpeedDisplayInfo; } public void setCities(Map<Integer, Item> cities) { m_cities = cities; } public void setCodeDistributions(List<String> codeDistributions) { m_codeDistributions = codeDistributions; } public void setCodes(Map<Integer, Code> codes) { m_codes = codes; } public void setCommand2Codes(Map<Integer, List<Code>> command2Codes) { m_command2Codes = command2Codes; } public void setCommand2Id(Map<String, Command> rawCommands) { m_command2Id = rawCommands; } public void setCommandId(int commandId) { m_commandId = commandId; } public void setCommands(List<Command> commands) { m_commands = commands; } public void setComparisonAppDetails(Map<String, AppDataDetail> comparisonAppDetail) { m_comparisonAppDetails = comparisonAppDetail; } public void setConnectionTypes(Map<Integer, Item> map) { m_connectionTypes = map; } public void setContent(String content) { m_content = content; } public void setCrashLogDomains(Collection<CrashLogDomain> crashLogDomains) { m_crashLogDomains = crashLogDomains; } public void setDefaultCommand(String defaultCommand) { m_defaultCommand = defaultCommand; } public void setDisplayCommands(DisplayCommands displayCommands) { m_displayCommands = displayCommands; } public void setDomain2Departments(Map<String, Pair<String, String>> domain2Departments) { m_domain2Departments = domain2Departments; } public void setFetchData(String fetchData) { m_fetchData = fetchData; } public void setFieldsInfo(FieldsInfo fieldsInfo) { m_fieldsInfo = fieldsInfo; } public void setLineChart(LineChart lineChart) { m_lineChart = lineChart; } public void setNetworks(Map<Integer, Item> networks) { m_networks = networks; } public void setOperators(Map<Integer, Item> operators) { m_operators = operators; } public void setPieChart(PieChart pieChart) { m_pieChart = pieChart; } public void setPieChartDetailInfos(List<PieChartDetailInfo> pieChartDetailInfos) { m_pieChartDetailInfos = pieChartDetailInfos; } public void setPlatforms(Map<Integer, Item> platforms) { m_platforms = platforms; } public void setProblemReport(ProblemReport problemReport) { m_problemReport = problemReport; } public void setProblemStatistics(ProblemStatistics problemStatistics) { m_problemStatistics = problemStatistics; } public void setSpeeds(Map<String, List<Speed>> speeds) { m_speeds = speeds; } public void setVersions(Map<Integer, Item> versions) { m_versions = versions; } }
package com.alibaba.jstorm.daemon.worker.metrics; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.log4j.Logger; import backtype.storm.utils.Utils; public class AlimonitorClient extends MetricSendClient { public static Logger LOG = Logger.getLogger(AlimonitorClient.class); // Send to localhost:15776 by default public static final String DEFAUT_ADDR = "127.0.0.1"; public static final String DEFAULT_PORT = "15776"; public static final int DEFAUTL_FLAG = 0; public static final String DEFAULT_ERROR_INFO = ""; private final String COLLECTION_FLAG = "collection_flag"; private final String ERROR_INFO = "error_info"; private final String MSG = "MSG"; private String port; private String requestIP; private String monitorName; private int collectionFlag; private String errorInfo; private boolean post; public AlimonitorClient() { } public AlimonitorClient(String requestIP, String port, boolean post) { this.requestIP = requestIP; this.port = port; this.post = post; this.monitorName = null; this.collectionFlag = 0; this.errorInfo = null; } public void setIpAddr(String ipAddr) { this.requestIP = ipAddr; } public void setPort(String port) { this.port = port; } public void setMonitorName(String monitorName) { this.monitorName = monitorName; } public void setCollectionFlag(int flag) { this.collectionFlag = flag; } public void setErrorInfo(String msg) { this.errorInfo = msg; } public void setPostFlag(boolean post) { this.post = post; } public String buildURL() { return "http://" + requestIP + ":" + port + "/passive"; } public String buildRqstAddr() { return "http://" + requestIP + ":" + port + "/passive?name=" + monitorName + "&msg="; } @Override public boolean send(Map<String,Object> msg) { try { if(monitorName == null) { LOG.warn("monitor name is null"); return false; } return sendRequest(collectionFlag, errorInfo, msg); } catch (Exception e) { LOG.error("Failed to sendRequest", e); return false; } } @Override public boolean send(List<Map<String,Object>> msg) { try { if(monitorName == null) { LOG.warn("monitor name is null"); return false; } return sendRequest(collectionFlag, errorInfo, msg); } catch (Exception e) { LOG.error("Failed to sendRequest", e); return false; } } public Map buildAliMonitorMsg(int collection_flag, String error_message) { // Json format of the message sent to Alimonitor // { // "collection_flag":int, // "error_info":string, // "MSG": ojbect | array // } Map ret = new HashMap(); ret.put(COLLECTION_FLAG, collection_flag); ret.put(ERROR_INFO, error_message); ret.put(MSG, null); return ret; } private void addMsgData(Map jsonObj, Map<String, Object> map) { jsonObj.put(MSG, map); } private void addMsgData(Map jsonObj, List<Map<String, Object>> mapList) { // JSONArray jsonArray = new JSONArray(); // for(Map<String, Object> map : mapList) { // jsonArray.add(map); // } jsonObj.put(MSG, mapList); } private boolean sendRequest(int collection_flag, String error_message, Map<String,Object> msg) throws Exception { boolean ret = false; if (msg.size() == 0) return ret; Map jsonObj = buildAliMonitorMsg(collection_flag, error_message); addMsgData(jsonObj, msg); String jsonMsg = jsonObj.toString(); LOG.info(jsonMsg); if (post == true) { String url = buildURL(); ret = httpPost(url, jsonMsg); } else { String request = buildRqstAddr(); StringBuilder postAddr= new StringBuilder(); postAddr.append(request); postAddr.append(URLEncoder.encode(jsonMsg)); ret = httpGet(postAddr); } return ret; } private boolean sendRequest(int collection_flag, String error_message, List<Map<String,Object>> msgList) throws Exception { boolean ret = false; if (msgList.size() == 0) return ret; Map jsonObj = buildAliMonitorMsg(collection_flag, error_message); addMsgData(jsonObj, msgList); String jsonMsg = Utils.to_json(jsonObj); LOG.info(jsonMsg); if (post == true) { String url = buildURL(); ret = httpPost(url, jsonMsg); } else { String request = buildRqstAddr(); StringBuilder postAddr= new StringBuilder(); postAddr.append(request); postAddr.append(URLEncoder.encode(jsonMsg)); ret = httpGet(postAddr); } return ret; } private boolean httpGet(StringBuilder postAddr) { boolean ret = false; CloseableHttpClient httpClient = HttpClientBuilder.create().build(); CloseableHttpResponse response = null; try { HttpGet request = new HttpGet(postAddr.toString()); response = httpClient.execute(request); HttpEntity entity = response.getEntity(); if (entity != null) { LOG.info(EntityUtils.toString(entity)); } EntityUtils.consume(entity); ret = true; } catch (Exception e) { LOG.error("Exception when sending http request to alimonitor", e); } finally { try { if (response != null) response.close(); httpClient.close(); } catch (Exception e) { LOG.error("Exception when closing httpclient", e); } } return ret; } private boolean httpPost(String url, String msg) { boolean ret = false; CloseableHttpClient httpClient = HttpClientBuilder.create().build(); CloseableHttpResponse response = null; try { HttpPost request = new HttpPost(url); List <NameValuePair> nvps = new ArrayList <NameValuePair>(); nvps.add(new BasicNameValuePair("name", monitorName)); nvps.add(new BasicNameValuePair("msg", msg)); request.setEntity(new UrlEncodedFormEntity(nvps)); response = httpClient.execute(request); HttpEntity entity = response.getEntity(); if (entity != null) { LOG.info(EntityUtils.toString(entity)); } EntityUtils.consume(entity); ret = true; } catch (Exception e) { LOG.error("Exception when sending http request to alimonitor", e); } finally { try { if (response != null) response.close(); httpClient.close(); } catch (Exception e) { LOG.error("Exception when closing httpclient", e); } } return ret; } public void close() { } }
package com.insightfullogic.honest_profiler.ports.javafx.controller; import static com.insightfullogic.honest_profiler.core.aggregation.result.ItemType.DIFFENTRY; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_CNT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_CNT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_CNT_PCT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_CNT_PCT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_TIME; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_TIME_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_TIME_PCT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_SELF_TIME_PCT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_CNT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_CNT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_CNT_PCT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_CNT_PCT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_TIME; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_TIME_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_TIME_PCT; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.COLUMN_TOTAL_TIME_PCT_DIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_BUTTON_COLLAPSEALLALL; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_BUTTON_EXPANDALL; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_BUTTON_FILTER; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_BUTTON_QUICKFILTER; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_INPUT_QUICKFILTER; import static com.insightfullogic.honest_profiler.ports.javafx.util.ResourceUtil.INFO_TABLE_TREEDIFF; import static com.insightfullogic.honest_profiler.ports.javafx.util.TreeUtil.expandFully; import static com.insightfullogic.honest_profiler.ports.javafx.util.TreeUtil.expandPartial; import com.insightfullogic.honest_profiler.core.aggregation.grouping.FrameGrouping; import com.insightfullogic.honest_profiler.core.aggregation.grouping.ThreadGrouping; import com.insightfullogic.honest_profiler.core.aggregation.result.Aggregation; import com.insightfullogic.honest_profiler.core.aggregation.result.diff.DiffNode; import com.insightfullogic.honest_profiler.core.aggregation.result.diff.TreeDiff; import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Tree; import com.insightfullogic.honest_profiler.ports.javafx.controller.filter.FilterDialogController; import com.insightfullogic.honest_profiler.ports.javafx.util.TreeUtil; import com.insightfullogic.honest_profiler.ports.javafx.view.cell.MethodNameTreeTableCell; import com.insightfullogic.honest_profiler.ports.javafx.view.tree.DiffNodeTreeItem; import javafx.beans.property.ReadOnlyStringWrapper; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.control.ChoiceBox; import javafx.scene.control.Label; import javafx.scene.control.TextField; import javafx.scene.control.TreeTableColumn; import javafx.scene.control.TreeTableView; /** * Controller for Views which displays the contents of a {@link TreeDiff} {@link Aggregation}. */ public class TreeDiffViewController extends AbstractProfileDiffViewController<Tree, DiffNode> { @FXML private Button filterButton; @FXML private Button expandAllButton; @FXML private Button collapseAllButton; @FXML private TextField quickFilterText; @FXML private Button quickFilterButton; @FXML private Label threadGroupingLabel; @FXML private ChoiceBox<ThreadGrouping> threadGrouping; @FXML private Label frameGroupingLabel; @FXML private ChoiceBox<FrameGrouping> frameGrouping; @FXML private TreeTableView<DiffNode> treeDiffTable; @FXML private TreeTableColumn<DiffNode, String> methodColumn; @FXML private TreeTableColumn<DiffNode, Number> baseSelfCntPct; @FXML private TreeTableColumn<DiffNode, Number> newSelfCntPct; @FXML private TreeTableColumn<DiffNode, Number> selfCntPctDiff; @FXML private TreeTableColumn<DiffNode, Number> baseTotalCntPct; @FXML private TreeTableColumn<DiffNode, Number> newTotalCntPct; @FXML private TreeTableColumn<DiffNode, Number> totalCntPctDiff; @FXML private TreeTableColumn<DiffNode, Number> baseSelfCnt; @FXML private TreeTableColumn<DiffNode, Number> newSelfCnt; @FXML private TreeTableColumn<DiffNode, Number> selfCntDiff; @FXML private TreeTableColumn<DiffNode, Number> baseTotalCnt; @FXML private TreeTableColumn<DiffNode, Number> newTotalCnt; @FXML private TreeTableColumn<DiffNode, Number> totalCntDiff; @FXML private TreeTableColumn<DiffNode, Number> baseSelfTimePct; @FXML private TreeTableColumn<DiffNode, Number> newSelfTimePct; @FXML private TreeTableColumn<DiffNode, Number> selfTimePctDiff; @FXML private TreeTableColumn<DiffNode, Number> baseTotalTimePct; @FXML private TreeTableColumn<DiffNode, Number> newTotalTimePct; @FXML private TreeTableColumn<DiffNode, Number> totalTimePctDiff; @FXML private TreeTableColumn<DiffNode, Number> baseSelfTime; @FXML private TreeTableColumn<DiffNode, Number> newSelfTime; @FXML private TreeTableColumn<DiffNode, Number> selfTimeDiff; @FXML private TreeTableColumn<DiffNode, Number> baseTotalTime; @FXML private TreeTableColumn<DiffNode, Number> newTotalTime; @FXML private TreeTableColumn<DiffNode, Number> totalTimeDiff; @FXML private FilterDialogController<DiffNode> filterController; private TreeDiff diff; // FXML Implementation @Override @FXML protected void initialize() { diff = new TreeDiff(); super.initialize(DIFFENTRY); super.initialize(filterController, filterButton, quickFilterButton, quickFilterText); super.initialize(threadGroupingLabel, threadGrouping, frameGroupingLabel, frameGrouping); } // AbstractController Implementation @Override protected void initializeInfoText() { info(filterButton, INFO_BUTTON_FILTER); info(expandAllButton, INFO_BUTTON_EXPANDALL); info(collapseAllButton, INFO_BUTTON_COLLAPSEALLALL); info(quickFilterText, INFO_INPUT_QUICKFILTER); info(quickFilterButton, INFO_BUTTON_QUICKFILTER); info(treeDiffTable, INFO_TABLE_TREEDIFF); } @Override protected void initializeHandlers() { expandAllButton.setOnAction(event -> expandFully(treeDiffTable.getRoot())); collapseAllButton.setOnAction( event -> treeDiffTable.getRoot().getChildren().stream() .forEach(TreeUtil::collapseFully)); } // AbstractViewController Implementation @Override protected void refresh() { diff = new TreeDiff(); updateDiff(getBaseTarget(), getNewTarget()); } /** * Initializes the {@link TreeTableView} which displays the {@link TreeDiff} {@link Aggregation}. */ @Override protected void initializeTable() { methodColumn.setCellValueFactory( data -> new ReadOnlyStringWrapper( data.getValue() == null ? null : data.getValue().getValue().getKey())); methodColumn.setCellFactory(col -> new MethodNameTreeTableCell<>(appCtx())); cfgPctCol(baseSelfCntPct, "baseSelfCntPct", baseCtx(), getText(COLUMN_SELF_CNT_PCT)); cfgPctCol(newSelfCntPct, "newSelfCntPct", newCtx(), getText(COLUMN_SELF_CNT_PCT)); cfgPctDiffCol(selfCntPctDiff, "selfCntPctDiff", getText(COLUMN_SELF_CNT_PCT_DIFF)); cfgPctCol(baseTotalCntPct, "baseTotalCntPct", baseCtx(), getText(COLUMN_TOTAL_CNT_PCT)); cfgPctCol(newTotalCntPct, "newTotalCntPct", newCtx(), getText(COLUMN_TOTAL_CNT_PCT)); cfgPctDiffCol(totalCntPctDiff, "totalCntPctDiff", getText(COLUMN_TOTAL_CNT_PCT_DIFF)); cfgNrCol(baseSelfCnt, "baseSelfCnt", baseCtx(), getText(COLUMN_SELF_CNT)); cfgNrCol(newSelfCnt, "newSelfCnt", newCtx(), getText(COLUMN_SELF_CNT)); cfgNrDiffCol(selfCntDiff, "selfCntDiff", getText(COLUMN_SELF_CNT_DIFF)); cfgNrCol(baseTotalCnt, "baseTotalCnt", baseCtx(), getText(COLUMN_TOTAL_CNT)); cfgNrCol(newTotalCnt, "newTotalCnt", newCtx(), getText(COLUMN_TOTAL_CNT)); cfgNrDiffCol(totalCntDiff, "totalCntDiff", getText(COLUMN_TOTAL_CNT_DIFF)); cfgPctCol(baseSelfTimePct, "baseSelfTimePct", baseCtx(), getText(COLUMN_SELF_TIME_PCT)); cfgPctCol(newSelfTimePct, "newSelfTimePct", newCtx(), getText(COLUMN_SELF_TIME_PCT)); cfgPctDiffCol(selfTimePctDiff, "selfTimePctDiff", getText(COLUMN_SELF_TIME_PCT_DIFF)); cfgPctCol(baseTotalTimePct, "baseTotalTimePct", baseCtx(), getText(COLUMN_TOTAL_TIME_PCT)); cfgPctCol(newTotalTimePct, "newTotalTimePct", newCtx(), getText(COLUMN_TOTAL_TIME_PCT)); cfgPctDiffCol(totalTimePctDiff, "totalTimePctDiff", getText(COLUMN_TOTAL_TIME_PCT_DIFF)); cfgTimeCol(baseSelfTime, "baseSelfTime", baseCtx(), getText(COLUMN_SELF_TIME)); cfgTimeCol(newSelfTime, "newSelfTime", newCtx(), getText(COLUMN_SELF_TIME)); cfgTimeDiffCol(selfTimeDiff, "selfTimeDiff", getText(COLUMN_SELF_TIME_DIFF)); cfgTimeCol(baseTotalTime, "baseTotalTime", baseCtx(), getText(COLUMN_TOTAL_TIME)); cfgTimeCol(newTotalTime, "newTotalTime", newCtx(), getText(COLUMN_TOTAL_TIME)); cfgTimeDiffCol(totalTimeDiff, "totalTimeDiff", getText(COLUMN_TOTAL_TIME_DIFF)); } /** * Helper method for {@link #refresh()}. * <p> * * @param baseTree the Base {@link Tree} to be compared * @param newTree the New {@link Tree} to be compared */ private void updateDiff(Tree baseTree, Tree newTree) { if (baseTree != null && newTree != null) { diff.set(baseTree, newTree); treeDiffTable.setRoot(new DiffNodeTreeItem(diff.filter(getFilterSpecification()))); expandPartial(treeDiffTable.getRoot(), 2); treeDiffTable.sort(); } } }
/* * Copyright 2019 Esri. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.esri.samples.generate_offline_map_with_local_basemap; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Optional; import java.util.concurrent.ExecutionException; import javafx.application.Application; import javafx.geometry.Insets; import javafx.geometry.Point2D; import javafx.geometry.Pos; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.ProgressBar; import javafx.scene.layout.StackPane; import javafx.stage.DirectoryChooser; import javafx.stage.Stage; import com.esri.arcgisruntime.ArcGISRuntimeEnvironment; import com.esri.arcgisruntime.concurrent.Job; import com.esri.arcgisruntime.concurrent.ListenableFuture; import com.esri.arcgisruntime.geometry.Envelope; import com.esri.arcgisruntime.geometry.Point; import com.esri.arcgisruntime.loadable.LoadStatus; import com.esri.arcgisruntime.mapping.ArcGISMap; import com.esri.arcgisruntime.mapping.view.DrawStatus; import com.esri.arcgisruntime.mapping.view.DrawStatusChangedEvent; import com.esri.arcgisruntime.mapping.view.DrawStatusChangedListener; import com.esri.arcgisruntime.mapping.view.Graphic; import com.esri.arcgisruntime.mapping.view.GraphicsOverlay; import com.esri.arcgisruntime.mapping.view.MapView; import com.esri.arcgisruntime.portal.Portal; import com.esri.arcgisruntime.portal.PortalItem; import com.esri.arcgisruntime.symbology.SimpleLineSymbol; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapJob; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapParameters; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapResult; import com.esri.arcgisruntime.tasks.offlinemap.OfflineMapTask; public class GenerateOfflineMapWithLocalBasemapSample extends Application { private MapView mapView; private Graphic downloadArea; private ArcGISMap map; @Override public void start(Stage stage) { try { // create stack pane and application scene StackPane stackPane = new StackPane(); Scene scene = new Scene(stackPane); scene.getStylesheets().add(getClass().getResource("/generate_offline_map_with_local_basemap/style.css").toExternalForm()); // set title, size, and add scene to stage stage.setTitle("Generate Offline Map With Local Basemap Sample"); stage.setWidth(800); stage.setHeight(700); stage.setScene(scene); stage.show(); // authentication with an API key or named user is required to access basemaps and other location services String yourAPIKey = System.getProperty("apiKey"); ArcGISRuntimeEnvironment.setApiKey(yourAPIKey); // create a map view mapView = new MapView(); // create a graphics overlay to show the download area extent var graphicsOverlay = new GraphicsOverlay(); mapView.getGraphicsOverlays().add(graphicsOverlay); // create a graphic to show a box around the extent we want to download downloadArea = new Graphic(); graphicsOverlay.getGraphics().add(downloadArea); var simpleLineSymbol = new SimpleLineSymbol(SimpleLineSymbol.Style.SOLID, 0xFFFF0000, 2); downloadArea.setSymbol(simpleLineSymbol); // load a web map from a portal item var portal = new Portal("https://www.arcgis.com"); var portalItem = new PortalItem(portal, "acc027394bc84c2fb04d1ed317aac674"); map = new ArcGISMap(portalItem); mapView.setMap(map); // draw the download extent area as a red outline when the draw status is completed for the first time var drawStatusChangedListener = new DrawStatusChangedListener() { @Override public void drawStatusChanged(DrawStatusChangedEvent drawStatusChangedEvent) { if (drawStatusChangedEvent.getDrawStatus() == DrawStatus.COMPLETED) { updateDownloadArea(); mapView.removeDrawStatusChangedListener(this); } } }; mapView.addDrawStatusChangedListener(drawStatusChangedListener); // update the download area box whenever the viewpoint changes mapView.addViewpointChangedListener(viewpointChangedEvent -> updateDownloadArea()); // create a progress bar to show download progress var progressBar = new ProgressBar(0.0); progressBar.setVisible(false); progressBar.setMaxSize(200, 25); // create a button to take the map offline Button offlineMapButton = new Button("Take Map Offline"); offlineMapButton.setDisable(true); // when the take map offline button is clicked, start the offline map task job offlineMapButton.setOnAction(e -> { // specify the extent, min scale, and max scale as parameters double minScale = mapView.getMapScale(); double maxScale = map.getMaxScale(); // minScale must always be larger than maxScale if (minScale <= maxScale) { minScale = maxScale + 1; } // create an offline map task with the map OfflineMapTask task = new OfflineMapTask(map); // create default generate offline map parameters ListenableFuture<GenerateOfflineMapParameters> generateOfflineMapParametersListenableFuture = task.createDefaultGenerateOfflineMapParametersAsync(downloadArea.getGeometry(), minScale, maxScale); generateOfflineMapParametersListenableFuture.addDoneListener(() -> { try { GenerateOfflineMapParameters generateOfflineMapParameters = generateOfflineMapParametersListenableFuture.get(); GenerateOfflineMapDialog dialog = new GenerateOfflineMapDialog(); dialog.setReferencedBasemapFileName(generateOfflineMapParameters.getReferenceBasemapFilename()); Optional<Boolean> usingLocalBasemap = dialog.showAndWait(); // if the user chose to cancel, the optional will be empty if (usingLocalBasemap.isPresent()) { // if the user chose to use a local basemap, this will be true // if the user chose to download the basemap again, this requires authentication, which is handled via the API key set at // the start of this sample if (usingLocalBasemap.get()) { // open a directory chooser to select the directory containing the referenced basemap var directoryChooser = new DirectoryChooser(); // for this sample, the directory chosen should be "naperville" directoryChooser.setInitialDirectory(new File(System.getProperty("data.dir"), "./samples-data/")); directoryChooser.setTitle("Choose directory containing local basemap"); File localBasemapDirectory = directoryChooser.showDialog(stage.getOwner()); if (localBasemapDirectory != null) { // set the path to the references basemap directory generateOfflineMapParameters.setReferenceBasemapFilename("naperville_imagery.tpkx"); generateOfflineMapParameters.setReferenceBasemapDirectory(localBasemapDirectory.getAbsolutePath()); } } // create an offline map job with the download directory path and parameters and start the job Path tempDirectory = Files.createTempDirectory("offline_map"); GenerateOfflineMapJob generateOfflineMapJob = task.generateOfflineMap(generateOfflineMapParameters, tempDirectory.toAbsolutePath().toString()); generateOfflineMapJob.start(); offlineMapButton.setDisable(true); progressBar.setVisible(true); // show the job's progress with the progress bar generateOfflineMapJob.addProgressChangedListener(() -> progressBar.setProgress(generateOfflineMapJob.getProgress() / 100.0)); // replace the current map with the result offline map when the job finishes generateOfflineMapJob.addJobDoneListener(() -> { if (generateOfflineMapJob.getStatus() == Job.Status.SUCCEEDED) { // replace the map in the map view with the offline map GenerateOfflineMapResult result = generateOfflineMapJob.getResult(); mapView.setMap(result.getOfflineMap()); graphicsOverlay.getGraphics().clear(); offlineMapButton.setDisable(true); progressBar.setVisible(false); } else { new Alert(Alert.AlertType.ERROR, generateOfflineMapJob.getError().getAdditionalMessage()).show(); } }); } } catch (InterruptedException | ExecutionException ex) { new Alert(Alert.AlertType.ERROR, "Failed to get default generate offline map parameters").show(); } catch (IOException ex) { new Alert(Alert.AlertType.ERROR, "Failed to create temporary file for mobile map").show(); } }); }); // enable the button when the map is loaded map.addDoneLoadingListener(() -> { if (map.getLoadStatus() == LoadStatus.LOADED) { offlineMapButton.setDisable(false); } else { new Alert(Alert.AlertType.ERROR, "Map failed to load").show(); } }); // add the map view, button, and progress bar to stack pane stackPane.getChildren().addAll(mapView, offlineMapButton, progressBar); StackPane.setAlignment(offlineMapButton, Pos.TOP_LEFT); StackPane.setAlignment(progressBar, Pos.TOP_RIGHT); StackPane.setMargin(offlineMapButton, new Insets(10)); StackPane.setMargin(progressBar, new Insets(10)); } catch (Exception ex) { // on any error, display the stack trace. ex.printStackTrace(); } } /** * Updates the download area graphic to show a red border around the current view extent that will be downloaded if * taken offline. */ private void updateDownloadArea() { if (map.getLoadStatus() == LoadStatus.LOADED) { // upper left corner of the area to take offline Point2D minScreenPoint = new Point2D(50, 50); // lower right corner of the downloaded area Point2D maxScreenPoint = new Point2D(mapView.getWidth() - 50, mapView.getHeight() - 50); // convert screen points to map points Point minPoint = mapView.screenToLocation(minScreenPoint); Point maxPoint = mapView.screenToLocation(maxScreenPoint); // use the points to define and return an envelope if (minPoint != null && maxPoint != null) { Envelope envelope = new Envelope(minPoint, maxPoint); downloadArea.setGeometry(envelope); } } } /** * Stops and releases all resources used in application. */ @Override public void stop() { if (mapView != null) { mapView.dispose(); } } /** * Opens and runs application. * * @param args arguments passed to this application */ public static void main(String[] args) { Application.launch(args); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.RegionException; import org.apache.hadoop.hbase.RegionTransition; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.catalog.CatalogTracker; import org.apache.hadoop.hbase.catalog.MetaMockingUtil; import org.apache.hadoop.hbase.client.ClientProtocol; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.executor.EventHandler.EventType; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.balancer.DefaultLoadBalancer; import org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory; import org.apache.hadoop.hbase.master.handler.EnableTableHandler; import org.apache.hadoop.hbase.master.handler.ServerShutdownHandler; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table; import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper; import org.apache.hadoop.hbase.zookeeper.ZKAssign; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NodeExistsException; import org.apache.zookeeper.Watcher; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; /** * Test {@link AssignmentManager} */ @Category(MediumTests.class) public class TestAssignmentManager { private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); private static final ServerName SERVERNAME_A = new ServerName("example.org", 1234, 5678); private static final ServerName SERVERNAME_B = new ServerName("example.org", 0, 5678); private static final HRegionInfo REGIONINFO = new HRegionInfo(Bytes.toBytes("t"), HConstants.EMPTY_START_ROW, HConstants.EMPTY_START_ROW); private static int assignmentCount; private static boolean enabling = false; // Mocked objects or; get redone for each test. private Server server; private ServerManager serverManager; private ZooKeeperWatcher watcher; private LoadBalancer balancer; private HMaster master; @BeforeClass public static void beforeClass() throws Exception { HTU.startMiniZKCluster(); } @AfterClass public static void afterClass() throws IOException { HTU.shutdownMiniZKCluster(); } @Before public void before() throws ZooKeeperConnectionException, IOException { // TODO: Make generic versions of what we do below and put up in a mocking // utility class or move up into HBaseTestingUtility. // Mock a Server. Have it return a legit Configuration and ZooKeeperWatcher. // If abort is called, be sure to fail the test (don't just swallow it // silently as is mockito default). this.server = Mockito.mock(Server.class); Mockito.when(server.getServerName()).thenReturn(new ServerName("master,1,1")); Mockito.when(server.getConfiguration()).thenReturn(HTU.getConfiguration()); this.watcher = new ZooKeeperWatcher(HTU.getConfiguration(), "mockedServer", this.server, true); Mockito.when(server.getZooKeeper()).thenReturn(this.watcher); Mockito.doThrow(new RuntimeException("Aborted")). when(server).abort(Mockito.anyString(), (Throwable)Mockito.anyObject()); // Mock a ServerManager. Say server SERVERNAME_{A,B} are online. Also // make it so if close or open, we return 'success'. this.serverManager = Mockito.mock(ServerManager.class); Mockito.when(this.serverManager.isServerOnline(SERVERNAME_A)).thenReturn(true); Mockito.when(this.serverManager.isServerOnline(SERVERNAME_B)).thenReturn(true); final Map<ServerName, ServerLoad> onlineServers = new HashMap<ServerName, ServerLoad>(); onlineServers.put(SERVERNAME_B, ServerLoad.EMPTY_SERVERLOAD); onlineServers.put(SERVERNAME_A, ServerLoad.EMPTY_SERVERLOAD); Mockito.when(this.serverManager.getOnlineServersList()).thenReturn( new ArrayList<ServerName>(onlineServers.keySet())); Mockito.when(this.serverManager.getOnlineServers()).thenReturn(onlineServers); List<ServerName> avServers = new ArrayList<ServerName>(); avServers.addAll(onlineServers.keySet()); Mockito.when(this.serverManager.createDestinationServersList()).thenReturn(avServers); Mockito.when(this.serverManager.createDestinationServersList(null)).thenReturn(avServers); Mockito.when(this.serverManager.sendRegionClose(SERVERNAME_A, REGIONINFO, -1)). thenReturn(true); Mockito.when(this.serverManager.sendRegionClose(SERVERNAME_B, REGIONINFO, -1)). thenReturn(true); // Ditto on open. Mockito.when(this.serverManager.sendRegionOpen(SERVERNAME_A, REGIONINFO, -1)). thenReturn(RegionOpeningState.OPENED); Mockito.when(this.serverManager.sendRegionOpen(SERVERNAME_B, REGIONINFO, -1)). thenReturn(RegionOpeningState.OPENED); this.master = Mockito.mock(HMaster.class); Mockito.when(this.master.getServerManager()).thenReturn(serverManager); } @After public void after() throws KeeperException { if (this.watcher != null) { // Clean up all znodes ZKAssign.deleteAllNodes(this.watcher); this.watcher.close(); } } /** * Test a balance going on at same time as a master failover * * @throws IOException * @throws KeeperException * @throws InterruptedException * @throws DeserializationException */ @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOpenedNode() throws IOException, KeeperException, InterruptedException, ServiceException, DeserializationException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { createRegionPlanAndBalance(am, SERVERNAME_A, SERVERNAME_B, REGIONINFO); startFakeFailedOverMasterAssignmentManager(am, this.watcher); while (!am.processRITInvoked) Thread.sleep(1); // As part of the failover cleanup, the balancing region plan is removed. // So a random server will be used to open the region. For testing purpose, // let's assume it is going to open on server b: am.addPlan(REGIONINFO.getEncodedName(), new RegionPlan(REGIONINFO, null, SERVERNAME_B)); // Now fake the region closing successfully over on the regionserver; the // regionserver will have set the region in CLOSED state. This will // trigger callback into AM. The below zk close call is from the RS close // region handler duplicated here because its down deep in a private // method hard to expose. int versionid = ZKAssign.transitionNodeClosed(this.watcher, REGIONINFO, SERVERNAME_A, -1); assertNotSame(versionid, -1); Mocking.waitForRegionPendingOpenInRIT(am, REGIONINFO.getEncodedName()); // Get current versionid else will fail on transition from OFFLINE to // OPENING below versionid = ZKAssign.getVersion(this.watcher, REGIONINFO); assertNotSame(-1, versionid); // This uglyness below is what the openregionhandler on RS side does. versionid = ZKAssign.transitionNode(server.getZooKeeper(), REGIONINFO, SERVERNAME_B, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_OPENING, versionid); assertNotSame(-1, versionid); // Move znode from OPENING to OPENED as RS does on successful open. versionid = ZKAssign.transitionNodeOpened(this.watcher, REGIONINFO, SERVERNAME_B, versionid); assertNotSame(-1, versionid); am.gate.set(false); // Block here until our znode is cleared or until this test times out. ZKAssign.blockUntilNoRIT(watcher); } finally { am.getExecutorService().shutdown(); am.shutdown(); } } @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithClosedNode() throws IOException, KeeperException, InterruptedException, ServiceException, DeserializationException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { createRegionPlanAndBalance(am, SERVERNAME_A, SERVERNAME_B, REGIONINFO); startFakeFailedOverMasterAssignmentManager(am, this.watcher); while (!am.processRITInvoked) Thread.sleep(1); // As part of the failover cleanup, the balancing region plan is removed. // So a random server will be used to open the region. For testing purpose, // let's assume it is going to open on server b: am.addPlan(REGIONINFO.getEncodedName(), new RegionPlan(REGIONINFO, null, SERVERNAME_B)); // Now fake the region closing successfully over on the regionserver; the // regionserver will have set the region in CLOSED state. This will // trigger callback into AM. The below zk close call is from the RS close // region handler duplicated here because its down deep in a private // method hard to expose. int versionid = ZKAssign.transitionNodeClosed(this.watcher, REGIONINFO, SERVERNAME_A, -1); assertNotSame(versionid, -1); am.gate.set(false); Mocking.waitForRegionPendingOpenInRIT(am, REGIONINFO.getEncodedName()); // Get current versionid else will fail on transition from OFFLINE to // OPENING below versionid = ZKAssign.getVersion(this.watcher, REGIONINFO); assertNotSame(-1, versionid); // This uglyness below is what the openregionhandler on RS side does. versionid = ZKAssign.transitionNode(server.getZooKeeper(), REGIONINFO, SERVERNAME_B, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_OPENING, versionid); assertNotSame(-1, versionid); // Move znode from OPENING to OPENED as RS does on successful open. versionid = ZKAssign.transitionNodeOpened(this.watcher, REGIONINFO, SERVERNAME_B, versionid); assertNotSame(-1, versionid); // Block here until our znode is cleared or until this test timesout. ZKAssign.blockUntilNoRIT(watcher); } finally { am.getExecutorService().shutdown(); am.shutdown(); } } @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOfflineNode() throws IOException, KeeperException, InterruptedException, ServiceException, DeserializationException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { createRegionPlanAndBalance(am, SERVERNAME_A, SERVERNAME_B, REGIONINFO); startFakeFailedOverMasterAssignmentManager(am, this.watcher); while (!am.processRITInvoked) Thread.sleep(1); // As part of the failover cleanup, the balancing region plan is removed. // So a random server will be used to open the region. For testing purpose, // let's assume it is going to open on server b: am.addPlan(REGIONINFO.getEncodedName(), new RegionPlan(REGIONINFO, null, SERVERNAME_B)); // Now fake the region closing successfully over on the regionserver; the // regionserver will have set the region in CLOSED state. This will // trigger callback into AM. The below zk close call is from the RS close // region handler duplicated here because its down deep in a private // method hard to expose. int versionid = ZKAssign.transitionNodeClosed(this.watcher, REGIONINFO, SERVERNAME_A, -1); assertNotSame(versionid, -1); Mocking.waitForRegionPendingOpenInRIT(am, REGIONINFO.getEncodedName()); am.gate.set(false); // Get current versionid else will fail on transition from OFFLINE to // OPENING below versionid = ZKAssign.getVersion(this.watcher, REGIONINFO); assertNotSame(-1, versionid); // This uglyness below is what the openregionhandler on RS side does. versionid = ZKAssign.transitionNode(server.getZooKeeper(), REGIONINFO, SERVERNAME_B, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_OPENING, versionid); assertNotSame(-1, versionid); // Move znode from OPENING to OPENED as RS does on successful open. versionid = ZKAssign.transitionNodeOpened(this.watcher, REGIONINFO, SERVERNAME_B, versionid); assertNotSame(-1, versionid); // Block here until our znode is cleared or until this test timesout. ZKAssign.blockUntilNoRIT(watcher); } finally { am.getExecutorService().shutdown(); am.shutdown(); } } private void createRegionPlanAndBalance( final AssignmentManager am, final ServerName from, final ServerName to, final HRegionInfo hri) throws RegionException { // Call the balance function but fake the region being online first at // servername from. am.regionOnline(hri, from); // Balance region from 'from' to 'to'. It calls unassign setting CLOSING state // up in zk. Create a plan and balance am.balance(new RegionPlan(hri, from, to)); } /** * Tests AssignmentManager balance function. Runs a balance moving a region * from one server to another mocking regionserver responding over zk. * @throws IOException * @throws KeeperException * @throws DeserializationException */ @Test public void testBalance() throws IOException, KeeperException, DeserializationException, InterruptedException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testBalanceExecutor"); // We need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(server .getConfiguration()); // Create an AM. AssignmentManager am = new AssignmentManager(this.server, this.serverManager, ct, balancer, executor, null); am.failoverCleanupDone.set(true); try { // Make sure our new AM gets callbacks; once registered, can't unregister. // Thats ok because we make a new zk watcher for each test. this.watcher.registerListenerFirst(am); // Call the balance function but fake the region being online first at // SERVERNAME_A. Create a balance plan. am.regionOnline(REGIONINFO, SERVERNAME_A); // Balance region from A to B. RegionPlan plan = new RegionPlan(REGIONINFO, SERVERNAME_A, SERVERNAME_B); am.balance(plan); // Now fake the region closing successfully over on the regionserver; the // regionserver will have set the region in CLOSED state. This will // trigger callback into AM. The below zk close call is from the RS close // region handler duplicated here because its down deep in a private // method hard to expose. int versionid = ZKAssign.transitionNodeClosed(this.watcher, REGIONINFO, SERVERNAME_A, -1); assertNotSame(versionid, -1); // AM is going to notice above CLOSED and queue up a new assign. The // assign will go to open the region in the new location set by the // balancer. The zk node will be OFFLINE waiting for regionserver to // transition it through OPENING, OPENED. Wait till we see the OFFLINE // zk node before we proceed. Mocking.waitForRegionPendingOpenInRIT(am, REGIONINFO.getEncodedName()); // Get current versionid else will fail on transition from OFFLINE to OPENING below versionid = ZKAssign.getVersion(this.watcher, REGIONINFO); assertNotSame(-1, versionid); // This uglyness below is what the openregionhandler on RS side does. versionid = ZKAssign.transitionNode(server.getZooKeeper(), REGIONINFO, SERVERNAME_A, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_OPENING, versionid); assertNotSame(-1, versionid); // Move znode from OPENING to OPENED as RS does on successful open. versionid = ZKAssign.transitionNodeOpened(this.watcher, REGIONINFO, SERVERNAME_B, versionid); assertNotSame(-1, versionid); // Wait on the handler removing the OPENED znode. while(am.getRegionStates().isRegionInTransition(REGIONINFO)) Threads.sleep(1); } finally { executor.shutdown(); am.shutdown(); // Clean up all znodes ZKAssign.deleteAllNodes(this.watcher); } } /** * Run a simple server shutdown handler. * @throws KeeperException * @throws IOException */ @Test public void testShutdownHandler() throws KeeperException, IOException, ServiceException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testShutdownHandler"); // We need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); // Create an AM. AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager( this.server, this.serverManager); try { processServerShutdownHandler(ct, am, false); } finally { executor.shutdown(); am.shutdown(); // Clean up all znodes ZKAssign.deleteAllNodes(this.watcher); } } /** * To test closed region handler to remove rit and delete corresponding znode * if region in pending close or closing while processing shutdown of a region * server.(HBASE-5927). * * @throws KeeperException * @throws IOException * @throws ServiceException */ @Test public void testSSHWhenDisableTableInProgress() throws KeeperException, IOException, ServiceException { testCaseWithPartiallyDisabledState(Table.State.DISABLING); testCaseWithPartiallyDisabledState(Table.State.DISABLED); } /** * To test if the split region is removed from RIT if the region was in SPLITTING state but the RS * has actually completed the splitting in META but went down. See HBASE-6070 and also HBASE-5806 * * @throws KeeperException * @throws IOException */ @Test public void testSSHWhenSplitRegionInProgress() throws KeeperException, IOException, Exception { // true indicates the region is split but still in RIT testCaseWithSplitRegionPartial(true); // false indicate the region is not split testCaseWithSplitRegionPartial(false); } private void testCaseWithSplitRegionPartial(boolean regionSplitDone) throws KeeperException, IOException, NodeExistsException, InterruptedException, ServiceException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testSSHWhenSplitRegionInProgress"); // We need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); ZKAssign.deleteAllNodes(this.watcher); // Create an AM. AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager( this.server, this.serverManager); // adding region to regions and servers maps. am.regionOnline(REGIONINFO, SERVERNAME_A); // adding region in pending close. am.getRegionStates().updateRegionState( REGIONINFO, State.SPLITTING, SERVERNAME_A); am.getZKTable().setEnabledTable(REGIONINFO.getTableNameAsString()); RegionTransition data = RegionTransition.createRegionTransition(EventType.RS_ZK_REGION_SPLITTING, REGIONINFO.getRegionName(), SERVERNAME_A); String node = ZKAssign.getNodeName(this.watcher, REGIONINFO.getEncodedName()); // create znode in M_ZK_REGION_CLOSING state. ZKUtil.createAndWatch(this.watcher, node, data.toByteArray()); try { processServerShutdownHandler(ct, am, regionSplitDone); // check znode deleted or not. // In both cases the znode should be deleted. if (regionSplitDone) { assertFalse("Region state of region in SPLITTING should be removed from rit.", am.getRegionStates().isRegionsInTransition()); } else { while (!am.assignInvoked) { Thread.sleep(1); } assertTrue("Assign should be invoked.", am.assignInvoked); } } finally { REGIONINFO.setOffline(false); REGIONINFO.setSplit(false); executor.shutdown(); am.shutdown(); // Clean up all znodes ZKAssign.deleteAllNodes(this.watcher); } } private void testCaseWithPartiallyDisabledState(Table.State state) throws KeeperException, IOException, NodeExistsException, ServiceException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testSSHWhenDisableTableInProgress"); // We need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(server.getConfiguration()); ZKAssign.deleteAllNodes(this.watcher); // Create an AM. AssignmentManager am = new AssignmentManager(this.server, this.serverManager, ct, balancer, executor, null); // adding region to regions and servers maps. am.regionOnline(REGIONINFO, SERVERNAME_A); // adding region in pending close. am.getRegionStates().updateRegionState(REGIONINFO, State.PENDING_CLOSE); if (state == Table.State.DISABLING) { am.getZKTable().setDisablingTable(REGIONINFO.getTableNameAsString()); } else { am.getZKTable().setDisabledTable(REGIONINFO.getTableNameAsString()); } RegionTransition data = RegionTransition.createRegionTransition(EventType.M_ZK_REGION_CLOSING, REGIONINFO.getRegionName(), SERVERNAME_A); // RegionTransitionData data = new // RegionTransitionData(EventType.M_ZK_REGION_CLOSING, // REGIONINFO.getRegionName(), SERVERNAME_A); String node = ZKAssign.getNodeName(this.watcher, REGIONINFO.getEncodedName()); // create znode in M_ZK_REGION_CLOSING state. ZKUtil.createAndWatch(this.watcher, node, data.toByteArray()); try { processServerShutdownHandler(ct, am, false); // check znode deleted or not. // In both cases the znode should be deleted. assertTrue("The znode should be deleted.", ZKUtil.checkExists(this.watcher, node) == -1); // check whether in rit or not. In the DISABLING case also the below // assert will be true but the piece of code added for HBASE-5927 will not // do that. if (state == Table.State.DISABLED) { assertFalse("Region state of region in pending close should be removed from rit.", am.getRegionStates().isRegionsInTransition()); } } finally { am.setEnabledTable(REGIONINFO.getTableNameAsString()); executor.shutdown(); am.shutdown(); // Clean up all znodes ZKAssign.deleteAllNodes(this.watcher); } } private void processServerShutdownHandler(CatalogTracker ct, AssignmentManager am, boolean splitRegion) throws IOException, ServiceException { // Make sure our new AM gets callbacks; once registered, can't unregister. // Thats ok because we make a new zk watcher for each test. this.watcher.registerListenerFirst(am); // Need to set up a fake scan of meta for the servershutdown handler // Make an RS Interface implementation. Make it so a scanner can go against it. ClientProtocol implementation = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A Result r = null; if (splitRegion) { r = MetaMockingUtil.getMetaTableRowResultAsSplitRegion(REGIONINFO, SERVERNAME_A); } else { r = MetaMockingUtil.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); } ScanResponse.Builder builder = ScanResponse.newBuilder(); builder.setMoreResults(true); builder.addResult(ProtobufUtil.toResult(r)); Mockito.when(implementation.scan( (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). thenReturn(builder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(HTU.getConfiguration(), null, implementation, SERVERNAME_B, REGIONINFO); // Make it so we can get a catalogtracker from servermanager.. .needed // down in guts of server shutdown handler. Mockito.when(ct.getConnection()).thenReturn(connection); Mockito.when(this.server.getCatalogTracker()).thenReturn(ct); // Now make a server shutdown handler instance and invoke process. // Have it that SERVERNAME_A died. DeadServer deadServers = new DeadServer(); deadServers.add(SERVERNAME_A); // I need a services instance that will return the AM MasterServices services = Mockito.mock(MasterServices.class); Mockito.when(services.getAssignmentManager()).thenReturn(am); Mockito.when(services.getServerManager()).thenReturn(this.serverManager); Mockito.when(services.getZooKeeper()).thenReturn(this.watcher); ServerShutdownHandler handler = new ServerShutdownHandler(this.server, services, deadServers, SERVERNAME_A, false); am.failoverCleanupDone.set(true); handler.process(); // The region in r will have been assigned. It'll be up in zk as unassigned. } /** * Create and startup executor pools. Start same set as master does (just * run a few less). * @param name Name to give our executor * @return Created executor (be sure to call shutdown when done). */ private ExecutorService startupMasterExecutor(final String name) { // TODO: Move up into HBaseTestingUtility? Generally useful. ExecutorService executor = new ExecutorService(name); executor.startExecutorService(ExecutorType.MASTER_OPEN_REGION, 3); executor.startExecutorService(ExecutorType.MASTER_CLOSE_REGION, 3); executor.startExecutorService(ExecutorType.MASTER_SERVER_OPERATIONS, 3); executor.startExecutorService(ExecutorType.MASTER_META_SERVER_OPERATIONS, 3); return executor; } @Test public void testUnassignWithSplitAtSameTime() throws KeeperException, IOException { // Region to use in test. final HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO; // First amend the servermanager mock so that when we do send close of the // first meta region on SERVERNAME_A, it will return true rather than // default null. Mockito.when(this.serverManager.sendRegionClose(SERVERNAME_A, hri, -1)).thenReturn(true); // Need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(server .getConfiguration()); // Create an AM. AssignmentManager am = new AssignmentManager(this.server, this.serverManager, ct, balancer, null, null); try { // First make sure my mock up basically works. Unassign a region. unassign(am, SERVERNAME_A, hri); // This delete will fail if the previous unassign did wrong thing. ZKAssign.deleteClosingNode(this.watcher, hri); // Now put a SPLITTING region in the way. I don't have to assert it // go put in place. This method puts it in place then asserts it still // owns it by moving state from SPLITTING to SPLITTING. int version = createNodeSplitting(this.watcher, hri, SERVERNAME_A); // Now, retry the unassign with the SPLTTING in place. It should just // complete without fail; a sort of 'silent' recognition that the // region to unassign has been split and no longer exists: TOOD: what if // the split fails and the parent region comes back to life? unassign(am, SERVERNAME_A, hri); // This transition should fail if the znode has been messed with. ZKAssign.transitionNode(this.watcher, hri, SERVERNAME_A, EventType.RS_ZK_REGION_SPLITTING, EventType.RS_ZK_REGION_SPLITTING, version); assertFalse(am.getRegionStates().isRegionInTransition(hri)); } finally { am.shutdown(); } } /** * Tests the processDeadServersAndRegionsInTransition should not fail with NPE * when it failed to get the children. Let's abort the system in this * situation * @throws ServiceException */ @Test(timeout = 5000) public void testProcessDeadServersAndRegionsInTransitionShouldNotFailWithNPE() throws IOException, KeeperException, InterruptedException, ServiceException { final RecoverableZooKeeper recoverableZk = Mockito .mock(RecoverableZooKeeper.class); AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager( this.server, this.serverManager); Watcher zkw = new ZooKeeperWatcher(HBaseConfiguration.create(), "unittest", null) { public RecoverableZooKeeper getRecoverableZooKeeper() { return recoverableZk; } }; ((ZooKeeperWatcher) zkw).registerListener(am); Mockito.doThrow(new InterruptedException()).when(recoverableZk) .getChildren("/hbase/unassigned", null); am.setWatcher((ZooKeeperWatcher) zkw); try { am.processDeadServersAndRegionsInTransition(null); fail("Expected to abort"); } catch (NullPointerException e) { fail("Should not throw NPE"); } catch (RuntimeException e) { assertEquals("Aborted", e.getLocalizedMessage()); } } /** * TestCase verifies that the regionPlan is updated whenever a region fails to open * and the master tries to process RS_ZK_FAILED_OPEN state.(HBASE-5546). */ @Test(timeout = 5000) public void testRegionPlanIsUpdatedWhenRegionFailsToOpen() throws IOException, KeeperException, ServiceException, InterruptedException { this.server.getConfiguration().setClass( HConstants.HBASE_MASTER_LOADBALANCER_CLASS, MockedLoadBalancer.class, LoadBalancer.class); AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager( this.server, this.serverManager); try { // Boolean variable used for waiting until randomAssignment is called and // new // plan is generated. AtomicBoolean gate = new AtomicBoolean(false); if (balancer instanceof MockedLoadBalancer) { ((MockedLoadBalancer) balancer).setGateVariable(gate); } ZKAssign.createNodeOffline(this.watcher, REGIONINFO, SERVERNAME_A); int v = ZKAssign.getVersion(this.watcher, REGIONINFO); ZKAssign.transitionNode(this.watcher, REGIONINFO, SERVERNAME_A, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_FAILED_OPEN, v); String path = ZKAssign.getNodeName(this.watcher, REGIONINFO .getEncodedName()); am.getRegionStates().updateRegionState( REGIONINFO, State.OPENING, SERVERNAME_A); // a dummy plan inserted into the regionPlans. This plan is cleared and // new one is formed am.regionPlans.put(REGIONINFO.getEncodedName(), new RegionPlan( REGIONINFO, null, SERVERNAME_A)); RegionPlan regionPlan = am.regionPlans.get(REGIONINFO.getEncodedName()); List<ServerName> serverList = new ArrayList<ServerName>(2); serverList.add(SERVERNAME_B); Mockito.when( this.serverManager.createDestinationServersList(SERVERNAME_A)) .thenReturn(serverList); am.nodeDataChanged(path); // here we are waiting until the random assignment in the load balancer is // called. while (!gate.get()) { Thread.sleep(10); } // new region plan may take some time to get updated after random // assignment is called and // gate is set to true. RegionPlan newRegionPlan = am.regionPlans .get(REGIONINFO.getEncodedName()); while (newRegionPlan == null) { Thread.sleep(10); newRegionPlan = am.regionPlans.get(REGIONINFO.getEncodedName()); } // the new region plan created may contain the same RS as destination but // it should // be new plan. assertNotSame("Same region plan should not come", regionPlan, newRegionPlan); assertTrue("Destination servers should be different.", !(regionPlan .getDestination().equals(newRegionPlan.getDestination()))); Mocking.waitForRegionPendingOpenInRIT(am, REGIONINFO.getEncodedName()); } finally { this.server.getConfiguration().setClass( HConstants.HBASE_MASTER_LOADBALANCER_CLASS, DefaultLoadBalancer.class, LoadBalancer.class); am.getExecutorService().shutdown(); am.shutdown(); } } /** * Mocked load balancer class used in the testcase to make sure that the testcase waits until * random assignment is called and the gate variable is set to true. */ public static class MockedLoadBalancer extends DefaultLoadBalancer { private AtomicBoolean gate; public void setGateVariable(AtomicBoolean gate) { this.gate = gate; } @Override public ServerName randomAssignment(HRegionInfo regionInfo, List<ServerName> servers) { ServerName randomServerName = super.randomAssignment(regionInfo, servers); this.gate.set(true); return randomServerName; } @Override public Map<ServerName, List<HRegionInfo>> retainAssignment( Map<HRegionInfo, ServerName> regions, List<ServerName> servers) { this.gate.set(true); return super.retainAssignment(regions, servers); } } /** * Test the scenario when the master is in failover and trying to process a * region which is in Opening state on a dead RS. Master should immediately * assign the region and not wait for Timeout Monitor.(Hbase-5882). */ @Test(timeout = 5000) public void testRegionInOpeningStateOnDeadRSWhileMasterFailover() throws IOException, KeeperException, ServiceException, InterruptedException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager( this.server, this.serverManager); ZKAssign.createNodeOffline(this.watcher, REGIONINFO, SERVERNAME_A); int version = ZKAssign.getVersion(this.watcher, REGIONINFO); ZKAssign.transitionNode(this.watcher, REGIONINFO, SERVERNAME_A, EventType.M_ZK_REGION_OFFLINE, EventType.RS_ZK_REGION_OPENING, version); RegionTransition rt = RegionTransition.createRegionTransition(EventType.RS_ZK_REGION_OPENING, REGIONINFO.getRegionName(), SERVERNAME_A, HConstants.EMPTY_BYTE_ARRAY); version = ZKAssign.getVersion(this.watcher, REGIONINFO); Mockito.when(this.serverManager.isServerOnline(SERVERNAME_A)).thenReturn(false); am.getRegionStates().createRegionState(REGIONINFO); am.gate.set(false); am.processRegionsInTransition(rt, REGIONINFO, version); // Waiting for the assignment to get completed. while (!am.gate.get()) { Thread.sleep(10); } assertTrue("The region should be assigned immediately.", null != am.regionPlans.get(REGIONINFO .getEncodedName())); } /** * Test verifies whether assignment is skipped for regions of tables in DISABLING state during * clean cluster startup. See HBASE-6281. * * @throws KeeperException * @throws IOException * @throws Exception */ @Test(timeout = 10000) public void testDisablingTableRegionsAssignmentDuringCleanClusterStartup() throws KeeperException, IOException, Exception { this.server.getConfiguration().setClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, MockedLoadBalancer.class, LoadBalancer.class); Mockito.when(this.serverManager.getOnlineServers()).thenReturn( new HashMap<ServerName, ServerLoad>(0)); List<ServerName> destServers = new ArrayList<ServerName>(1); destServers.add(SERVERNAME_A); Mockito.when(this.serverManager.createDestinationServersList()).thenReturn(destServers); // To avoid cast exception in DisableTableHandler process. HTU.getConfiguration().setInt(HConstants.MASTER_PORT, 0); Server server = new HMaster(HTU.getConfiguration()); AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(server, this.serverManager); AtomicBoolean gate = new AtomicBoolean(false); if (balancer instanceof MockedLoadBalancer) { ((MockedLoadBalancer) balancer).setGateVariable(gate); } try{ // set table in disabling state. am.getZKTable().setDisablingTable(REGIONINFO.getTableNameAsString()); am.joinCluster(); // should not call retainAssignment if we get empty regions in assignAllUserRegions. assertFalse( "Assign should not be invoked for disabling table regions during clean cluster startup.", gate.get()); // need to change table state from disabling to disabled. assertTrue("Table should be disabled.", am.getZKTable().isDisabledTable(REGIONINFO.getTableNameAsString())); } finally { this.server.getConfiguration().setClass( HConstants.HBASE_MASTER_LOADBALANCER_CLASS, DefaultLoadBalancer.class, LoadBalancer.class); am.getZKTable().setEnabledTable(REGIONINFO.getTableNameAsString()); am.shutdown(); } } /** * Test verifies whether all the enabling table regions assigned only once during master startup. * * @throws KeeperException * @throws IOException * @throws Exception */ @Test public void testMasterRestartWhenTableInEnabling() throws KeeperException, IOException, Exception { enabling = true; List<ServerName> destServers = new ArrayList<ServerName>(1); destServers.add(SERVERNAME_A); Mockito.when(this.serverManager.createDestinationServersList()).thenReturn(destServers); Mockito.when(this.serverManager.isServerOnline(SERVERNAME_A)).thenReturn(true); HTU.getConfiguration().setInt(HConstants.MASTER_PORT, 0); Server server = new HMaster(HTU.getConfiguration()); Whitebox.setInternalState(server, "serverManager", this.serverManager); AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(server, this.serverManager); try { // set table in enabling state. am.getZKTable().setEnablingTable(REGIONINFO.getTableNameAsString()); new EnableTableHandler(server, REGIONINFO.getTableName(), am.getCatalogTracker(), am, true) .process(); assertEquals("Number of assignments should be 1.", 1, assignmentCount); assertTrue("Table should be enabled.", am.getZKTable().isEnabledTable(REGIONINFO.getTableNameAsString())); } finally { enabling = false; assignmentCount = 0; am.getZKTable().setEnabledTable(REGIONINFO.getTableNameAsString()); am.shutdown(); ZKAssign.deleteAllNodes(this.watcher); } } /** * When a region is in transition, if the region server opening the region goes down, * the region assignment takes a long time normally (waiting for timeout monitor to trigger assign). * This test is to make sure SSH times out the transition right away. */ @Test public void testSSHTimesOutOpeningRegionTransition() throws KeeperException, IOException, ServiceException { // We need a mocked catalog tracker. CatalogTracker ct = Mockito.mock(CatalogTracker.class); // Create an AM. AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); // adding region in pending open. RegionState state = new RegionState(REGIONINFO, State.OPENING, System.currentTimeMillis(), SERVERNAME_A); am.getRegionStates().regionsInTransition.put(REGIONINFO.getEncodedName(), state); // adding region plan am.regionPlans.put(REGIONINFO.getEncodedName(), new RegionPlan(REGIONINFO, SERVERNAME_B, SERVERNAME_A)); am.getZKTable().setEnabledTable(REGIONINFO.getTableNameAsString()); try { processServerShutdownHandler(ct, am, false); assertTrue("Transtion is timed out", state.getStamp() == 0); } finally { am.getRegionStates().regionsInTransition.remove(REGIONINFO.getEncodedName()); am.regionPlans.remove(REGIONINFO.getEncodedName()); } } /** * Creates a new ephemeral node in the SPLITTING state for the specified region. * Create it ephemeral in case regionserver dies mid-split. * * <p>Does not transition nodes from other states. If a node already exists * for this region, a {@link NodeExistsException} will be thrown. * * @param zkw zk reference * @param region region to be created as offline * @param serverName server event originates from * @return Version of znode created. * @throws KeeperException * @throws IOException */ // Copied from SplitTransaction rather than open the method over there in // the regionserver package. private static int createNodeSplitting(final ZooKeeperWatcher zkw, final HRegionInfo region, final ServerName serverName) throws KeeperException, IOException { RegionTransition rt = RegionTransition.createRegionTransition(EventType.RS_ZK_REGION_SPLITTING, region.getRegionName(), serverName); String node = ZKAssign.getNodeName(zkw, region.getEncodedName()); if (!ZKUtil.createEphemeralNodeAndWatch(zkw, node, rt.toByteArray())) { throw new IOException("Failed create of ephemeral " + node); } // Transition node from SPLITTING to SPLITTING and pick up version so we // can be sure this znode is ours; version is needed deleting. return transitionNodeSplitting(zkw, region, serverName, -1); } // Copied from SplitTransaction rather than open the method over there in // the regionserver package. private static int transitionNodeSplitting(final ZooKeeperWatcher zkw, final HRegionInfo parent, final ServerName serverName, final int version) throws KeeperException, IOException { return ZKAssign.transitionNode(zkw, parent, serverName, EventType.RS_ZK_REGION_SPLITTING, EventType.RS_ZK_REGION_SPLITTING, version); } private void unassign(final AssignmentManager am, final ServerName sn, final HRegionInfo hri) throws RegionException { // Before I can unassign a region, I need to set it online. am.regionOnline(hri, sn); // Unassign region. am.unassign(hri); } /** * Create an {@link AssignmentManagerWithExtrasForTesting} that has mocked * {@link CatalogTracker} etc. * @param server * @param manager * @return An AssignmentManagerWithExtras with mock connections, etc. * @throws IOException * @throws KeeperException */ private AssignmentManagerWithExtrasForTesting setUpMockedAssignmentManager(final Server server, final ServerManager manager) throws IOException, KeeperException, ServiceException { // We need a mocked catalog tracker. Its used by our AM instance. CatalogTracker ct = Mockito.mock(CatalogTracker.class); // Make an RS Interface implementation. Make it so a scanner can go against // it and a get to return the single region, REGIONINFO, this test is // messing with. Needed when "new master" joins cluster. AM will try and // rebuild its list of user regions and it will also get the HRI that goes // with an encoded name by doing a Get on .META. ClientProtocol ri = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A for REGIONINFO Result r = MetaMockingUtil.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); ScanResponse.Builder builder = ScanResponse.newBuilder(); builder.setMoreResults(true); builder.addResult(ProtobufUtil.toResult(r)); if (enabling) { Mockito.when(ri.scan((RpcController) Mockito.any(), (ScanRequest) Mockito.any())) .thenReturn(builder.build()).thenReturn(builder.build()).thenReturn(builder.build()) .thenReturn(builder.build()).thenReturn(builder.build()) .thenReturn(ScanResponse.newBuilder().setMoreResults(false).build()); } else { Mockito.when(ri.scan((RpcController) Mockito.any(), (ScanRequest) Mockito.any())).thenReturn( builder.build()); } // If a get, return the above result too for REGIONINFO GetResponse.Builder getBuilder = GetResponse.newBuilder(); getBuilder.setResult(ProtobufUtil.toResult(r)); Mockito.when(ri.get((RpcController)Mockito.any(), (GetRequest) Mockito.any())). thenReturn(getBuilder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility. getMockedConnectionAndDecorate(HTU.getConfiguration(), null, ri, SERVERNAME_B, REGIONINFO); // Make it so we can get the connection from our mocked catalogtracker Mockito.when(ct.getConnection()).thenReturn(connection); // Create and startup an executor. Used by AM handling zk callbacks. ExecutorService executor = startupMasterExecutor("mockedAMExecutor"); this.balancer = LoadBalancerFactory.getLoadBalancer(server.getConfiguration()); AssignmentManagerWithExtrasForTesting am = new AssignmentManagerWithExtrasForTesting( server, manager, ct, this.balancer, executor); return am; } /** * An {@link AssignmentManager} with some extra facility used testing */ class AssignmentManagerWithExtrasForTesting extends AssignmentManager { // Keep a reference so can give it out below in {@link #getExecutorService} private final ExecutorService es; // Ditto for ct private final CatalogTracker ct; boolean processRITInvoked = false; boolean assignInvoked = false; AtomicBoolean gate = new AtomicBoolean(true); public AssignmentManagerWithExtrasForTesting( final Server master, final ServerManager serverManager, final CatalogTracker catalogTracker, final LoadBalancer balancer, final ExecutorService service) throws KeeperException, IOException { super(master, serverManager, catalogTracker, balancer, service, null); this.es = service; this.ct = catalogTracker; } @Override boolean processRegionInTransition(String encodedRegionName, HRegionInfo regionInfo) throws KeeperException, IOException { this.processRITInvoked = true; return super.processRegionInTransition(encodedRegionName, regionInfo); } @Override public void assign(HRegionInfo region, boolean setOfflineInZK, boolean forceNewPlan) { if (enabling) { assignmentCount++; this.regionOnline(region, SERVERNAME_A); } else { super.assign(region, setOfflineInZK, forceNewPlan); this.gate.set(true); } } @Override public void assign(List<HRegionInfo> regions) throws IOException, InterruptedException { assignInvoked = true; } /** reset the watcher */ void setWatcher(ZooKeeperWatcher watcher) { this.watcher = watcher; } /** * @return ExecutorService used by this instance. */ ExecutorService getExecutorService() { return this.es; } /** * @return CatalogTracker used by this AM (Its a mock). */ CatalogTracker getCatalogTracker() { return this.ct; } } /** * Call joinCluster on the passed AssignmentManager. Do it in a thread * so it runs independent of what all else is going on. Try to simulate * an AM running insided a failed over master by clearing all in-memory * AM state first. */ private void startFakeFailedOverMasterAssignmentManager(final AssignmentManager am, final ZooKeeperWatcher watcher) { // Make sure our new AM gets callbacks; once registered, we can't unregister. // Thats ok because we make a new zk watcher for each test. watcher.registerListenerFirst(am); Thread t = new Thread("RunAmJoinCluster") { public void run() { // Call the joinCluster function as though we were doing a master // failover at this point. It will stall just before we go to add // the RIT region to our RIT Map in AM at processRegionsInTransition. // First clear any inmemory state from AM so it acts like a new master // coming on line. am.getRegionStates().regionsInTransition.clear(); am.regionPlans.clear(); try { am.joinCluster(); } catch (IOException e) { throw new RuntimeException(e); } catch (KeeperException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } }; }; t.start(); while (!t.isAlive()) Threads.sleep(1); } }
package com.rackspacecloud.blueflood.io; import com.netflix.astyanax.model.ColumnFamily; import com.netflix.astyanax.serializers.LongSerializer; import com.netflix.astyanax.serializers.StringSerializer; import com.rackspacecloud.blueflood.io.serializers.LocatorSerializer; import com.rackspacecloud.blueflood.io.serializers.SlotStateSerializer; import com.rackspacecloud.blueflood.rollup.Granularity; import com.rackspacecloud.blueflood.service.Configuration; import com.rackspacecloud.blueflood.service.CoreConfig; import com.rackspacecloud.blueflood.service.SlotState; import com.rackspacecloud.blueflood.types.*; import com.rackspacecloud.blueflood.utils.TimeValue; import java.util.*; import java.util.concurrent.TimeUnit; public class CassandraModel { public static final String KEYSPACE = Configuration.getInstance().getStringProperty(CoreConfig.ROLLUP_KEYSPACE); public static final String CLUSTER = Configuration.getInstance().getStringProperty(CoreConfig.CLUSTER_NAME); /** * It is worth pointing out that the actual TTL value is calculated by taking the TimeValues below * and multiplying by 5. Why? Becuase SafetyTtlProvider.java multiplies the TimeValues below by 5. * * Look for a line like this (currently line 48): * TimeValue ttl = new TimeValue(metricCF.getDefaultTTL().getValue() * 5, metricCF.getDefaultTTL().getUnit()); * * For example, TimeValue of 1 will equate to a 5 day TTL. */ public static final MetricColumnFamily CF_METRICS_FULL = new MetricColumnFamily("metrics_full", new TimeValue(1, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_5M = new MetricColumnFamily("metrics_5m", new TimeValue(2, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_20M = new MetricColumnFamily("metrics_20m", new TimeValue(4, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_60M = new MetricColumnFamily("metrics_60m", new TimeValue(31, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_240M = new MetricColumnFamily("metrics_240m", new TimeValue(60, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_1440M = new MetricColumnFamily("metrics_1440m", new TimeValue(365, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_STRING = new MetricColumnFamily("metrics_string", new TimeValue(365 * 3, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_FULL = new MetricColumnFamily("metrics_preaggregated_full", new TimeValue(1, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_5M = new MetricColumnFamily("metrics_preaggregated_5m", new TimeValue(2, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_20M = new MetricColumnFamily("metrics_preaggregated_20m", new TimeValue(4, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_60M = new MetricColumnFamily("metrics_preaggregated_60m", new TimeValue(31, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_240M = new MetricColumnFamily("metrics_preaggregated_240m", new TimeValue(60, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_PREAGGREGATED_1440M = new MetricColumnFamily("metrics_preaggregated_1440m", new TimeValue(365, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_HIST_FULL = CF_METRICS_FULL; public static final MetricColumnFamily CF_METRICS_HIST_5M = new MetricColumnFamily("metrics_histogram_5m", new TimeValue(2, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_HIST_20M = new MetricColumnFamily("metrics_histogram_20m", new TimeValue(4, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_HIST_60M = new MetricColumnFamily("metrics_histogram_60m", new TimeValue(31, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_HIST_240M = new MetricColumnFamily("metrics_histogram_240m", new TimeValue(60, TimeUnit.DAYS)); public static final MetricColumnFamily CF_METRICS_HIST_1440M = new MetricColumnFamily("metrics_histogram_1440m", new TimeValue(365, TimeUnit.DAYS)); public static final ColumnFamily<Locator, String> CF_METRIC_METADATA = new ColumnFamily<Locator, String>("metrics_metadata", LocatorSerializer.get(), StringSerializer.get()); public static final ColumnFamily<Long, Locator> CF_METRICS_LOCATOR = new ColumnFamily<Long, Locator>("metrics_locator", LongSerializer.get(), LocatorSerializer.get()); public static final ColumnFamily<Long, SlotState> CF_METRICS_STATE = new ColumnFamily<Long, SlotState>("metrics_state", LongSerializer.get(), SlotStateSerializer.get()); private static final MetricColumnFamily[] METRIC_COLUMN_FAMILES = new MetricColumnFamily[] { CF_METRICS_FULL, CF_METRICS_5M, CF_METRICS_20M, CF_METRICS_60M, CF_METRICS_240M, CF_METRICS_1440M, CF_METRICS_PREAGGREGATED_FULL, CF_METRICS_PREAGGREGATED_5M, CF_METRICS_PREAGGREGATED_20M, CF_METRICS_PREAGGREGATED_60M, CF_METRICS_PREAGGREGATED_240M, CF_METRICS_PREAGGREGATED_1440M, CF_METRICS_HIST_FULL, CF_METRICS_HIST_5M, CF_METRICS_HIST_20M, CF_METRICS_HIST_60M, CF_METRICS_HIST_240M, CF_METRICS_HIST_1440M, CF_METRICS_STRING }; private static final ColumnFamily[] BF_SYSTEM_COLUMN_FAMILIES = new ColumnFamily[] { CF_METRIC_METADATA, CF_METRICS_LOCATOR, CF_METRICS_STATE }; private static final Collection<ColumnFamily> ALL_COLUMN_FAMILIES; private static final ColumnFamilyMapper CF_NAME_TO_CF; private static final ColumnFamilyMapper PREAG_GRAN_TO_CF; private static final ColumnFamilyMapper HIST_GRAN_TO_CF; private static final Map<ColumnFamily<Locator, Long>, Granularity> CF_TO_GRAN; static { final Map<Granularity, MetricColumnFamily> columnFamilyMap = new HashMap<Granularity, MetricColumnFamily>(); columnFamilyMap.put(Granularity.FULL, CF_METRICS_FULL); columnFamilyMap.put(Granularity.MIN_5, CF_METRICS_5M); columnFamilyMap.put(Granularity.MIN_20, CF_METRICS_20M); columnFamilyMap.put(Granularity.MIN_60, CF_METRICS_60M); columnFamilyMap.put(Granularity.MIN_240, CF_METRICS_240M); columnFamilyMap.put(Granularity.MIN_1440, CF_METRICS_1440M); final Map<Granularity, MetricColumnFamily> preagCFMap = new HashMap<Granularity, MetricColumnFamily>(); preagCFMap.put(Granularity.FULL, CF_METRICS_PREAGGREGATED_FULL); preagCFMap.put(Granularity.MIN_5, CF_METRICS_PREAGGREGATED_5M); preagCFMap.put(Granularity.MIN_20, CF_METRICS_PREAGGREGATED_20M); preagCFMap.put(Granularity.MIN_60, CF_METRICS_PREAGGREGATED_60M); preagCFMap.put(Granularity.MIN_240, CF_METRICS_PREAGGREGATED_240M); preagCFMap.put(Granularity.MIN_1440, CF_METRICS_PREAGGREGATED_1440M); final Map<Granularity, MetricColumnFamily> histCFMap = new HashMap<Granularity, MetricColumnFamily>(); histCFMap.put(Granularity.FULL, CF_METRICS_HIST_FULL); histCFMap.put(Granularity.MIN_5, CF_METRICS_HIST_5M); histCFMap.put(Granularity.MIN_20, CF_METRICS_HIST_20M); histCFMap.put(Granularity.MIN_60, CF_METRICS_HIST_60M); histCFMap.put(Granularity.MIN_240, CF_METRICS_HIST_240M); histCFMap.put(Granularity.MIN_1440, CF_METRICS_HIST_1440M); Map<ColumnFamily<Locator, Long>, Granularity> cfToGranMap = new HashMap<ColumnFamily<Locator, Long>, Granularity>(); cfToGranMap.put(CF_METRICS_FULL, Granularity.FULL); cfToGranMap.put(CF_METRICS_STRING, Granularity.FULL); cfToGranMap.put(CF_METRICS_5M, Granularity.MIN_5); cfToGranMap.put(CF_METRICS_20M, Granularity.MIN_20); cfToGranMap.put(CF_METRICS_60M, Granularity.MIN_60); cfToGranMap.put(CF_METRICS_240M, Granularity.MIN_240); cfToGranMap.put(CF_METRICS_1440M, Granularity.MIN_1440); CF_NAME_TO_CF = new ColumnFamilyMapper() { @Override public MetricColumnFamily get(Granularity gran) { return columnFamilyMap.get(gran); } }; PREAG_GRAN_TO_CF = new ColumnFamilyMapper() { @Override public MetricColumnFamily get(Granularity gran) { return preagCFMap.get(gran); } }; HIST_GRAN_TO_CF = new ColumnFamilyMapper() { @Override public MetricColumnFamily get(Granularity gran) { return histCFMap.get(gran); } }; CF_TO_GRAN = Collections.unmodifiableMap(cfToGranMap); List<ColumnFamily> cfs = new ArrayList<ColumnFamily>(); for (ColumnFamily cf : METRIC_COLUMN_FAMILES) { cfs.add(cf); } for (ColumnFamily cf : BF_SYSTEM_COLUMN_FAMILIES) { cfs.add(cf); } ALL_COLUMN_FAMILIES = Collections.unmodifiableList(cfs); } public static ColumnFamily getColumnFamily(Class<? extends Rollup> type, Granularity granularity) { if (type.equals(SimpleNumber.class)) { return CF_METRICS_FULL; } else if (type.equals(BasicRollup.class)) { return CF_NAME_TO_CF.get(granularity); } else if (type.equals(HistogramRollup.class)) { return HIST_GRAN_TO_CF.get(granularity); } else if (type.equals(SetRollup.class) || type.equals(TimerRollup.class) || type.equals(GaugeRollup.class) || type.equals(CounterRollup.class)) { return PREAG_GRAN_TO_CF.get(granularity); } else { throw new RuntimeException("Unsupported rollup type."); } } public static ColumnFamily getColumnFamily(RollupType type, DataType dataType, Granularity gran) { if (dataType == null) { dataType = DataType.NUMERIC; } if (type == null) { type = RollupType.BF_BASIC; } if (type == RollupType.BF_BASIC && (dataType.equals(DataType.BOOLEAN) || dataType.equals(DataType.STRING))) { return CF_METRICS_STRING; } return getColumnFamily(RollupType.classOf(type, gran), gran); } // iterate over all column families that store metrics. public static Iterable<MetricColumnFamily> getMetricColumnFamilies() { return new Iterable<MetricColumnFamily>() { @Override public Iterator<MetricColumnFamily> iterator() { return new Iterator<MetricColumnFamily>() { private int pos = 0; @Override public boolean hasNext() { return pos < METRIC_COLUMN_FAMILES.length; } @Override public MetricColumnFamily next() { return METRIC_COLUMN_FAMILES[pos++]; } @Override public void remove() { throw new NoSuchMethodError("Not implemented"); } }; } }; } public static Collection<ColumnFamily> getAllColumnFamilies() { return ALL_COLUMN_FAMILIES; } public static class MetricColumnFamily extends ColumnFamily<Locator, Long> { private final TimeValue ttl; public MetricColumnFamily(String name, TimeValue ttl) { super(name, LocatorSerializer.get(), LongSerializer.get()); this.ttl = ttl; } public TimeValue getDefaultTTL() { return ttl; } } // future versions will have get(Granularity, RollupType). public interface ColumnFamilyMapper { public MetricColumnFamily get(Granularity gran); } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.l3vpn.netl3vpn.impl; import com.google.common.collect.ImmutableSet; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.util.KryoNamespace; import org.onosproject.l3vpn.netl3vpn.AccessInfo; import org.onosproject.l3vpn.netl3vpn.BgpInfo; import org.onosproject.l3vpn.netl3vpn.DeviceInfo; import org.onosproject.l3vpn.netl3vpn.FullMeshVpnConfig; import org.onosproject.l3vpn.netl3vpn.HubSpokeVpnConfig; import org.onosproject.l3vpn.netl3vpn.InterfaceInfo; import org.onosproject.l3vpn.netl3vpn.NetL3VpnStore; import org.onosproject.l3vpn.netl3vpn.ProtocolInfo; import org.onosproject.l3vpn.netl3vpn.RouteProtocol; import org.onosproject.l3vpn.netl3vpn.TunnelInfo; import org.onosproject.l3vpn.netl3vpn.VpnConfig; import org.onosproject.l3vpn.netl3vpn.VpnInstance; import org.onosproject.l3vpn.netl3vpn.VpnType; import org.onosproject.net.DeviceId; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.ConsistentMap; import org.onosproject.store.service.DistributedSet; import org.onosproject.store.service.Serializer; import org.onosproject.store.service.StorageService; import org.onosproject.yang.model.LeafListKey; import org.onosproject.yang.model.ListKey; import org.onosproject.yang.model.NodeKey; import org.onosproject.yang.model.ResourceId; import org.onosproject.yang.model.SchemaId; import org.slf4j.Logger; import java.util.Map; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; import static org.slf4j.LoggerFactory.getLogger; /** * Manages the pool of available VPN instances and its associated devices * and interface information. */ @Component(immediate = true) @Service public class DistributedNetL3VpnStore implements NetL3VpnStore { private static final Serializer L3VPN_SERIALIZER = Serializer .using(new KryoNamespace.Builder().register(KryoNamespaces.API) .register(KryoNamespaces.API) .register(VpnInstance.class) .register(VpnType.class) .register(VpnConfig.class) .register(FullMeshVpnConfig.class) .register(HubSpokeVpnConfig.class) .register(DeviceInfo.class) .register(ResourceId.class) .register(NodeKey.class) .register(SchemaId.class) .register(LeafListKey.class) .register(ListKey.class) .register(AccessInfo.class) .register(InterfaceInfo.class) .register(BgpInfo.class) .register(RouteProtocol.class) .register(ProtocolInfo.class) .register(TunnelInfo.class) .build()); private static final String FREE_ID_NULL = "Free ID cannot be null"; private static final String VPN_NAME_NULL = "VPN name cannot be null"; private static final String VPN_INS_NULL = "VPN instance cannot be null"; private static final String ACCESS_INFO_NULL = "Access info cannot be null"; private static final String BGP_INFO_NULL = "BGP info cannot be null"; private static final String INT_INFO_NULL = "Interface info cannot be null"; private static final String DEV_ID_NULL = "Device Id cannot be null"; private final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected StorageService storageService; /** * Freed id list of NET L3VPN. */ private DistributedSet<Long> freedIdList; /** * Map of interface info with access info as key. */ private ConsistentMap<AccessInfo, InterfaceInfo> intInfoMap; /** * Map of VPN instance with VPN name as key. */ private ConsistentMap<String, VpnInstance> vpnInsMap; /** * Map of BGP information and the device id. */ private ConsistentMap<BgpInfo, DeviceId> bgpInfoMap; /** * Map of device id and tunnel count. */ private ConsistentMap<DeviceId, Integer> tunnelInfoMap; @Activate protected void activate() { vpnInsMap = storageService.<String, VpnInstance>consistentMapBuilder() .withName("onos-l3vpn-instance-map") .withSerializer(L3VPN_SERIALIZER) .build(); intInfoMap = storageService .<AccessInfo, InterfaceInfo>consistentMapBuilder() .withName("onos-l3vpn-int-info-map") .withSerializer(L3VPN_SERIALIZER) .build(); bgpInfoMap = storageService.<BgpInfo, DeviceId>consistentMapBuilder() .withName("onos-l3vpn-bgp-info-map") .withSerializer(L3VPN_SERIALIZER) .build(); tunnelInfoMap = storageService.<DeviceId, Integer>consistentMapBuilder() .withName("onos-l3vpn-tnl-info-map") .withSerializer(L3VPN_SERIALIZER) .build(); freedIdList = storageService.<Long>setBuilder() .withName("onos-l3vpn-id-freed-list") .withSerializer(Serializer.using(KryoNamespaces.API)) .build() .asDistributedSet(); log.info("Started"); } @Deactivate protected void deactivate() { log.info("Stopped"); } @Override public Iterable<Long> getFreedIdList() { return ImmutableSet.copyOf(freedIdList); } @Override public Map<String, VpnInstance> getVpnInstances() { return vpnInsMap.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue() .value())); } @Override public Map<BgpInfo, DeviceId> getBgpInfo() { return bgpInfoMap.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue() .value())); } @Override public Map<AccessInfo, InterfaceInfo> getInterfaceInfo() { return intInfoMap.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue() .value())); } @Override public Map<DeviceId, Integer> getTunnelInfo() { return tunnelInfoMap.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue() .value())); } @Override public void addIdToFreeList(Long id) { checkNotNull(id, FREE_ID_NULL); freedIdList.add(id); } @Override public void addVpnInsIfAbsent(String name, VpnInstance instance) { checkNotNull(name, VPN_NAME_NULL); checkNotNull(instance, VPN_INS_NULL); vpnInsMap.putIfAbsent(name, instance); } @Override public void addVpnIns(String name, VpnInstance instance) { checkNotNull(name, VPN_NAME_NULL); checkNotNull(instance, VPN_INS_NULL); vpnInsMap.put(name, instance); } @Override public void addInterfaceInfo(AccessInfo accessInfo, InterfaceInfo intInfo) { checkNotNull(accessInfo, ACCESS_INFO_NULL); checkNotNull(intInfo, INT_INFO_NULL); intInfoMap.put(accessInfo, intInfo); } @Override public void addBgpInfo(BgpInfo bgpInfo, DeviceId devId) { checkNotNull(bgpInfo, BGP_INFO_NULL); checkNotNull(devId, DEV_ID_NULL); bgpInfoMap.put(bgpInfo, devId); } @Override public void addTunnelInfo(DeviceId devId, Integer count) { checkNotNull(devId, DEV_ID_NULL); tunnelInfoMap.put(devId, count); } @Override public boolean removeInterfaceInfo(AccessInfo accessInfo) { checkNotNull(accessInfo, ACCESS_INFO_NULL); if (intInfoMap.remove(accessInfo) == null) { log.error("Interface info deletion for access info {} has failed.", accessInfo.toString()); return false; } return true; } @Override public boolean removeVpnInstance(String vpnName) { checkNotNull(vpnName, VPN_NAME_NULL); if (vpnInsMap.remove(vpnName) == null) { log.error("Vpn instance deletion for vpn name {} has failed.", vpnName); return false; } return true; } @Override public boolean removeIdFromFreeList(Long id) { checkNotNull(id, FREE_ID_NULL); if (!freedIdList.remove(id)) { log.error("Id from free id list {} deletion has failed.", id.toString()); return false; } return true; } @Override public boolean removeBgpInfo(BgpInfo bgpInfo) { checkNotNull(bgpInfo, BGP_INFO_NULL); if (bgpInfoMap.remove(bgpInfo) == null) { log.error("Device id deletion for BGP info {} has failed.", bgpInfo.toString()); return false; } return true; } @Override public boolean removeTunnelInfo(DeviceId id) { checkNotNull(id, DEV_ID_NULL); if (tunnelInfoMap.remove(id) == null) { log.error("Device id deletion in tunnel info has failed."); return false; } return true; } }
package apple.networkextension; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSMethodSignature; import apple.foundation.NSNumber; import apple.foundation.NSSet; import apple.foundation.protocol.NSCopying; import apple.foundation.protocol.NSSecureCoding; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * [@interface] NEHotspotConfiguration * <p> * The NEHotspotConfiguration class represents set of properties that are required * to configure a Wi-Fi Network. */ @Generated @Library("NetworkExtension") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class NEHotspotConfiguration extends NSObject implements NSCopying, NSSecureCoding { static { NatJ.register(); } @Generated protected NEHotspotConfiguration(Pointer peer) { super(peer); } /** * [@property] SSID * <p> * SSID of the Wi-Fi Network. */ @Generated @Selector("SSID") public native String SSID(); @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native NEHotspotConfiguration alloc(); @Owned @Generated @Selector("allocWithZone:") public static native NEHotspotConfiguration allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Owned @Selector("copyWithZone:") @MappedReturn(ObjCObjectMapper.class) public native Object copyWithZone(VoidPtr zone); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("encodeWithCoder:") public native void encodeWithCoder(NSCoder coder); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native NEHotspotConfiguration init(); @Generated @Selector("initWithCoder:") public native NEHotspotConfiguration initWithCoder(NSCoder coder); /** * initWithHS20Settings:eapSettings * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used configure HS2.0 Wi-Fi Networks. * * @param hs20Settings Hotspot 2.0 configuration * @param eapSettings EAP configuration */ @Generated @Selector("initWithHS20Settings:eapSettings:") public native NEHotspotConfiguration initWithHS20SettingsEapSettings(NEHotspotHS20Settings hs20Settings, NEHotspotEAPSettings eapSettings); /** * initWithSSID: * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used to configure open Wi-Fi Networks. * * @param SSID The SSID of the Open Wi-Fi Network. * Length of SSID must be between 1 and 32 characters. */ @Generated @Selector("initWithSSID:") public native NEHotspotConfiguration initWithSSID(String SSID); /** * initWithSSID:eapSettings * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used configure WPA/WPA2 Enterprise Wi-Fi Networks. * * @param SSID The SSID of WPA/WPA2 Enterprise Wi-Fi Network * @param eapSettings EAP configuration */ @Generated @Selector("initWithSSID:eapSettings:") public native NEHotspotConfiguration initWithSSIDEapSettings(String SSID, NEHotspotEAPSettings eapSettings); /** * initWithSSID:passphrase:isWEP * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used configure either WEP or WPA/WPA2 Personal Wi-Fi Networks. * * @param SSID The SSID of the WEP or WPA/WPA2 Personal Wi-Fi Network * @param passphrase The passphrase credential. * For WPA/WPA2 Personal networks: between 8 and 63 characters. * For Static WEP(64bit) : 10 Hex Digits * For Static WEP(128bit) : 26 Hex Digits * @param isWEP YES specifies WEP Wi-Fi Network else WPA/WPA2 Personal Wi-Fi Network */ @Generated @Selector("initWithSSID:passphrase:isWEP:") public native NEHotspotConfiguration initWithSSIDPassphraseIsWEP(String SSID, String passphrase, boolean isWEP); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); /** * [@property] joinOnce * <p> * if set to YES the configuration will not be persisted. Default is NO. */ @Generated @Selector("joinOnce") public native boolean joinOnce(); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); /** * [@property] lifeTimeInDays * <p> * The lifetime of the configuration in days. The configuration is stored for the * number of days specified by this property. The minimum value is 1 day and maximum value is 365 days. * A configuration does not get deleted automatically if this property is not set or set to an invalid value. * This property does not apply to Enterprise and HS2.0 networks. */ @Generated @Selector("lifeTimeInDays") public native NSNumber lifeTimeInDays(); @Generated @Owned @Selector("new") public static native NEHotspotConfiguration new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); /** * [@property] joinOnce * <p> * if set to YES the configuration will not be persisted. Default is NO. */ @Generated @Selector("setJoinOnce:") public native void setJoinOnce(boolean value); /** * [@property] lifeTimeInDays * <p> * The lifetime of the configuration in days. The configuration is stored for the * number of days specified by this property. The minimum value is 1 day and maximum value is 365 days. * A configuration does not get deleted automatically if this property is not set or set to an invalid value. * This property does not apply to Enterprise and HS2.0 networks. */ @Generated @Selector("setLifeTimeInDays:") public native void setLifeTimeInDays(NSNumber value); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("supportsSecureCoding") public static native boolean supportsSecureCoding(); @Generated @ProtocolClassMethod("supportsSecureCoding") public boolean _supportsSecureCoding() { return supportsSecureCoding(); } @Generated @Selector("version") @NInt public static native long version_static(); /** * [@property] SSIDPrefix * <p> * Prefix string of SSID of the Wi-Fi Network. */ @Generated @Selector("SSIDPrefix") public native String SSIDPrefix(); /** * [@property] hidden * <p> * if set to YES the system will perform active scan of the SSID. Default is NO. */ @Generated @Selector("hidden") public native boolean hidden(); /** * initWithSSIDPrefix: * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used to configure open Wi-Fi Networks. * * @param SSIDPrefix The prefix string of SSID of the Open Wi-Fi Network. * Length of SSIDPrefix must be between 3 and 32 characters. */ @Generated @Selector("initWithSSIDPrefix:") public native NEHotspotConfiguration initWithSSIDPrefix(String SSIDPrefix); /** * initWithSSIDPrefix:passphrase:isWEP * <p> * A designated initializer to instantiate a new NEHotspotConfiguration object. * This initializer is used configure either WEP or WPA/WPA2 Personal Wi-Fi Networks. * * @param SSIDPrefix The prefix string of SSID of the WEP or WPA/WPA2 Personal Wi-Fi Network. * Length of SSIDPrefix must be between 3 and 32 characters. * @param passphrase The passphrase credential. * For WPA/WPA2 Personal networks: between 8 and 63 characters. * For Static WEP(64bit) : 10 Hex Digits * For Static WEP(128bit) : 26 Hex Digits * @param isWEP YES specifies WEP Wi-Fi Network else WPA/WPA2 Personal Wi-Fi Network */ @Generated @Selector("initWithSSIDPrefix:passphrase:isWEP:") public native NEHotspotConfiguration initWithSSIDPrefixPassphraseIsWEP(String SSIDPrefix, String passphrase, boolean isWEP); /** * [@property] hidden * <p> * if set to YES the system will perform active scan of the SSID. Default is NO. */ @Generated @Selector("setHidden:") public native void setHidden(boolean value); }
package com.github.mike10004.common.dbhelp; import java.util.function.Function; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.ImmutableList; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Rule; import org.openide.util.RequestProcessor; /** * Test for MysqlConnectionSource class in core project. This test has * to be in this package because it utilizes the MysqlConnectionSourceRule * class in the testtools project. */ public class MysqlConnectionSourceIT { @Rule public MysqlConnectionSourceRule connectionSourceRule = new MysqlConnectionSourceRule(IntegrationTests.getMysqlPort(), new Function<ConnectionParams, MysqlConnectionSource>() { @Override public MysqlConnectionSource apply(ConnectionParams t) { return new CountingConnectionSource(t); } }); public static class CountingConnectionSource extends MysqlConnectionSource { private transient final AtomicInteger numPrepareCalls = new AtomicInteger(0); public CountingConnectionSource(ConnectionParams connectionParams) { super(connectionParams); } @Override protected void prepare() throws SQLException { super.prepare(); numPrepareCalls.incrementAndGet(); } public int getNumPrepareCalls() { return numPrepareCalls.get(); } } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Test public void testReadingAndWritingConcurrently() throws SQLException, InterruptedException, ExecutionException { System.out.println("testReadingAndWritingConcurrently"); int numThreads = 20; int numReaders = 500, numWriters = 50; AtomicInteger numInsertedCount = new AtomicInteger(), taskCount = new AtomicInteger(); boolean clean = false; List<DbTask> tasks = new ArrayList<>(); int count; CountingConnectionSource cs = (CountingConnectionSource) connectionSourceRule.getConnectionSource(); DatabaseContext db = new DefaultDatabaseContext(cs); List<Customer> customers = Collections.synchronizedList(new ArrayList<Customer>()); try { db.getTableUtils().createTable(Customer.class); for (int i = 0; i < numWriters; i++) { int numToInsert = (i + 1) * 10; DbWriter writer = new DbWriter(db, numToInsert, customers, taskCount, numInsertedCount); tasks.add(writer); } for (int i = 0; i < numReaders; i++) { tasks.add(new DbReader(db, taskCount)); } Collections.shuffle(tasks); RequestProcessor rp = new RequestProcessor(MysqlConnectionSourceIT.class.getName(), numThreads); System.out.println("invoking all tasks using " + numThreads + " threads"); List<Future<Void>> futures = rp.invokeAll(tasks); System.out.println("awaiting termination..."); for (Future future : futures) { future.get(); } count = (int) db.getDao(Customer.class).countOf(); clean = true; } finally { db.closeConnections(!clean); } System.out.println("all tasks terminated"); System.out.println("max connections ever used: " + cs.getMaxConnectionsEverUsed()); System.out.println(cs.getOpenCount() + " opened"); System.out.println(cs.getCloseCount() + " closed"); assertTrue("expect max connections ever used to be no more than num threads", cs.getMaxConnectionsEverUsed() <= numThreads); assertEquals("expected num opened == num closed", cs.getOpenCount(), cs.getCloseCount()); System.out.println(numInsertedCount.get() + " inserted; collection.size = " + customers.size()); assertEquals(numInsertedCount.get(), customers.size()); assertEquals(numInsertedCount.get(), count); System.out.println(cs.getNumPrepareCalls() + " prepare() calls"); assertEquals(1, cs.getNumPrepareCalls()); } public static abstract class DbTask implements Callable<Void> { private transient final DatabaseContext db; public DbTask(DatabaseContext db) { this.db = db; } protected abstract void doWork(DatabaseContext db) throws SQLException; @Override public Void call() throws Exception { doWork(db); return (Void)null; } } public static class DbReader extends DbTask { private final AtomicInteger taskCount; public DbReader(DatabaseContext db, AtomicInteger taskCount) { super(db); this.taskCount = checkNotNull(taskCount); } @Override protected void doWork(DatabaseContext db) throws SQLException { for (Class<?> entity : ImmutableList.<Class<?>>of(Customer.class)) { List<?> list = db.getDao(entity).queryForAll(); System.out.format("%5d %08x: read %d%n", taskCount.incrementAndGet(), System.identityHashCode(this), list.size()); } } } public static class DbWriter extends DbTask { protected Random random = new Random(DbWriter.class.hashCode()); private final int numToInsert; private final Collection<Customer> customers; private final AtomicInteger numInsertedCount, taskCount; public DbWriter(DatabaseContext db, int numToInsert, Collection<Customer> customers, AtomicInteger taskCount, AtomicInteger numInsertedCount) { super(db); this.numToInsert = numToInsert; this.customers = checkNotNull(customers); this.taskCount = checkNotNull(taskCount); this.numInsertedCount = checkNotNull(numInsertedCount); } @Override protected void doWork(final DatabaseContext db) throws SQLException { db.getTransactionManager().callInTransaction(new Callable<Void>() { @Override public Void call() throws Exception { List<Customer> subset = new ArrayList<>(); for (int i = 0; i < numToInsert; i++) { Customer c = construct(Customer.class); db.getDao(Customer.class).create(c); subset.add(c); } customers.addAll(subset); return (Void) null; } }); numInsertedCount.addAndGet(numToInsert); System.out.format("%5d %08x: inserted %d%n", taskCount.incrementAndGet(), System.identityHashCode(this), numToInsert); } protected <T> T construct(Class<T> entity) { if (Customer.class.equals(entity)) { Customer c = new Customer(); c.address = newRandomString(); c.name = newRandomString(); return (T) c; } else { throw new IllegalArgumentException("not supported yet: " + entity); } } protected String newRandomString() { int numLongs = 10; StringBuilder sb = new StringBuilder(8 * numLongs); for (int i = 0; i < numLongs; i++) { long value = Math.abs(random.nextLong()); sb.append(String.format("%08x", value)); } String s = sb.toString(); return s; } } }
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver; import java.io.File; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.runtime.core.Report; import com.asakusafw.testdriver.compiler.CompilerConfiguration.DebugLevel; import com.asakusafw.testdriver.compiler.CompilerConfiguration.OptimizeLevel; import com.asakusafw.testdriver.core.TestDataToolProvider; import com.asakusafw.testdriver.core.TestingEnvironmentConfigurator; import com.asakusafw.trace.model.TraceSetting; import com.asakusafw.trace.model.TraceSetting.Mode; import com.asakusafw.trace.model.TraceSettingList; import com.asakusafw.trace.model.Tracepoint; import com.asakusafw.trace.model.Tracepoint.PortKind; import com.asakusafw.vocabulary.flow.FlowDescription; import com.asakusafw.vocabulary.flow.FlowPart; /** * An abstract super class of test-driver classes. * @since 0.2.0 * @version 0.10.0 */ public abstract class TestDriverBase extends DriverElementBase { private static final Logger LOG = LoggerFactory.getLogger(TestDriverBase.class); private static final String FLOW_OPERATOR_FACTORY_METHOD_NAME = "create"; //$NON-NLS-1$ static { TestingEnvironmentConfigurator.initialize(); } /** * The internal test driver context object. */ protected final TestDriverContext driverContext; /** * Creates a new instance. * @param callerClass the caller class * @throws IllegalArgumentException if some parameters were {@code null} */ public TestDriverBase(Class<?> callerClass) { if (callerClass == null) { throw new IllegalArgumentException("callerClass must not be null"); //$NON-NLS-1$ } this.driverContext = new TestDriverContext(callerClass); } @Override protected final Class<?> getCallerClass() { return driverContext.getCallerClass(); } @Override protected final TestDataToolProvider getTestTools() { return driverContext.getRepository(); } /** * Adds a runtime configuration item. * This may customize behavior of some framework APIs (e.g. {@link Report report API}). * @param key the configuration key name * @param value the configuration value, or {@code null} to unset the target configuration * @throws IllegalArgumentException if {@code key} is {@code null} */ public void configure(String key, String value) { if (key == null) { throw new IllegalArgumentException("key must not be null"); //$NON-NLS-1$ } if (value != null) { driverContext.getExtraConfigurations().put(key, value); } else { driverContext.getExtraConfigurations().remove(key); } } /** * Adds a batch argument. * @param key the argument name * @param value the argument value, or {@code null} to unset the target argument * @throws IllegalArgumentException if {@code key} is {@code null} */ public void setBatchArg(String key, String value) { if (key == null) { throw new IllegalArgumentException("key must not be null"); //$NON-NLS-1$ } if (value != null) { driverContext.getBatchArgs().put(key, value); } else { driverContext.getBatchArgs().remove(key); } } /** * Configures the compiler optimization level. * <ul> * <li> 0: disables all optimizations </li> * <li> 1: only enables default optimizations </li> * <li> 2~: enables aggressive optimizations </li> * </ul> * @param level the compiler optimization level */ public void setOptimize(int level) { if (level <= 0) { driverContext.setCompilerOptimizeLevel(OptimizeLevel.DISABLED); } else if (level == 1) { driverContext.setCompilerOptimizeLevel(OptimizeLevel.NORMAL); } else { driverContext.setCompilerOptimizeLevel(OptimizeLevel.AGGRESSIVE); } } /** * Sets whether compiler should keep debugging information or not. * @param enable {@code true} to keep debugging information, otherwise {@code false} */ public void setDebug(boolean enable) { if (enable) { driverContext.setCompilerDebugLevel(DebugLevel.NORMAL); } else { driverContext.setCompilerDebugLevel(DebugLevel.DISABLED); } } /** * Sets an extra compiler option. * @param name the option name * @param value the option value * @since 0.7.3 */ public void setExtraCompilerOption(String name, String value) { Objects.requireNonNull(name); if (value == null) { driverContext.getCompilerOptions().remove(name); } else { driverContext.getCompilerOptions().put(name, value); } } /** * Sets the Asakusa framework installation path ({@literal a.k.a.} {@code $ASAKUSA_HOME}). * If this is not set, the installation path will be computed from the environment variable. * @param frameworkHomePath the framework installation path */ public void setFrameworkHomePath(File frameworkHomePath) { driverContext.setFrameworkHomePath(frameworkHomePath); } /** * Sets the search path of the external library files. * If this is not set, the search path will be {@link TestDriverContext#EXTERNAL_LIBRARIES_PATH}. * @param librariesPath the search path of the external library files * @since 0.5.1 */ public void setLibrariesPath(File librariesPath) { driverContext.setLibrariesPath(librariesPath); } /** * Sets the explicit compiler working directory. * If this is not set, the compiler will create the working directory into the temporary area, * and remove it after test was finished. * @param path the explicit compiler working directory * @since 0.5.2 */ public void setCompilerWorkingDirectory(File path) { driverContext.setCompilerWorkingDirectory(path); } /** * Sets whether skips verifying test conditions. * @param skip {@code true} if verifying test conditions, otherwise {@code false} * @since 0.7.0 */ public void skipValidateCondition(boolean skip) { driverContext.setSkipValidateCondition(skip); } /** * Sets whether skips truncating test input data. * @param skip {@code true} if truncating test input data, otherwise {@code false} */ public void skipCleanInput(boolean skip) { driverContext.setSkipCleanInput(skip); } /** * Sets whether skips truncating test output data. * @param skip {@code true} if truncating test output data, otherwise {@code false} */ public void skipCleanOutput(boolean skip) { driverContext.setSkipCleanOutput(skip); } /** * Sets whether skips preparing test input data. * @param skip {@code true} if preparing test input data, otherwise {@code false} */ public void skipPrepareInput(boolean skip) { driverContext.setSkipPrepareInput(skip); } /** * Sets whether skips preparing test output data. * @param skip {@code true} if preparing test output data, otherwise {@code false} */ public void skipPrepareOutput(boolean skip) { driverContext.setSkipPrepareOutput(skip); } /** * Sets whether skips executing jobflows. * @param skip {@code true} if executing jobflows, otherwise {@code false} */ public void skipRunJobflow(boolean skip) { driverContext.setSkipRunJobflow(skip); } /** * Sets whether skips verifying test results. * @param skip {@code true} if verifying test results, otherwise {@code false} */ public void skipVerify(boolean skip) { driverContext.setSkipVerify(skip); } /** * Adds a new trace-point to the target operator input. * @param operatorClass target operator class * @param methodName target operator method name * @param portName target operator input port name * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.5.1 */ public void addInputTrace(Class<?> operatorClass, String methodName, String portName) { if (operatorClass == null) { throw new IllegalArgumentException("operatorClass must not be null"); //$NON-NLS-1$ } if (methodName == null) { throw new IllegalArgumentException("methodName must not be null"); //$NON-NLS-1$ } if (portName == null) { throw new IllegalArgumentException("portName must not be null"); //$NON-NLS-1$ } TraceSetting setting = createTraceSetting( operatorClass, methodName, PortKind.INPUT, portName, Collections.emptyMap()); appendTrace(setting); } /** * Adds a new trace-point to the target operator output. * @param operatorClass target operator class * @param methodName target operator method name * @param portName target operator input port name * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.5.1 */ public void addOutputTrace(Class<?> operatorClass, String methodName, String portName) { if (operatorClass == null) { throw new IllegalArgumentException("operatorClass must not be null"); //$NON-NLS-1$ } if (methodName == null) { throw new IllegalArgumentException("methodName must not be null"); //$NON-NLS-1$ } if (portName == null) { throw new IllegalArgumentException("portName must not be null"); //$NON-NLS-1$ } TraceSetting setting = createTraceSetting( operatorClass, methodName, PortKind.OUTPUT, portName, Collections.emptyMap()); appendTrace(setting); } /** * Adds a new trace-point to the target operator input. * @param flowpartClass target flow-part class * @param portName target operator input port name * @throws IllegalArgumentException if some parameters were {@code null} * @deprecated some platform does not support tracing flow-part I/O; * please use {@link #addInputTrace(Class, String, String)} instead * @since 0.5.1 */ @Deprecated public void addInputTrace(Class<? extends FlowDescription> flowpartClass, String portName) { if (flowpartClass == null) { throw new IllegalArgumentException("operatorClass must not be null"); //$NON-NLS-1$ } if (portName == null) { throw new IllegalArgumentException("portName must not be null"); //$NON-NLS-1$ } checkFlowpart(flowpartClass); TraceSetting setting = createTraceSetting( flowpartClass, FLOW_OPERATOR_FACTORY_METHOD_NAME, PortKind.INPUT, portName, Collections.emptyMap()); appendTrace(setting); } /** * Adds a new trace-point to the target operator output. * @param flowpartClass target flow-part class * @param portName target operator input port name * @throws IllegalArgumentException if some parameters were {@code null} * @deprecated some platform does not support tracing flow-part I/O; * please use {@link #addOutputTrace(Class, String, String)} instead * @since 0.5.1 */ @Deprecated public void addOutputTrace(Class<? extends FlowDescription> flowpartClass, String portName) { if (flowpartClass == null) { throw new IllegalArgumentException("operatorClass must not be null"); //$NON-NLS-1$ } if (portName == null) { throw new IllegalArgumentException("portName must not be null"); //$NON-NLS-1$ } checkFlowpart(flowpartClass); TraceSetting setting = createTraceSetting( flowpartClass, FLOW_OPERATOR_FACTORY_METHOD_NAME, PortKind.OUTPUT, portName, Collections.emptyMap()); appendTrace(setting); } private void checkFlowpart(Class<? extends FlowDescription> flowpartClass) { if (flowpartClass.isAnnotationPresent(FlowPart.class) == false) { throw new IllegalArgumentException(MessageFormat.format( Messages.getString("TestDriverBase.errorInvalidFlowpartClass"), //$NON-NLS-1$ flowpartClass.getName())); } } private void appendTrace(TraceSetting setting) { assert setting != null; List<TraceSetting> elements = new ArrayList<>(); TraceSettingList list = driverContext.getExtension(TraceSettingList.class); if (list != null) { elements.addAll(list.getElements()); } elements.add(setting); driverContext.putExtension(TraceSettingList.class, new TraceSettingList(elements)); } static TraceSetting createTraceSetting( Class<?> operatorClass, String methodName, PortKind portKind, String portName, Map<String, String> attributes) { assert operatorClass != null; assert methodName != null; assert portKind != null; assert portName != null; assert attributes != null; return new TraceSetting( new Tracepoint(operatorClass.getName(), methodName, portKind, portName), Mode.STRICT, attributes); } /** * Sets the {@link JobExecutorFactory} for executing jobs in this test. * @param factory the factory, or {@code null} to use a default implementation * @since 0.6.0 * @deprecated not supported */ @Deprecated public void setJobExecutorFactory(JobExecutorFactory factory) { LOG.warn("{}.setJobExecutorFactory() is not supported", getClass().getSimpleName()); } /** * Returns the current test driver context (for internal use only). * @return the current test driver context * @since 0.6.1 */ TestDriverContext getDriverContext() { return driverContext; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.conn.routing; import java.net.InetAddress; import org.apache.http.annotation.NotThreadSafe; import org.apache.http.util.LangUtils; import org.apache.http.HttpHost; /** * Helps tracking the steps in establishing a route. * * @since 4.0 */ @NotThreadSafe public final class RouteTracker implements RouteInfo, Cloneable { /** The target host to connect to. */ private final HttpHost targetHost; /** * The local address to connect from. * <code>null</code> indicates that the default should be used. */ private final InetAddress localAddress; // the attributes above are fixed at construction time // now follow attributes that indicate the established route /** Whether the first hop of the route is established. */ private boolean connected; /** The proxy chain, if any. */ private HttpHost[] proxyChain; /** Whether the the route is tunnelled end-to-end through proxies. */ private TunnelType tunnelled; /** Whether the route is layered over a tunnel. */ private LayerType layered; /** Whether the route is secure. */ private boolean secure; /** * Creates a new route tracker. * The target and origin need to be specified at creation time. * * @param target the host to which to route * @param local the local address to route from, or * <code>null</code> for the default */ public RouteTracker(HttpHost target, InetAddress local) { if (target == null) { throw new IllegalArgumentException("Target host may not be null."); } this.targetHost = target; this.localAddress = local; this.tunnelled = TunnelType.PLAIN; this.layered = LayerType.PLAIN; } /** * @since 4.2 */ public void reset() { this.connected = false; this.proxyChain = null; this.tunnelled = TunnelType.PLAIN; this.layered = LayerType.PLAIN; this.secure = false; } /** * Creates a new tracker for the given route. * Only target and origin are taken from the route, * everything else remains to be tracked. * * @param route the route to track */ public RouteTracker(HttpRoute route) { this(route.getTargetHost(), route.getLocalAddress()); } /** * Tracks connecting to the target. * * @param secure <code>true</code> if the route is secure, * <code>false</code> otherwise */ public final void connectTarget(boolean secure) { if (this.connected) { throw new IllegalStateException("Already connected."); } this.connected = true; this.secure = secure; } /** * Tracks connecting to the first proxy. * * @param proxy the proxy connected to * @param secure <code>true</code> if the route is secure, * <code>false</code> otherwise */ public final void connectProxy(HttpHost proxy, boolean secure) { if (proxy == null) { throw new IllegalArgumentException("Proxy host may not be null."); } if (this.connected) { throw new IllegalStateException("Already connected."); } this.connected = true; this.proxyChain = new HttpHost[]{ proxy }; this.secure = secure; } /** * Tracks tunnelling to the target. * * @param secure <code>true</code> if the route is secure, * <code>false</code> otherwise */ public final void tunnelTarget(boolean secure) { if (!this.connected) { throw new IllegalStateException("No tunnel unless connected."); } if (this.proxyChain == null) { throw new IllegalStateException("No tunnel without proxy."); } this.tunnelled = TunnelType.TUNNELLED; this.secure = secure; } /** * Tracks tunnelling to a proxy in a proxy chain. * This will extend the tracked proxy chain, but it does not mark * the route as tunnelled. Only end-to-end tunnels are considered there. * * @param proxy the proxy tunnelled to * @param secure <code>true</code> if the route is secure, * <code>false</code> otherwise */ public final void tunnelProxy(HttpHost proxy, boolean secure) { if (proxy == null) { throw new IllegalArgumentException("Proxy host may not be null."); } if (!this.connected) { throw new IllegalStateException("No tunnel unless connected."); } if (this.proxyChain == null) { throw new IllegalStateException("No proxy tunnel without proxy."); } // prepare an extended proxy chain HttpHost[] proxies = new HttpHost[this.proxyChain.length+1]; System.arraycopy(this.proxyChain, 0, proxies, 0, this.proxyChain.length); proxies[proxies.length-1] = proxy; this.proxyChain = proxies; this.secure = secure; } /** * Tracks layering a protocol. * * @param secure <code>true</code> if the route is secure, * <code>false</code> otherwise */ public final void layerProtocol(boolean secure) { // it is possible to layer a protocol over a direct connection, // although this case is probably not considered elsewhere if (!this.connected) { throw new IllegalStateException ("No layered protocol unless connected."); } this.layered = LayerType.LAYERED; this.secure = secure; } public final HttpHost getTargetHost() { return this.targetHost; } public final InetAddress getLocalAddress() { return this.localAddress; } public final int getHopCount() { int hops = 0; if (this.connected) { if (proxyChain == null) hops = 1; else hops = proxyChain.length + 1; } return hops; } public final HttpHost getHopTarget(int hop) { if (hop < 0) throw new IllegalArgumentException ("Hop index must not be negative: " + hop); final int hopcount = getHopCount(); if (hop >= hopcount) { throw new IllegalArgumentException ("Hop index " + hop + " exceeds tracked route length " + hopcount +"."); } HttpHost result = null; if (hop < hopcount-1) result = this.proxyChain[hop]; else result = this.targetHost; return result; } public final HttpHost getProxyHost() { return (this.proxyChain == null) ? null : this.proxyChain[0]; } public final boolean isConnected() { return this.connected; } public final TunnelType getTunnelType() { return this.tunnelled; } public final boolean isTunnelled() { return (this.tunnelled == TunnelType.TUNNELLED); } public final LayerType getLayerType() { return this.layered; } public final boolean isLayered() { return (this.layered == LayerType.LAYERED); } public final boolean isSecure() { return this.secure; } /** * Obtains the tracked route. * If a route has been tracked, it is {@link #isConnected connected}. * If not connected, nothing has been tracked so far. * * @return the tracked route, or * <code>null</code> if nothing has been tracked so far */ public final HttpRoute toRoute() { return !this.connected ? null : new HttpRoute(this.targetHost, this.localAddress, this.proxyChain, this.secure, this.tunnelled, this.layered); } /** * Compares this tracked route to another. * * @param o the object to compare with * * @return <code>true</code> if the argument is the same tracked route, * <code>false</code> */ @Override public final boolean equals(Object o) { if (o == this) return true; if (!(o instanceof RouteTracker)) return false; RouteTracker that = (RouteTracker) o; return // Do the cheapest checks first (this.connected == that.connected) && (this.secure == that.secure) && (this.tunnelled == that.tunnelled) && (this.layered == that.layered) && LangUtils.equals(this.targetHost, that.targetHost) && LangUtils.equals(this.localAddress, that.localAddress) && LangUtils.equals(this.proxyChain, that.proxyChain); } /** * Generates a hash code for this tracked route. * Route trackers are modifiable and should therefore not be used * as lookup keys. Use {@link #toRoute toRoute} to obtain an * unmodifiable representation of the tracked route. * * @return the hash code */ @Override public final int hashCode() { int hash = LangUtils.HASH_SEED; hash = LangUtils.hashCode(hash, this.targetHost); hash = LangUtils.hashCode(hash, this.localAddress); if (this.proxyChain != null) { for (int i = 0; i < this.proxyChain.length; i++) { hash = LangUtils.hashCode(hash, this.proxyChain[i]); } } hash = LangUtils.hashCode(hash, this.connected); hash = LangUtils.hashCode(hash, this.secure); hash = LangUtils.hashCode(hash, this.tunnelled); hash = LangUtils.hashCode(hash, this.layered); return hash; } /** * Obtains a description of the tracked route. * * @return a human-readable representation of the tracked route */ @Override public final String toString() { StringBuilder cab = new StringBuilder(50 + getHopCount()*30); cab.append("RouteTracker["); if (this.localAddress != null) { cab.append(this.localAddress); cab.append("->"); } cab.append('{'); if (this.connected) cab.append('c'); if (this.tunnelled == TunnelType.TUNNELLED) cab.append('t'); if (this.layered == LayerType.LAYERED) cab.append('l'); if (this.secure) cab.append('s'); cab.append("}->"); if (this.proxyChain != null) { for (int i=0; i<this.proxyChain.length; i++) { cab.append(this.proxyChain[i]); cab.append("->"); } } cab.append(this.targetHost); cab.append(']'); return cab.toString(); } // default implementation of clone() is sufficient @Override public Object clone() throws CloneNotSupportedException { return super.clone(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1/feature.proto package com.google.cloud.gkehub.v1; /** * * * <pre> * CommonFeatureState contains Hub-wide Feature status information. * </pre> * * Protobuf type {@code google.cloud.gkehub.v1.CommonFeatureState} */ public final class CommonFeatureState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1.CommonFeatureState) CommonFeatureStateOrBuilder { private static final long serialVersionUID = 0L; // Use CommonFeatureState.newBuilder() to construct. private CommonFeatureState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CommonFeatureState() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CommonFeatureState(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CommonFeatureState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.gkehub.v1.FeatureState.Builder subBuilder = null; if (state_ != null) { subBuilder = state_.toBuilder(); } state_ = input.readMessage( com.google.cloud.gkehub.v1.FeatureState.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(state_); state_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.v1.FeatureProto .internal_static_google_cloud_gkehub_v1_CommonFeatureState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.v1.FeatureProto .internal_static_google_cloud_gkehub_v1_CommonFeatureState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.v1.CommonFeatureState.class, com.google.cloud.gkehub.v1.CommonFeatureState.Builder.class); } public static final int STATE_FIELD_NUMBER = 1; private com.google.cloud.gkehub.v1.FeatureState state_; /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return state_ != null; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The state. */ @java.lang.Override public com.google.cloud.gkehub.v1.FeatureState getState() { return state_ == null ? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance() : state_; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.gkehub.v1.FeatureStateOrBuilder getStateOrBuilder() { return getState(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (state_ != null) { output.writeMessage(1, getState()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (state_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getState()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gkehub.v1.CommonFeatureState)) { return super.equals(obj); } com.google.cloud.gkehub.v1.CommonFeatureState other = (com.google.cloud.gkehub.v1.CommonFeatureState) obj; if (hasState() != other.hasState()) return false; if (hasState()) { if (!getState().equals(other.getState())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + getState().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.v1.CommonFeatureState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.gkehub.v1.CommonFeatureState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * CommonFeatureState contains Hub-wide Feature status information. * </pre> * * Protobuf type {@code google.cloud.gkehub.v1.CommonFeatureState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1.CommonFeatureState) com.google.cloud.gkehub.v1.CommonFeatureStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.v1.FeatureProto .internal_static_google_cloud_gkehub_v1_CommonFeatureState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.v1.FeatureProto .internal_static_google_cloud_gkehub_v1_CommonFeatureState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.v1.CommonFeatureState.class, com.google.cloud.gkehub.v1.CommonFeatureState.Builder.class); } // Construct using com.google.cloud.gkehub.v1.CommonFeatureState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (stateBuilder_ == null) { state_ = null; } else { state_ = null; stateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gkehub.v1.FeatureProto .internal_static_google_cloud_gkehub_v1_CommonFeatureState_descriptor; } @java.lang.Override public com.google.cloud.gkehub.v1.CommonFeatureState getDefaultInstanceForType() { return com.google.cloud.gkehub.v1.CommonFeatureState.getDefaultInstance(); } @java.lang.Override public com.google.cloud.gkehub.v1.CommonFeatureState build() { com.google.cloud.gkehub.v1.CommonFeatureState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gkehub.v1.CommonFeatureState buildPartial() { com.google.cloud.gkehub.v1.CommonFeatureState result = new com.google.cloud.gkehub.v1.CommonFeatureState(this); if (stateBuilder_ == null) { result.state_ = state_; } else { result.state_ = stateBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gkehub.v1.CommonFeatureState) { return mergeFrom((com.google.cloud.gkehub.v1.CommonFeatureState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.gkehub.v1.CommonFeatureState other) { if (other == com.google.cloud.gkehub.v1.CommonFeatureState.getDefaultInstance()) return this; if (other.hasState()) { mergeState(other.getState()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.gkehub.v1.CommonFeatureState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.gkehub.v1.CommonFeatureState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.gkehub.v1.FeatureState state_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.v1.FeatureState, com.google.cloud.gkehub.v1.FeatureState.Builder, com.google.cloud.gkehub.v1.FeatureStateOrBuilder> stateBuilder_; /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the state field is set. */ public boolean hasState() { return stateBuilder_ != null || state_ != null; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The state. */ public com.google.cloud.gkehub.v1.FeatureState getState() { if (stateBuilder_ == null) { return state_ == null ? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance() : state_; } else { return stateBuilder_.getMessage(); } } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setState(com.google.cloud.gkehub.v1.FeatureState value) { if (stateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } state_ = value; onChanged(); } else { stateBuilder_.setMessage(value); } return this; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setState(com.google.cloud.gkehub.v1.FeatureState.Builder builderForValue) { if (stateBuilder_ == null) { state_ = builderForValue.build(); onChanged(); } else { stateBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeState(com.google.cloud.gkehub.v1.FeatureState value) { if (stateBuilder_ == null) { if (state_ != null) { state_ = com.google.cloud.gkehub.v1.FeatureState.newBuilder(state_) .mergeFrom(value) .buildPartial(); } else { state_ = value; } onChanged(); } else { stateBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearState() { if (stateBuilder_ == null) { state_ = null; onChanged(); } else { state_ = null; stateBuilder_ = null; } return this; } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.v1.FeatureState.Builder getStateBuilder() { onChanged(); return getStateFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.v1.FeatureStateOrBuilder getStateOrBuilder() { if (stateBuilder_ != null) { return stateBuilder_.getMessageOrBuilder(); } else { return state_ == null ? com.google.cloud.gkehub.v1.FeatureState.getDefaultInstance() : state_; } } /** * * * <pre> * Output only. The "running state" of the Feature in this Hub. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureState state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.v1.FeatureState, com.google.cloud.gkehub.v1.FeatureState.Builder, com.google.cloud.gkehub.v1.FeatureStateOrBuilder> getStateFieldBuilder() { if (stateBuilder_ == null) { stateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.v1.FeatureState, com.google.cloud.gkehub.v1.FeatureState.Builder, com.google.cloud.gkehub.v1.FeatureStateOrBuilder>( getState(), getParentForChildren(), isClean()); state_ = null; } return stateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1.CommonFeatureState) } // @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1.CommonFeatureState) private static final com.google.cloud.gkehub.v1.CommonFeatureState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1.CommonFeatureState(); } public static com.google.cloud.gkehub.v1.CommonFeatureState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CommonFeatureState> PARSER = new com.google.protobuf.AbstractParser<CommonFeatureState>() { @java.lang.Override public CommonFeatureState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CommonFeatureState(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CommonFeatureState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CommonFeatureState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gkehub.v1.CommonFeatureState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.builtin; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.apache.hadoop.hive.ql.exec.UDAF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver2; import org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.pig.Algebraic; import org.apache.pig.EvalFunc; import org.apache.pig.ResourceSchema; import org.apache.pig.ResourceSchema.ResourceFieldSchema; import org.apache.pig.data.DataBag; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema; import org.apache.pig.impl.util.hive.HiveUtils; /** * Use Hive UDAF or GenericUDAF. * Example: * define avg HiveUDAF('avg'); * A = load 'mydata' as (name:chararray, num:double); * B = group A by name; * C = foreach B generate group, avg(A.num); */ public class HiveUDAF extends HiveUDFBase implements Algebraic { private boolean inited = false; private String funcName; private String params; private GenericUDAFResolver udaf; static class SchemaAndEvaluatorInfo { private TypeInfo inputTypeInfo; private TypeInfo outputTypeInfo; private TypeInfo intermediateOutputTypeInfo; private ObjectInspector[] inputObjectInspectorAsArray; private ObjectInspector[] intermediateInputObjectInspectorAsArray; private StructObjectInspector inputObjectInspector; private ObjectInspector intermediateInputObjectInspector; private ObjectInspector intermediateOutputObjectInspector; private ObjectInspector outputObjectInspector; private GenericUDAFEvaluator evaluator; private static TypeInfo getInputTypeInfo(Schema inputSchema) throws IOException { FieldSchema innerFieldSchema = inputSchema.getField(0).schema.getField(0); ResourceFieldSchema rfs = new ResourceFieldSchema(innerFieldSchema); TypeInfo inputTypeInfo = HiveUtils.getTypeInfo(rfs); return inputTypeInfo; } private static ObjectInspector[] getInputObjectInspectorAsArray(TypeInfo inputTypeInfo, ConstantObjectInspectInfo constantsInfo) throws IOException { StructObjectInspector inputObjectInspector = (StructObjectInspector)HiveUtils.createObjectInspector(inputTypeInfo); ObjectInspector[] arguments = new ObjectInspector[inputObjectInspector.getAllStructFieldRefs().size()]; for (int i=0;i<inputObjectInspector.getAllStructFieldRefs().size();i++) { if (constantsInfo!=null && constantsInfo.get(i)!=null) { arguments[i] = constantsInfo.get(i); } else { arguments[i] = inputObjectInspector.getAllStructFieldRefs().get(i).getFieldObjectInspector(); } } return arguments; } private static GenericUDAFEvaluator getEvaluator(TypeInfo inputTypeInfo, GenericUDAFResolver udaf, ConstantObjectInspectInfo constantsInfo) throws IOException { try { GenericUDAFEvaluator evaluator; ObjectInspector[] arguments = getInputObjectInspectorAsArray(inputTypeInfo, constantsInfo); if (udaf instanceof GenericUDAFResolver2) { GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo( arguments, false, false); evaluator = ((GenericUDAFResolver2)udaf).getEvaluator(paramInfo); } else { TypeInfo[] params = ((StructTypeInfo)inputTypeInfo) .getAllStructFieldTypeInfos().toArray(new TypeInfo[0]); evaluator = udaf.getEvaluator(params); } return evaluator; } catch (Exception e) { throw new IOException(e); } } private void init(Schema inputSchema, GenericUDAFResolver udaf, Mode m, ConstantObjectInspectInfo constantsInfo) throws IOException { try { inputTypeInfo = getInputTypeInfo(inputSchema); inputObjectInspector = (StructObjectInspector)HiveUtils.createObjectInspector(inputTypeInfo); if (constantsInfo!=null) { constantsInfo.injectConstantObjectInspector(inputObjectInspector); } inputObjectInspectorAsArray = getInputObjectInspectorAsArray(inputTypeInfo, constantsInfo); evaluator = getEvaluator(inputTypeInfo, udaf, constantsInfo); if (m == Mode.COMPLETE) { outputObjectInspector = evaluator.init(Mode.COMPLETE, inputObjectInspectorAsArray); outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector); return; } if (m == Mode.PARTIAL1 || m == Mode.FINAL) { intermediateOutputObjectInspector = evaluator.init(Mode.PARTIAL1, inputObjectInspectorAsArray); intermediateOutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(intermediateOutputObjectInspector); if (m == Mode.FINAL) { intermediateInputObjectInspector = HiveUtils.createObjectInspector(intermediateOutputTypeInfo); intermediateInputObjectInspectorAsArray = new ObjectInspector[] {intermediateInputObjectInspector}; outputObjectInspector = evaluator.init(Mode.FINAL, intermediateInputObjectInspectorAsArray); outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector); } } } catch (Exception e) { throw new IOException(e); } } } SchemaAndEvaluatorInfo schemaAndEvaluatorInfo = new SchemaAndEvaluatorInfo(); ConstantObjectInspectInfo constantsInfo; public HiveUDAF(String funcName) throws IOException, InstantiationException, IllegalAccessException { this.funcName = funcName; this.udaf = instantiateUDAF(funcName); } public HiveUDAF(String funcName, String params) throws IOException, InstantiationException, IllegalAccessException { this(funcName); constantsInfo = ConstantObjectInspectInfo.parse(params); this.params = params; } private static GenericUDAFResolver instantiateUDAF(String funcName) throws IOException, InstantiationException, IllegalAccessException { GenericUDAFResolver udaf; Class hiveUDAFClass = resolveFunc(funcName); if (UDAF.class.isAssignableFrom(hiveUDAFClass)) { udaf = new GenericUDAFBridge((UDAF)hiveUDAFClass.newInstance()); } else if (GenericUDAFResolver.class.isAssignableFrom(hiveUDAFClass)){ udaf = (GenericUDAFResolver)hiveUDAFClass.newInstance(); } else { throw new IOException(getErrorMessage(hiveUDAFClass)); } return udaf; } @Override public String getInitial() { if (params == null) { return Initial.class.getName() + "('" + funcName + "')"; } else { return Initial.class.getName() + "('" + funcName + "," + params + "')"; } } @Override public String getIntermed() { if (params == null) { return Intermediate.class.getName() + "('" + funcName + "')"; } else { return Intermediate.class.getName() + "('" + funcName + "," + params + "')"; } } @Override public String getFinal() { if (params == null) { return Final.class.getName() + "('" + funcName + "')"; } else { return Final.class.getName() + "('" + funcName + "," + params + "')"; } } static public class Initial extends EvalFunc<Tuple> { public Initial(String funcName) { } public Initial(String funcName, String params) { } @Override public Tuple exec(Tuple input) throws IOException { DataBag bg = (DataBag) input.get(0); Tuple tp = null; if(bg.iterator().hasNext()) { tp = bg.iterator().next(); } return tp; } } static public class Intermediate extends EvalFunc<Tuple> { private boolean inited = false; private String funcName; ConstantObjectInspectInfo constantsInfo; private SchemaAndEvaluatorInfo schemaAndEvaluatorInfo = new SchemaAndEvaluatorInfo(); private static TupleFactory tf = TupleFactory.getInstance(); public Intermediate(String funcName) { this.funcName = funcName; } public Intermediate(String funcName, String params) throws IOException { this.funcName = funcName; constantsInfo = ConstantObjectInspectInfo.parse(params); } @Override public Tuple exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.PARTIAL1, constantsInfo); inited = true; } DataBag b = (DataBag)input.get(0); AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); for (Iterator<Tuple> it = b.iterator(); it.hasNext();) { Tuple t = it.next(); List inputs = schemaAndEvaluatorInfo.inputObjectInspector.getStructFieldsDataAsList(t); schemaAndEvaluatorInfo.evaluator.iterate(agg, inputs.toArray()); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminatePartial(agg); Tuple result = tf.newTuple(); result.append(HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.intermediateOutputObjectInspector, null)); return result; } catch (Exception e) { throw new IOException(e); } } } static public class Final extends EvalFunc<Object> { private boolean inited = false; private String funcName; ConstantObjectInspectInfo constantsInfo; private SchemaAndEvaluatorInfo schemaAndEvaluatorInfo = new SchemaAndEvaluatorInfo(); public Final(String funcName) { this.funcName = funcName; } public Final(String funcName, String params) throws IOException { this.funcName = funcName; constantsInfo = ConstantObjectInspectInfo.parse(params); } @Override public Object exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.FINAL, constantsInfo); schemaAndEvaluatorInfo.evaluator.configure(instantiateMapredContext()); inited = true; } DataBag b = (DataBag)input.get(0); AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); for (Iterator<Tuple> it = b.iterator(); it.hasNext();) { Tuple t = it.next(); schemaAndEvaluatorInfo.evaluator.merge(agg, t.get(0)); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminate(agg); Object result = HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.outputObjectInspector, null); return result; } catch (Exception e) { throw new IOException(e); } } } @Override public Object exec(Tuple input) throws IOException { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.COMPLETE, constantsInfo); inited = true; } AggregationBuffer agg = schemaAndEvaluatorInfo.evaluator.getNewAggregationBuffer(); DataBag bg = (DataBag) input.get(0); Tuple tp = null; for (Iterator<Tuple> it = bg.iterator(); it.hasNext();) { tp = it.next(); List inputs = schemaAndEvaluatorInfo.inputObjectInspector.getStructFieldsDataAsList(tp); schemaAndEvaluatorInfo.evaluator.iterate(agg, inputs.toArray()); } Object returnValue = schemaAndEvaluatorInfo.evaluator.terminate(agg); Object result = HiveUtils.convertHiveToPig(returnValue, schemaAndEvaluatorInfo.outputObjectInspector, null); return result; } catch (Exception e) { throw new IOException(e); } } @Override public Schema outputSchema(Schema input) { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.COMPLETE, constantsInfo); inited = true; } ResourceFieldSchema rfs = HiveUtils.getResourceFieldSchema(schemaAndEvaluatorInfo.outputTypeInfo); ResourceSchema outputSchema = new ResourceSchema(); outputSchema.setFields(new ResourceFieldSchema[] {rfs}); return Schema.getPigSchema(outputSchema); } catch (Exception e) { throw new RuntimeException(e); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.memory.LocalMemoryContext; import com.facebook.presto.metadata.Split; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PageBuilder; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.RecordPageSource; import com.facebook.presto.spi.UpdatablePageSource; import com.facebook.presto.spi.type.Type; import com.facebook.presto.split.PageSourceProvider; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Optional; import java.util.function.Supplier; import static com.facebook.presto.SystemSessionProperties.getProcessingOptimization; import static com.facebook.presto.sql.analyzer.FeaturesConfig.ProcessingOptimization.COLUMNAR; import static com.facebook.presto.sql.analyzer.FeaturesConfig.ProcessingOptimization.COLUMNAR_DICTIONARY; import static com.facebook.presto.sql.analyzer.FeaturesConfig.ProcessingOptimization.DISABLED; import static com.google.common.base.Preconditions.checkState; import static java.util.Objects.requireNonNull; public class ScanFilterAndProjectOperator implements SourceOperator, Closeable { private static final int ROWS_PER_PAGE = 16384; private final OperatorContext operatorContext; private final PlanNodeId planNodeId; private final PageSourceProvider pageSourceProvider; private final List<Type> types; private final List<ColumnHandle> columns; private final PageBuilder pageBuilder; private final CursorProcessor cursorProcessor; private final PageProcessor pageProcessor; private final LocalMemoryContext pageSourceMemoryContext; private final LocalMemoryContext pageBuilderMemoryContext; private final SettableFuture<?> blocked = SettableFuture.create(); private final String processingOptimization; private RecordCursor cursor; private ConnectorPageSource pageSource; private Split split; private Page currentPage; private int currentPosition; private boolean finishing; private long completedBytes; private long readTimeNanos; protected ScanFilterAndProjectOperator( OperatorContext operatorContext, PlanNodeId sourceId, PageSourceProvider pageSourceProvider, CursorProcessor cursorProcessor, PageProcessor pageProcessor, Iterable<ColumnHandle> columns, Iterable<Type> types) { this.cursorProcessor = requireNonNull(cursorProcessor, "cursorProcessor is null"); this.pageProcessor = requireNonNull(pageProcessor, "pageProcessor is null"); this.operatorContext = requireNonNull(operatorContext, "operatorContext is null"); this.planNodeId = requireNonNull(sourceId, "sourceId is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.types = ImmutableList.copyOf(requireNonNull(types, "types is null")); this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null")); this.pageSourceMemoryContext = operatorContext.getSystemMemoryContext().newLocalMemoryContext(); this.pageBuilderMemoryContext = operatorContext.getSystemMemoryContext().newLocalMemoryContext(); this.processingOptimization = getProcessingOptimization(operatorContext.getSession()); this.pageBuilder = new PageBuilder(getTypes()); } @Override public OperatorContext getOperatorContext() { return operatorContext; } @Override public PlanNodeId getSourceId() { return planNodeId; } @Override public Supplier<Optional<UpdatablePageSource>> addSplit(Split split) { requireNonNull(split, "split is null"); checkState(this.split == null, "Table scan split already set"); if (finishing) { return Optional::empty; } this.split = split; Object splitInfo = split.getInfo(); if (splitInfo != null) { operatorContext.setInfoSupplier(() -> splitInfo); } blocked.set(null); return () -> { if (pageSource instanceof UpdatablePageSource) { return Optional.of((UpdatablePageSource) pageSource); } return Optional.empty(); }; } @Override public void noMoreSplits() { if (split == null) { finishing = true; } blocked.set(null); } @Override public final List<Type> getTypes() { return types; } @Override public void close() { finish(); } @Override public void finish() { blocked.set(null); if (pageSource != null) { try { pageSource.close(); } catch (IOException e) { throw Throwables.propagate(e); } } else if (cursor != null) { cursor.close(); } finishing = true; } @Override public final boolean isFinished() { if (!finishing) { createSourceIfNecessary(); } if (pageSource != null && pageSource.isFinished() && currentPage == null) { finishing = true; } return finishing && pageBuilder.isEmpty(); } @Override public ListenableFuture<?> isBlocked() { return blocked; } @Override public final boolean needsInput() { return false; } @Override public final void addInput(Page page) { throw new UnsupportedOperationException(); } @Override public Page getOutput() { if (!finishing) { createSourceIfNecessary(); if (cursor != null) { int rowsProcessed = cursorProcessor.process(operatorContext.getSession().toConnectorSession(), cursor, ROWS_PER_PAGE, pageBuilder); pageSourceMemoryContext.setBytes(cursor.getSystemMemoryUsage()); long bytesProcessed = cursor.getCompletedBytes() - completedBytes; long elapsedNanos = cursor.getReadTimeNanos() - readTimeNanos; operatorContext.recordGeneratedInput(bytesProcessed, rowsProcessed, elapsedNanos); completedBytes = cursor.getCompletedBytes(); readTimeNanos = cursor.getReadTimeNanos(); if (rowsProcessed == 0) { finishing = true; } } else { if (currentPage == null) { currentPage = pageSource.getNextPage(); if (currentPage != null) { // update operator stats long endCompletedBytes = pageSource.getCompletedBytes(); long endReadTimeNanos = pageSource.getReadTimeNanos(); operatorContext.recordGeneratedInput(endCompletedBytes - completedBytes, currentPage.getPositionCount(), endReadTimeNanos - readTimeNanos); completedBytes = endCompletedBytes; readTimeNanos = endReadTimeNanos; } currentPosition = 0; } if (currentPage != null) { switch (processingOptimization) { case COLUMNAR: { Page page = pageProcessor.processColumnar(operatorContext.getSession().toConnectorSession(), currentPage, getTypes()); currentPage = null; currentPosition = 0; return page; } case COLUMNAR_DICTIONARY: { Page page = pageProcessor.processColumnarDictionary(operatorContext.getSession().toConnectorSession(), currentPage, getTypes()); currentPage = null; currentPosition = 0; return page; } case DISABLED: { currentPosition = pageProcessor.process(operatorContext.getSession().toConnectorSession(), currentPage, currentPosition, currentPage.getPositionCount(), pageBuilder); if (currentPosition == currentPage.getPositionCount()) { currentPage = null; currentPosition = 0; } break; } default: throw new IllegalStateException(String.format("Found unexpected value %s for processingOptimization", processingOptimization)); } } pageSourceMemoryContext.setBytes(pageSource.getSystemMemoryUsage()); } } // only return a full page if buffer is full or we are finishing if (pageBuilder.isEmpty() || (!finishing && !pageBuilder.isFull())) { pageBuilderMemoryContext.setBytes(pageBuilder.getRetainedSizeInBytes()); return null; } Page page = pageBuilder.build(); pageBuilder.reset(); pageBuilderMemoryContext.setBytes(pageBuilder.getRetainedSizeInBytes()); return page; } private void createSourceIfNecessary() { if ((split != null) && (pageSource == null) && (cursor == null)) { ConnectorPageSource source = pageSourceProvider.createPageSource(operatorContext.getSession(), split, columns); if (source instanceof RecordPageSource) { cursor = ((RecordPageSource) source).getCursor(); } else { pageSource = source; } } } public static class ScanFilterAndProjectOperatorFactory implements SourceOperatorFactory { private final int operatorId; private final PlanNodeId planNodeId; private final Supplier<CursorProcessor> cursorProcessor; private final Supplier<PageProcessor> pageProcessor; private final PlanNodeId sourceId; private final PageSourceProvider pageSourceProvider; private final List<ColumnHandle> columns; private final List<Type> types; private boolean closed; public ScanFilterAndProjectOperatorFactory( int operatorId, PlanNodeId planNodeId, PlanNodeId sourceId, PageSourceProvider pageSourceProvider, Supplier<CursorProcessor> cursorProcessor, Supplier<PageProcessor> pageProcessor, Iterable<ColumnHandle> columns, List<Type> types) { this.operatorId = operatorId; this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.cursorProcessor = requireNonNull(cursorProcessor, "cursorProcessor is null"); this.pageProcessor = requireNonNull(pageProcessor, "pageProcessor is null"); this.sourceId = requireNonNull(sourceId, "sourceId is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null")); this.types = requireNonNull(types, "types is null"); } @Override public PlanNodeId getSourceId() { return sourceId; } @Override public List<Type> getTypes() { return types; } @Override public SourceOperator createOperator(DriverContext driverContext) { checkState(!closed, "Factory is already closed"); OperatorContext operatorContext = driverContext.addOperatorContext(operatorId, planNodeId, ScanFilterAndProjectOperator.class.getSimpleName()); return new ScanFilterAndProjectOperator( operatorContext, sourceId, pageSourceProvider, cursorProcessor.get(), pageProcessor.get(), columns, types); } @Override public void close() { closed = true; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import java.io.Closeable; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; /** * An internal registry for tokenizer, token filter, char filter and analyzer. * This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build(IndexSettings)} */ public final class AnalysisRegistry implements Closeable { public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter"; public static final String INDEX_ANALYSIS_FILTER = "index.analysis.filter"; public static final String INDEX_ANALYSIS_TOKENIZER = "index.analysis.tokenizer"; private final PrebuiltAnalysis prebuiltAnalysis; private final Map<String, Analyzer> cachedAnalyzer = new ConcurrentHashMap<>(); private final Environment environment; private final Map<String, AnalysisProvider<CharFilterFactory>> charFilters; private final Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters; private final Map<String, AnalysisProvider<TokenizerFactory>> tokenizers; private final Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers; private final Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers; public AnalysisRegistry(Environment environment, Map<String, AnalysisProvider<CharFilterFactory>> charFilters, Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters, Map<String, AnalysisProvider<TokenizerFactory>> tokenizers, Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers, Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers, Map<String, PreConfiguredCharFilter> preConfiguredCharFilters, Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters, Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) { this.environment = environment; this.charFilters = unmodifiableMap(charFilters); this.tokenFilters = unmodifiableMap(tokenFilters); this.tokenizers = unmodifiableMap(tokenizers); this.analyzers = unmodifiableMap(analyzers); this.normalizers = unmodifiableMap(normalizers); prebuiltAnalysis = new PrebuiltAnalysis(preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers); } /** * Returns a {@link Settings} by groupName from {@link IndexSettings} or a default {@link Settings} * @param indexSettings an index settings * @param groupName tokenizer/token filter/char filter name * @return {@link Settings} */ public static Settings getSettingsFromIndexSettings(IndexSettings indexSettings, String groupName) { Settings settings = indexSettings.getSettings().getAsSettings(groupName); if (settings.isEmpty()) { settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexSettings.getIndexVersionCreated()).build(); } return settings; } /** * Returns a registered {@link TokenizerFactory} provider by name or <code>null</code> if the tokenizer was not registered */ public AnalysisModule.AnalysisProvider<TokenizerFactory> getTokenizerProvider(String tokenizer) { return tokenizers.getOrDefault(tokenizer, this.prebuiltAnalysis.getTokenizerFactory(tokenizer)); } /** * Returns a registered {@link TokenFilterFactory} provider by name or <code>null</code> if the token filter was not registered */ public AnalysisModule.AnalysisProvider<TokenFilterFactory> getTokenFilterProvider(String tokenFilter) { return tokenFilters.getOrDefault(tokenFilter, this.prebuiltAnalysis.getTokenFilterFactory(tokenFilter)); } /** * Returns a registered {@link CharFilterFactory} provider by name or <code>null</code> if the char filter was not registered */ public AnalysisModule.AnalysisProvider<CharFilterFactory> getCharFilterProvider(String charFilter) { return charFilters.getOrDefault(charFilter, this.prebuiltAnalysis.getCharFilterFactory(charFilter)); } /** * Returns a registered {@link Analyzer} provider by name or <code>null</code> if the analyzer was not registered */ public Analyzer getAnalyzer(String analyzer) throws IOException { AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer); if (analyzerProvider == null) { AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> provider = analyzers.get(analyzer); return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> { try { return provider.get(environment, key).get(); } catch (IOException ex) { throw new ElasticsearchException("failed to load analyzer for name " + key, ex); }} ); } return analyzerProvider.get(environment, analyzer).get(); } @Override public void close() throws IOException { try { prebuiltAnalysis.close(); } finally { IOUtils.close(cachedAnalyzer.values()); } } /** * Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings */ public IndexAnalyzers build(IndexSettings indexSettings) throws IOException { final Map<String, CharFilterFactory> charFilterFactories = buildCharFilterFactories(indexSettings); final Map<String, TokenizerFactory> tokenizerFactories = buildTokenizerFactories(indexSettings); final Map<String, TokenFilterFactory> tokenFilterFactories = buildTokenFilterFactories(indexSettings); final Map<String, AnalyzerProvider<?>> analyzierFactories = buildAnalyzerFactories(indexSettings); final Map<String, AnalyzerProvider<?>> normalizerFactories = buildNormalizerFactories(indexSettings); return build(indexSettings, analyzierFactories, normalizerFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); } public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException { final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER); Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(this.tokenFilters); /* * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and * hide internal data-structures as much as possible. */ tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings))); return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); } public Map<String, TokenizerFactory> buildTokenizerFactories(IndexSettings indexSettings) throws IOException { final Map<String, Settings> tokenizersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_TOKENIZER); return buildMapping(Component.TOKENIZER, indexSettings, tokenizersSettings, tokenizers, prebuiltAnalysis.preConfiguredTokenizers); } public Map<String, CharFilterFactory> buildCharFilterFactories(IndexSettings indexSettings) throws IOException { final Map<String, Settings> charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER); return buildMapping(Component.CHAR_FILTER, indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.preConfiguredCharFilterFactories); } public Map<String, AnalyzerProvider<?>> buildAnalyzerFactories(IndexSettings indexSettings) throws IOException { final Map<String, Settings> analyzersSettings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); return buildMapping(Component.ANALYZER, indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories); } public Map<String, AnalyzerProvider<?>> buildNormalizerFactories(IndexSettings indexSettings) throws IOException { final Map<String, Settings> normalizersSettings = indexSettings.getSettings().getGroups("index.analysis.normalizer"); // TODO: Have pre-built normalizers return buildMapping(Component.NORMALIZER, indexSettings, normalizersSettings, normalizers, Collections.emptyMap()); } /** * Returns a registered {@link TokenizerFactory} provider by {@link IndexSettings} * or a registered {@link TokenizerFactory} provider by predefined name * or <code>null</code> if the tokenizer was not registered * @param tokenizer global or defined tokenizer name * @param indexSettings an index settings * @return {@link TokenizerFactory} provider or <code>null</code> */ public AnalysisProvider<TokenizerFactory> getTokenizerProvider(String tokenizer, IndexSettings indexSettings) { final Map<String, Settings> tokenizerSettings = indexSettings.getSettings().getGroups("index.analysis.tokenizer"); if (tokenizerSettings.containsKey(tokenizer)) { Settings currentSettings = tokenizerSettings.get(tokenizer); return getAnalysisProvider(Component.TOKENIZER, tokenizers, tokenizer, currentSettings.get("type")); } else { return getTokenizerProvider(tokenizer); } } /** * Returns a registered {@link TokenFilterFactory} provider by {@link IndexSettings} * or a registered {@link TokenFilterFactory} provider by predefined name * or <code>null</code> if the tokenFilter was not registered * @param tokenFilter global or defined tokenFilter name * @param indexSettings an index settings * @return {@link TokenFilterFactory} provider or <code>null</code> */ public AnalysisProvider<TokenFilterFactory> getTokenFilterProvider(String tokenFilter, IndexSettings indexSettings) { final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.filter"); if (tokenFilterSettings.containsKey(tokenFilter)) { Settings currentSettings = tokenFilterSettings.get(tokenFilter); String typeName = currentSettings.get("type"); /* * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and * hide internal data-structures as much as possible. */ if ("synonym".equals(typeName)) { return requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)); } else if ("synonym_graph".equals(typeName)) { return requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings)); } else { return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName); } } else { return getTokenFilterProvider(tokenFilter); } } /** * Returns a registered {@link CharFilterFactory} provider by {@link IndexSettings} * or a registered {@link CharFilterFactory} provider by predefined name * or <code>null</code> if the charFilter was not registered * @param charFilter global or defined charFilter name * @param indexSettings an index settings * @return {@link CharFilterFactory} provider or <code>null</code> */ public AnalysisProvider<CharFilterFactory> getCharFilterProvider(String charFilter, IndexSettings indexSettings) { final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.char_filter"); if (tokenFilterSettings.containsKey(charFilter)) { Settings currentSettings = tokenFilterSettings.get(charFilter); return getAnalysisProvider(Component.CHAR_FILTER, charFilters, charFilter, currentSettings.get("type")); } else { return getCharFilterProvider(charFilter); } } private static <T> AnalysisModule.AnalysisProvider<T> requiresAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) { return new AnalysisModule.AnalysisProvider<T>() { @Override public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { return provider.get(indexSettings, environment, name, settings); } @Override public boolean requiresAnalysisSettings() { return true; } }; } enum Component { ANALYZER { @Override public String toString() { return "analyzer"; } }, NORMALIZER { @Override public String toString() { return "normalizer"; } }, CHAR_FILTER { @Override public String toString() { return "char_filter"; } }, TOKENIZER { @Override public String toString() { return "tokenizer"; } }, FILTER { @Override public String toString() { return "filter"; } }; } @SuppressWarnings("unchecked") private <T> Map<String, T> buildMapping(Component component, IndexSettings settings, Map<String, Settings> settingsMap, Map<String, ? extends AnalysisModule.AnalysisProvider<T>> providerMap, Map<String, ? extends AnalysisModule.AnalysisProvider<T>> defaultInstance) throws IOException { Settings defaultSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, settings.getIndexVersionCreated()).build(); Map<String, T> factories = new HashMap<>(); for (Map.Entry<String, Settings> entry : settingsMap.entrySet()) { String name = entry.getKey(); Settings currentSettings = entry.getValue(); String typeName = currentSettings.get("type"); if (component == Component.ANALYZER) { T factory = null; if (typeName == null) { if (currentSettings.get("tokenizer") != null) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings, environment); } else { throw new IllegalArgumentException(component + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } } else if (typeName.equals("custom")) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings, environment); } if (factory != null) { factories.put(name, factory); continue; } } else if (component == Component.NORMALIZER) { if (typeName == null || typeName.equals("custom")) { T factory = (T) new CustomNormalizerProvider(settings, name, currentSettings); factories.put(name, factory); continue; } } AnalysisProvider<T> type = getAnalysisProvider(component, providerMap, name, typeName); if (type == null) { throw new IllegalArgumentException("Unknown " + component + " type [" + typeName + "] for [" + name + "]"); } final T factory = type.get(settings, environment, name, currentSettings); factories.put(name, factory); } // go over the char filters in the bindings and register the ones that are not configured for (Map.Entry<String, ? extends AnalysisModule.AnalysisProvider<T>> entry : providerMap.entrySet()) { String name = entry.getKey(); AnalysisModule.AnalysisProvider<T> provider = entry.getValue(); // we don't want to re-register one that already exists if (settingsMap.containsKey(name)) { continue; } // check, if it requires settings, then don't register it, we know default has no settings... if (provider.requiresAnalysisSettings()) { continue; } AnalysisModule.AnalysisProvider<T> defaultProvider = defaultInstance.get(name); final T instance; if (defaultProvider == null) { instance = provider.get(settings, environment, name, defaultSettings); } else { instance = defaultProvider.get(settings, environment, name, defaultSettings); } factories.put(name, instance); } for (Map.Entry<String, ? extends AnalysisModule.AnalysisProvider<T>> entry : defaultInstance.entrySet()) { final String name = entry.getKey(); final AnalysisModule.AnalysisProvider<T> provider = entry.getValue(); if (factories.containsKey(name) == false) { final T instance = provider.get(settings, environment, name, defaultSettings); if (factories.containsKey(name) == false) { factories.put(name, instance); } } } return factories; } private <T> AnalysisProvider<T> getAnalysisProvider(Component component, Map<String, ? extends AnalysisProvider<T>> providerMap, String name, String typeName) { if (typeName == null) { throw new IllegalArgumentException(component + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } AnalysisProvider<T> type = providerMap.get(typeName); if (type == null) { throw new IllegalArgumentException("Unknown " + component + " type [" + typeName + "] for [" + name + "]"); } return type; } private static class PrebuiltAnalysis implements Closeable { final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<?>>> analyzerProviderFactories; final Map<String, ? extends AnalysisProvider<TokenFilterFactory>> preConfiguredTokenFilters; final Map<String, ? extends AnalysisProvider<TokenizerFactory>> preConfiguredTokenizers; final Map<String, ? extends AnalysisProvider<CharFilterFactory>> preConfiguredCharFilterFactories; private PrebuiltAnalysis( Map<String, PreConfiguredCharFilter> preConfiguredCharFilters, Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters, Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) { Map<String, PreBuiltAnalyzerProviderFactory> analyzerProviderFactories = new HashMap<>(); // Analyzers for (PreBuiltAnalyzers preBuiltAnalyzerEnum : PreBuiltAnalyzers.values()) { String name = preBuiltAnalyzerEnum.name().toLowerCase(Locale.ROOT); analyzerProviderFactories.put(name, new PreBuiltAnalyzerProviderFactory(name, AnalyzerScope.INDICES, preBuiltAnalyzerEnum.getAnalyzer(Version.CURRENT))); } this.analyzerProviderFactories = Collections.unmodifiableMap(analyzerProviderFactories); this.preConfiguredCharFilterFactories = preConfiguredCharFilters; this.preConfiguredTokenFilters = preConfiguredTokenFilters; this.preConfiguredTokenizers = preConfiguredTokenizers; } public AnalysisModule.AnalysisProvider<CharFilterFactory> getCharFilterFactory(String name) { return preConfiguredCharFilterFactories.get(name); } public AnalysisModule.AnalysisProvider<TokenFilterFactory> getTokenFilterFactory(String name) { return preConfiguredTokenFilters.get(name); } public AnalysisModule.AnalysisProvider<TokenizerFactory> getTokenizerFactory(String name) { return preConfiguredTokenizers.get(name); } public AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> getAnalyzerProvider(String name) { return analyzerProviderFactories.get(name); } Analyzer analyzer(String name) { PreBuiltAnalyzerProviderFactory analyzerProviderFactory = (PreBuiltAnalyzerProviderFactory) analyzerProviderFactories.get(name); if (analyzerProviderFactory == null) { return null; } return analyzerProviderFactory.analyzer(); } @Override public void close() throws IOException { IOUtils.close(analyzerProviderFactories.values().stream().map((a) -> ((PreBuiltAnalyzerProviderFactory)a).analyzer()).collect(Collectors.toList())); } } public IndexAnalyzers build(IndexSettings indexSettings, Map<String, AnalyzerProvider<?>> analyzerProviders, Map<String, AnalyzerProvider<?>> normalizerProviders, Map<String, TokenizerFactory> tokenizerFactoryFactories, Map<String, CharFilterFactory> charFilterFactoryFactories, Map<String, TokenFilterFactory> tokenFilterFactoryFactories) { Index index = indexSettings.getIndex(); analyzerProviders = new HashMap<>(analyzerProviders); Map<String, NamedAnalyzer> analyzers = new HashMap<>(); Map<String, NamedAnalyzer> normalizers = new HashMap<>(); for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) { processAnalyzerFactory(indexSettings, entry.getKey(), entry.getValue(), analyzers, tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories); } for (Map.Entry<String, AnalyzerProvider<?>> entry : normalizerProviders.entrySet()) { processNormalizerFactory(entry.getKey(), entry.getValue(), normalizers, tokenizerFactoryFactories.get("keyword"), tokenFilterFactoryFactories, charFilterFactoryFactories); } if (!analyzers.containsKey("default")) { processAnalyzerFactory(indexSettings, "default", new StandardAnalyzerProvider(indexSettings, null, "default", Settings.Builder.EMPTY_SETTINGS), analyzers, tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories); } if (!analyzers.containsKey("default_search")) { analyzers.put("default_search", analyzers.get("default")); } if (!analyzers.containsKey("default_search_quoted")) { analyzers.put("default_search_quoted", analyzers.get("default_search")); } NamedAnalyzer defaultAnalyzer = analyzers.get("default"); if (defaultAnalyzer == null) { throw new IllegalArgumentException("no default analyzer configured"); } if (analyzers.containsKey("default_index")) { throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index.getName() + "]"); } NamedAnalyzer defaultSearchAnalyzer = analyzers.getOrDefault("default_search", defaultAnalyzer); NamedAnalyzer defaultSearchQuoteAnalyzer = analyzers.getOrDefault("default_search_quote", defaultSearchAnalyzer); for (Map.Entry<String, NamedAnalyzer> analyzer : analyzers.entrySet()) { if (analyzer.getKey().startsWith("_")) { throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\""); } } return new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultSearchAnalyzer, defaultSearchQuoteAnalyzer, unmodifiableMap(analyzers), unmodifiableMap(normalizers)); } private void processAnalyzerFactory(IndexSettings indexSettings, String name, AnalyzerProvider<?> analyzerFactory, Map<String, NamedAnalyzer> analyzers, Map<String, TokenFilterFactory> tokenFilters, Map<String, CharFilterFactory> charFilters, Map<String, TokenizerFactory> tokenizers) { /* * Lucene defaults positionIncrementGap to 0 in all analyzers but * Elasticsearch defaults them to 0 only before version 2.0 * and 100 afterwards so we override the positionIncrementGap if it * doesn't match here. */ int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; if (analyzerFactory instanceof CustomAnalyzerProvider) { ((CustomAnalyzerProvider) analyzerFactory).build(tokenizers, charFilters, tokenFilters); /* * Custom analyzers already default to the correct, version * dependent positionIncrementGap and the user is be able to * configure the positionIncrementGap directly on the analyzer so * we disable overriding the positionIncrementGap to preserve the * user's setting. */ overridePositionIncrementGap = Integer.MIN_VALUE; } Analyzer analyzerF = analyzerFactory.get(); if (analyzerF == null) { throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer"); } NamedAnalyzer analyzer; if (analyzerF instanceof NamedAnalyzer) { // if we got a named analyzer back, use it... analyzer = (NamedAnalyzer) analyzerF; if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) { // unless the positionIncrementGap needs to be overridden analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap); } } else { analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap); } if (analyzers.containsKey(name)) { throw new IllegalStateException("already registered analyzer with name: " + name); } analyzers.put(name, analyzer); // TODO: remove alias support completely when we no longer support pre 5.0 indices final String analyzerAliasKey = "index.analysis.analyzer." + analyzerFactory.name() + ".alias"; if (indexSettings.getSettings().get(analyzerAliasKey) != null) { throw new IllegalArgumentException("setting [" + analyzerAliasKey + "] is not supported"); } } private void processNormalizerFactory( String name, AnalyzerProvider<?> normalizerFactory, Map<String, NamedAnalyzer> normalizers, TokenizerFactory keywordTokenizerFactory, Map<String, TokenFilterFactory> tokenFilters, Map<String, CharFilterFactory> charFilters) { if (normalizerFactory instanceof CustomNormalizerProvider) { ((CustomNormalizerProvider) normalizerFactory).build(keywordTokenizerFactory, charFilters, tokenFilters); } Analyzer normalizerF = normalizerFactory.get(); if (normalizerF == null) { throw new IllegalArgumentException("normalizer [" + normalizerFactory.name() + "] created null normalizer"); } NamedAnalyzer normalizer = new NamedAnalyzer(name, normalizerFactory.scope(), normalizerF); if (normalizers.containsKey(name)) { throw new IllegalStateException("already registered analyzer with name: " + name); } normalizers.put(name, normalizer); } }
package org.g4studio.core.orm.xibatis.sqlmap.engine.config; import java.sql.ResultSet; import java.util.Arrays; import java.util.List; import org.g4studio.core.orm.xibatis.common.beans.Probe; import org.g4studio.core.orm.xibatis.common.beans.ProbeFactory; import org.g4studio.core.orm.xibatis.common.resources.Resources; import org.g4studio.core.orm.xibatis.sqlmap.client.SqlMapException; import org.g4studio.core.orm.xibatis.sqlmap.engine.cache.CacheModel; import org.g4studio.core.orm.xibatis.sqlmap.engine.impl.SqlMapClientImpl; import org.g4studio.core.orm.xibatis.sqlmap.engine.impl.SqlMapExecutorDelegate; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.parameter.InlineParameterMapParser; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.parameter.ParameterMap; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.result.AutoResultMap; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.result.ResultMap; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.Sql; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.SqlText; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.dynamic.DynamicSql; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.simple.SimpleDynamicSql; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.stat.StaticSql; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.statement.CachingStatement; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.statement.InsertStatement; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.statement.MappedStatement; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.statement.SelectKeyStatement; import org.g4studio.core.orm.xibatis.sqlmap.engine.scope.ErrorContext; import org.g4studio.core.orm.xibatis.sqlmap.engine.type.TypeHandlerFactory; public class MappedStatementConfig { private static final Probe PROBE = ProbeFactory.getProbe(); private static final InlineParameterMapParser PARAM_PARSER = new InlineParameterMapParser(); private ErrorContext errorContext; private SqlMapClientImpl client; private TypeHandlerFactory typeHandlerFactory; private MappedStatement mappedStatement; private MappedStatement rootStatement; MappedStatementConfig(SqlMapConfiguration config, String id, MappedStatement statement, SqlSource processor, String parameterMapName, Class parameterClass, String resultMapName, String[] additionalResultMapNames, Class resultClass, Class[] additionalResultClasses, String cacheModelName, String resultSetType, Integer fetchSize, boolean allowRemapping, Integer timeout, Integer defaultStatementTimeout, String xmlResultName) { this.errorContext = config.getErrorContext(); this.client = config.getClient(); SqlMapExecutorDelegate delegate = client.getDelegate(); this.typeHandlerFactory = config.getTypeHandlerFactory(); errorContext.setActivity("parsing a mapped statement"); errorContext.setObjectId(id + " statement"); errorContext.setMoreInfo("Check the result map name."); if (resultMapName != null) { statement.setResultMap(client.getDelegate().getResultMap(resultMapName)); if (additionalResultMapNames != null) { for (int i = 0; i < additionalResultMapNames.length; i++) { statement.addResultMap(client.getDelegate().getResultMap(additionalResultMapNames[i])); } } } errorContext.setMoreInfo("Check the parameter map name."); if (parameterMapName != null) { statement.setParameterMap(client.getDelegate().getParameterMap(parameterMapName)); } statement.setId(id); statement.setResource(errorContext.getResource()); if (resultSetType != null) { if ("FORWARD_ONLY".equals(resultSetType)) { statement.setResultSetType(new Integer(ResultSet.TYPE_FORWARD_ONLY)); } else if ("SCROLL_INSENSITIVE".equals(resultSetType)) { statement.setResultSetType(new Integer(ResultSet.TYPE_SCROLL_INSENSITIVE)); } else if ("SCROLL_SENSITIVE".equals(resultSetType)) { statement.setResultSetType(new Integer(ResultSet.TYPE_SCROLL_SENSITIVE)); } } if (fetchSize != null) { statement.setFetchSize(fetchSize); } // set parameter class either from attribute or from map (make sure to // match) ParameterMap parameterMap = statement.getParameterMap(); if (parameterMap == null) { statement.setParameterClass(parameterClass); } else { statement.setParameterClass(parameterMap.getParameterClass()); } // process SQL statement, including inline parameter maps errorContext.setMoreInfo("Check the SQL statement."); Sql sql = processor.getSql(); setSqlForStatement(statement, sql); // set up either null result map or automatic result mapping ResultMap resultMap = (ResultMap) statement.getResultMap(); if (resultMap == null && resultClass == null) { statement.setResultMap(null); } else if (resultMap == null) { resultMap = buildAutoResultMap(allowRemapping, statement, resultClass, xmlResultName); statement.setResultMap(resultMap); if (additionalResultClasses != null) { for (int i = 0; i < additionalResultClasses.length; i++) { statement.addResultMap(buildAutoResultMap(allowRemapping, statement, additionalResultClasses[i], xmlResultName)); } } } statement.setTimeout(defaultStatementTimeout); if (timeout != null) { try { statement.setTimeout(timeout); } catch (NumberFormatException e) { throw new SqlMapException("Specified timeout value for statement " + statement.getId() + " is not a valid integer"); } } errorContext.setMoreInfo(null); errorContext.setObjectId(null); statement.setSqlMapClient(client); if (cacheModelName != null && cacheModelName.length() > 0 && client.getDelegate().isCacheModelsEnabled()) { CacheModel cacheModel = client.getDelegate().getCacheModel(cacheModelName); mappedStatement = new CachingStatement(statement, cacheModel); } else { mappedStatement = statement; } rootStatement = statement; delegate.addMappedStatement(mappedStatement); } public void setSelectKeyStatement(SqlSource processor, String resultClassName, String keyPropName, boolean runAfterSQL, String type) { if (rootStatement instanceof InsertStatement) { InsertStatement insertStatement = ((InsertStatement) rootStatement); Class parameterClass = insertStatement.getParameterClass(); errorContext.setActivity("parsing a select key"); SelectKeyStatement selectKeyStatement = new SelectKeyStatement(); resultClassName = typeHandlerFactory.resolveAlias(resultClassName); Class resultClass = null; // get parameter and result maps selectKeyStatement.setSqlMapClient(client); selectKeyStatement.setId(insertStatement.getId() + "-SelectKey"); selectKeyStatement.setResource(errorContext.getResource()); selectKeyStatement.setKeyProperty(keyPropName); selectKeyStatement.setRunAfterSQL(runAfterSQL); // process the type (pre or post) attribute if (type != null) { selectKeyStatement.setRunAfterSQL("post".equals(type)); } try { if (resultClassName != null) { errorContext.setMoreInfo("Check the select key result class."); resultClass = Resources.classForName(resultClassName); } else { if (keyPropName != null && parameterClass != null) { resultClass = PROBE.getPropertyTypeForSetter(parameterClass, selectKeyStatement.getKeyProperty()); } } } catch (ClassNotFoundException e) { throw new SqlMapException("Error. Could not set result class. Cause: " + e, e); } if (resultClass == null) { resultClass = Object.class; } // process SQL statement, including inline parameter maps errorContext.setMoreInfo("Check the select key SQL statement."); Sql sql = processor.getSql(); setSqlForStatement(selectKeyStatement, sql); ResultMap resultMap; resultMap = new AutoResultMap(client.getDelegate(), false); resultMap.setId(selectKeyStatement.getId() + "-AutoResultMap"); resultMap.setResultClass(resultClass); resultMap.setResource(selectKeyStatement.getResource()); selectKeyStatement.setResultMap(resultMap); errorContext.setMoreInfo(null); insertStatement.setSelectKeyStatement(selectKeyStatement); } else { throw new SqlMapException("You cant set a select key statement on statement named " + rootStatement.getId() + " because it is not an InsertStatement."); } } private void setSqlForStatement(MappedStatement statement, Sql sql) { if (sql instanceof DynamicSql) { statement.setSql(sql); } else { applyInlineParameterMap(statement, sql.getSql(null, null)); } } private void applyInlineParameterMap(MappedStatement statement, String sqlStatement) { String newSql = sqlStatement; errorContext.setActivity("building an inline parameter map"); ParameterMap parameterMap = statement.getParameterMap(); errorContext.setMoreInfo("Check the inline parameters."); if (parameterMap == null) { ParameterMap map; map = new ParameterMap(client.getDelegate()); map.setId(statement.getId() + "-InlineParameterMap"); map.setParameterClass(statement.getParameterClass()); map.setResource(statement.getResource()); statement.setParameterMap(map); SqlText sqlText = PARAM_PARSER.parseInlineParameterMap(client.getDelegate().getTypeHandlerFactory(), newSql, statement.getParameterClass()); newSql = sqlText.getText(); List mappingList = Arrays.asList(sqlText.getParameterMappings()); map.setParameterMappingList(mappingList); } Sql sql; if (SimpleDynamicSql.isSimpleDynamicSql(newSql)) { sql = new SimpleDynamicSql(client.getDelegate(), newSql); } else { sql = new StaticSql(newSql); } statement.setSql(sql); } private ResultMap buildAutoResultMap(boolean allowRemapping, MappedStatement statement, Class firstResultClass, String xmlResultName) { ResultMap resultMap; resultMap = new AutoResultMap(client.getDelegate(), allowRemapping); resultMap.setId(statement.getId() + "-AutoResultMap"); resultMap.setResultClass(firstResultClass); resultMap.setXmlName(xmlResultName); resultMap.setResource(statement.getResource()); return resultMap; } public MappedStatement getMappedStatement() { return mappedStatement; } }
/*<license> Copyright 2004 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.semantics_I.bean; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.ppwcode.vernacular.semantics_I.bean.AbstractSemanticBean; import org.ppwcode.vernacular.semantics_I.bean.SemanticBean; public class AbstractSemanticBeanTest { public static class AbstractSemanticBeanSTUB extends AbstractSemanticBean { public AbstractSemanticBeanSTUB(String property1, Date property2, Set<String> property3, int[] property4) { $property1 = property1; $property2 = property2; $property3 = property3; $property4 = property4; } public final String getProperty1() { return $property1; } public final void setProperty1(String property1) { $property1 = property1; } private String $property1; public final Date getProperty2() { return $property2; } public final void setProperty2(Date property2) { $property2 = property2; } private Date $property2; public final Set<String> getProperty3() { return $property3; } public final void setProperty3(Set<String> property3) { $property3 = property3; } private Set<String> $property3; public final int[] getProperty4() { return $property4; } public final void setProperty4(int[] property4) { $property4 = property4; } private int[] $property4; } public static class AbstractSemanticBeanNOPROPERTIES extends AbstractSemanticBean { // NOP } private List<AbstractSemanticBean> subjects; @Before public void setUp() throws Exception { subjects = new ArrayList<AbstractSemanticBean>(); AbstractSemanticBeanSTUB subject = new AbstractSemanticBeanSTUB(null, null, null, null); subjects.add(subject); Set<String> stringSet = new HashSet<String>(); stringSet.add("string 1"); stringSet.add("string 2"); stringSet.add(null); int[] intArray = {5, 6, 4, 8}; subject = new AbstractSemanticBeanSTUB("PROPERTY 1", null, null, null); subjects.add(subject); subject = new AbstractSemanticBeanSTUB(null, new Date(), null, null); subjects.add(subject); subject = new AbstractSemanticBeanSTUB(null, null, stringSet, null); subjects.add(subject); subject = new AbstractSemanticBeanSTUB(null, null, null, intArray); subjects.add(subject); subject = new AbstractSemanticBeanSTUB("PROPERTY 1", new Date(), stringSet, intArray); subjects.add(subject); } @After public void tearDown() throws Exception { subjects = null; } public static void assertInvariants(SemanticBean subject) { // no own invariants SemanticBeanContract.assertInvariants(subject); } public static void testEquals(AbstractSemanticBean subject, Object other) { // execute boolean result = subject.equals(other); // validate SemanticBeanContract.contractEquals(subject, other, result); assertInvariants(subject); } @Test public void testEqualsObject() { for (AbstractSemanticBean subject : subjects) { testEquals(subject, null); testEquals(subject, subject); testEquals(subject, new Object()); testEquals(subject, new AbstractSemanticBeanSTUB("hfhfh", null, null, null)); } } public static void testHashCode(AbstractSemanticBean subject) { // execute int result = subject.hashCode(); // validate SemanticBeanContract.contractHashCode(subject, result); assertInvariants(subject); } @Test public void testHashCode() { for (AbstractSemanticBean subject : subjects) { testHashCode(subject); } } public static void testToString(AbstractSemanticBean subject) { // execute String result = subject.toString(); // validate SemanticBeanContract.contractToString(subject, result); assertInvariants(subject); } @Test public void testToString() { for (AbstractSemanticBean subject : subjects) { testToString(subject); } } @Test public void testClone() { for (AbstractSemanticBean subject : subjects) { try { subject.clone(); fail(); } catch (CloneNotSupportedException cnsExc) { // expected assertInvariants(subject); } } } public static Set<String> testPropertyNamesForToStringA(AbstractSemanticBean subject, int nrofProperties) { Set<String> result = subject.propertyNamesForToString(); assertNotNull(result); assertEquals(nrofProperties, result.size()); assertInvariants(subject); return result; } public static Set<String> testPropertyNamesForToStringB(AbstractSemanticBean subject, int nrOfProperties) { Set<String> result = testPropertyNamesForToStringA(subject, nrOfProperties); assertTrue(result.contains("property1")); assertTrue(result.contains("property2")); return result; } @Test public void testPropertyNamesForToString2() { AbstractSemanticBean subject = new AbstractSemanticBeanNOPROPERTIES(); testPropertyNamesForToStringA(subject, 0); } @Test public void testPropertyNamesForToString1() { for (AbstractSemanticBean subject : subjects) { testPropertyNamesForToStringB(subject, 2); } } public static void testCollectionString(AbstractSemanticBean subject) { assertInvariants(subject); } @Test public void testCollectionString() { for (AbstractSemanticBean subject : subjects) { testCollectionString(subject); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.library.alarm; import java.time.Instant; import java.util.List; import java.util.Locale; import org.apache.polygene.api.identity.Identity; import org.apache.polygene.test.AbstractPolygeneTest; import org.junit.Test; import org.apache.polygene.api.entity.EntityBuilder; import org.apache.polygene.api.mixin.Mixins; import org.apache.polygene.api.service.ServiceComposite; import org.apache.polygene.api.unitofwork.UnitOfWork; import org.apache.polygene.api.value.ValueBuilder; import org.apache.polygene.bootstrap.AssemblyException; import org.apache.polygene.bootstrap.ModuleAssembly; import org.apache.polygene.test.EntityTestAssembler; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class SimpleAlarmModelTest extends AbstractPolygeneTest { @SuppressWarnings( { "unchecked" } ) @Override public void assemble( ModuleAssembly module ) throws AssemblyException { module.services( TestAlarmModel.class ); module.services( AlarmSystemService.class ); new EntityTestAssembler().assemble( module ); module.entities( AlarmPointEntity.class ); module.values( AlarmEvent.class ); module.values( AlarmCategory.class ); module.values( AlarmStatus.class ); module.forMixin( AlarmHistory.class ).declareDefaults().maxSize().set( 10 ); } @Mixins( SimpleAlarmModelService.SimpleAlarmModelMixin.class ) public interface TestAlarmModel extends AlarmModel, ServiceComposite { } @Override public void setUp() throws Exception { super.setUp(); unitOfWorkFactory.newUnitOfWork(); } @Override public void tearDown() throws Exception { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); if( uow != null ) { uow.discard(); } super.tearDown(); } @Test public void testName() throws Exception { SimpleAlarmModelService.SimpleAlarmModelMixin spi = new SimpleAlarmModelService.SimpleAlarmModelMixin(); assertEquals( "org.apache.polygene.library.alarm.model.simple", spi.modelName() ); } @Test public void testDescription() throws Exception { SimpleAlarmModelService.SimpleAlarmModelMixin spi = new SimpleAlarmModelService.SimpleAlarmModelMixin(); boolean test1 = spi.modelDescription().toLowerCase().contains( "normal" ); boolean test2 = spi.modelDescription().toLowerCase().contains( "activated" ); boolean test3 = spi.modelDescription().toLowerCase().contains( "activation" ); boolean test4 = spi.modelDescription().toLowerCase().contains( "deactivation" ); assertTrue( test1 && test2 && test3 && test4 ); Locale english = new Locale( "en" ); test1 = spi.modelDescription( english ).toLowerCase().contains( "normal" ); test2 = spi.modelDescription( english ).toLowerCase().contains( "activated" ); test3 = spi.modelDescription( english ).toLowerCase().contains( "activation" ); test4 = spi.modelDescription( english ).toLowerCase().contains( "deactivation" ); assertTrue( test1 && test2 && test3 && test4 ); } @Test public void testTriggers() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); List<String> triggers = provider.alarmTriggers(); assertEquals( 2, triggers.size() ); int result = 0; for( String trigger : triggers ) { if( AlarmPoint.TRIGGER_ACTIVATE.equals( trigger ) ) { result |= 1; } if( AlarmPoint.TRIGGER_DEACTIVATE.equals( trigger ) ) { result |= 2; } if( AlarmPoint.TRIGGER_ACKNOWLEDGE.equals( trigger ) ) { result |= 4; } if( AlarmPoint.TRIGGER_BLOCK.equals( trigger ) ) { result |= 8; } if( AlarmPoint.TRIGGER_UNBLOCK.equals( trigger ) ) { result |= 16; } if( AlarmPoint.TRIGGER_ENABLE.equals( trigger ) ) { result |= 32; } if( AlarmPoint.TRIGGER_DISABLE.equals( trigger ) ) { result |= 64; } } assertEquals( 3, result ); assertEquals( AlarmPoint.STATUS_NORMAL, underTest.currentStatus().name(null) ); } @Test public void testStateChangeFromNormal() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertEquals( AlarmPoint.EVENT_ACTIVATION, event1.systemName().get() ); alarm = createAlarm( "Another 2" ); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertNull( event2 ); try { alarm = createAlarm( "Another 3" ); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertNull( event3 ); fail( "[Acknowledge] trigger should not be allowed on this model." ); } catch( IllegalArgumentException e ) { // expected } } @Test public void testStateChangeFromActivated() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertNull( event1 ); alarm = createAlarm( "Another 2" ); alarm.activate(); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertEquals( AlarmPoint.EVENT_DEACTIVATION, event2.systemName().get() ); } @Test public void testIllegalTrigger() throws Exception { try { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); provider.evaluate( underTest, "my-trigger" ); fail( "IllegalArgumentException not thrown." ); } catch( IllegalArgumentException e ) { // Expected. } } @Test public void testNormalToActivated() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name(null) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, newstate.name(null) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testActivatedToNormal() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.deactivate(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name(null) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name(null) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testConditionChanges1() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( false ); AlarmEvent event = underTest.history().lastEvent(); assertNull( "Generated an event but should have not.", event ); } @Test public void testConditionChanges2() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( true ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name(null) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, newstate.name(null) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testConditionChanges3() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( true ); underTest.updateCondition( false ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name(null) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name(null) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testComputeCondition() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus s1 = createStatus( AlarmPoint.STATUS_NORMAL ); assertFalse( provider.computeCondition( s1 ) ); AlarmStatus s2 = createStatus( AlarmPoint.STATUS_ACTIVATED ); assertTrue( provider.computeCondition( s2 ) ); } @Test public void testComputeTrigger() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus s1 = createStatus( AlarmPoint.STATUS_NORMAL ); AlarmStatus s2 = createStatus( AlarmPoint.STATUS_ACTIVATED ); String trigger1 = provider.computeTrigger( s1, true ); String trigger2 = provider.computeTrigger( s2, true ); String trigger5 = provider.computeTrigger( s1, false ); String trigger6 = provider.computeTrigger( s2, false ); assertEquals( AlarmPoint.TRIGGER_ACTIVATE, trigger1 ); assertEquals( null, trigger2 ); assertEquals( null, trigger5 ); assertEquals( AlarmPoint.TRIGGER_DEACTIVATE, trigger6 ); } private AlarmPoint createAlarm( String name ) { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); EntityBuilder<AlarmPoint> builder = uow.newEntityBuilder( AlarmPoint.class ); builder.instance().category().set( createCategory( "SimpleModelTest" ) ); AlarmPoint.AlarmState state = builder.instanceFor( AlarmPoint.AlarmState.class ); state.currentStatus().set( createStatus( AlarmPoint.STATUS_NORMAL ) ); state.description().set( "Test Description" ); state.systemName().set( name ); return builder.newInstance(); } private AlarmCategory createCategory( String name ) { ValueBuilder<AlarmCategory> builder = valueBuilderFactory.newValueBuilder( AlarmCategory.class ); builder.prototype().name().set( name ); return builder.newInstance(); } private AlarmPoint getAlarm( Identity identity ) { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); return uow.get( AlarmPoint.class, identity ); } private AlarmStatus createStatus( String status ) { ValueBuilder<AlarmStatus> builder = valueBuilderFactory.newValueBuilder( AlarmStatus.class ); AlarmStatus.State statePrototype = builder.prototypeFor( AlarmStatus.State.class ); statePrototype.name().set( status ); statePrototype.creationDate().set( Instant.now() ); return builder.newInstance(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.nio.ByteBuffer; import javax.cache.processor.EntryProcessor; import javax.cache.processor.MutableEntry; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.jetbrains.annotations.Nullable; /** * */ public class CacheInvokeDirectResult implements Message { /** */ private static final long serialVersionUID = 0L; /** */ private KeyCacheObject key; /** */ @GridToStringInclude private CacheObject res; /** */ @GridToStringInclude(sensitive = true) @GridDirectTransient private Exception err; /** */ private byte[] errBytes; /** * Required for {@link Message}. */ public CacheInvokeDirectResult() { // No-op. } /** * @param key Key. * @param res Result. */ public CacheInvokeDirectResult(KeyCacheObject key, CacheObject res) { this.key = key; this.res = res; } /** * @param key Key. * @param err Exception thrown by {@link EntryProcessor#process(MutableEntry, Object...)}. */ public CacheInvokeDirectResult(KeyCacheObject key, Exception err) { this.key = key; this.err = err; } /** * @return Key. */ public KeyCacheObject key() { return key; } /** * @return Result. */ public CacheObject result() { return res; } /** * @return Error. */ @Nullable public Exception error() { return err; } /** * @param ctx Cache context. * @throws IgniteCheckedException If failed. */ public void prepareMarshal(GridCacheContext ctx) throws IgniteCheckedException { key.prepareMarshal(ctx.cacheObjectContext()); if (err != null && errBytes == null) errBytes = U.marshal(ctx.marshaller(), err); if (res != null) res.prepareMarshal(ctx.cacheObjectContext()); } /** * @param ctx Cache context. * @param ldr Class loader. * @throws IgniteCheckedException If failed. */ public void finishUnmarshal(GridCacheContext ctx, ClassLoader ldr) throws IgniteCheckedException { key.finishUnmarshal(ctx.cacheObjectContext(), ldr); if (errBytes != null && err == null) err = U.unmarshal(ctx.marshaller(), errBytes, U.resolveClassLoader(ldr, ctx.gridConfig())); if (res != null) res.finishUnmarshal(ctx.cacheObjectContext(), ldr); } /** {@inheritDoc} */ @Override public void onAckReceived() { // No-op. } /** {@inheritDoc} */ @Override public short directType() { return 93; } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeByteArray("errBytes", errBytes)) return false; writer.incrementState(); case 1: if (!writer.writeMessage("key", key)) return false; writer.incrementState(); case 2: if (!writer.writeMessage("res", res)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; switch (reader.state()) { case 0: errBytes = reader.readByteArray("errBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 1: key = reader.readMessage("key"); if (!reader.isLastRead()) return false; reader.incrementState(); case 2: res = reader.readMessage("res"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(CacheInvokeDirectResult.class); } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 3; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(CacheInvokeDirectResult.class, this); } }
package org.ovirt.engine.ui.common.widget.editor; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.ovirt.engine.ui.common.widget.editor.ListModelTypeAheadListBoxEditor.SuggestBoxRenderer; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.Scheduler; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.FocusEvent; import com.google.gwt.event.dom.client.FocusHandler; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.MouseDownEvent; import com.google.gwt.event.dom.client.MouseDownHandler; import com.google.gwt.event.dom.client.MouseUpEvent; import com.google.gwt.event.dom.client.MouseUpHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.resources.client.CssResource; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Event.NativePreviewEvent; import com.google.gwt.user.client.Event.NativePreviewHandler; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.MenuBar; import com.google.gwt.user.client.ui.MenuItem; import com.google.gwt.user.client.ui.MultiWordSuggestOracle; import com.google.gwt.user.client.ui.MultiWordSuggestOracle.MultiWordSuggestion; import com.google.gwt.user.client.ui.SuggestBox; import com.google.gwt.user.client.ui.SuggestOracle.Suggestion; /** * SuggestBox widget that adapts to UiCommon list model items and looks like a list box. The suggestion content can be rich (html). * <p> * Accepts any objects as soon as the provided renderer can render them. */ public class ListModelTypeAheadListBox<T> extends BaseListModelSuggestBox<T> { @UiField(provided = true) SuggestBox suggestBox; @UiField Image dropDownImage; @UiField FlowPanel mainPanel; @UiField Style style; private final SuggestBoxRenderer<T> renderer; /** * This is used to decide whether, when setting the value of the widget to one that doesn't exist among the list of * suggested items {@link #acceptableValues}, the new value should be added to it; see usage in * {@link #addToValidValuesIfNeeded(Object)}. */ private final boolean autoAddToValidValues; private Collection<T> acceptableValues = new ArrayList<T>(); private HandlerRegistration eventHandler; interface Style extends CssResource { String enabledMainPanel(); String disabledMainPanel(); } interface ViewUiBinder extends UiBinder<FlowPanel, ListModelTypeAheadListBox> { ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class); } public ListModelTypeAheadListBox(SuggestBoxRenderer<T> renderer) { this(renderer, true); } public ListModelTypeAheadListBox(SuggestBoxRenderer<T> renderer, boolean autoAddToValidValues) { super(new RenderableSuggestOracle<T>(renderer)); this.renderer = renderer; this.autoAddToValidValues = autoAddToValidValues; suggestBox = asSuggestBox(); // this needs to be handled by focus on text box and clicks on drop down image setAutoHideEnabled(false); initWidget(ViewUiBinder.uiBinder.createAndBindUi(this)); mainPanel.getElement().addClassName("lmtalb_listbox_pfly_fix"); //$NON-NLS-1$ registerListeners(); } private void registerListeners() { SuggestBoxFocusHandler handlers = new SuggestBoxFocusHandler(); suggestBox.getValueBox().addBlurHandler(handlers); suggestBox.getValueBox().addFocusHandler(handlers); // not listening to focus because it would show the suggestions also after the whole browser // gets the focus back (after loosing it) if this was the last element with focus suggestBox.getValueBox().addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { switchSuggestions(); } }); dropDownImage.addMouseDownHandler(new FocusHandlerEnablingMouseHandlers(handlers)); dropDownImage.addMouseUpHandler(new FocusHandlerEnablingMouseHandlers(handlers) { @Override public void onMouseUp(MouseUpEvent event) { super.onMouseUp(event); switchSuggestions(); } }); getSuggestionMenu().getParent().addDomHandler(new FocusHandlerEnablingMouseHandlers(handlers), MouseDownEvent.getType()); // no need to do additional switchSuggestions() - it is processed by MenuBar itself getSuggestionMenu().getParent().addDomHandler(new FocusHandlerEnablingMouseHandlers(handlers), MouseUpEvent.getType()); asSuggestBox().addValueChangeHandler(new ValueChangeHandler<String>() { @Override public void onValueChange(ValueChangeEvent<String> event) { // in case of other way changed the value (like clicking somewhere else when there is a correct value) // hide the suggest box if(isSuggestionListShowing()) { hideSuggestions(); } } }); addValueChangeHandler(new ValueChangeHandler<T>() { @Override public void onValueChange(ValueChangeEvent<T> event) { // if the value has been changed using the mouse setValue(event.getValue()); } }); } private void switchSuggestions() { if (!isEnabled()) { return; } if (isSuggestionListShowing()) { hideSuggestions(); adjustSelectedValue(); } else { // show all the suggestions even if there is already something filled // otherwise it is not obvious that there are more options suggestBox.setText(null); suggestBox.showSuggestionList(); selectInMenu(getValue()); Scheduler.get().scheduleDeferred(new Scheduler.ScheduledCommand() { @Override public void execute () { setFocus(true); } }); } } private void selectInMenu(T toSelect) { if (!(getSuggestionMenu() instanceof MenuBar)) { // can not select if the it is not a menu bar return; } MenuBar menuBar = (MenuBar) getSuggestionMenu(); List<MenuItem> items = getItems(menuBar); if (items == null) { return; } String selectedReplacementString = renderer.getReplacementString(toSelect); if (selectedReplacementString == null) { return; } int selectedItemIndex = -1; for (T acceptableValue : acceptableValues) { selectedItemIndex ++; String acceptableValueReplacement = renderer.getReplacementString(acceptableValue); if (acceptableValueReplacement != null && acceptableValueReplacement.equals(selectedReplacementString)) { if (items.size() > selectedItemIndex) { menuBar.selectItem(items.get(selectedItemIndex)); scrollSelectedItemIntoView(); } break; } } } protected void scrollSelectedItemIntoView() { if (!(getSuggestionMenu() instanceof MenuBar)) { // can not select if it is not a menu bar return; } MenuBar menuBar = (MenuBar) getSuggestionMenu(); MenuItem item = getSelectedItem(menuBar); if (item != null) { Element toSelect = item.getElement().getParentElement(); toSelect.scrollIntoView(); } } // extremely ugly - there is just no better way how to find the items as MenuItems private native List<MenuItem> getItems(MenuBar menuBar) /*-{ return menuBar.@com.google.gwt.user.client.ui.MenuBar::getItems()(); }-*/; private native MenuItem getSelectedItem(MenuBar menuBar) /*-{ return menuBar.@com.google.gwt.user.client.ui.MenuBar::getSelectedItem()(); }-*/; private void adjustSelectedValue() { if (acceptableValues.size() == 0) { return; } // empty suggest box String providedText = asSuggestBox().getText(); if (providedText == null || "".equals(providedText)) { if (getValue() != null) { // something has been there, deleted on click inside and than hidden the box - restoring asSuggestBox().setText(renderer.getReplacementString(getValue())); } } else { // something has been typed inside - validate try { T newData = asEntity(providedText); // correct provided - use it setValue(newData); } catch (IllegalArgumentException e) { // incorrect - return to previous one asSuggestBox().setText(renderer.getReplacementString(getValue())); } } } @Override protected T asEntity(String provided) { if (provided == null) { if (acceptableValues.contains(null)) { return null; } else { throw new IllegalArgumentException("Only non-null arguments are accepted"); //$NON-NLS-1$ } } for (T data : acceptableValues) { String expected = renderer.getReplacementString(data); if (expected == null) { continue; } if (expected.equals(provided)) { return data; } } throw new IllegalArgumentException("The provided value is not acceptable: '" + provided + "'"); //$NON-NLS-1$ //$NON-NLS-2$ } @Override public void setValue(T value) { addToValidValuesIfNeeded(value); super.setValue(value); } @Override public void setValue(T value, boolean fireEvents) { addToValidValuesIfNeeded(value); super.setValue(value, fireEvents); } @Override public T getValue() { if (acceptableValues.contains(super.getValue())) { return super.getValue(); } return null; } private void addToValidValuesIfNeeded(T value) { if (!acceptableValues.contains(value) && autoAddToValidValues) { acceptableValues.add(value); } } @Override public void setAcceptableValues(Collection<T> acceptableValues) { this.acceptableValues = acceptableValues; T selected = getValue(); addToValidValuesIfNeeded(selected); RenderableSuggestOracle<T> suggestOracle = (RenderableSuggestOracle<T>) suggestBox.getSuggestOracle(); suggestOracle.setData(acceptableValues); } @Override protected void render(T value, boolean fireEvents) { asSuggestBox().setValue(renderer.getReplacementString(value), fireEvents); } @Override public void setEnabled(boolean enabled) { super.setEnabled(enabled); if (enabled) { mainPanel.getElement().replaceClassName(style.disabledMainPanel(), style.enabledMainPanel()); } else { mainPanel.getElement().replaceClassName(style.enabledMainPanel(), style.disabledMainPanel()); } } class SuggestBoxFocusHandler implements FocusHandler, BlurHandler { private boolean enabled = true; @Override public void onBlur(BlurEvent blurEvent) { if (eventHandler != null) { eventHandler.removeHandler(); } // process only if it will not be processed by other handlers if (enabled) { // first give the opportunity to the click handler on the menu to process the event, than we can hide it hideSuggestions(); adjustSelectedValue(); } } public void setEnabled(boolean enabled) { this.enabled = enabled; } @Override public void onFocus(FocusEvent event) { eventHandler = Event.addNativePreviewHandler(new EnterIgnoringNativePreviewHandler<T>(ListModelTypeAheadListBox.this)); } } class FocusHandlerEnablingMouseHandlers implements MouseDownHandler, MouseUpHandler { private SuggestBoxFocusHandler focusHandler; public FocusHandlerEnablingMouseHandlers(SuggestBoxFocusHandler focusHandler) { this.focusHandler = focusHandler; } @Override public void onMouseDown(MouseDownEvent event) { focusHandler.setEnabled(false); } @Override public void onMouseUp(MouseUpEvent event) { focusHandler.setEnabled(true); } } } class RenderableSuggestion<T> extends MultiWordSuggestion { public RenderableSuggestion(T row, SuggestBoxRenderer<T> renderer) { super(renderer.getReplacementString(row), renderer.getDisplayString(row)); } public boolean matches(String query) { String replacementString = getReplacementString(); if (replacementString == null || query == null) { return false; } return replacementString.toLowerCase().startsWith(query.toLowerCase()); } } class RenderableSuggestOracle<T> extends MultiWordSuggestOracle { private SuggestBoxRenderer<T> renderer; // inited to avoid null checks private Collection<T> data = new ArrayList<T>(); public RenderableSuggestOracle(SuggestBoxRenderer<T> renderer) { this.renderer = renderer; } @Override public void requestSuggestions(Request request, Callback callback) { List<RenderableSuggestion<T>> suggestions = new ArrayList<RenderableSuggestion<T>>(); String query = request.getQuery(); for (T row : data) { RenderableSuggestion<T> suggestionCandidate = new RenderableSuggestion<T>(row, renderer); if (suggestionCandidate.matches(query)) { suggestions.add(suggestionCandidate); } } callback.onSuggestionsReady(request, new Response(suggestions)); } @Override public void requestDefaultSuggestions(Request request, Callback callback) { List<RenderableSuggestion<T>> suggestions = new ArrayList<RenderableSuggestion<T>>(); for (T row : data) { suggestions.add(new RenderableSuggestion<T>(row, renderer)); } callback.onSuggestionsReady(request, new Response(suggestions)); } public void setData(Collection<T> data) { this.data = data; } } class EnterIgnoringNativePreviewHandler<T> implements NativePreviewHandler { private final ListModelTypeAheadListBox<T> listModelTypeAheadListBox; public EnterIgnoringNativePreviewHandler(ListModelTypeAheadListBox<T> listModelTypeAheadListBox) { this.listModelTypeAheadListBox = listModelTypeAheadListBox; } @Override public void onPreviewNativeEvent(NativePreviewEvent event) { NativeEvent nativeEvent = event.getNativeEvent(); if (nativeEvent.getKeyCode() == KeyCodes.KEY_ENTER && event.getTypeInt() == Event.ONKEYPRESS && !event.isCanceled()) { // swallow the enter key otherwise the whole dialog would get submitted nativeEvent.preventDefault(); nativeEvent.stopPropagation(); event.cancel(); // process the event here directly Suggestion currentSelection = listModelTypeAheadListBox.getCurrentSelection(); if (currentSelection != null) { String replacementString = currentSelection.getReplacementString(); try { listModelTypeAheadListBox.setValue(listModelTypeAheadListBox.asEntity(replacementString), true); } catch (IllegalArgumentException e) { // do not set the value if it is not a correct one } } listModelTypeAheadListBox.hideSuggestions(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.elasticsearch.table; import org.apache.flink.api.common.time.Deadline; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.catalog.Column; import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.catalog.UniqueConstraint; import org.apache.flink.table.connector.sink.DynamicTableSink; import org.apache.flink.table.connector.sink.SinkFunctionProvider; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.StringData; import org.apache.flink.table.data.TimestampData; import org.apache.flink.table.types.DataType; import org.apache.flink.types.RowKind; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.search.SearchHits; import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.junit.ClassRule; import org.junit.Test; import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.utility.DockerImageName; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import static org.apache.flink.streaming.connectors.elasticsearch.table.TestContext.context; import static org.apache.flink.table.api.Expressions.row; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; /** IT tests for {@link Elasticsearch6DynamicSink}. */ public class Elasticsearch6DynamicSinkITCase { @ClassRule public static ElasticsearchContainer elasticsearchContainer = new ElasticsearchContainer( DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch-oss") .withTag("6.3.1")); @SuppressWarnings("deprecation") protected final Client getClient() { TransportAddress transportAddress = new TransportAddress(elasticsearchContainer.getTcpHost()); String expectedClusterName = "docker-cluster"; Settings settings = Settings.builder().put("cluster.name", expectedClusterName).build(); return new PreBuiltTransportClient(settings).addTransportAddress(transportAddress); } @Test public void testWritingDocuments() throws Exception { ResolvedSchema schema = new ResolvedSchema( Arrays.asList( Column.physical("a", DataTypes.BIGINT().notNull()), Column.physical("b", DataTypes.TIME()), Column.physical("c", DataTypes.STRING().notNull()), Column.physical("d", DataTypes.FLOAT()), Column.physical("e", DataTypes.TINYINT().notNull()), Column.physical("f", DataTypes.DATE()), Column.physical("g", DataTypes.TIMESTAMP().notNull())), Collections.emptyList(), UniqueConstraint.primaryKey("name", Arrays.asList("a", "g"))); GenericRowData rowData = GenericRowData.of( 1L, 12345, StringData.fromString("ABCDE"), 12.12f, (byte) 2, 12345, TimestampData.fromLocalDateTime( LocalDateTime.parse("2012-12-12T12:12:12"))); String index = "writing-documents"; String myType = "MyType"; Elasticsearch6DynamicSinkFactory sinkFactory = new Elasticsearch6DynamicSinkFactory(); SinkFunctionProvider sinkRuntimeProvider = (SinkFunctionProvider) sinkFactory .createDynamicTableSink( context() .withSchema(schema) .withOption( ElasticsearchOptions.INDEX_OPTION.key(), index) .withOption( ElasticsearchOptions.DOCUMENT_TYPE_OPTION .key(), myType) .withOption( ElasticsearchOptions.HOSTS_OPTION.key(), elasticsearchContainer.getHttpHostAddress()) .withOption( ElasticsearchOptions .FLUSH_ON_CHECKPOINT_OPTION .key(), "false") .build()) .getSinkRuntimeProvider(new MockContext()); SinkFunction<RowData> sinkFunction = sinkRuntimeProvider.createSinkFunction(); StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); rowData.setRowKind(RowKind.UPDATE_AFTER); environment.<RowData>fromElements(rowData).addSink(sinkFunction); environment.execute(); Client client = getClient(); Map<String, Object> response = client.get(new GetRequest(index, myType, "1_2012-12-12T12:12:12")) .actionGet() .getSource(); Map<Object, Object> expectedMap = new HashMap<>(); expectedMap.put("a", 1); expectedMap.put("b", "00:00:12"); expectedMap.put("c", "ABCDE"); expectedMap.put("d", 12.12d); expectedMap.put("e", 2); expectedMap.put("f", "2003-10-20"); expectedMap.put("g", "2012-12-12 12:12:12"); assertThat(response, equalTo(expectedMap)); } @Test public void testWritingDocumentsFromTableApi() throws Exception { TableEnvironment tableEnvironment = TableEnvironment.create( EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build()); String index = "table-api"; String myType = "MyType"; tableEnvironment.executeSql( "CREATE TABLE esTable (" + "a BIGINT NOT NULL,\n" + "b TIME,\n" + "c STRING NOT NULL,\n" + "d FLOAT,\n" + "e TINYINT NOT NULL,\n" + "f DATE,\n" + "g TIMESTAMP NOT NULL,\n" + "h as a + 2,\n" + "PRIMARY KEY (a, g) NOT ENFORCED\n" + ")\n" + "WITH (\n" + String.format("'%s'='%s',\n", "connector", "elasticsearch-6") + String.format( "'%s'='%s',\n", ElasticsearchOptions.INDEX_OPTION.key(), index) + String.format( "'%s'='%s',\n", ElasticsearchOptions.DOCUMENT_TYPE_OPTION.key(), myType) + String.format( "'%s'='%s',\n", ElasticsearchOptions.HOSTS_OPTION.key(), elasticsearchContainer.getHttpHostAddress()) + String.format( "'%s'='%s'\n", ElasticsearchOptions.FLUSH_ON_CHECKPOINT_OPTION.key(), "false") + ")"); tableEnvironment .fromValues( row( 1L, LocalTime.ofNanoOfDay(12345L * 1_000_000L), "ABCDE", 12.12f, (byte) 2, LocalDate.ofEpochDay(12345), LocalDateTime.parse("2012-12-12T12:12:12"))) .executeInsert("esTable") .await(); Client client = getClient(); Map<String, Object> response = client.get(new GetRequest(index, myType, "1_2012-12-12T12:12:12")) .actionGet() .getSource(); Map<Object, Object> expectedMap = new HashMap<>(); expectedMap.put("a", 1); expectedMap.put("b", "00:00:12"); expectedMap.put("c", "ABCDE"); expectedMap.put("d", 12.12d); expectedMap.put("e", 2); expectedMap.put("f", "2003-10-20"); expectedMap.put("g", "2012-12-12 12:12:12"); assertThat(response, equalTo(expectedMap)); } @Test public void testWritingDocumentsNoPrimaryKey() throws Exception { TableEnvironment tableEnvironment = TableEnvironment.create( EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build()); String index = "no-primary-key"; String myType = "MyType"; tableEnvironment.executeSql( "CREATE TABLE esTable (" + "a BIGINT NOT NULL,\n" + "b TIME,\n" + "c STRING NOT NULL,\n" + "d FLOAT,\n" + "e TINYINT NOT NULL,\n" + "f DATE,\n" + "g TIMESTAMP NOT NULL\n" + ")\n" + "WITH (\n" + String.format("'%s'='%s',\n", "connector", "elasticsearch-6") + String.format( "'%s'='%s',\n", ElasticsearchOptions.INDEX_OPTION.key(), index) + String.format( "'%s'='%s',\n", ElasticsearchOptions.DOCUMENT_TYPE_OPTION.key(), myType) + String.format( "'%s'='%s',\n", ElasticsearchOptions.HOSTS_OPTION.key(), elasticsearchContainer.getHttpHostAddress()) + String.format( "'%s'='%s'\n", ElasticsearchOptions.FLUSH_ON_CHECKPOINT_OPTION.key(), "false") + ")"); tableEnvironment .fromValues( row( 1L, LocalTime.ofNanoOfDay(12345L * 1_000_000L), "ABCDE", 12.12f, (byte) 2, LocalDate.ofEpochDay(12345), LocalDateTime.parse("2012-12-12T12:12:12")), row( 2L, LocalTime.ofNanoOfDay(12345L * 1_000_000L), "FGHIJK", 13.13f, (byte) 4, LocalDate.ofEpochDay(12345), LocalDateTime.parse("2013-12-12T13:13:13"))) .executeInsert("esTable") .await(); Client client = getClient(); // search API does not return documents that were not indexed, we might need to query // the index a few times Deadline deadline = Deadline.fromNow(Duration.ofSeconds(30)); SearchHits hits; do { hits = client.prepareSearch(index).execute().actionGet().getHits(); if (hits.getTotalHits() < 2) { Thread.sleep(200); } } while (hits.getTotalHits() < 2 && deadline.hasTimeLeft()); if (hits.getTotalHits() < 2) { throw new AssertionError("Could not retrieve results from Elasticsearch."); } HashSet<Map<String, Object>> resultSet = new HashSet<>(); resultSet.add(hits.getAt(0).getSourceAsMap()); resultSet.add(hits.getAt(1).getSourceAsMap()); Map<Object, Object> expectedMap1 = new HashMap<>(); expectedMap1.put("a", 1); expectedMap1.put("b", "00:00:12"); expectedMap1.put("c", "ABCDE"); expectedMap1.put("d", 12.12d); expectedMap1.put("e", 2); expectedMap1.put("f", "2003-10-20"); expectedMap1.put("g", "2012-12-12 12:12:12"); Map<Object, Object> expectedMap2 = new HashMap<>(); expectedMap2.put("a", 2); expectedMap2.put("b", "00:00:12"); expectedMap2.put("c", "FGHIJK"); expectedMap2.put("d", 13.13d); expectedMap2.put("e", 4); expectedMap2.put("f", "2003-10-20"); expectedMap2.put("g", "2013-12-12 13:13:13"); HashSet<Map<Object, Object>> expectedSet = new HashSet<>(); expectedSet.add(expectedMap1); expectedSet.add(expectedMap2); assertThat(resultSet, equalTo(expectedSet)); } @Test public void testWritingDocumentsWithDynamicIndex() throws Exception { TableEnvironment tableEnvironment = TableEnvironment.create( EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build()); String index = "dynamic-index-{b|yyyy-MM-dd}"; String myType = "MyType"; tableEnvironment.executeSql( "CREATE TABLE esTable (" + "a BIGINT NOT NULL,\n" + "b TIMESTAMP NOT NULL,\n" + "PRIMARY KEY (a) NOT ENFORCED\n" + ")\n" + "WITH (\n" + String.format("'%s'='%s',\n", "connector", "elasticsearch-6") + String.format( "'%s'='%s',\n", ElasticsearchOptions.INDEX_OPTION.key(), index) + String.format( "'%s'='%s',\n", ElasticsearchOptions.DOCUMENT_TYPE_OPTION.key(), myType) + String.format( "'%s'='%s',\n", ElasticsearchOptions.HOSTS_OPTION.key(), elasticsearchContainer.getHttpHostAddress()) + String.format( "'%s'='%s'\n", ElasticsearchOptions.FLUSH_ON_CHECKPOINT_OPTION.key(), "false") + ")"); tableEnvironment .fromValues(row(1L, LocalDateTime.parse("2012-12-12T12:12:12"))) .executeInsert("esTable") .await(); Client client = getClient(); Map<String, Object> response = client.get(new GetRequest("dynamic-index-2012-12-12", myType, "1")) .actionGet() .getSource(); Map<Object, Object> expectedMap = new HashMap<>(); expectedMap.put("a", 1); expectedMap.put("b", "2012-12-12 12:12:12"); assertThat(response, equalTo(expectedMap)); } private static class MockContext implements DynamicTableSink.Context { @Override public boolean isBounded() { return false; } @Override public TypeInformation<?> createTypeInformation(DataType consumedDataType) { return null; } @Override public DynamicTableSink.DataStructureConverter createDataStructureConverter( DataType consumedDataType) { return null; } } }
/* ----------------------------------------------------------------------------- * Rule_cmdXorInt.java * ----------------------------------------------------------------------------- * * Producer : com.parse2.aparse.Parser 2.3 * Produced : Fri Apr 12 10:40:21 MUT 2013 * * ----------------------------------------------------------------------------- */ package com.litecoding.smali2java.parser.cmd.and7or7xor.xor; import java.util.ArrayList; import com.litecoding.smali2java.builder.Visitor; import com.litecoding.smali2java.parser.ParserContext; import com.litecoding.smali2java.parser.Rule; import com.litecoding.smali2java.parser.Terminal_StringValue; import com.litecoding.smali2java.parser.smali.Rule_codeRegister; import com.litecoding.smali2java.parser.smali.Rule_codeRegisterVDst; import com.litecoding.smali2java.parser.smali.Rule_commentSequence; import com.litecoding.smali2java.parser.smali.Rule_listSeparator; import com.litecoding.smali2java.parser.smali.Rule_optPadding; import com.litecoding.smali2java.parser.smali.Rule_padding; import com.litecoding.smali2java.parser.text.Rule_CRLF; final public class Rule_cmdXorInt extends Rule { private Rule_cmdXorInt(String spelling, ArrayList<Rule> rules) { super(spelling, rules); } public Object accept(Visitor visitor) { return visitor.visit(this); } public static Rule_cmdXorInt parse(ParserContext context) { context.push("cmdXorInt"); boolean parsed = true; int s0 = context.index; ArrayList<Rule> e0 = new ArrayList<Rule>(); Rule rule; parsed = false; if (!parsed) { { ArrayList<Rule> e1 = new ArrayList<Rule>(); int s1 = context.index; parsed = true; if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_optPadding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Terminal_StringValue.parse(context, "xor-int"); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_padding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegisterVDst.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_listSeparator.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegister.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_listSeparator.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegister.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_optPadding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; @SuppressWarnings("unused") int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { int g1 = context.index; parsed = false; if (!parsed) { { ArrayList<Rule> e2 = new ArrayList<Rule>(); int s2 = context.index; parsed = true; if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule_padding.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule_commentSequence.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) e1.addAll(e2); else context.index = s2; } } f1 = context.index > g1; if (parsed) c1++; } parsed = true; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_CRLF.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) e0.addAll(e1); else context.index = s1; } } rule = null; if (parsed) rule = new Rule_cmdXorInt(context.text.substring(s0, context.index), e0); else context.index = s0; context.pop("cmdXorInt", parsed); return (Rule_cmdXorInt)rule; } } /* ----------------------------------------------------------------------------- * eof * ----------------------------------------------------------------------------- */
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.frauddetector.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/frauddetector-2019-11-15/GetRules" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetRulesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The rule ID. * </p> */ private String ruleId; /** * <p> * The detector ID. * </p> */ private String detectorId; /** * <p> * The rule version. * </p> */ private String ruleVersion; /** * <p> * The next page token. * </p> */ private String nextToken; /** * <p> * The maximum number of rules to return for the request. * </p> */ private Integer maxResults; /** * <p> * The rule ID. * </p> * * @param ruleId * The rule ID. */ public void setRuleId(String ruleId) { this.ruleId = ruleId; } /** * <p> * The rule ID. * </p> * * @return The rule ID. */ public String getRuleId() { return this.ruleId; } /** * <p> * The rule ID. * </p> * * @param ruleId * The rule ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetRulesRequest withRuleId(String ruleId) { setRuleId(ruleId); return this; } /** * <p> * The detector ID. * </p> * * @param detectorId * The detector ID. */ public void setDetectorId(String detectorId) { this.detectorId = detectorId; } /** * <p> * The detector ID. * </p> * * @return The detector ID. */ public String getDetectorId() { return this.detectorId; } /** * <p> * The detector ID. * </p> * * @param detectorId * The detector ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetRulesRequest withDetectorId(String detectorId) { setDetectorId(detectorId); return this; } /** * <p> * The rule version. * </p> * * @param ruleVersion * The rule version. */ public void setRuleVersion(String ruleVersion) { this.ruleVersion = ruleVersion; } /** * <p> * The rule version. * </p> * * @return The rule version. */ public String getRuleVersion() { return this.ruleVersion; } /** * <p> * The rule version. * </p> * * @param ruleVersion * The rule version. * @return Returns a reference to this object so that method calls can be chained together. */ public GetRulesRequest withRuleVersion(String ruleVersion) { setRuleVersion(ruleVersion); return this; } /** * <p> * The next page token. * </p> * * @param nextToken * The next page token. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The next page token. * </p> * * @return The next page token. */ public String getNextToken() { return this.nextToken; } /** * <p> * The next page token. * </p> * * @param nextToken * The next page token. * @return Returns a reference to this object so that method calls can be chained together. */ public GetRulesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The maximum number of rules to return for the request. * </p> * * @param maxResults * The maximum number of rules to return for the request. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of rules to return for the request. * </p> * * @return The maximum number of rules to return for the request. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of rules to return for the request. * </p> * * @param maxResults * The maximum number of rules to return for the request. * @return Returns a reference to this object so that method calls can be chained together. */ public GetRulesRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRuleId() != null) sb.append("RuleId: ").append(getRuleId()).append(","); if (getDetectorId() != null) sb.append("DetectorId: ").append(getDetectorId()).append(","); if (getRuleVersion() != null) sb.append("RuleVersion: ").append(getRuleVersion()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetRulesRequest == false) return false; GetRulesRequest other = (GetRulesRequest) obj; if (other.getRuleId() == null ^ this.getRuleId() == null) return false; if (other.getRuleId() != null && other.getRuleId().equals(this.getRuleId()) == false) return false; if (other.getDetectorId() == null ^ this.getDetectorId() == null) return false; if (other.getDetectorId() != null && other.getDetectorId().equals(this.getDetectorId()) == false) return false; if (other.getRuleVersion() == null ^ this.getRuleVersion() == null) return false; if (other.getRuleVersion() != null && other.getRuleVersion().equals(this.getRuleVersion()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRuleId() == null) ? 0 : getRuleId().hashCode()); hashCode = prime * hashCode + ((getDetectorId() == null) ? 0 : getDetectorId().hashCode()); hashCode = prime * hashCode + ((getRuleVersion() == null) ? 0 : getRuleVersion().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); return hashCode; } @Override public GetRulesRequest clone() { return (GetRulesRequest) super.clone(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.everyItem; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.hasSize; public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase<MatchBoolPrefixQueryBuilder> { @Override protected MatchBoolPrefixQueryBuilder doCreateTestQueryBuilder() { final String fieldName = randomFrom(TEXT_FIELD_NAME, TEXT_ALIAS_FIELD_NAME); final Object value = IntStream.rangeClosed(0, randomIntBetween(0, 3)) .mapToObj(i -> randomAlphaOfLengthBetween(1, 10) + " ") .collect(Collectors.joining()) .trim(); final MatchBoolPrefixQueryBuilder queryBuilder = new MatchBoolPrefixQueryBuilder(fieldName, value); if (randomBoolean() && isTextField(fieldName)) { queryBuilder.analyzer(randomFrom("simple", "keyword", "whitespace")); } if (randomBoolean()) { queryBuilder.operator(randomFrom(Operator.values())); } if (randomBoolean()) { queryBuilder.minimumShouldMatch(randomMinimumShouldMatch()); } if (randomBoolean()) { queryBuilder.fuzziness(randomFuzziness(fieldName)); } if (randomBoolean()) { queryBuilder.prefixLength(randomIntBetween(0, 10)); } if (randomBoolean()) { queryBuilder.maxExpansions(randomIntBetween(1, 1000)); } if (randomBoolean()) { queryBuilder.fuzzyTranspositions(randomBoolean()); } if (randomBoolean()) { queryBuilder.fuzzyRewrite(getRandomRewriteMethod()); } return queryBuilder; } @Override protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat(query, notNullValue()); assertThat(query, anyOf(instanceOf(BooleanQuery.class), instanceOf(PrefixQuery.class))); if (query instanceof PrefixQuery) { final PrefixQuery prefixQuery = (PrefixQuery) query; assertThat(prefixQuery.getPrefix().text(), equalToIgnoringCase((String) queryBuilder.value())); } else { assertThat(query, instanceOf(BooleanQuery.class)); final BooleanQuery booleanQuery = (BooleanQuery) query; // all queries except the last should be TermQuery or SynonymQuery final Set<Query> allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) .map(BooleanClause::getQuery) .collect(Collectors.toSet()); assertThat(allQueriesExceptLast, anyOf( everyItem(instanceOf(TermQuery.class)), everyItem(instanceOf(SynonymQuery.class)), everyItem(instanceOf(FuzzyQuery.class)) )); if (allQueriesExceptLast.stream().anyMatch(subQuery -> subQuery instanceof FuzzyQuery)) { assertThat(queryBuilder.fuzziness(), notNullValue()); } allQueriesExceptLast.stream().filter(subQuery -> subQuery instanceof FuzzyQuery).forEach(subQuery -> { final FuzzyQuery fuzzyQuery = (FuzzyQuery) subQuery; assertThat(fuzzyQuery.getPrefixLength(), equalTo(queryBuilder.prefixLength())); assertThat(fuzzyQuery.getTranspositions(), equalTo(queryBuilder.fuzzyTranspositions())); }); // the last query should be PrefixQuery final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).getQuery(); assertThat(shouldBePrefixQuery, instanceOf(PrefixQuery.class)); if (queryBuilder.minimumShouldMatch() != null) { final int optionalClauses = (int) booleanQuery.clauses().stream().filter(clause -> clause.getOccur() == BooleanClause.Occur.SHOULD).count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); } } } public void testIllegalValues() { { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchBoolPrefixQueryBuilder(null, "value")); assertEquals("[match_bool_prefix] requires fieldName", e.getMessage()); } { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchBoolPrefixQueryBuilder("name", null)); assertEquals("[match_bool_prefix] requires query value", e.getMessage()); } { final MatchBoolPrefixQueryBuilder builder = new MatchBoolPrefixQueryBuilder("name", "value"); builder.analyzer("bogusAnalyzer"); QueryShardException e = expectThrows(QueryShardException.class, () -> builder.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found")); } } public void testFromSimpleJson() throws IOException { final String simple = "{" + "\"match_bool_prefix\": {" + "\"fieldName\": \"fieldValue\"" + "}" + "}"; final String expected = "{" + "\"match_bool_prefix\": {" + "\"fieldName\": {" + "\"query\": \"fieldValue\"," + "\"operator\": \"OR\"," + "\"prefix_length\": 0," + "\"max_expansions\": 50," + "\"fuzzy_transpositions\": true," + "\"boost\": 1.0" + "}" + "}" + "}"; final MatchBoolPrefixQueryBuilder builder = (MatchBoolPrefixQueryBuilder) parseQuery(simple); checkGeneratedJson(expected, builder); } public void testFromJson() throws IOException { final String expected = "{" + "\"match_bool_prefix\": {" + "\"fieldName\": {" + "\"query\": \"fieldValue\"," + "\"analyzer\": \"simple\"," + "\"operator\": \"AND\"," + "\"minimum_should_match\": \"2\"," + "\"fuzziness\": \"1\"," + "\"prefix_length\": 1," + "\"max_expansions\": 10," + "\"fuzzy_transpositions\": false," + "\"fuzzy_rewrite\": \"constant_score\"," + "\"boost\": 2.0" + "}" + "}" + "}"; final MatchBoolPrefixQueryBuilder builder = (MatchBoolPrefixQueryBuilder) parseQuery(expected); checkGeneratedJson(expected, builder); } public void testParseFailsWithMultipleFields() { { final String json = "{" + "\"match_bool_prefix\" : {" + "\"field_name_1\" : {" + "\"query\" : \"foo\"" + "}," + "\"field_name_2\" : {" + "\"query\" : \"foo\"\n" + "}" + "}" + "}"; final ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); assertEquals( "[match_bool_prefix] query doesn't support multiple fields, found [field_name_1] and [field_name_2]", e.getMessage()); } { final String simpleJson = "{" + "\"match_bool_prefix\" : {" + "\"field_name_1\" : \"foo\"," + "\"field_name_2\" : \"foo\"" + "}" + "}"; final ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(simpleJson)); assertEquals( "[match_bool_prefix] query doesn't support multiple fields, found [field_name_1] and [field_name_2]", e.getMessage()); } } public void testAnalysis() throws Exception { final MatchBoolPrefixQueryBuilder builder = new MatchBoolPrefixQueryBuilder(TEXT_FIELD_NAME, "foo bar baz"); final Query query = builder.toQuery(createShardContext()); assertBooleanQuery(query, asList( new TermQuery(new Term(TEXT_FIELD_NAME, "foo")), new TermQuery(new Term(TEXT_FIELD_NAME, "bar")), new PrefixQuery(new Term(TEXT_FIELD_NAME, "baz")) )); } public void testAnalysisSynonym() throws Exception { final MatchQuery matchQuery = new MatchQuery(createShardContext()); matchQuery.setAnalyzer(new MockSynonymAnalyzer()); final Query query = matchQuery.parse(MatchQuery.Type.BOOLEAN_PREFIX, TEXT_FIELD_NAME, "fox dogs red"); assertBooleanQuery(query, asList( new TermQuery(new Term(TEXT_FIELD_NAME, "fox")), new SynonymQuery(new Term(TEXT_FIELD_NAME, "dogs"), new Term(TEXT_FIELD_NAME, "dog")), new PrefixQuery(new Term(TEXT_FIELD_NAME, "red")) )); } public void testAnalysisSingleTerm() throws Exception { final MatchBoolPrefixQueryBuilder builder = new MatchBoolPrefixQueryBuilder(TEXT_FIELD_NAME, "foo"); final Query query = builder.toQuery(createShardContext()); assertThat(query, equalTo(new PrefixQuery(new Term(TEXT_FIELD_NAME, "foo")))); } private static void assertBooleanQuery(Query actual, List<Query> expectedClauseQueries) { assertThat(actual, instanceOf(BooleanQuery.class)); final BooleanQuery actualBooleanQuery = (BooleanQuery) actual; assertThat(actualBooleanQuery.clauses(), hasSize(expectedClauseQueries.size())); assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { final Query clauseQuery = actualBooleanQuery.clauses().get(i).getQuery(); assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT! // Generated from protobuf package org.apache.drill.exec.proto.beans; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import com.dyuproject.protostuff.GraphIOUtil; import com.dyuproject.protostuff.Input; import com.dyuproject.protostuff.Message; import com.dyuproject.protostuff.Output; import com.dyuproject.protostuff.Schema; import com.dyuproject.protostuff.UninitializedMessageException; public final class GetQueryPlanFragments implements Externalizable, Message<GetQueryPlanFragments>, Schema<GetQueryPlanFragments> { public static Schema<GetQueryPlanFragments> getSchema() { return DEFAULT_INSTANCE; } public static GetQueryPlanFragments getDefaultInstance() { return DEFAULT_INSTANCE; } static final GetQueryPlanFragments DEFAULT_INSTANCE = new GetQueryPlanFragments(); static final Boolean DEFAULT_SPLIT_PLAN = new Boolean(false); private String query; private QueryType type; private Boolean splitPlan = DEFAULT_SPLIT_PLAN; public GetQueryPlanFragments() { } public GetQueryPlanFragments( String query ) { this.query = query; } // getters and setters // query public String getQuery() { return query; } public GetQueryPlanFragments setQuery(String query) { this.query = query; return this; } // type public QueryType getType() { return type == null ? QueryType.SQL : type; } public GetQueryPlanFragments setType(QueryType type) { this.type = type; return this; } // splitPlan public Boolean getSplitPlan() { return splitPlan; } public GetQueryPlanFragments setSplitPlan(Boolean splitPlan) { this.splitPlan = splitPlan; return this; } // java serialization public void readExternal(ObjectInput in) throws IOException { GraphIOUtil.mergeDelimitedFrom(in, this, this); } public void writeExternal(ObjectOutput out) throws IOException { GraphIOUtil.writeDelimitedTo(out, this, this); } // message method public Schema<GetQueryPlanFragments> cachedSchema() { return DEFAULT_INSTANCE; } // schema methods public GetQueryPlanFragments newMessage() { return new GetQueryPlanFragments(); } public Class<GetQueryPlanFragments> typeClass() { return GetQueryPlanFragments.class; } public String messageName() { return GetQueryPlanFragments.class.getSimpleName(); } public String messageFullName() { return GetQueryPlanFragments.class.getName(); } public boolean isInitialized(GetQueryPlanFragments message) { return message.query != null; } public void mergeFrom(Input input, GetQueryPlanFragments message) throws IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: message.query = input.readString(); break; case 2: message.type = QueryType.valueOf(input.readEnum()); break; case 3: message.splitPlan = input.readBool(); break; default: input.handleUnknownField(number, this); } } } public void writeTo(Output output, GetQueryPlanFragments message) throws IOException { if(message.query == null) throw new UninitializedMessageException(message); output.writeString(1, message.query, false); if(message.type != null) output.writeEnum(2, message.type.number, false); if(message.splitPlan != null && message.splitPlan != DEFAULT_SPLIT_PLAN) output.writeBool(3, message.splitPlan, false); } public String getFieldName(int number) { switch(number) { case 1: return "query"; case 2: return "type"; case 3: return "splitPlan"; default: return null; } } public int getFieldNumber(String name) { final Integer number = __fieldMap.get(name); return number == null ? 0 : number.intValue(); } private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>(); static { __fieldMap.put("query", 1); __fieldMap.put("type", 2); __fieldMap.put("splitPlan", 3); } }
// Copyright (C) 2022 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.acceptance.api.change; import static com.google.common.truth.Truth.assertThat; import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowLabel; import static com.google.gerrit.server.group.SystemGroupBackend.ANONYMOUS_USERS; import static com.google.gerrit.server.project.testing.TestLabels.label; import static com.google.gerrit.server.project.testing.TestLabels.value; import com.google.gerrit.acceptance.AbstractDaemonTest; import com.google.gerrit.acceptance.NoHttpd; import com.google.gerrit.acceptance.UseTimezone; import com.google.gerrit.acceptance.VerifyNoPiiInChangeNotes; import com.google.gerrit.acceptance.testsuite.account.AccountOperations; import com.google.gerrit.acceptance.testsuite.change.ChangeOperations; import com.google.gerrit.acceptance.testsuite.project.ProjectOperations; import com.google.gerrit.acceptance.testsuite.request.RequestScopeOperations; import com.google.gerrit.entities.Account; import com.google.gerrit.entities.Change; import com.google.gerrit.entities.LabelType; import com.google.gerrit.entities.SubmitRequirementExpression; import com.google.gerrit.entities.SubmitRequirementExpressionResult; import com.google.gerrit.extensions.api.changes.ReviewInput; import com.google.gerrit.server.project.SubmitRequirementsEvaluator; import com.google.gerrit.server.query.change.ChangeData; import com.google.inject.Inject; import org.junit.Before; import org.junit.Test; @NoHttpd @UseTimezone(timezone = "US/Eastern") @VerifyNoPiiInChangeNotes(true) public class SubmitRequirementPredicateIT extends AbstractDaemonTest { @Inject private RequestScopeOperations requestScopeOperations; @Inject private SubmitRequirementsEvaluator submitRequirementsEvaluator; @Inject private ChangeOperations changeOperations; @Inject private ProjectOperations projectOperations; @Inject private AccountOperations accountOperations; private final LabelType label = label("Custom-Label", value(1, "Positive"), value(0, "No score"), value(-1, "Negative")); private final LabelType pLabel = label("Custom-Label2", value(1, "Positive"), value(0, "No score")); @Before public void setUp() throws Exception { projectOperations .project(project) .forUpdate() .add(allowLabel(label.getName()).ref("refs/heads/*").group(ANONYMOUS_USERS).range(-1, 1)) .add(allowLabel(pLabel.getName()).ref("refs/heads/*").group(ANONYMOUS_USERS).range(0, 1)) .update(); try (ProjectConfigUpdate u = updateProject(project)) { u.getConfig().upsertLabelType(label); u.getConfig().upsertLabelType(pLabel); u.save(); } } @Test public void distinctVoters_sameUserVotesOnDifferentLabels_fails() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(admin.id()); approve(c1.toString()); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MAX,count>1\"", c1); // Same user votes on both labels gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", 1)); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MAX,count>1\"", c1); } @Test public void distinctVoters_distinctUsersOnDifferentLabels_passes() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(admin.id()); approve(c1.toString()); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", 1)); assertMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MAX,count>1\"", c1); } @Test public void distinctVoters_onlyMaxVotesRespected() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", 1)); requestScopeOperations.setApiUser(admin.id()); recommend(c1.toString()); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MAX,count>1\"", c1); requestScopeOperations.setApiUser(admin.id()); approve(c1.toString()); assertMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MAX,count>1\"", c1); } @Test public void distinctVoters_onlyMinVotesRespected() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", -1)); requestScopeOperations.setApiUser(admin.id()); recommend(c1.toString()); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MIN,count>1\"", c1); requestScopeOperations.setApiUser(admin.id()); gApi.changes().id(c1.toString()).current().review(ReviewInput.reject()); assertMatching("distinctvoters:\"[Code-Review,Custom-Label],value=MIN,count>1\"", c1); } @Test public void distinctVoters_onlyExactValueRespected() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", 1)); requestScopeOperations.setApiUser(admin.id()); approve(c1.toString()); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],value=1,count>1\"", c1); requestScopeOperations.setApiUser(admin.id()); recommend(c1.toString()); assertMatching("distinctvoters:\"[Code-Review,Custom-Label],value=1,count>1\"", c1); } @Test public void distinctVoters_valueIsOptional() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", -1)); requestScopeOperations.setApiUser(admin.id()); assertNotMatching("distinctvoters:\"[Code-Review,Custom-Label],count>1\"", c1); recommend(c1.toString()); assertMatching("distinctvoters:\"[Code-Review,Custom-Label],count>1\"", c1); } @Test public void distinctVoters_moreThanTwoLabels() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label2", 1)); requestScopeOperations.setApiUser(admin.id()); recommend(c1.toString()); assertMatching( "distinctvoters:\"[Code-Review,Custom-Label,Custom-Label2],value=1,count>1\"", c1); } @Test public void distinctVoters_moreThanTwoLabels_moreThanTwoUsers() throws Exception { Change.Id c1 = changeOperations.newChange().project(project).create(); requestScopeOperations.setApiUser(user.id()); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label2", 1)); requestScopeOperations.setApiUser(admin.id()); recommend(c1.toString()); assertNotMatching( "distinctvoters:\"[Code-Review,Custom-Label,Custom-Label2],value=1,count>2\"", c1); Account.Id tester = accountOperations.newAccount().create(); requestScopeOperations.setApiUser(tester); gApi.changes() .id(c1.toString()) .current() .review(ReviewInput.create().label("Custom-Label", 1)); assertMatching( "distinctvoters:\"[Code-Review,Custom-Label,Custom-Label2],value=1,count>2\"", c1); } private void assertMatching(String requirement, Change.Id change) { assertThat(evaluate(requirement, change).status()) .isEqualTo(SubmitRequirementExpressionResult.Status.PASS); } private void assertNotMatching(String requirement, Change.Id change) { assertThat(evaluate(requirement, change).status()) .isEqualTo(SubmitRequirementExpressionResult.Status.FAIL); } private SubmitRequirementExpressionResult evaluate(String requirement, Change.Id change) { ChangeData cd = changeDataFactory.create(project, change); return submitRequirementsEvaluator.evaluateExpression( SubmitRequirementExpression.create(requirement), cd); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.junit.matchers.JUnitMatchers.containsString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.jar.JarFile; import java.util.jar.Manifest; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerExitEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.ShellScriptBuilder; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; import org.junit.Test; public class TestContainerLaunch extends BaseContainerManagerTest { protected Context distContext = new NMContext(new NMContainerTokenSecretManager( conf), new NMTokenSecretManagerInNM(), null, new ApplicationACLsManager(conf), new NMNullStateStoreService()) { public int getHttpPort() { return HTTP_PORT; }; public NodeId getNodeId() { return NodeId.newInstance("ahost", 1234); }; }; public TestContainerLaunch() throws UnsupportedFileSystemException { super(); } @Before public void setup() throws IOException { conf.setClass( YarnConfiguration.NM_MON_RESOURCE_CALCULATOR, LinuxResourceCalculatorPlugin.class, ResourceCalculatorPlugin.class); super.setup(); } @Test public void testSpecialCharSymlinks() throws IOException { File shellFile = null; File tempFile = null; String badSymlink = Shell.WINDOWS ? "foo@zz_#!-+bar.cmd" : "foo@zz%_#*&!-+= bar()"; File symLinkFile = null; try { shellFile = Shell.appendScriptExtension(tmpDir, "hello"); tempFile = Shell.appendScriptExtension(tmpDir, "temp"); String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\""; PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile)); FileUtil.setExecutable(shellFile, true); writer.println(timeoutCommand); writer.close(); Map<Path, List<String>> resources = new HashMap<Path, List<String>>(); Path path = new Path(shellFile.getAbsolutePath()); resources.put(path, Arrays.asList(badSymlink)); FileOutputStream fos = new FileOutputStream(tempFile); Map<String, String> env = new HashMap<String, String>(); List<String> commands = new ArrayList<String>(); if (Shell.WINDOWS) { commands.add("cmd"); commands.add("/c"); commands.add("\"" + badSymlink + "\""); } else { commands.add("/bin/sh ./\\\"" + badSymlink + "\\\""); } new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands); fos.flush(); fos.close(); FileUtil.setExecutable(tempFile, true); Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[]{tempFile.getAbsolutePath()}, tmpDir); shexc.execute(); assertEquals(shexc.getExitCode(), 0); assert(shexc.getOutput().contains("hello")); symLinkFile = new File(tmpDir, badSymlink); } finally { // cleanup if (shellFile != null && shellFile.exists()) { shellFile.delete(); } if (tempFile != null && tempFile.exists()) { tempFile.delete(); } if (symLinkFile != null && symLinkFile.exists()) { symLinkFile.delete(); } } } // test the diagnostics are generated @Test (timeout = 20000) public void testInvalidSymlinkDiagnostics() throws IOException { File shellFile = null; File tempFile = null; String symLink = Shell.WINDOWS ? "test.cmd" : "test"; File symLinkFile = null; try { shellFile = Shell.appendScriptExtension(tmpDir, "hello"); tempFile = Shell.appendScriptExtension(tmpDir, "temp"); String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\""; PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile)); FileUtil.setExecutable(shellFile, true); writer.println(timeoutCommand); writer.close(); Map<Path, List<String>> resources = new HashMap<Path, List<String>>(); //This is an invalid path and should throw exception because of No such file. Path invalidPath = new Path(shellFile.getAbsolutePath()+"randomPath"); resources.put(invalidPath, Arrays.asList(symLink)); FileOutputStream fos = new FileOutputStream(tempFile); Map<String, String> env = new HashMap<String, String>(); List<String> commands = new ArrayList<String>(); if (Shell.WINDOWS) { commands.add("cmd"); commands.add("/c"); commands.add("\"" + symLink + "\""); } else { commands.add("/bin/sh ./\\\"" + symLink + "\\\""); } new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands); fos.flush(); fos.close(); FileUtil.setExecutable(tempFile, true); Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[]{tempFile.getAbsolutePath()}, tmpDir); String diagnostics = null; try { shexc.execute(); Assert.fail("Should catch exception"); } catch(ExitCodeException e){ diagnostics = e.getMessage(); } Assert.assertNotNull(diagnostics); Assert.assertTrue(shexc.getExitCode() != 0); symLinkFile = new File(tmpDir, symLink); } finally { // cleanup if (shellFile != null && shellFile.exists()) { shellFile.delete(); } if (tempFile != null && tempFile.exists()) { tempFile.delete(); } if (symLinkFile != null && symLinkFile.exists()) { symLinkFile.delete(); } } } @Test (timeout = 20000) public void testInvalidEnvSyntaxDiagnostics() throws IOException { File shellFile = null; try { shellFile = Shell.appendScriptExtension(tmpDir, "hello"); Map<Path, List<String>> resources = new HashMap<Path, List<String>>(); FileOutputStream fos = new FileOutputStream(shellFile); FileUtil.setExecutable(shellFile, true); Map<String, String> env = new HashMap<String, String>(); // invalid env env.put( "APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," + "\"workflowName\":\"\n\ninsert table " + "\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, " ); List<String> commands = new ArrayList<String>(); new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands); fos.flush(); fos.close(); // It is supposed that LANG is set as C. Map<String, String> cmdEnv = new HashMap<String, String>(); cmdEnv.put("LANG", "C"); Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()}, tmpDir, cmdEnv); String diagnostics = null; try { shexc.execute(); Assert.fail("Should catch exception"); } catch(ExitCodeException e){ diagnostics = e.getMessage(); } Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ? "is not recognized as an internal or external command" : "command not found")); Assert.assertTrue(shexc.getExitCode() != 0); } finally { // cleanup if (shellFile != null && shellFile.exists()) { shellFile.delete(); } } } @Test(timeout = 10000) public void testEnvExpansion() throws IOException { Path logPath = new Path("/nm/container/logs"); String input = Apps.crossPlatformify("HADOOP_HOME") + "/share/hadoop/common/*" + ApplicationConstants.CLASS_PATH_SEPARATOR + Apps.crossPlatformify("HADOOP_HOME") + "/share/hadoop/common/lib/*" + ApplicationConstants.CLASS_PATH_SEPARATOR + Apps.crossPlatformify("HADOOP_LOG_HOME") + ApplicationConstants.LOG_DIR_EXPANSION_VAR; String res = ContainerLaunch.expandEnvironment(input, logPath); if (Shell.WINDOWS) { Assert.assertEquals("%HADOOP_HOME%/share/hadoop/common/*;" + "%HADOOP_HOME%/share/hadoop/common/lib/*;" + "%HADOOP_LOG_HOME%/nm/container/logs", res); } else { Assert.assertEquals("$HADOOP_HOME/share/hadoop/common/*:" + "$HADOOP_HOME/share/hadoop/common/lib/*:" + "$HADOOP_LOG_HOME/nm/container/logs", res); } System.out.println(res); } @Test (timeout = 20000) public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException { File shellFile = null; try { shellFile = Shell.appendScriptExtension(tmpDir, "hello"); // echo "hello" to stdout and "error" to stderr and exit code with 2; String command = Shell.WINDOWS ? "@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" : "echo \"hello\"; echo \"error\" 1>&2; exit 2;"; PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile)); FileUtil.setExecutable(shellFile, true); writer.println(command); writer.close(); Map<Path, List<String>> resources = new HashMap<Path, List<String>>(); FileOutputStream fos = new FileOutputStream(shellFile, true); Map<String, String> env = new HashMap<String, String>(); List<String> commands = new ArrayList<String>(); commands.add(command); ContainerExecutor exec = new DefaultContainerExecutor(); exec.writeLaunchEnv(fos, env, resources, commands); fos.flush(); fos.close(); Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()}, tmpDir); String diagnostics = null; try { shexc.execute(); Assert.fail("Should catch exception"); } catch(ExitCodeException e){ diagnostics = e.getMessage(); } // test stderr Assert.assertTrue(diagnostics.contains("error")); // test stdout Assert.assertTrue(shexc.getOutput().contains("hello")); Assert.assertTrue(shexc.getExitCode() == 2); } finally { // cleanup if (shellFile != null && shellFile.exists()) { shellFile.delete(); } } } @Test public void testPrependDistcache() throws Exception { // Test is only relevant on Windows Assume.assumeTrue(Shell.WINDOWS); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); Map<String, String> userSetEnv = new HashMap<String, String>(); userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id"); userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST"); userSetEnv.put(Environment.NM_PORT.name(), "user_set_NM_PORT"); userSetEnv.put(Environment.NM_HTTP_PORT.name(), "user_set_NM_HTTP_PORT"); userSetEnv.put(Environment.LOCAL_DIRS.name(), "user_set_LOCAL_DIR"); userSetEnv.put(Environment.USER.key(), "user_set_" + Environment.USER.key()); userSetEnv.put(Environment.LOGNAME.name(), "user_set_LOGNAME"); userSetEnv.put(Environment.PWD.name(), "user_set_PWD"); userSetEnv.put(Environment.HOME.name(), "user_set_HOME"); userSetEnv.put(Environment.CLASSPATH.name(), "APATH"); containerLaunchContext.setEnvironment(userSetEnv); Container container = mock(Container.class); when(container.getContainerId()).thenReturn(cId); when(container.getLaunchContext()).thenReturn(containerLaunchContext); when(container.getLocalizedResources()).thenReturn(null); Dispatcher dispatcher = mock(Dispatcher.class); EventHandler eventHandler = new EventHandler() { public void handle(Event event) { Assert.assertTrue(event instanceof ContainerExitEvent); ContainerExitEvent exitEvent = (ContainerExitEvent) event; Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitEvent.getType()); } }; when(dispatcher.getEventHandler()).thenReturn(eventHandler); Configuration conf = new Configuration(); ContainerLaunch launch = new ContainerLaunch(distContext, conf, dispatcher, exec, null, container, dirsHandler, containerManager); String testDir = System.getProperty("test.build.data", "target/test-dir"); Path pwd = new Path(testDir); List<Path> appDirs = new ArrayList<Path>(); List<String> containerLogs = new ArrayList<String>(); Map<Path, List<String>> resources = new HashMap<Path, List<String>>(); Path userjar = new Path("user.jar"); List<String> lpaths = new ArrayList<String>(); lpaths.add("userjarlink.jar"); resources.put(userjar, lpaths); Path nmp = new Path(testDir); launch.sanitizeEnv( userSetEnv, pwd, appDirs, containerLogs, resources, nmp); List<String> result = getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name())); Assert.assertTrue(result.size() > 1); Assert.assertTrue( result.get(result.size() - 1).endsWith("userjarlink.jar")); //Then, with user classpath first userSetEnv.put(Environment.CLASSPATH_PREPEND_DISTCACHE.name(), "true"); cId = ContainerId.newContainerId(appAttemptId, 1); when(container.getContainerId()).thenReturn(cId); launch = new ContainerLaunch(distContext, conf, dispatcher, exec, null, container, dirsHandler, containerManager); launch.sanitizeEnv( userSetEnv, pwd, appDirs, containerLogs, resources, nmp); result = getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name())); Assert.assertTrue(result.size() > 1); Assert.assertTrue( result.get(0).endsWith("userjarlink.jar")); } private static List<String> getJarManifestClasspath(String path) throws Exception { List<String> classpath = new ArrayList<String>(); JarFile jarFile = new JarFile(path); Manifest manifest = jarFile.getManifest(); String cps = manifest.getMainAttributes().getValue("Class-Path"); StringTokenizer cptok = new StringTokenizer(cps); while (cptok.hasMoreTokens()) { String cpentry = cptok.nextToken(); classpath.add(cpentry); } return classpath; } /** * See if environment variable is forwarded using sanitizeEnv. * @throws Exception */ @Test (timeout = 60000) public void testContainerEnvVariables() throws Exception { containerManager.start(); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); // ////// Construct the Container-id ApplicationId appId = ApplicationId.newInstance(0, 0); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); Map<String, String> userSetEnv = new HashMap<String, String>(); userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id"); userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST"); userSetEnv.put(Environment.NM_PORT.name(), "user_set_NM_PORT"); userSetEnv.put(Environment.NM_HTTP_PORT.name(), "user_set_NM_HTTP_PORT"); userSetEnv.put(Environment.LOCAL_DIRS.name(), "user_set_LOCAL_DIR"); userSetEnv.put(Environment.USER.key(), "user_set_" + Environment.USER.key()); userSetEnv.put(Environment.LOGNAME.name(), "user_set_LOGNAME"); userSetEnv.put(Environment.PWD.name(), "user_set_PWD"); userSetEnv.put(Environment.HOME.name(), "user_set_HOME"); containerLaunchContext.setEnvironment(userSetEnv); File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile"); PrintWriter fileWriter = new PrintWriter(scriptFile); File processStartFile = new File(tmpDir, "env_vars.txt").getAbsoluteFile(); if (Shell.WINDOWS) { fileWriter.println("@echo " + Environment.CONTAINER_ID.$() + "> " + processStartFile); fileWriter.println("@echo " + Environment.NM_HOST.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.NM_PORT.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.NM_HTTP_PORT.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.LOCAL_DIRS.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.USER.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.LOGNAME.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.PWD.$() + ">> " + processStartFile); fileWriter.println("@echo " + Environment.HOME.$() + ">> " + processStartFile); for (String serviceName : containerManager.getAuxServiceMetaData() .keySet()) { fileWriter.println("@echo %" + AuxiliaryServiceHelper.NM_AUX_SERVICE + serviceName + "%>> " + processStartFile); } fileWriter.println("@echo " + cId + ">> " + processStartFile); fileWriter.println("@ping -n 100 127.0.0.1 >nul"); } else { fileWriter.write("\numask 0"); // So that start file is readable by the test fileWriter.write("\necho $" + Environment.CONTAINER_ID.name() + " > " + processStartFile); fileWriter.write("\necho $" + Environment.NM_HOST.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.NM_PORT.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.NM_HTTP_PORT.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.LOCAL_DIRS.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.USER.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.LOGNAME.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.PWD.name() + " >> " + processStartFile); fileWriter.write("\necho $" + Environment.HOME.name() + " >> " + processStartFile); for (String serviceName : containerManager.getAuxServiceMetaData() .keySet()) { fileWriter.write("\necho $" + AuxiliaryServiceHelper.NM_AUX_SERVICE + serviceName + " >> " + processStartFile); } fileWriter.write("\necho $$ >> " + processStartFile); fileWriter.write("\nexec sleep 100"); } fileWriter.close(); // upload the script file so that the container can run it URL resource_alpha = ConverterUtils.getYarnUrlFromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); rsrc_alpha.setSize(-1); rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION); rsrc_alpha.setType(LocalResourceType.FILE); rsrc_alpha.setTimestamp(scriptFile.lastModified()); String destinationFile = "dest_file"; Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); localResources.put(destinationFile, rsrc_alpha); containerLaunchContext.setLocalResources(localResources); // set up the rest of the container List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile)); containerLaunchContext.setCommands(commands); StartContainerRequest scRequest = StartContainerRequest.newInstance(containerLaunchContext, createContainerToken(cId, Priority.newInstance(0), 0)); List<StartContainerRequest> list = new ArrayList<StartContainerRequest>(); list.add(scRequest); StartContainersRequest allRequests = StartContainersRequest.newInstance(list); containerManager.startContainers(allRequests); int timeoutSecs = 0; while (!processStartFile.exists() && timeoutSecs++ < 20) { Thread.sleep(1000); LOG.info("Waiting for process start-file to be created"); } Assert.assertTrue("ProcessStartFile doesn't exist!", processStartFile.exists()); // Now verify the contents of the file List<String> localDirs = dirsHandler.getLocalDirs(); List<String> logDirs = dirsHandler.getLogDirs(); List<Path> appDirs = new ArrayList<Path>(localDirs.size()); for (String localDir : localDirs) { Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); Path appsdir = new Path(userdir, ContainerLocalizer.APPCACHE); appDirs.add(new Path(appsdir, appId.toString())); } List<String> containerLogDirs = new ArrayList<String>(); String relativeContainerLogDir = ContainerLaunch .getRelativeContainerLogDir(appId.toString(), cId.toString()); for(String logDir : logDirs){ containerLogDirs.add(logDir + Path.SEPARATOR + relativeContainerLogDir); } BufferedReader reader = new BufferedReader(new FileReader(processStartFile)); Assert.assertEquals(cId.toString(), reader.readLine()); Assert.assertEquals(context.getNodeId().getHost(), reader.readLine()); Assert.assertEquals(String.valueOf(context.getNodeId().getPort()), reader.readLine()); Assert.assertEquals(String.valueOf(HTTP_PORT), reader.readLine()); Assert.assertEquals(StringUtils.join(",", appDirs), reader.readLine()); Assert.assertEquals(user, reader.readLine()); Assert.assertEquals(user, reader.readLine()); String obtainedPWD = reader.readLine(); boolean found = false; for (Path localDir : appDirs) { if (new Path(localDir, cId.toString()).toString().equals(obtainedPWD)) { found = true; break; } } Assert.assertTrue("Wrong local-dir found : " + obtainedPWD, found); Assert.assertEquals( conf.get( YarnConfiguration.NM_USER_HOME_DIR, YarnConfiguration.DEFAULT_NM_USER_HOME_DIR), reader.readLine()); for (String serviceName : containerManager.getAuxServiceMetaData().keySet()) { Assert.assertEquals( containerManager.getAuxServiceMetaData().get(serviceName), ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes()))); } Assert.assertEquals(cId.toString(), containerLaunchContext .getEnvironment().get(Environment.CONTAINER_ID.name())); Assert.assertEquals(context.getNodeId().getHost(), containerLaunchContext .getEnvironment().get(Environment.NM_HOST.name())); Assert.assertEquals(String.valueOf(context.getNodeId().getPort()), containerLaunchContext.getEnvironment().get(Environment.NM_PORT.name())); Assert.assertEquals(String.valueOf(HTTP_PORT), containerLaunchContext .getEnvironment().get(Environment.NM_HTTP_PORT.name())); Assert.assertEquals(StringUtils.join(",", appDirs), containerLaunchContext .getEnvironment().get(Environment.LOCAL_DIRS.name())); Assert.assertEquals(StringUtils.join(",", containerLogDirs), containerLaunchContext.getEnvironment().get(Environment.LOG_DIRS.name())); Assert.assertEquals(user, containerLaunchContext.getEnvironment() .get(Environment.USER.name())); Assert.assertEquals(user, containerLaunchContext.getEnvironment() .get(Environment.LOGNAME.name())); found = false; obtainedPWD = containerLaunchContext.getEnvironment().get(Environment.PWD.name()); for (Path localDir : appDirs) { if (new Path(localDir, cId.toString()).toString().equals(obtainedPWD)) { found = true; break; } } Assert.assertTrue("Wrong local-dir found : " + obtainedPWD, found); Assert.assertEquals( conf.get( YarnConfiguration.NM_USER_HOME_DIR, YarnConfiguration.DEFAULT_NM_USER_HOME_DIR), containerLaunchContext.getEnvironment() .get(Environment.HOME.name())); // Get the pid of the process String pid = reader.readLine().trim(); // No more lines Assert.assertEquals(null, reader.readLine()); // Now test the stop functionality. // Assert that the process is alive Assert.assertTrue("Process is not alive!", DefaultContainerExecutor.containerIsAlive(pid)); // Once more Assert.assertTrue("Process is not alive!", DefaultContainerExecutor.containerIsAlive(pid)); // Now test the stop functionality. List<ContainerId> containerIds = new ArrayList<ContainerId>(); containerIds.add(cId); StopContainersRequest stopRequest = StopContainersRequest.newInstance(containerIds); containerManager.stopContainers(stopRequest); BaseContainerManagerTest.waitForContainerState(containerManager, cId, ContainerState.COMPLETE); GetContainerStatusesRequest gcsRequest = GetContainerStatusesRequest.newInstance(containerIds); ContainerStatus containerStatus = containerManager.getContainerStatuses(gcsRequest).getContainerStatuses().get(0); int expectedExitCode = ContainerExitStatus.KILLED_BY_APPMASTER; Assert.assertEquals(expectedExitCode, containerStatus.getExitStatus()); // Assert that the process is not alive anymore Assert.assertFalse("Process is still alive!", DefaultContainerExecutor.containerIsAlive(pid)); } @Test (timeout = 5000) public void testAuxiliaryServiceHelper() throws Exception { Map<String, String> env = new HashMap<String, String>(); String serviceName = "testAuxiliaryService"; ByteBuffer bb = ByteBuffer.wrap("testAuxiliaryService".getBytes()); AuxiliaryServiceHelper.setServiceDataIntoEnv(serviceName, bb, env); Assert.assertEquals(bb, AuxiliaryServiceHelper.getServiceDataFromEnv(serviceName, env)); } private void internalKillTest(boolean delayed) throws Exception { conf.setLong(YarnConfiguration.NM_SLEEP_DELAY_BEFORE_SIGKILL_MS, delayed ? 1000 : 0); containerManager.start(); // ////// Construct the Container-id ApplicationId appId = ApplicationId.newInstance(1, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); File processStartFile = new File(tmpDir, "pid.txt").getAbsoluteFile(); // setup a script that can handle sigterm gracefully File scriptFile = Shell.appendScriptExtension(tmpDir, "testscript"); PrintWriter writer = new PrintWriter(new FileOutputStream(scriptFile)); if (Shell.WINDOWS) { writer.println("@echo \"Running testscript for delayed kill\""); writer.println("@echo \"Writing pid to start file\""); writer.println("@echo " + cId + "> " + processStartFile); writer.println("@ping -n 100 127.0.0.1 >nul"); } else { writer.println("#!/bin/bash\n\n"); writer.println("echo \"Running testscript for delayed kill\""); writer.println("hello=\"Got SIGTERM\""); writer.println("umask 0"); writer.println("trap \"echo $hello >> " + processStartFile + "\" SIGTERM"); writer.println("echo \"Writing pid to start file\""); writer.println("echo $$ >> " + processStartFile); writer.println("while true; do\nsleep 1s;\ndone"); } writer.close(); FileUtil.setExecutable(scriptFile, true); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); // upload the script file so that the container can run it URL resource_alpha = ConverterUtils.getYarnUrlFromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); rsrc_alpha.setSize(-1); rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION); rsrc_alpha.setType(LocalResourceType.FILE); rsrc_alpha.setTimestamp(scriptFile.lastModified()); String destinationFile = "dest_file.sh"; Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); localResources.put(destinationFile, rsrc_alpha); containerLaunchContext.setLocalResources(localResources); // set up the rest of the container List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile)); containerLaunchContext.setCommands(commands); Priority priority = Priority.newInstance(10); long createTime = 1234; Token containerToken = createContainerToken(cId, priority, createTime); StartContainerRequest scRequest = StartContainerRequest.newInstance(containerLaunchContext, containerToken); List<StartContainerRequest> list = new ArrayList<StartContainerRequest>(); list.add(scRequest); StartContainersRequest allRequests = StartContainersRequest.newInstance(list); containerManager.startContainers(allRequests); int timeoutSecs = 0; while (!processStartFile.exists() && timeoutSecs++ < 20) { Thread.sleep(1000); LOG.info("Waiting for process start-file to be created"); } Assert.assertTrue("ProcessStartFile doesn't exist!", processStartFile.exists()); NMContainerStatus nmContainerStatus = containerManager.getContext().getContainers().get(cId) .getNMContainerStatus(); Assert.assertEquals(priority, nmContainerStatus.getPriority()); // Now test the stop functionality. List<ContainerId> containerIds = new ArrayList<ContainerId>(); containerIds.add(cId); StopContainersRequest stopRequest = StopContainersRequest.newInstance(containerIds); containerManager.stopContainers(stopRequest); BaseContainerManagerTest.waitForContainerState(containerManager, cId, ContainerState.COMPLETE); // if delayed container stop sends a sigterm followed by a sigkill // otherwise sigkill is sent immediately GetContainerStatusesRequest gcsRequest = GetContainerStatusesRequest.newInstance(containerIds); ContainerStatus containerStatus = containerManager.getContainerStatuses(gcsRequest) .getContainerStatuses().get(0); Assert.assertEquals(ContainerExitStatus.KILLED_BY_APPMASTER, containerStatus.getExitStatus()); // Now verify the contents of the file. Script generates a message when it // receives a sigterm so we look for that. We cannot perform this check on // Windows, because the process is not notified when killed by winutils. // There is no way for the process to trap and respond. Instead, we can // verify that the job object with ID matching container ID no longer exists. if (Shell.WINDOWS || !delayed) { Assert.assertFalse("Process is still alive!", DefaultContainerExecutor.containerIsAlive(cId.toString())); } else { BufferedReader reader = new BufferedReader(new FileReader(processStartFile)); boolean foundSigTermMessage = false; while (true) { String line = reader.readLine(); if (line == null) { break; } if (line.contains("SIGTERM")) { foundSigTermMessage = true; break; } } Assert.assertTrue("Did not find sigterm message", foundSigTermMessage); reader.close(); } } @Test (timeout = 30000) public void testDelayedKill() throws Exception { internalKillTest(true); } @Test (timeout = 30000) public void testImmediateKill() throws Exception { internalKillTest(false); } @SuppressWarnings("rawtypes") @Test (timeout = 10000) public void testCallFailureWithNullLocalizedResources() { Container container = mock(Container.class); when(container.getContainerId()).thenReturn(ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance( System.currentTimeMillis(), 1), 1), 1)); ContainerLaunchContext clc = mock(ContainerLaunchContext.class); when(clc.getCommands()).thenReturn(Collections.<String>emptyList()); when(container.getLaunchContext()).thenReturn(clc); when(container.getLocalizedResources()).thenReturn(null); Dispatcher dispatcher = mock(Dispatcher.class); EventHandler eventHandler = new EventHandler() { public void handle(Event event) { Assert.assertTrue(event instanceof ContainerExitEvent); ContainerExitEvent exitEvent = (ContainerExitEvent) event; Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitEvent.getType()); } }; when(dispatcher.getEventHandler()).thenReturn(eventHandler); ContainerLaunch launch = new ContainerLaunch(context, new Configuration(), dispatcher, exec, null, container, dirsHandler, containerManager); launch.call(); } protected Token createContainerToken(ContainerId cId, Priority priority, long createTime) throws InvalidToken { Resource r = BuilderUtils.newResource(1024, 1); ContainerTokenIdentifier containerTokenIdentifier = new ContainerTokenIdentifier(cId, context.getNodeId().toString(), user, r, System.currentTimeMillis() + 10000L, 123, DUMMY_RM_IDENTIFIER, priority, createTime); Token containerToken = BuilderUtils.newContainerToken( context.getNodeId(), context.getContainerTokenSecretManager().retrievePassword( containerTokenIdentifier), containerTokenIdentifier); return containerToken; } /** * Test that script exists with non-zero exit code when command fails. * @throws IOException */ @Test (timeout = 10000) public void testShellScriptBuilderNonZeroExitCode() throws IOException { ShellScriptBuilder builder = ShellScriptBuilder.create(); builder.command(Arrays.asList(new String[] {"unknownCommand"})); File shellFile = Shell.appendScriptExtension(tmpDir, "testShellScriptBuilderError"); PrintStream writer = new PrintStream(new FileOutputStream(shellFile)); builder.write(writer); writer.close(); try { FileUtil.setExecutable(shellFile, true); Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor( new String[]{shellFile.getAbsolutePath()}, tmpDir); try { shexc.execute(); fail("builder shell command was expected to throw"); } catch(IOException e) { // expected System.out.println("Received an expected exception: " + e.getMessage()); } } finally { FileUtil.fullyDelete(shellFile); } } private static final String expectedMessage = "The command line has a length of"; @Test (timeout = 10000) public void testWindowsShellScriptBuilderCommand() throws IOException { String callCmd = "@call "; // Test is only relevant on Windows Assume.assumeTrue(Shell.WINDOWS); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); ShellScriptBuilder builder = ShellScriptBuilder.create(); // Basic tests: less length, exact length, max+1 length builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat("A", 1024))); builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat( "E", Shell.WINDOWS_MAX_SHELL_LENGTH - callCmd.length()))); try { builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat( "X", Shell.WINDOWS_MAX_SHELL_LENGTH -callCmd.length() + 1))); fail("longCommand was expected to throw"); } catch(IOException e) { assertThat(e.getMessage(), containsString(expectedMessage)); } // Composite tests, from parts: less, exact and + builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat("A", 1024), org.apache.commons.lang.StringUtils.repeat("A", 1024), org.apache.commons.lang.StringUtils.repeat("A", 1024))); // buildr.command joins the command parts with an extra space builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat("E", 4095), org.apache.commons.lang.StringUtils.repeat("E", 2047), org.apache.commons.lang.StringUtils.repeat("E", 2047 - callCmd.length()))); try { builder.command(Arrays.asList( org.apache.commons.lang.StringUtils.repeat("X", 4095), org.apache.commons.lang.StringUtils.repeat("X", 2047), org.apache.commons.lang.StringUtils.repeat("X", 2048 - callCmd.length()))); fail("long commands was expected to throw"); } catch(IOException e) { assertThat(e.getMessage(), containsString(expectedMessage)); } } @Test (timeout = 10000) public void testWindowsShellScriptBuilderEnv() throws IOException { // Test is only relevant on Windows Assume.assumeTrue(Shell.WINDOWS); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); ShellScriptBuilder builder = ShellScriptBuilder.create(); // test env builder.env("somekey", org.apache.commons.lang.StringUtils.repeat("A", 1024)); builder.env("somekey", org.apache.commons.lang.StringUtils.repeat( "A", Shell.WINDOWS_MAX_SHELL_LENGTH - ("@set somekey=").length())); try { builder.env("somekey", org.apache.commons.lang.StringUtils.repeat( "A", Shell.WINDOWS_MAX_SHELL_LENGTH - ("@set somekey=").length()) + 1); fail("long env was expected to throw"); } catch(IOException e) { assertThat(e.getMessage(), containsString(expectedMessage)); } } @Test (timeout = 10000) public void testWindowsShellScriptBuilderMkdir() throws IOException { String mkDirCmd = "@if not exist \"\" mkdir \"\""; // Test is only relevant on Windows Assume.assumeTrue(Shell.WINDOWS); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); ShellScriptBuilder builder = ShellScriptBuilder.create(); // test mkdir builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024))); builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat( "E", (Shell.WINDOWS_MAX_SHELL_LENGTH - mkDirCmd.length())/2))); try { builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat( "X", (Shell.WINDOWS_MAX_SHELL_LENGTH - mkDirCmd.length())/2 +1))); fail("long mkdir was expected to throw"); } catch(IOException e) { assertThat(e.getMessage(), containsString(expectedMessage)); } } @Test (timeout = 10000) public void testWindowsShellScriptBuilderLink() throws IOException { // Test is only relevant on Windows Assume.assumeTrue(Shell.WINDOWS); String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\""; // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); ShellScriptBuilder builder = ShellScriptBuilder.create(); // test link builder.link(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024)), new Path(org.apache.commons.lang.StringUtils.repeat("B", 1024))); builder.link( new Path(org.apache.commons.lang.StringUtils.repeat( "E", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2)), new Path(org.apache.commons.lang.StringUtils.repeat( "F", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2))); try { builder.link( new Path(org.apache.commons.lang.StringUtils.repeat( "X", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2 + 1)), new Path(org.apache.commons.lang.StringUtils.repeat( "Y", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2) + 1)); fail("long link was expected to throw"); } catch(IOException e) { assertThat(e.getMessage(), containsString(expectedMessage)); } } @Test public void testKillProcessGroup() throws Exception { Assume.assumeTrue(Shell.isSetsidAvailable); containerManager.start(); // Construct the Container-id ApplicationId appId = ApplicationId.newInstance(2, 2); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); File processStartFile = new File(tmpDir, "pid.txt").getAbsoluteFile(); File childProcessStartFile = new File(tmpDir, "child_pid.txt").getAbsoluteFile(); // setup a script that can handle sigterm gracefully File scriptFile = Shell.appendScriptExtension(tmpDir, "testscript"); PrintWriter writer = new PrintWriter(new FileOutputStream(scriptFile)); writer.println("#!/bin/bash\n\n"); writer.println("echo \"Running testscript for forked process\""); writer.println("umask 0"); writer.println("echo $$ >> " + processStartFile); writer.println("while true;\ndo sleep 1s;\ndone > /dev/null 2>&1 &"); writer.println("echo $! >> " + childProcessStartFile); writer.println("while true;\ndo sleep 1s;\ndone"); writer.close(); FileUtil.setExecutable(scriptFile, true); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); // upload the script file so that the container can run it URL resource_alpha = ConverterUtils.getYarnUrlFromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); rsrc_alpha.setSize(-1); rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION); rsrc_alpha.setType(LocalResourceType.FILE); rsrc_alpha.setTimestamp(scriptFile.lastModified()); String destinationFile = "dest_file.sh"; Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); localResources.put(destinationFile, rsrc_alpha); containerLaunchContext.setLocalResources(localResources); // set up the rest of the container List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile)); containerLaunchContext.setCommands(commands); Priority priority = Priority.newInstance(10); long createTime = 1234; Token containerToken = createContainerToken(cId, priority, createTime); StartContainerRequest scRequest = StartContainerRequest.newInstance(containerLaunchContext, containerToken); List<StartContainerRequest> list = new ArrayList<StartContainerRequest>(); list.add(scRequest); StartContainersRequest allRequests = StartContainersRequest.newInstance(list); containerManager.startContainers(allRequests); int timeoutSecs = 0; while (!processStartFile.exists() && timeoutSecs++ < 20) { Thread.sleep(1000); LOG.info("Waiting for process start-file to be created"); } Assert.assertTrue("ProcessStartFile doesn't exist!", processStartFile.exists()); BufferedReader reader = new BufferedReader(new FileReader(processStartFile)); // Get the pid of the process String pid = reader.readLine().trim(); // No more lines Assert.assertEquals(null, reader.readLine()); reader.close(); reader = new BufferedReader(new FileReader(childProcessStartFile)); // Get the pid of the child process String child = reader.readLine().trim(); // No more lines Assert.assertEquals(null, reader.readLine()); reader.close(); LOG.info("Manually killing pid " + pid + ", but not child pid " + child); Shell.execCommand(new String[]{"kill", "-9", pid}); BaseContainerManagerTest.waitForContainerState(containerManager, cId, ContainerState.COMPLETE); Assert.assertFalse("Process is still alive!", DefaultContainerExecutor.containerIsAlive(pid)); List<ContainerId> containerIds = new ArrayList<ContainerId>(); containerIds.add(cId); GetContainerStatusesRequest gcsRequest = GetContainerStatusesRequest.newInstance(containerIds); ContainerStatus containerStatus = containerManager.getContainerStatuses(gcsRequest) .getContainerStatuses().get(0); Assert.assertEquals(ExitCode.FORCE_KILLED.getExitCode(), containerStatus.getExitStatus()); } }
/* * Copyright 2013 Dittmar Steiner * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dittmarsteiner.util; import java.util.HashMap; import java.util.Map; import android.util.SparseArray; /** * <p> * This utility class encodes and decodes HTML and XML entities and is optimized * for Android.<br/> * {@link android.text.Html Html} does not support all tags or like i.e. * '&#8222;' (<code>&amp;bdquo;</code> or <code>&amp;#8222;</code>) and is just * too complex for this purpose. * </p> * <p> * The goal is highly performant conversion with a minimum of memory footprint. * It is best for frequently usage of relatively short strings like you will * find in XML or HTML text elements or attribute values. So Regular Expressions * are not an option. <br/> * It does not support streaming, which would require more overhead. And also * wrappers like {@link java.io.StringReader StringReader} for {@link String} * and {@link java.io.StringWriter StringWriter} for {@link StringBuilder} have * a lower performance compared to the wrapped classes. Just by putting another * method call on the stack. * </p> * <p> * The flow is optimized for the most probably occurence of characters in Roman * languages, which means ASCII characters lower than 128 are most expected. <br/> * The apdaption for the Android platform utilizes {@link SparseArray} instead * of {@link java.util.Map Map&lt;Integer, String&gt;}. <br/> * <i style="color: green;">Feel free to change the code for non-Android * environments!</i> * </p> * * <b>License:</b><br/> * <span style="padding-left: 3em;">Apache 2.0</span> * * @version 1.0 * @author <a href="mailto:dittmar.steiner@gmail.com">Dittmar Steiner</a> */ public class Entities { /** * The acual map size is 252. * <br/> * The size + 1 will never reach the load factor limit and it will keep * incedibly fast. * (255 looks nicer and we have only two <code>null</code> references) */ private static final int mapSize = 0xFF; /** * For direct addressing to optimize for probability <code>char &lt; 128</code> */ private static final int lt = '<', gt = '>', amp = '&', quot = '"', apos = '\'', semicolon = ';'; /** * For direct addressing to optimize for probability <code>char &lt; 128</code> */ private static final String ltEnt = "&lt;", gtEnt = "&gt;", ampEnt = "&amp;", quotEnt = "&quot;", aposEnt = "&apos;"; /** * Contains all codes and entities from <a * href="http://www.w3.org/2003/entities/2007xml/unicode.xml" * >http://www.w3.org/2003/entities/2007xml/unicode.xml</a> */ private static final SparseArray<String> encodeMap = new SparseArray<String>(mapSize); // populate static { // predefined XML entities encodeMap.put(lt, ltEnt); encodeMap.put(gt, gtEnt); encodeMap.put(amp, ampEnt); encodeMap.put(quot, quotEnt); encodeMap.put(apos, aposEnt); // HTML 4.0.1 entities encodeMap.put(193, "&Aacute;"); encodeMap.put(225, "&aacute;"); encodeMap.put(194, "&Acirc;"); encodeMap.put(226, "&acirc;"); encodeMap.put(180, "&acute;"); encodeMap.put(198, "&AElig;"); encodeMap.put(230, "&aelig;"); encodeMap.put(192, "&Agrave;"); encodeMap.put(224, "&agrave;"); encodeMap.put(8501, "&alefsym;"); encodeMap.put(913, "&Alpha;"); encodeMap.put(945, "&alpha;"); encodeMap.put(8743, "&and;"); encodeMap.put(8736, "&ang;"); encodeMap.put(197, "&Aring;"); encodeMap.put(229, "&aring;"); encodeMap.put(8776, "&asymp;"); encodeMap.put(195, "&Atilde;"); encodeMap.put(227, "&atilde;"); encodeMap.put(196, "&Auml;"); encodeMap.put(228, "&auml;"); encodeMap.put(8222, "&bdquo;"); encodeMap.put(914, "&Beta;"); encodeMap.put(946, "&beta;"); encodeMap.put(166, "&brvbar;"); encodeMap.put(8226, "&bull;"); encodeMap.put(8745, "&cap;"); encodeMap.put(199, "&Ccedil;"); encodeMap.put(231, "&ccedil;"); encodeMap.put(184, "&cedil;"); encodeMap.put(162, "&cent;"); encodeMap.put(935, "&Chi;"); encodeMap.put(967, "&chi;"); encodeMap.put(710, "&circ;"); encodeMap.put(9827, "&clubs;"); encodeMap.put(8773, "&cong;"); encodeMap.put(169, "&copy;"); encodeMap.put(8629, "&crarr;"); encodeMap.put(8746, "&cup;"); encodeMap.put(164, "&curren;"); encodeMap.put(8224, "&dagger;"); encodeMap.put(8225, "&Dagger;"); encodeMap.put(8595, "&darr;"); encodeMap.put(8659, "&dArr;"); encodeMap.put(176, "&deg;"); encodeMap.put(916, "&Delta;"); encodeMap.put(948, "&delta;"); encodeMap.put(9830, "&diams;"); encodeMap.put(247, "&divide;"); encodeMap.put(201, "&Eacute;"); encodeMap.put(233, "&eacute;"); encodeMap.put(202, "&Ecirc;"); encodeMap.put(234, "&ecirc;"); encodeMap.put(200, "&Egrave;"); encodeMap.put(232, "&egrave;"); encodeMap.put(8709, "&empty;"); encodeMap.put(8195, "&emsp;"); encodeMap.put(8194, "&ensp;"); encodeMap.put(917, "&Epsilon;"); encodeMap.put(949, "&epsilon;"); encodeMap.put(8801, "&equiv;"); encodeMap.put(919, "&Eta;"); encodeMap.put(951, "&eta;"); encodeMap.put(208, "&ETH;"); encodeMap.put(240, "&eth;"); encodeMap.put(203, "&Euml;"); encodeMap.put(235, "&euml;"); encodeMap.put(8364, "&euro;"); encodeMap.put(8707, "&exist;"); encodeMap.put(402, "&fnof;"); encodeMap.put(8704, "&forall;"); encodeMap.put(189, "&frac12;"); encodeMap.put(188, "&frac14;"); encodeMap.put(190, "&frac34;"); encodeMap.put(8260, "&frasl;"); encodeMap.put(915, "&Gamma;"); encodeMap.put(947, "&gamma;"); encodeMap.put(8805, "&ge;"); encodeMap.put(8596, "&harr;"); encodeMap.put(8660, "&hArr;"); encodeMap.put(9829, "&hearts;"); encodeMap.put(8230, "&hellip;"); encodeMap.put(205, "&Iacute;"); encodeMap.put(237, "&iacute;"); encodeMap.put(206, "&Icirc;"); encodeMap.put(238, "&icirc;"); encodeMap.put(161, "&iexcl;"); encodeMap.put(204, "&Igrave;"); encodeMap.put(236, "&igrave;"); encodeMap.put(8465, "&image;"); encodeMap.put(8734, "&infin;"); encodeMap.put(8747, "&int;"); encodeMap.put(921, "&Iota;"); encodeMap.put(953, "&iota;"); encodeMap.put(191, "&iquest;"); encodeMap.put(8712, "&isin;"); encodeMap.put(207, "&Iuml;"); encodeMap.put(239, "&iuml;"); encodeMap.put(922, "&Kappa;"); encodeMap.put(954, "&kappa;"); encodeMap.put(923, "&Lambda;"); encodeMap.put(955, "&lambda;"); encodeMap.put(9001, "&lang;"); encodeMap.put(171, "&laquo;"); encodeMap.put(8592, "&larr;"); encodeMap.put(8656, "&lArr;"); encodeMap.put(8968, "&lceil;"); encodeMap.put(8220, "&ldquo;"); encodeMap.put(8804, "&le;"); encodeMap.put(8970, "&lfloor;"); encodeMap.put(8727, "&lowast;"); encodeMap.put(9674, "&loz;"); encodeMap.put(8206, "&lrm;"); encodeMap.put(8249, "&lsaquo;"); encodeMap.put(8216, "&lsquo;"); encodeMap.put(175, "&macr;"); encodeMap.put(8212, "&mdash;"); encodeMap.put(181, "&micro;"); encodeMap.put(183, "&middot;"); encodeMap.put(8722, "&minus;"); encodeMap.put(924, "&Mu;"); encodeMap.put(956, "&mu;"); encodeMap.put(8711, "&nabla;"); encodeMap.put(160, "&nbsp;"); encodeMap.put(8211, "&ndash;"); encodeMap.put(8800, "&ne;"); encodeMap.put(8715, "&ni;"); encodeMap.put(172, "&not;"); encodeMap.put(8713, "&notin;"); encodeMap.put(8836, "&nsub;"); encodeMap.put(209, "&Ntilde;"); encodeMap.put(241, "&ntilde;"); encodeMap.put(925, "&Nu;"); encodeMap.put(957, "&nu;"); encodeMap.put(211, "&Oacute;"); encodeMap.put(243, "&oacute;"); encodeMap.put(212, "&Ocirc;"); encodeMap.put(244, "&ocirc;"); encodeMap.put(338, "&OElig;"); encodeMap.put(339, "&oelig;"); encodeMap.put(210, "&Ograve;"); encodeMap.put(242, "&ograve;"); encodeMap.put(8254, "&oline;"); encodeMap.put(937, "&Omega;"); encodeMap.put(969, "&omega;"); encodeMap.put(927, "&Omicron;"); encodeMap.put(959, "&omicron;"); encodeMap.put(8853, "&oplus;"); encodeMap.put(8744, "&or;"); encodeMap.put(170, "&ordf;"); encodeMap.put(186, "&ordm;"); encodeMap.put(216, "&Oslash;"); encodeMap.put(248, "&oslash;"); encodeMap.put(213, "&Otilde;"); encodeMap.put(245, "&otilde;"); encodeMap.put(8855, "&otimes;"); encodeMap.put(214, "&Ouml;"); encodeMap.put(246, "&ouml;"); encodeMap.put(182, "&para;"); encodeMap.put(8706, "&part;"); encodeMap.put(8240, "&permil;"); encodeMap.put(8869, "&perp;"); encodeMap.put(934, "&Phi;"); encodeMap.put(966, "&phi;"); encodeMap.put(928, "&Pi;"); encodeMap.put(960, "&pi;"); encodeMap.put(982, "&piv;"); encodeMap.put(177, "&plusmn;"); encodeMap.put(163, "&pound;"); encodeMap.put(8242, "&prime;"); encodeMap.put(8243, "&Prime;"); encodeMap.put(8719, "&prod;"); encodeMap.put(8733, "&prop;"); encodeMap.put(936, "&Psi;"); encodeMap.put(968, "&psi;"); encodeMap.put(8730, "&radic;"); encodeMap.put(9002, "&rang;"); encodeMap.put(187, "&raquo;"); encodeMap.put(8594, "&rarr;"); encodeMap.put(8658, "&rArr;"); encodeMap.put(8969, "&rceil;"); encodeMap.put(8221, "&rdquo;"); encodeMap.put(8476, "&real;"); encodeMap.put(174, "&reg;"); encodeMap.put(8971, "&rfloor;"); encodeMap.put(929, "&Rho;"); encodeMap.put(961, "&rho;"); encodeMap.put(8207, "&rlm;"); encodeMap.put(8250, "&rsaquo;"); encodeMap.put(8217, "&rsquo;"); encodeMap.put(8218, "&sbquo;"); encodeMap.put(352, "&Scaron;"); encodeMap.put(353, "&scaron;"); encodeMap.put(8901, "&sdot;"); encodeMap.put(167, "&sect;"); encodeMap.put(173, "&shy;"); encodeMap.put(931, "&Sigma;"); encodeMap.put(963, "&sigma;"); encodeMap.put(962, "&sigmaf;"); encodeMap.put(8764, "&sim;"); encodeMap.put(9824, "&spades;"); encodeMap.put(8834, "&sub;"); encodeMap.put(8838, "&sube;"); encodeMap.put(8721, "&sum;"); encodeMap.put(185, "&sup1;"); encodeMap.put(178, "&sup2;"); encodeMap.put(179, "&sup3;"); encodeMap.put(8835, "&sup;"); encodeMap.put(8839, "&supe;"); encodeMap.put(223, "&szlig;"); encodeMap.put(932, "&Tau;"); encodeMap.put(964, "&tau;"); encodeMap.put(8756, "&there4;"); encodeMap.put(920, "&Theta;"); encodeMap.put(952, "&theta;"); encodeMap.put(977, "&thetasym;"); encodeMap.put(8201, "&thinsp;"); encodeMap.put(222, "&THORN;"); encodeMap.put(254, "&thorn;"); encodeMap.put(732, "&tilde;"); encodeMap.put(215, "&times;"); encodeMap.put(8482, "&trade;"); encodeMap.put(218, "&Uacute;"); encodeMap.put(250, "&uacute;"); encodeMap.put(8593, "&uarr;"); encodeMap.put(8657, "&uArr;"); encodeMap.put(219, "&Ucirc;"); encodeMap.put(251, "&ucirc;"); encodeMap.put(217, "&Ugrave;"); encodeMap.put(249, "&ugrave;"); encodeMap.put(168, "&uml;"); encodeMap.put(978, "&upsih;"); encodeMap.put(933, "&Upsilon;"); encodeMap.put(965, "&upsilon;"); encodeMap.put(220, "&Uuml;"); encodeMap.put(252, "&uuml;"); encodeMap.put(8472, "&weierp;"); encodeMap.put(926, "&Xi;"); encodeMap.put(958, "&xi;"); encodeMap.put(221, "&Yacute;"); encodeMap.put(253, "&yacute;"); encodeMap.put(165, "&yen;"); encodeMap.put(255, "&yuml;"); encodeMap.put(376, "&Yuml;"); encodeMap.put(918, "&Zeta;"); encodeMap.put(950, "&zeta;"); encodeMap.put(8205, "&zwj;"); encodeMap.put(8204, "&zwnj;"); } /** * The revese version of {@link #encodeMap}. */ private static final Map<String, Integer> decodeMap = new HashMap<String, Integer>(mapSize); // transfer key/values static { final int max = encodeMap.size(); for (int index = 0; index < max; index++) { decodeMap.put(encodeMap.valueAt(index), encodeMap.keyAt(index)); } } /** * Encodes all basic XML and characters &gt; 127 to all known HTML entities * like <code>'&Auml;'</code> to <code>&amp;Auml;</code>. Otherwise escapes * them to <code>'&amp;#&lt;integer&gt;;'</code> like * <code>'&#666;'<code> to <code>&amp;#666;<code>. * * @param str * the String to encode * @return the encoded String * * @see #encodeXml(String) */ public static String encodeHtml(String str) { return encode(str, false, false); } /** * Encodes all basic XML characters to entities <code>'&lt;'</code> to * <code>&amp;lt;</code>. All characters &gt; 127 are not encoded (unicode * as is)<br/> * <i>Except</i> the soft hyphen (<i>shy</i>) which will be * encodes as <code>&amp;#173;</code> just to make it visible.<br/> * (I really like hyphenation especially for small screens!) * * @param str * the String to encode * @return the encoded Unicode-String */ public static String encodeXml(String str) { return encode(str, true, false); } /** * Just like {@link #encodeXml(String)} plus all characters &gt; 127 in the * form of <code>'&amp;#&lt;integer&gt;;'</code>. * * @param str * the String to encode * @return the encoded ASCII-String */ public static String encodeAsciiXml(String str) { return encode(str, true, true); } /** * Here the actual conversion is done. All condition and conversion are * inline to avoid method calls. * * @param str * @param xml false for HTML * @param ascii do not keep any unicode char &gt; 127 * @return */ private static String encode(String str, boolean xml, boolean ascii) { if (str == null) { return ""; } // size * 1.2 is just a guess, not based on empirical data final StringBuilder builder = new StringBuilder((int)(str.length() * 1.2)); for (int i = 0; i < str.length(); ++i) { char c = str.charAt(i); // xml basic if (c < 128) { switch (c) { case lt: builder.append(ltEnt); continue; case gt: builder.append(gtEnt); continue; case amp: builder.append(ampEnt); continue; case quot: builder.append(quotEnt); continue; case apos: builder.append(aposEnt); continue; } } // xml if (xml) { if (c < 128) { builder.append(c); } else { if (!ascii) { if (c != 0x00AD) { builder.append(c); } else { // we always escape the shy char (0x00AD or #173) to make it visible escape(c, builder); } } else { escape(c, builder); } } continue; } // html String entity = !ascii ? encodeMap.get((int)c) : null; if (entity == null) { if (c < 128) { builder.append(c); } else { if (ascii) { escape(c, builder); } else { builder.append(c); } } } else { builder.append(entity); } } return builder.toString(); } /** * The only little helper method do encode linke '&#666;' to '&amp;#666;' * @param c * @param builder */ private static void escape(int c, StringBuilder builder) { builder.append("&#"); builder.append(Integer.toString(c)); builder.append(";"); } /** * E.g. <code>&amp;Auml;</code>, <code>&amp;#196;</code> and * <code>&amp;#xC4;</code> are decoded to &#196; * * @param encoded * XML or HTML String to decode * @return the decoded Unicode-String */ public static String decode(final String encoded) { if (encoded.indexOf(amp) == -1) { // no StringBuilder allocation required return encoded; } final StringBuilder builder = new StringBuilder(encoded.length()); for (int i = 0; i < encoded.length(); ++i) { final char c = encoded.charAt(i); if (c != amp) { builder.append(c); } else { final int sOffset = encoded.indexOf(semicolon, i + 1); // min length is 4 like "&lt;" // max length: "&thetasym;" and "&#x000e4;" are possible, // otherwise this would just be a sentence. if (sOffset < i + 3 || sOffset >= i + 10) { builder.append(c); continue; } String entity = encoded.substring(i, sOffset + 1); if ( entity.indexOf(amp) > 0 || entity.indexOf(' ') > -1 || entity.indexOf('\t') > -1 || entity.indexOf('\f') > -1 || entity.indexOf('\r') > -1 || entity.indexOf('\n') > -1 ) { // not a valid entity name builder.append(c); continue; } Integer code; if (entity.charAt(1) == '#') { try { if (entity.charAt(2) == 'x') { // example: &#xAD; code = Integer.valueOf( entity.substring(3, entity.length() - 1) , 16); } else { // example: &#173; code = Integer.valueOf( entity.substring(2, entity.length() - 1) ); } } catch (NumberFormatException e) { code = null; } catch (StringIndexOutOfBoundsException e) { code = null; } } else { code = (Integer)decodeMap.get(entity); } if (code == null) { builder.append(entity); } else { builder.append((char) code.intValue()); } i = sOffset; } } return builder.toString(); } }
// ======================================================================== // Copyright 2011 NEXCOM Systems // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package org.cipango.sip.security; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArraySet; import org.cipango.server.SipConnector; import org.cipango.server.SipRequest; import org.cipango.servlet.SipServletHolder; import org.eclipse.jetty.security.UserDataConstraint; import org.eclipse.jetty.server.UserIdentity; public class ConstraintSecurityHandler extends SipSecurityHandler<RoleInfo> { private final List<ConstraintMapping> _constraintMappings= new CopyOnWriteArrayList<ConstraintMapping>(); private final Set<String> _roles = new CopyOnWriteArraySet<String>(); private boolean _strict = true; private Map<String, Map<String, RoleInfo>> _servletsMap = new HashMap<String, Map<String,RoleInfo>>(); /* ------------------------------------------------------------ */ /** Get the strict mode. * @return true if the security handler is running in strict mode. */ public boolean isStrict() { return _strict; } /* ------------------------------------------------------------ */ /** Set the strict mode of the security handler. * <p> * When in strict mode (the default), the full servlet specification * will be implemented. * If not in strict mode, some additional flexibility in configuration * is allowed:<ul> * <li>All users do not need to have a role defined in the deployment descriptor * <li>The * role in a constraint applies to ANY role rather than all roles defined in * the deployment descriptor. * </ul> * * @param strict the strict to set * @see #setRoles(Set) * @see #setConstraintMappings(List, Set) */ public void setStrict(boolean strict) { _strict = strict; } /* ------------------------------------------------------------ */ /** * @return Returns the constraintMappings. */ public List<ConstraintMapping> getConstraintMappings() { return _constraintMappings; } /* ------------------------------------------------------------ */ public Set<String> getRoles() { return _roles; } /* ------------------------------------------------------------ */ /** * Process the constraints following the combining rules in Servlet 3.0 EA * spec section 13.7.1 Note that much of the logic is in the RoleInfo class. * * @param constraintMappings * The constraintMappings to set, from which the set of known roles * is determined. */ public void setConstraintMappings(List<ConstraintMapping> constraintMappings) { setConstraintMappings(constraintMappings,null); } /* ------------------------------------------------------------ */ /** * Process the constraints following the combining rules in Servlet 3.0 EA * spec section 13.7.1 Note that much of the logic is in the RoleInfo class. * * @param constraintMappings * The constraintMappings to set. * @param roles The known roles (or null to determine them from the mappings) */ public void setConstraintMappings(List<ConstraintMapping> constraintMappings, Set<String> roles) { if (isStarted()) throw new IllegalStateException("Started"); _constraintMappings.clear(); _constraintMappings.addAll(constraintMappings); if (roles==null) { roles = new HashSet<String>(); for (ConstraintMapping cm : constraintMappings) { String[] cmr = cm.getConstraint().getRoles(); if (cmr!=null) { for (String r : cmr) if (!"*".equals(r)) roles.add(r); } } } setRoles(roles); } /* ------------------------------------------------------------ */ /** * Set the known roles. * This may be overridden by a subsequent call to {@link #setConstraintMappings(ConstraintMapping[])} or * {@link #setConstraintMappings(List, Set)}. * @see #setStrict(boolean) * @param roles The known roles (or null to determine them from the mappings) */ public void setRoles(Set<String> roles) { if (isStarted()) throw new IllegalStateException("Started"); _roles.clear(); _roles.addAll(roles); } /* ------------------------------------------------------------ */ /** * @see org.eclipse.jetty.security.ConstraintAware#addConstraintMapping(org.eclipse.jetty.security.ConstraintMapping) */ public void addConstraintMapping(ConstraintMapping mapping) { _constraintMappings.add(mapping); if (mapping.getConstraint()!=null && mapping.getConstraint().getRoles()!=null) for (String role : mapping.getConstraint().getRoles()) addRole(role); } /* ------------------------------------------------------------ */ /** * @see org.eclipse.jetty.security.ConstraintAware#addRole(java.lang.String) */ public void addRole(String role) { _roles.add(role); } @Override protected void doStart() throws Exception { _servletsMap.clear(); if (_constraintMappings!=null) { for (ConstraintMapping mapping : _constraintMappings) { processConstraintMapping(mapping); } } super.doStart(); } @Override protected void doStop() throws Exception { _servletsMap.clear(); _constraintMappings.clear(); _roles.clear(); super.doStop(); } @Override protected RoleInfo prepareConstraintInfo(SipServletHolder holder, SipRequest request) { Map<String, RoleInfo> mappings = _servletsMap.get(holder.getName()); // FIXME merge in case method && servlets if (mappings != null) { String method = request.getMethod(); RoleInfo roleInfo = mappings.get(method); if (roleInfo == null) roleInfo = mappings.get(null); return roleInfo; } mappings = _servletsMap.get(null); if (mappings != null) return mappings.get(request.getMethod()); return null; } @Override protected boolean checkUserDataPermissions(SipServletHolder holder, SipRequest request, RoleInfo constraintInfo) throws IOException { if (constraintInfo == null) return true; if (constraintInfo.isForbidden()) return false; UserDataConstraint dataConstraint = constraintInfo.getUserDataConstraint(); if (dataConstraint == null || dataConstraint == UserDataConstraint.None) { return true; } SipConnector connector = null; if (request.getConnection() != null) connector = request.getConnection().getConnector(); return true; /* TODO if (dataConstraint == UserDataConstraint.Integral) { if (connector.isIntegral(request)) return true; if (connector.getConfidentialPort() > 0) { String url = connector.getIntegralScheme() + "://" + request.getServerName() + ":" + connector.getIntegralPort() + request.getRequestURI(); if (request.getQueryString() != null) url += "?" + request.getQueryString(); response.setContentLength(0); response.sendRedirect(url); } else response.sendError(Response.SC_FORBIDDEN,"!Integral"); request.setHandled(true); return false; } else if (dataConstraint == UserDataConstraint.Confidential) { if (connector.isConfidential(request)) return true; if (connector.getConfidentialPort() > 0) { String url = connector.getConfidentialScheme() + "://" + request.getServerName() + ":" + connector.getConfidentialPort() + request.getRequestURI(); if (request.getQueryString() != null) url += "?" + request.getQueryString(); response.setContentLength(0); response.sendRedirect(url); } else response.sendError(Response.SC_FORBIDDEN,"!Confidential"); request.setHandled(true); return false; } else { throw new IllegalArgumentException("Invalid dataConstraint value: " + dataConstraint); } */ } @Override protected boolean isAuthMandatory(SipRequest baseRequest, RoleInfo constraintInfo) { if (constraintInfo == null) return false; return constraintInfo.isChecked(); } @Override protected boolean checkSipResourcePermissions(SipServletHolder holder, SipRequest request, RoleInfo constraintInfo, UserIdentity userIdentity) throws IOException { if (constraintInfo == null) { return true; } if (!constraintInfo.isChecked()) { return true; } if (constraintInfo.isAnyRole() /*&& request.getAuthType()!=null*/) return true; for (String role : constraintInfo.getRoles()) { if (userIdentity.isUserInRole(role, null)) return true; } return false; } @Override protected boolean isProxyMode(SipRequest baseRequest, RoleInfo constraintInfo) { if (constraintInfo == null) return true; return constraintInfo.isProxyMode(); } protected void processConstraintMapping(ConstraintMapping mapping) { if (mapping.getServletNames() == null) { for (String method : mapping.getMethods()) processMapping(null, method, mapping.getConstraint()); } else { for (String servletName : mapping.getServletNames()) { if (mapping.getMethods() == null) processMapping(servletName, null, mapping.getConstraint()); else { for (String method : mapping.getMethods()) processMapping(servletName, method, mapping.getConstraint()); } } } } private void processMapping(String servletName, String method, Constraint constraint) { Map<String, RoleInfo> mappings = (Map<String, RoleInfo>)_servletsMap.get(servletName); if (mappings == null) { mappings = new HashMap<String, RoleInfo>(); _servletsMap.put(servletName,mappings); } RoleInfo allMethodsRoleInfo = mappings.get(null); if (allMethodsRoleInfo != null && allMethodsRoleInfo.isForbidden()) return; RoleInfo roleInfo = mappings.get(method); if (roleInfo == null) { roleInfo = new RoleInfo(); mappings.put(method,roleInfo); if (allMethodsRoleInfo != null) { roleInfo.combine(allMethodsRoleInfo); } } if (roleInfo.isForbidden()) return; boolean forbidden = constraint.isForbidden(); roleInfo.setForbidden(forbidden); if (forbidden) { if (method == null) { mappings.clear(); mappings.put(null,roleInfo); } } else { UserDataConstraint userDataConstraint = UserDataConstraint.get(constraint.getDataConstraint()); roleInfo.setUserDataConstraint(userDataConstraint); roleInfo.setProxyMode(constraint.isProxyMode()); boolean checked = constraint.getAuthenticate(); roleInfo.setChecked(checked); if (roleInfo.isChecked()) { if (constraint.isAnyRole()) { if (_strict) { // * means "all defined roles" for (String role : _roles) roleInfo.addRole(role); } else // * means any role roleInfo.setAnyRole(true); } else { String[] newRoles = constraint.getRoles(); for (String role : newRoles) { if (_strict &&!_roles.contains(role)) throw new IllegalArgumentException("Attempt to use undeclared role: " + role + ", known roles: " + _roles); roleInfo.addRole(role); } } } if (method == null) { for (Map.Entry<String, RoleInfo> entry : mappings.entrySet()) { if (entry.getKey() != null) { RoleInfo specific = entry.getValue(); specific.combine(roleInfo); } } } } } @SuppressWarnings("unchecked") @Override public void dump(Appendable out,String indent) throws IOException { dumpThis(out); dump(out,indent,Arrays.asList(getBeans(),Collections.singleton(_roles),_servletsMap.entrySet())); } }
/* * Copyright (C) 2007-2008 JVending Masa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jvending.masa.plugin.dx; import org.apache.maven.artifact.Artifact; import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.codehaus.plexus.util.IOUtil; import org.jvending.masa.CommandExecutor; import org.jvending.masa.ExecutionException; import org.jvending.masa.MasaUtil; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; /** * @goal dx * @phase process-classes * @description */ public class DxMojo extends AbstractMojo { /** * The maven project. * * @parameter expression="${project}" */ public MavenProject project; /** * @parameter expression="${session}" */ public MavenSession session; /** * @component */ private MavenProjectHelper mavenProjectHelper; /** * Extra JVM Arguments * * @parameter * @optional */ private String[] jvmArguments; public void execute() throws MojoExecutionException, MojoFailureException { CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor(); executor.setLogger( this.getLog() ); File outputFile = new File( project.getBuild().getDirectory() + File.separator + "classes.dex" ); File inputFile = new File( project.getBuild().getDirectory() + File.separator + project.getBuild().getFinalName() + ".jar" ); // Unpackage all dependent and main classes File outputDirectory = new File( project.getBuild().getDirectory(), "android-classes" ); for ( Artifact artifact : (List<Artifact>) project.getCompileArtifacts() ) { if ( artifact.getGroupId().equals( "com.google.android" ) ) { continue; } if ( artifact.getFile().isDirectory() ) { throw new MojoExecutionException( "Dependent artifact is directory: Directory = " + artifact.getFile().getAbsolutePath() ); } try { unjar( new JarFile( artifact.getFile() ), outputDirectory ); } catch ( IOException e ) { throw new MojoExecutionException( "Unable to jar file: File = " + artifact.getFile().getAbsolutePath(), e ); } } try { unjar( new JarFile( inputFile ), outputDirectory ); } catch ( IOException e ) { throw new MojoExecutionException( "", e ); } List<String> commands = new ArrayList<String>(); if ( jvmArguments != null ) { for ( String jvmArgument : jvmArguments ) { if ( jvmArgument != null ) { if ( jvmArgument.startsWith( "-" ) ) { jvmArgument = jvmArgument.substring( 1 ); } commands.add( "-J" + jvmArgument ); } } } commands.add( "--dex" ); commands.add( "--output=" + outputFile.getAbsolutePath() ); commands.add( outputDirectory.getAbsolutePath() ); getLog().info( "dx " + commands.toString() ); try { executor.executeCommand( MasaUtil.getToolnameWithPath( session, project, "dx" ), commands, project.getBasedir(), false ); } catch ( ExecutionException e ) { throw new MojoExecutionException( "", e ); } mavenProjectHelper.attachArtifact( project, "jar", project.getArtifact().getClassifier(), inputFile ); } private void unjar( JarFile jarFile, File outputDirectory ) throws IOException { for ( Enumeration en = jarFile.entries(); en.hasMoreElements(); ) { JarEntry entry = (JarEntry) en.nextElement(); File entryFile = new File( outputDirectory, entry.getName() ); if ( !entryFile.getParentFile().exists() && !entry.getName().startsWith( "META-INF" ) ) { entryFile.getParentFile().mkdirs(); } if ( !entry.isDirectory() && entry.getName().endsWith( ".class" ) ) { final InputStream in = jarFile.getInputStream( entry ); try { final OutputStream out = new FileOutputStream( entryFile ); try { IOUtil.copy( in, out ); } finally { closeQuietly( out ); } } finally { closeQuietly( in ); } } } } private void closeQuietly( final Closeable c ) { try { c.close(); } catch ( Exception ex ) { getLog().warn( "Failed to close closeable " + c, ex ); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.dataFlow; import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet; import com.intellij.codeInspection.dataFlow.value.*; import com.intellij.openapi.util.Key; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; /** * A type of the fact which restricts some value. * * @author Tagir Valeev */ public abstract class DfaFactType<T> extends Key<T> { private static final List<DfaFactType<?>> ourFactTypes = new ArrayList<>(); /** * This fact specifies whether the value can be null. The absence of the fact means that the nullability is unknown. */ public static final DfaFactType<Boolean> CAN_BE_NULL = new DfaFactType<Boolean>("Can be null") { @Override String toString(Boolean fact) { return fact ? "Nullable" : "NotNull"; } @Nullable @Override Boolean fromDfaValue(DfaValue value) { if (value instanceof DfaConstValue) { return ((DfaConstValue)value).getValue() == null; } if (value instanceof DfaBoxedValue || value instanceof DfaUnboxedValue || value instanceof DfaRangeValue) { return false; } if (value instanceof DfaTypeValue) { return NullnessUtil.toBoolean(((DfaTypeValue)value).getNullness()); } return null; } @Nullable @Override Boolean calcFromVariable(@NotNull DfaVariableValue value) { return NullnessUtil.calcCanBeNull(value); } }; /** * This fact is applied to the Optional values (like {@link java.util.Optional} or Guava Optional). * When its value is true, then optional is known to be present. * When its value is false, then optional is known to be empty (absent). */ public static final DfaFactType<Boolean> OPTIONAL_PRESENCE = new DfaFactType<Boolean>("Optional presense") { @Override String toString(Boolean fact) { return fact ? "present Optional" : "absent Optional"; } @Nullable @Override Boolean fromDfaValue(DfaValue value) { return value instanceof DfaOptionalValue ? ((DfaOptionalValue)value).isPresent() : null; } }; /** * This fact is applied to the integral values (of types byte, char, short, int, long). * Its value represents a range of possible values. */ public static final DfaFactType<LongRangeSet> RANGE = new DfaFactType<LongRangeSet>("Range") { @Override boolean isSuper(@Nullable LongRangeSet superFact, @Nullable LongRangeSet subFact) { return superFact == null || subFact != null && superFact.contains(subFact); } @Nullable @Override LongRangeSet fromDfaValue(DfaValue value) { if(value instanceof DfaVariableValue) { return calcFromVariable((DfaVariableValue)value); } return LongRangeSet.fromDfaValue(value); } @Nullable @Override LongRangeSet calcFromVariable(@NotNull DfaVariableValue var) { if (var.getQualifier() != null) { for (SpecialField sf : SpecialField.values()) { if (sf.isMyAccessor(var.getPsiVariable())) { return sf.getRange(); } } } return LongRangeSet.fromType(var.getVariableType()); } @Nullable @Override LongRangeSet unionFacts(@NotNull LongRangeSet left, @NotNull LongRangeSet right) { return left.union(right); } @Nullable @Override LongRangeSet intersectFacts(@NotNull LongRangeSet left, @NotNull LongRangeSet right) { LongRangeSet intersection = left.intersect(right); return intersection.isEmpty() ? null : intersection; } @Override String toString(LongRangeSet fact) { return fact.toString(); } }; /** * This fact represents a set of possible types of this value * {@link TypeConstraint#EMPTY} value is equivalent to absent fact (not constrained) */ public static final DfaFactType<TypeConstraint> TYPE_CONSTRAINT = new DfaFactType<TypeConstraint>("Type") { @Nullable @Override TypeConstraint fromDfaValue(DfaValue value) { if(value instanceof DfaTypeValue) { TypeConstraint constraint = TypeConstraint.EMPTY.withInstanceofValue((DfaTypeValue)value); return constraint == null || constraint.isEmpty() ? null : constraint; } return null; } @Override boolean isSuper(@Nullable TypeConstraint superFact, @Nullable TypeConstraint subFact) { return superFact == null || (subFact != null && superFact.isSuperStateOf(subFact)); } @Nullable @Override TypeConstraint intersectFacts(@NotNull TypeConstraint left, @NotNull TypeConstraint right) { for (DfaPsiType type : right.getInstanceofValues()) { left = left.withInstanceofValue(type); if (left == null) return null; } for (DfaPsiType type : right.getNotInstanceofValues()) { left = left.withNotInstanceofValue(type); if (left == null) return null; } return left; } @Nullable @Override TypeConstraint unionFacts(@NotNull TypeConstraint left, @NotNull TypeConstraint right) { if(left.isSuperStateOf(right)) return left; if(right.isSuperStateOf(left)) return right; return null; } }; private DfaFactType(String name) { super("DfaFactType: " + name); // Thread-safe as all DfaFactType instances are created only from DfaFactType class static initializer ourFactTypes.add(this); } @Nullable T fromDfaValue(DfaValue value) { return null; } // Could be expensive @Nullable T calcFromVariable(@NotNull DfaVariableValue value) { return null; } boolean isSuper(@Nullable T superFact, @Nullable T subFact) { return Objects.equals(superFact, subFact); } /** * Intersects two facts of this type. * * @param left left fact * @param right right fact * @return intersection fact or null if facts are incompatible */ @Nullable T intersectFacts(@NotNull T left, @NotNull T right) { return left.equals(right) ? left : null; } /** * Unites two facts of this type. * * @param left left fact * @param right right fact * @return union fact (null means that the fact can have any value) */ @Nullable T unionFacts(@NotNull T left, @NotNull T right) { return left.equals(right) ? left : null; } String toString(T fact) { return fact.toString(); } static List<DfaFactType<?>> getTypes() { return Collections.unmodifiableList(ourFactTypes); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Contains information about an IAM role. This structure is returned as a response element in several API operations * that interact with roles. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/Role" target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Role implements Serializable, Cloneable { /** * <p> * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> */ private String path; /** * <p> * The friendly name that identifies the role. * </p> */ private String roleName; /** * <p> * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> */ private String roleId; /** * <p> * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. * </p> */ private String arn; /** * <p> * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the role was * created. * </p> */ private java.util.Date createDate; /** * <p> * The policy that grants an entity permission to assume the role. * </p> */ private String assumeRolePolicyDocument; /** * <p> * A description of the role that you provide. * </p> */ private String description; /** * <p> * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to assume * the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. * </p> */ private Integer maxSessionDuration; /** * <p> * The ARN of the policy used to set the permissions boundary for the role. * </p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions Boundaries * for IAM Identities </a> in the <i>IAM User Guide</i>. * </p> */ private AttachedPermissionsBoundary permissionsBoundary; /** * <p> * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the <i>IAM * User Guide</i>. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @param path * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. */ public void setPath(String path) { this.path = path; } /** * <p> * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @return The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. */ public String getPath() { return this.path; } /** * <p> * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @param path * The path to the role. For more information about paths, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withPath(String path) { setPath(path); return this; } /** * <p> * The friendly name that identifies the role. * </p> * * @param roleName * The friendly name that identifies the role. */ public void setRoleName(String roleName) { this.roleName = roleName; } /** * <p> * The friendly name that identifies the role. * </p> * * @return The friendly name that identifies the role. */ public String getRoleName() { return this.roleName; } /** * <p> * The friendly name that identifies the role. * </p> * * @param roleName * The friendly name that identifies the role. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withRoleName(String roleName) { setRoleName(roleName); return this; } /** * <p> * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @param roleId * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. */ public void setRoleId(String roleId) { this.roleId = roleId; } /** * <p> * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @return The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. */ public String getRoleId() { return this.roleId; } /** * <p> * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * </p> * * @param roleId * The stable and unique string identifying the role. For more information about IDs, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM Identifiers</a> in the * <i>Using IAM</i> guide. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withRoleId(String roleId) { setRoleId(roleId); return this; } /** * <p> * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. * </p> * * @param arn * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. */ public void setArn(String arn) { this.arn = arn; } /** * <p> * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. * </p> * * @return The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them * in policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. */ public String getArn() { return this.arn; } /** * <p> * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. * </p> * * @param arn * The Amazon Resource Name (ARN) specifying the role. For more information about ARNs and how to use them in * policies, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html">IAM * Identifiers</a> in the <i>IAM User Guide</i> guide. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withArn(String arn) { setArn(arn); return this; } /** * <p> * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the role was * created. * </p> * * @param createDate * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the * role was created. */ public void setCreateDate(java.util.Date createDate) { this.createDate = createDate; } /** * <p> * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the role was * created. * </p> * * @return The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the * role was created. */ public java.util.Date getCreateDate() { return this.createDate; } /** * <p> * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the role was * created. * </p> * * @param createDate * The date and time, in <a href="http://www.iso.org/iso/iso8601">ISO 8601 date-time format</a>, when the * role was created. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withCreateDate(java.util.Date createDate) { setCreateDate(createDate); return this; } /** * <p> * The policy that grants an entity permission to assume the role. * </p> * * @param assumeRolePolicyDocument * The policy that grants an entity permission to assume the role. */ public void setAssumeRolePolicyDocument(String assumeRolePolicyDocument) { this.assumeRolePolicyDocument = assumeRolePolicyDocument; } /** * <p> * The policy that grants an entity permission to assume the role. * </p> * * @return The policy that grants an entity permission to assume the role. */ public String getAssumeRolePolicyDocument() { return this.assumeRolePolicyDocument; } /** * <p> * The policy that grants an entity permission to assume the role. * </p> * * @param assumeRolePolicyDocument * The policy that grants an entity permission to assume the role. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withAssumeRolePolicyDocument(String assumeRolePolicyDocument) { setAssumeRolePolicyDocument(assumeRolePolicyDocument); return this; } /** * <p> * A description of the role that you provide. * </p> * * @param description * A description of the role that you provide. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description of the role that you provide. * </p> * * @return A description of the role that you provide. */ public String getDescription() { return this.description; } /** * <p> * A description of the role that you provide. * </p> * * @param description * A description of the role that you provide. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withDescription(String description) { setDescription(description); return this; } /** * <p> * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to assume * the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. * </p> * * @param maxSessionDuration * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to * assume the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. */ public void setMaxSessionDuration(Integer maxSessionDuration) { this.maxSessionDuration = maxSessionDuration; } /** * <p> * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to assume * the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. * </p> * * @return The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to * assume the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. */ public Integer getMaxSessionDuration() { return this.maxSessionDuration; } /** * <p> * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to assume * the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. * </p> * * @param maxSessionDuration * The maximum session duration (in seconds) for the specified role. Anyone who uses the AWS CLI, or API to * assume the role can specify the duration using the optional <code>DurationSeconds</code> API parameter or * <code>duration-seconds</code> CLI parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withMaxSessionDuration(Integer maxSessionDuration) { setMaxSessionDuration(maxSessionDuration); return this; } /** * <p> * The ARN of the policy used to set the permissions boundary for the role. * </p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions Boundaries * for IAM Identities </a> in the <i>IAM User Guide</i>. * </p> * * @param permissionsBoundary * The ARN of the policy used to set the permissions boundary for the role.</p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions * Boundaries for IAM Identities </a> in the <i>IAM User Guide</i>. */ public void setPermissionsBoundary(AttachedPermissionsBoundary permissionsBoundary) { this.permissionsBoundary = permissionsBoundary; } /** * <p> * The ARN of the policy used to set the permissions boundary for the role. * </p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions Boundaries * for IAM Identities </a> in the <i>IAM User Guide</i>. * </p> * * @return The ARN of the policy used to set the permissions boundary for the role.</p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions * Boundaries for IAM Identities </a> in the <i>IAM User Guide</i>. */ public AttachedPermissionsBoundary getPermissionsBoundary() { return this.permissionsBoundary; } /** * <p> * The ARN of the policy used to set the permissions boundary for the role. * </p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions Boundaries * for IAM Identities </a> in the <i>IAM User Guide</i>. * </p> * * @param permissionsBoundary * The ARN of the policy used to set the permissions boundary for the role.</p> * <p> * For more information about permissions boundaries, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html">Permissions * Boundaries for IAM Identities </a> in the <i>IAM User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withPermissionsBoundary(AttachedPermissionsBoundary permissionsBoundary) { setPermissionsBoundary(permissionsBoundary); return this; } /** * <p> * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the <i>IAM * User Guide</i>. * </p> * * @return A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the * <i>IAM User Guide</i>. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the <i>IAM * User Guide</i>. * </p> * * @param tags * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the * <i>IAM User Guide</i>. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the <i>IAM * User Guide</i>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the * <i>IAM User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the <i>IAM * User Guide</i>. * </p> * * @param tags * A list of tags that are attached to the specified role. For more information about tagging, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html">Tagging IAM Identities</a> in the * <i>IAM User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public Role withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPath() != null) sb.append("Path: ").append(getPath()).append(","); if (getRoleName() != null) sb.append("RoleName: ").append(getRoleName()).append(","); if (getRoleId() != null) sb.append("RoleId: ").append(getRoleId()).append(","); if (getArn() != null) sb.append("Arn: ").append(getArn()).append(","); if (getCreateDate() != null) sb.append("CreateDate: ").append(getCreateDate()).append(","); if (getAssumeRolePolicyDocument() != null) sb.append("AssumeRolePolicyDocument: ").append(getAssumeRolePolicyDocument()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getMaxSessionDuration() != null) sb.append("MaxSessionDuration: ").append(getMaxSessionDuration()).append(","); if (getPermissionsBoundary() != null) sb.append("PermissionsBoundary: ").append(getPermissionsBoundary()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Role == false) return false; Role other = (Role) obj; if (other.getPath() == null ^ this.getPath() == null) return false; if (other.getPath() != null && other.getPath().equals(this.getPath()) == false) return false; if (other.getRoleName() == null ^ this.getRoleName() == null) return false; if (other.getRoleName() != null && other.getRoleName().equals(this.getRoleName()) == false) return false; if (other.getRoleId() == null ^ this.getRoleId() == null) return false; if (other.getRoleId() != null && other.getRoleId().equals(this.getRoleId()) == false) return false; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; if (other.getCreateDate() == null ^ this.getCreateDate() == null) return false; if (other.getCreateDate() != null && other.getCreateDate().equals(this.getCreateDate()) == false) return false; if (other.getAssumeRolePolicyDocument() == null ^ this.getAssumeRolePolicyDocument() == null) return false; if (other.getAssumeRolePolicyDocument() != null && other.getAssumeRolePolicyDocument().equals(this.getAssumeRolePolicyDocument()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getMaxSessionDuration() == null ^ this.getMaxSessionDuration() == null) return false; if (other.getMaxSessionDuration() != null && other.getMaxSessionDuration().equals(this.getMaxSessionDuration()) == false) return false; if (other.getPermissionsBoundary() == null ^ this.getPermissionsBoundary() == null) return false; if (other.getPermissionsBoundary() != null && other.getPermissionsBoundary().equals(this.getPermissionsBoundary()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPath() == null) ? 0 : getPath().hashCode()); hashCode = prime * hashCode + ((getRoleName() == null) ? 0 : getRoleName().hashCode()); hashCode = prime * hashCode + ((getRoleId() == null) ? 0 : getRoleId().hashCode()); hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); hashCode = prime * hashCode + ((getCreateDate() == null) ? 0 : getCreateDate().hashCode()); hashCode = prime * hashCode + ((getAssumeRolePolicyDocument() == null) ? 0 : getAssumeRolePolicyDocument().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getMaxSessionDuration() == null) ? 0 : getMaxSessionDuration().hashCode()); hashCode = prime * hashCode + ((getPermissionsBoundary() == null) ? 0 : getPermissionsBoundary().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public Role clone() { try { return (Role) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.parquet.columnreaders; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.common.collect.ImmutableList; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.store.parquet.ParquetReaderStats; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.FileMetaData; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.format.converter.ParquetMetadataConverter; import org.apache.parquet.hadoop.CodecFactory; import org.apache.parquet.hadoop.ParquetFileWriter; import org.apache.parquet.hadoop.metadata.BlockMetaData; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.apache.parquet.schema.PrimitiveType; import com.google.common.collect.Lists; public class ParquetRecordReader extends AbstractRecordReader { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRecordReader.class); // this value has been inflated to read in multiple value vectors at once, and then break them up into smaller vectors private static final int NUMBER_OF_VECTORS = 1; private static final long DEFAULT_BATCH_LENGTH = 256 * 1024 * NUMBER_OF_VECTORS; // 256kb private static final long DEFAULT_BATCH_LENGTH_IN_BITS = DEFAULT_BATCH_LENGTH * 8; // 256kb private static final char DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH = 32*1024; // When no column is required by the downstrea operator, ask SCAN to return a DEFAULT column. If such column does not exist, // it will return as a nullable-int column. If that column happens to exist, return that column. protected static final List<SchemaPath> DEFAULT_COLS_TO_READ = ImmutableList.of(SchemaPath.getSimplePath("_DEFAULT_COL_TO_READ_")); // TODO - should probably find a smarter way to set this, currently 1 megabyte public static final int PARQUET_PAGE_MAX_SIZE = 1024 * 1024 * 1; // used for clearing the last n bits of a byte public static final byte[] endBitMasks = {-2, -4, -8, -16, -32, -64, -128}; // used for clearing the first n bits of a byte public static final byte[] startBitMasks = {127, 63, 31, 15, 7, 3, 1}; private int bitWidthAllFixedFields; private boolean allFieldsFixedLength; private int recordsPerBatch; private OperatorContext operatorContext; // private long totalRecords; // private long rowGroupOffset; private List<ColumnReader<?>> columnStatuses; private FileSystem fileSystem; private long batchSize; Path hadoopPath; private VarLenBinaryReader varLengthReader; private ParquetMetadata footer; // This is a parallel list to the columns list above, it is used to determine the subset of the project // pushdown columns that do not appear in this file private boolean[] columnsFound; // For columns not found in the file, we need to return a schema element with the correct number of values // at that position in the schema. Currently this requires a vector be present. Here is a list of all of these vectors // that need only have their value count set at the end of each call to next(), as the values default to null. private List<NullableIntVector> nullFilledVectors; // Keeps track of the number of records returned in the case where only columns outside of the file were selected. // No actual data needs to be read out of the file, we only need to return batches until we have 'read' the number of // records specified in the row group metadata long mockRecordsRead; private final CodecFactory codecFactory; int rowGroupIndex; long totalRecordsRead; private final FragmentContext fragmentContext; public ParquetReaderStats parquetReaderStats = new ParquetReaderStats(); public ParquetRecordReader(FragmentContext fragmentContext, String path, int rowGroupIndex, FileSystem fs, CodecFactory codecFactory, ParquetMetadata footer, List<SchemaPath> columns) throws ExecutionSetupException { this(fragmentContext, DEFAULT_BATCH_LENGTH_IN_BITS, path, rowGroupIndex, fs, codecFactory, footer, columns); } public ParquetRecordReader( FragmentContext fragmentContext, long batchSize, String path, int rowGroupIndex, FileSystem fs, CodecFactory codecFactory, ParquetMetadata footer, List<SchemaPath> columns) throws ExecutionSetupException { this.hadoopPath = new Path(path); this.fileSystem = fs; this.codecFactory = codecFactory; this.rowGroupIndex = rowGroupIndex; this.batchSize = batchSize; this.footer = footer; this.fragmentContext = fragmentContext; setColumns(columns); } public CodecFactory getCodecFactory() { return codecFactory; } public Path getHadoopPath() { return hadoopPath; } public FileSystem getFileSystem() { return fileSystem; } public int getRowGroupIndex() { return rowGroupIndex; } public int getBitWidthAllFixedFields() { return bitWidthAllFixedFields; } public long getBatchSize() { return batchSize; } /** * @param type a fixed length type from the parquet library enum * @return the length in pageDataByteArray of the type */ public static int getTypeLengthInBits(PrimitiveType.PrimitiveTypeName type) { switch (type) { case INT64: return 64; case INT32: return 32; case BOOLEAN: return 1; case FLOAT: return 32; case DOUBLE: return 64; case INT96: return 96; // binary and fixed length byte array default: throw new IllegalStateException("Length cannot be determined for type " + type); } } private boolean fieldSelected(MaterializedField field) { // TODO - not sure if this is how we want to represent this // for now it makes the existing tests pass, simply selecting // all available data if no columns are provided if (isStarQuery()) { return true; } int i = 0; for (SchemaPath expr : getColumns()) { if ( field.getPath().equalsIgnoreCase(expr.getAsUnescapedPath())) { columnsFound[i] = true; return true; } i++; } return false; } public OperatorContext getOperatorContext() { return operatorContext; } @Override public void setup(OperatorContext operatorContext, OutputMutator output) throws ExecutionSetupException { this.operatorContext = operatorContext; if (!isStarQuery()) { columnsFound = new boolean[getColumns().size()]; nullFilledVectors = new ArrayList<>(); } columnStatuses = new ArrayList<>(); // totalRecords = footer.getBlocks().get(rowGroupIndex).getRowCount(); List<ColumnDescriptor> columns = footer.getFileMetaData().getSchema().getColumns(); allFieldsFixedLength = true; ColumnDescriptor column; ColumnChunkMetaData columnChunkMetaData; int columnsToScan = 0; mockRecordsRead = 0; MaterializedField field; // ParquetMetadataConverter metaConverter = new ParquetMetadataConverter(); FileMetaData fileMetaData; logger.debug("Reading row group({}) with {} records in file {}.", rowGroupIndex, footer.getBlocks().get(rowGroupIndex).getRowCount(), hadoopPath.toUri().getPath()); totalRecordsRead = 0; // TODO - figure out how to deal with this better once we add nested reading, note also look where this map is used below // store a map from column name to converted types if they are non-null HashMap<String, SchemaElement> schemaElements = new HashMap<>(); fileMetaData = new ParquetMetadataConverter().toParquetMetadata(ParquetFileWriter.CURRENT_VERSION, footer); for (SchemaElement se : fileMetaData.getSchema()) { schemaElements.put(se.getName(), se); } // loop to add up the length of the fixed width columns and build the schema for (int i = 0; i < columns.size(); ++i) { column = columns.get(i); logger.debug("name: " + fileMetaData.getSchema().get(i).name); SchemaElement se = schemaElements.get(column.getPath()[0]); MajorType mt = ParquetToDrillTypeConverter.toMajorType(column.getType(), se.getType_length(), getDataMode(column), se, fragmentContext.getOptions()); field = MaterializedField.create(toFieldName(column.getPath()), mt); if ( ! fieldSelected(field)) { continue; } columnsToScan++; // sum the lengths of all of the fixed length fields if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) { if (column.getMaxRepetitionLevel() > 0) { allFieldsFixedLength = false; } if (column.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) { bitWidthAllFixedFields += se.getType_length() * 8; } else { bitWidthAllFixedFields += getTypeLengthInBits(column.getType()); } } else { allFieldsFixedLength = false; } } // rowGroupOffset = footer.getBlocks().get(rowGroupIndex).getColumns().get(0).getFirstDataPageOffset(); if (columnsToScan != 0 && allFieldsFixedLength) { recordsPerBatch = (int) Math.min(Math.min(batchSize / bitWidthAllFixedFields, footer.getBlocks().get(0).getColumns().get(0).getValueCount()), 65535); } else { recordsPerBatch = DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH; } try { ValueVector vector; SchemaElement schemaElement; final ArrayList<VarLengthColumn<? extends ValueVector>> varLengthColumns = new ArrayList<>(); // initialize all of the column read status objects boolean fieldFixedLength; // the column chunk meta-data is not guaranteed to be in the same order as the columns in the schema // a map is constructed for fast access to the correct columnChunkMetadata to correspond // to an element in the schema Map<String, Integer> columnChunkMetadataPositionsInList = new HashMap<>(); BlockMetaData rowGroupMetadata = footer.getBlocks().get(rowGroupIndex); int colChunkIndex = 0; for (ColumnChunkMetaData colChunk : rowGroupMetadata.getColumns()) { columnChunkMetadataPositionsInList.put(Arrays.toString(colChunk.getPath().toArray()), colChunkIndex); colChunkIndex++; } for (int i = 0; i < columns.size(); ++i) { column = columns.get(i); columnChunkMetaData = rowGroupMetadata.getColumns().get(columnChunkMetadataPositionsInList.get(Arrays.toString(column.getPath()))); schemaElement = schemaElements.get(column.getPath()[0]); MajorType type = ParquetToDrillTypeConverter.toMajorType(column.getType(), schemaElement.getType_length(), getDataMode(column), schemaElement, fragmentContext.getOptions()); field = MaterializedField.create(toFieldName(column.getPath()), type); // the field was not requested to be read if ( ! fieldSelected(field)) { continue; } fieldFixedLength = column.getType() != PrimitiveType.PrimitiveTypeName.BINARY; vector = output.addField(field, (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode())); if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) { if (column.getMaxRepetitionLevel() > 0) { final RepeatedValueVector repeatedVector = RepeatedValueVector.class.cast(vector); ColumnReader<?> dataReader = ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, repeatedVector.getDataVector(), schemaElement); varLengthColumns.add(new FixedWidthRepeatedReader(this, dataReader, getTypeLengthInBits(column.getType()), -1, column, columnChunkMetaData, false, repeatedVector, schemaElement)); } else { columnStatuses.add(ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, vector, schemaElement)); } } else { // create a reader and add it to the appropriate list varLengthColumns.add(ColumnReaderFactory.getReader(this, -1, column, columnChunkMetaData, false, vector, schemaElement)); } } varLengthReader = new VarLenBinaryReader(this, varLengthColumns); if (!isStarQuery()) { List<SchemaPath> projectedColumns = Lists.newArrayList(getColumns()); SchemaPath col; for (int i = 0; i < columnsFound.length; i++) { col = projectedColumns.get(i); assert col!=null; if ( ! columnsFound[i] && !col.equals(STAR_COLUMN)) { nullFilledVectors.add((NullableIntVector)output.addField(MaterializedField.create(col.getAsUnescapedPath(), Types.optional(TypeProtos.MinorType.INT)), (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(TypeProtos.MinorType.INT, DataMode.OPTIONAL))); } } } } catch (Exception e) { handleAndRaise("Failure in setting up reader", e); } } protected void handleAndRaise(String s, Exception e) { String message = "Error in parquet record reader.\nMessage: " + s + "\nParquet Metadata: " + footer; throw new DrillRuntimeException(message, e); } @Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { try { for (final ValueVector v : vectorMap.values()) { AllocationHelper.allocate(v, recordsPerBatch, 50, 10); } } catch (NullPointerException e) { throw new OutOfMemoryException(); } } private String toFieldName(String[] paths) { return SchemaPath.getCompoundPath(paths).getAsUnescapedPath(); } private TypeProtos.DataMode getDataMode(ColumnDescriptor column) { if (column.getMaxRepetitionLevel() > 0 ) { return DataMode.REPEATED; } else if (column.getMaxDefinitionLevel() == 0) { return TypeProtos.DataMode.REQUIRED; } else { return TypeProtos.DataMode.OPTIONAL; } } private void resetBatch() { for (final ColumnReader<?> column : columnStatuses) { column.valuesReadInCurrentPass = 0; } for (final VarLengthColumn<?> r : varLengthReader.columns) { r.valuesReadInCurrentPass = 0; } } public void readAllFixedFields(long recordsToRead) throws IOException { for (ColumnReader<?> crs : columnStatuses) { crs.processPages(recordsToRead); } } @Override public int next() { resetBatch(); long recordsToRead = 0; try { ColumnReader<?> firstColumnStatus; if (columnStatuses.size() > 0) { firstColumnStatus = columnStatuses.iterator().next(); } else{ if (varLengthReader.columns.size() > 0) { firstColumnStatus = varLengthReader.columns.iterator().next(); } else{ firstColumnStatus = null; } } // No columns found in the file were selected, simply return a full batch of null records for each column requested if (firstColumnStatus == null) { if (mockRecordsRead == footer.getBlocks().get(rowGroupIndex).getRowCount()) { return 0; } recordsToRead = Math.min(DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH, footer.getBlocks().get(rowGroupIndex).getRowCount() - mockRecordsRead); for (final ValueVector vv : nullFilledVectors ) { vv.getMutator().setValueCount( (int) recordsToRead); } mockRecordsRead += recordsToRead; totalRecordsRead += recordsToRead; return (int) recordsToRead; } if (allFieldsFixedLength) { recordsToRead = Math.min(recordsPerBatch, firstColumnStatus.columnChunkMetaData.getValueCount() - firstColumnStatus.totalValuesRead); } else { recordsToRead = DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH; } if (allFieldsFixedLength) { readAllFixedFields(recordsToRead); } else { // variable length columns long fixedRecordsToRead = varLengthReader.readFields(recordsToRead, firstColumnStatus); readAllFixedFields(fixedRecordsToRead); } // if we have requested columns that were not found in the file fill their vectors with null // (by simply setting the value counts inside of them, as they start null filled) if (nullFilledVectors != null) { for (final ValueVector vv : nullFilledVectors ) { vv.getMutator().setValueCount(firstColumnStatus.getRecordsReadInCurrentPass()); } } // logger.debug("So far read {} records out of row group({}) in file '{}'", totalRecordsRead, rowGroupIndex, hadoopPath.toUri().getPath()); totalRecordsRead += firstColumnStatus.getRecordsReadInCurrentPass(); return firstColumnStatus.getRecordsReadInCurrentPass(); } catch (Exception e) { handleAndRaise("\nHadoop path: " + hadoopPath.toUri().getPath() + "\nTotal records read: " + totalRecordsRead + "\nMock records read: " + mockRecordsRead + "\nRecords to read: " + recordsToRead + "\nRow group index: " + rowGroupIndex + "\nRecords in row group: " + footer.getBlocks().get(rowGroupIndex).getRowCount(), e); } // this is never reached return 0; } @Override public void close() { logger.debug("Read {} records out of row group({}) in file '{}'", totalRecordsRead, rowGroupIndex, hadoopPath.toUri().getPath()); // enable this for debugging when it is know that a whole file will be read // limit kills upstream operators once it has enough records, so this assert will fail // assert totalRecordsRead == footer.getBlocks().get(rowGroupIndex).getRowCount(); if (columnStatuses != null) { for (final ColumnReader<?> column : columnStatuses) { column.clear(); } columnStatuses.clear(); columnStatuses = null; } codecFactory.release(); if (varLengthReader != null) { for (final VarLengthColumn r : varLengthReader.columns) { r.clear(); } varLengthReader.columns.clear(); varLengthReader = null; } if(parquetReaderStats != null) { logger.trace("ParquetTrace,Summary,{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}", hadoopPath, parquetReaderStats.numDictPageHeaders, parquetReaderStats.numPageHeaders, parquetReaderStats.numDictPageLoads, parquetReaderStats.numPageLoads, parquetReaderStats.numDictPagesDecompressed, parquetReaderStats.numPagesDecompressed, parquetReaderStats.totalDictPageHeaderBytes, parquetReaderStats.totalPageHeaderBytes, parquetReaderStats.totalDictPageReadBytes, parquetReaderStats.totalPageReadBytes, parquetReaderStats.totalDictDecompressedBytes, parquetReaderStats.totalDecompressedBytes, parquetReaderStats.timeDictPageHeaders, parquetReaderStats.timePageHeaders, parquetReaderStats.timeDictPageLoads, parquetReaderStats.timePageLoads, parquetReaderStats.timeDictPagesDecompressed, parquetReaderStats.timePagesDecompressed); parquetReaderStats=null; } } @Override protected List<SchemaPath> getDefaultColumnsToRead() { return DEFAULT_COLS_TO_READ; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.window; import com.facebook.presto.operator.PagesHashStrategy; import com.facebook.presto.operator.PagesIndex; import com.facebook.presto.spi.PageBuilder; import com.facebook.presto.spi.function.WindowIndex; import com.facebook.presto.sql.planner.plan.WindowNode.Frame.BoundType; import com.google.common.collect.ImmutableList; import java.util.List; import static com.facebook.presto.spi.StandardErrorCode.INVALID_WINDOW_FRAME; import static com.facebook.presto.sql.planner.plan.WindowNode.Frame.BoundType.FOLLOWING; import static com.facebook.presto.sql.planner.plan.WindowNode.Frame.BoundType.PRECEDING; import static com.facebook.presto.sql.planner.plan.WindowNode.Frame.BoundType.UNBOUNDED_FOLLOWING; import static com.facebook.presto.sql.planner.plan.WindowNode.Frame.BoundType.UNBOUNDED_PRECEDING; import static com.facebook.presto.sql.planner.plan.WindowNode.Frame.WindowType.RANGE; import static com.facebook.presto.util.Failures.checkCondition; import static com.google.common.base.Preconditions.checkState; import static java.lang.Math.toIntExact; public final class WindowPartition { private final PagesIndex pagesIndex; private final int partitionStart; private final int partitionEnd; private final int[] outputChannels; private final List<FramedWindowFunction> windowFunctions; private final PagesHashStrategy peerGroupHashStrategy; private int peerGroupStart; private int peerGroupEnd; private int currentPosition; public WindowPartition(PagesIndex pagesIndex, int partitionStart, int partitionEnd, int[] outputChannels, List<FramedWindowFunction> windowFunctions, PagesHashStrategy peerGroupHashStrategy) { this.pagesIndex = pagesIndex; this.partitionStart = partitionStart; this.partitionEnd = partitionEnd; this.outputChannels = outputChannels; this.windowFunctions = ImmutableList.copyOf(windowFunctions); this.peerGroupHashStrategy = peerGroupHashStrategy; // reset functions for new partition WindowIndex windowIndex = new PagesWindowIndex(pagesIndex, partitionStart, partitionEnd); for (FramedWindowFunction framedWindowFunction : windowFunctions) { framedWindowFunction.getFunction().reset(windowIndex); } currentPosition = partitionStart; updatePeerGroup(); } public int getPartitionStart() { return partitionStart; } public int getPartitionEnd() { return partitionEnd; } public boolean hasNext() { return currentPosition < partitionEnd; } public void processNextRow(PageBuilder pageBuilder) { checkState(hasNext(), "No more rows in partition"); // copy output channels pageBuilder.declarePosition(); int channel = 0; while (channel < outputChannels.length) { pagesIndex.appendTo(outputChannels[channel], currentPosition, pageBuilder.getBlockBuilder(channel)); channel++; } // check for new peer group if (currentPosition == peerGroupEnd) { updatePeerGroup(); } for (FramedWindowFunction framedFunction : windowFunctions) { Range range = getFrameRange(framedFunction.getFrame()); framedFunction.getFunction().processRow( pageBuilder.getBlockBuilder(channel), peerGroupStart - partitionStart, peerGroupEnd - partitionStart - 1, range.getStart(), range.getEnd()); channel++; } currentPosition++; } private static class Range { private final int start; private final int end; Range(int start, int end) { this.start = start; this.end = end; } public int getStart() { return start; } public int getEnd() { return end; } } private void updatePeerGroup() { peerGroupStart = currentPosition; // find end of peer group peerGroupEnd = peerGroupStart + 1; while ((peerGroupEnd < partitionEnd) && pagesIndex.positionEqualsPosition(peerGroupHashStrategy, peerGroupStart, peerGroupEnd)) { peerGroupEnd++; } } private Range getFrameRange(FrameInfo frameInfo) { int rowPosition = currentPosition - partitionStart; int endPosition = partitionEnd - partitionStart - 1; // handle empty frame if (emptyFrame(frameInfo, rowPosition, endPosition)) { return new Range(-1, -1); } int frameStart; int frameEnd; // frame start if (frameInfo.getStartType() == UNBOUNDED_PRECEDING) { frameStart = 0; } else if (frameInfo.getStartType() == PRECEDING) { frameStart = preceding(rowPosition, getStartValue(frameInfo)); } else if (frameInfo.getStartType() == FOLLOWING) { frameStart = following(rowPosition, endPosition, getStartValue(frameInfo)); } else if (frameInfo.getType() == RANGE) { frameStart = peerGroupStart - partitionStart; } else { frameStart = rowPosition; } // frame end if (frameInfo.getEndType() == UNBOUNDED_FOLLOWING) { frameEnd = endPosition; } else if (frameInfo.getEndType() == PRECEDING) { frameEnd = preceding(rowPosition, getEndValue(frameInfo)); } else if (frameInfo.getEndType() == FOLLOWING) { frameEnd = following(rowPosition, endPosition, getEndValue(frameInfo)); } else if (frameInfo.getType() == RANGE) { frameEnd = peerGroupEnd - partitionStart - 1; } else { frameEnd = rowPosition; } return new Range(frameStart, frameEnd); } private boolean emptyFrame(FrameInfo frameInfo, int rowPosition, int endPosition) { BoundType startType = frameInfo.getStartType(); BoundType endType = frameInfo.getEndType(); int positions = endPosition - rowPosition; if ((startType == UNBOUNDED_PRECEDING) && (endType == PRECEDING)) { return getEndValue(frameInfo) > rowPosition; } if ((startType == FOLLOWING) && (endType == UNBOUNDED_FOLLOWING)) { return getStartValue(frameInfo) > positions; } if (startType != endType) { return false; } BoundType type = frameInfo.getStartType(); if ((type != PRECEDING) && (type != FOLLOWING)) { return false; } long start = getStartValue(frameInfo); long end = getEndValue(frameInfo); if (type == PRECEDING) { return (start < end) || ((start > rowPosition) && (end > rowPosition)); } return (start > end) || ((start > positions) && (end > positions)); } private static int preceding(int rowPosition, long value) { if (value > rowPosition) { return 0; } return toIntExact(rowPosition - value); } private static int following(int rowPosition, int endPosition, long value) { if (value > (endPosition - rowPosition)) { return endPosition; } return toIntExact(rowPosition + value); } private long getStartValue(FrameInfo frameInfo) { return getFrameValue(frameInfo.getStartChannel(), "starting"); } private long getEndValue(FrameInfo frameInfo) { return getFrameValue(frameInfo.getEndChannel(), "ending"); } private long getFrameValue(int channel, String type) { checkCondition(!pagesIndex.isNull(channel, currentPosition), INVALID_WINDOW_FRAME, "Window frame %s offset must not be null", type); long value = pagesIndex.getLong(channel, currentPosition); checkCondition(value >= 0, INVALID_WINDOW_FRAME, "Window frame %s offset must not be negative", value); return value; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.test; import java.io.File; import java.util.Collection; import java.util.List; import java.util.Locale; import junit.framework.TestCase; import org.apache.camel.CamelContext; import org.apache.camel.Channel; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.InvalidPayloadException; import org.apache.camel.Message; import org.apache.camel.Predicate; import org.apache.camel.Processor; import org.apache.camel.Route; import org.apache.camel.builder.Builder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.builder.ValueBuilder; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.impl.DefaultExchange; import org.apache.camel.processor.DelegateProcessor; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.PredicateAssertHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A bunch of useful testing methods * * @version * @deprecated Support for JUnit 3.x is slated for removal in Camel 3.x. You are encouraged to move to * JUnit 4.x based tests. See {@link org.apache.camel.test.junit4.TestSupport}. */ @Deprecated public abstract class TestSupport extends TestCase { protected static final String LS = System.getProperty("line.separator"); private static final Logger LOG = LoggerFactory.getLogger(TestSupport.class); protected transient Logger log = LoggerFactory.getLogger(getClass()); /** * Runs the bare test sequence only if this platform is supported * @exception Throwable if any exception is thrown */ @Override public void runBare() throws Throwable { if (canRunOnThisPlatform()) { //start with a clean slate DefaultCamelContext.setContextCounter(0); TestSupportNodeIdFactory.resetCounters(); super.runBare(); } } protected boolean canRunOnThisPlatform() { return true; } // Builder methods for expressions used when testing // ------------------------------------------------------------------------- /** * Returns a value builder for the given header */ public static ValueBuilder header(String name) { return Builder.header(name); } /** * Returns a value builder for the given property */ public static ValueBuilder property(String name) { return Builder.property(name); } /** * Returns a predicate and value builder for the inbound body on an exchange */ public static ValueBuilder body() { return Builder.body(); } /** * Returns a predicate and value builder for the inbound message body as a * specific type */ public static <T> ValueBuilder bodyAs(Class<T> type) { return Builder.bodyAs(type); } /** * Returns a predicate and value builder for the outbound body on an * exchange */ public static ValueBuilder outBody() { return Builder.outBody(); } /** * Returns a predicate and value builder for the outbound message body as a * specific type */ public static <T> ValueBuilder outBodyAs(Class<T> type) { return Builder.outBodyAs(type); } /** * Returns a predicate and value builder for the fault body on an * exchange */ public static ValueBuilder faultBody() { return Builder.faultBody(); } /** * Returns a predicate and value builder for the fault message body as a * specific type */ public static <T> ValueBuilder faultBodyAs(Class<T> type) { return Builder.faultBodyAs(type); } /** * Returns a value builder for the given system property */ public static ValueBuilder systemProperty(String name) { return Builder.systemProperty(name); } /** * Returns a value builder for the given system property */ public static ValueBuilder systemProperty(String name, String defaultValue) { return Builder.systemProperty(name, defaultValue); } // Assertions // ----------------------------------------------------------------------- public static <T> T assertIsInstanceOf(Class<T> expectedType, Object value) { assertNotNull("Expected an instance of type: " + expectedType.getName() + " but was null", value); assertTrue("object should be a " + expectedType.getName() + " but was: " + value + " with type: " + value.getClass().getName(), expectedType.isInstance(value)); return expectedType.cast(value); } public static void assertEndpointUri(Endpoint endpoint, String uri) { assertNotNull("Endpoint is null when expecting endpoint for: " + uri, endpoint); assertEquals("Endpoint uri for: " + endpoint, uri, endpoint.getEndpointUri()); } /** * Asserts the In message on the exchange contains the expected value */ public static Object assertInMessageHeader(Exchange exchange, String name, Object expected) { return assertMessageHeader(exchange.getIn(), name, expected); } /** * Asserts the Out message on the exchange contains the expected value */ public static Object assertOutMessageHeader(Exchange exchange, String name, Object expected) { return assertMessageHeader(exchange.getOut(), name, expected); } /** * Asserts that the given exchange has an OUT message of the given body value * * @param exchange the exchange which should have an OUT message * @param expected the expected value of the OUT message * @throws InvalidPayloadException is thrown if the payload is not the expected class type */ public static void assertInMessageBodyEquals(Exchange exchange, Object expected) throws InvalidPayloadException { assertNotNull("Should have a response exchange!", exchange); Object actual; if (expected == null) { actual = ExchangeHelper.getMandatoryInBody(exchange); assertEquals("in body of: " + exchange, expected, actual); } else { actual = ExchangeHelper.getMandatoryInBody(exchange, expected.getClass()); } assertEquals("in body of: " + exchange, expected, actual); LOG.debug("Received response: " + exchange + " with in: " + exchange.getIn()); } /** * Asserts that the given exchange has an OUT message of the given body value * * @param exchange the exchange which should have an OUT message * @param expected the expected value of the OUT message * @throws InvalidPayloadException is thrown if the payload is not the expected class type */ public static void assertOutMessageBodyEquals(Exchange exchange, Object expected) throws InvalidPayloadException { assertNotNull("Should have a response exchange!", exchange); Object actual; if (expected == null) { actual = ExchangeHelper.getMandatoryOutBody(exchange); assertEquals("output body of: " + exchange, expected, actual); } else { actual = ExchangeHelper.getMandatoryOutBody(exchange, expected.getClass()); } assertEquals("output body of: " + exchange, expected, actual); LOG.debug("Received response: " + exchange + " with out: " + exchange.getOut()); } public static Object assertMessageHeader(Message message, String name, Object expected) { Object value = message.getHeader(name); assertEquals("Header: " + name + " on Message: " + message, expected, value); return value; } /** * Asserts that the given expression when evaluated returns the given answer */ public static Object assertExpression(Expression expression, Exchange exchange, Object expected) { Object value; if (expected != null) { value = expression.evaluate(exchange, expected.getClass()); } else { value = expression.evaluate(exchange, Object.class); } LOG.debug("Evaluated expression: " + expression + " on exchange: " + exchange + " result: " + value); assertEquals("Expression: " + expression + " on Exchange: " + exchange, expected, value); return value; } /** * Asserts that the predicate returns the expected value on the exchange */ public static void assertPredicateMatches(Predicate predicate, Exchange exchange) { assertPredicate(predicate, exchange, true); } /** * Asserts that the predicate returns the expected value on the exchange */ public static void assertPredicateDoesNotMatch(Predicate predicate, Exchange exchange) { try { PredicateAssertHelper.assertMatches(predicate, "Predicate should match: ", exchange); } catch (AssertionError e) { LOG.debug("Caught expected assertion error: " + e); } assertPredicate(predicate, exchange, false); } /** * Asserts that the predicate returns the expected value on the exchange */ public static boolean assertPredicate(final Predicate predicate, Exchange exchange, boolean expected) { if (expected) { PredicateAssertHelper.assertMatches(predicate, "Predicate failed: ", exchange); } boolean value = predicate.matches(exchange); LOG.debug("Evaluated predicate: " + predicate + " on exchange: " + exchange + " result: " + value); assertEquals("Predicate: " + predicate + " on Exchange: " + exchange, expected, value); return value; } /** * Resolves an endpoint and asserts that it is found */ public static Endpoint resolveMandatoryEndpoint(CamelContext context, String uri) { Endpoint endpoint = context.getEndpoint(uri); assertNotNull("No endpoint found for URI: " + uri, endpoint); return endpoint; } /** * Resolves an endpoint and asserts that it is found */ public static <T extends Endpoint> T resolveMandatoryEndpoint(CamelContext context, String uri, Class<T> endpointType) { T endpoint = context.getEndpoint(uri, endpointType); assertNotNull("No endpoint found for URI: " + uri, endpoint); return endpoint; } /** * Creates an exchange with the given body */ protected Exchange createExchangeWithBody(CamelContext camelContext, Object body) { Exchange exchange = new DefaultExchange(camelContext); Message message = exchange.getIn(); message.setHeader("testName", getName()); message.setHeader("testClass", getClass().getName()); message.setBody(body); return exchange; } public static <T> T assertOneElement(List<T> list) { assertEquals("Size of list should be 1: " + list, 1, list.size()); return list.get(0); } /** * Asserts that a list is of the given size */ public static <T> List<T> assertListSize(List<T> list, int size) { return assertListSize("List", list, size); } /** * Asserts that a list is of the given size */ public static <T> List<T> assertListSize(String message, List<T> list, int size) { assertEquals(message + " should be of size: " + size + " but is: " + list, size, list.size()); return list; } /** * Asserts that a list is of the given size */ public static <T> Collection<T> assertCollectionSize(Collection<T> list, int size) { return assertCollectionSize("List", list, size); } /** * Asserts that a list is of the given size */ public static <T> Collection<T> assertCollectionSize(String message, Collection<T> list, int size) { assertEquals(message + " should be of size: " + size + " but is: " + list, size, list.size()); return list; } /** * A helper method to create a list of Route objects for a given route builder */ public static List<Route> getRouteList(RouteBuilder builder) throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(builder); context.start(); List<Route> answer = context.getRoutes(); context.stop(); return answer; } /** * Asserts that the text contains the given string * * @param text the text to compare * @param containedText the text which must be contained inside the other text parameter */ public static void assertStringContains(String text, String containedText) { assertNotNull("Text should not be null!", text); assertTrue("Text: " + text + " does not contain: " + containedText, text.contains(containedText)); } /** * If a processor is wrapped with a bunch of DelegateProcessor or DelegateAsyncProcessor objects * this call will drill through them and return the wrapped Processor. */ public static Processor unwrap(Processor processor) { while (true) { if (processor instanceof DelegateProcessor) { processor = ((DelegateProcessor)processor).getProcessor(); } else { return processor; } } } /** * If a processor is wrapped with a bunch of DelegateProcessor or DelegateAsyncProcessor objects * this call will drill through them and return the Channel. * <p/> * Returns null if no channel is found. */ public static Channel unwrapChannel(Processor processor) { while (true) { if (processor instanceof Channel) { return (Channel) processor; } else if (processor instanceof DelegateProcessor) { processor = ((DelegateProcessor)processor).getProcessor(); } else { return null; } } } /** * Recursively delete a directory, useful to zapping test data * * @param file the directory to be deleted * @return <tt>false</tt> if error deleting directory */ public static boolean deleteDirectory(String file) { return deleteDirectory(new File(file)); } /** * Recursively delete a directory, useful to zapping test data * * @param file the directory to be deleted * @return <tt>false</tt> if error deleting directory */ public static boolean deleteDirectory(File file) { int tries = 0; int maxTries = 5; boolean exists = true; while (exists && (tries < maxTries)) { recursivelyDeleteDirectory(file); tries++; exists = file.exists(); if (exists) { try { Thread.sleep(1000); } catch (InterruptedException e) { // Ignore } } } return !exists; } private static void recursivelyDeleteDirectory(File file) { if (!file.exists()) { return; } if (file.isDirectory()) { File[] files = file.listFiles(); for (File child : files) { recursivelyDeleteDirectory(child); } } boolean success = file.delete(); if (!success) { LOG.warn("Deletion of file: " + file.getAbsolutePath() + " failed"); } } /** * create the directory * * @param file the directory to be created */ public static void createDirectory(String file) { File dir = new File(file); dir.mkdirs(); } /** * To be used for folder/directory comparison that works across different platforms such * as Window, Mac and Linux. */ public static void assertDirectoryEquals(String expected, String actual) { assertDirectoryEquals(null, expected, actual); } /** * To be used for folder/directory comparison that works across different platforms such * as Window, Mac and Linux. */ public static void assertDirectoryEquals(String message, String expected, String actual) { // must use single / as path separators String expectedPath = expected.replace('\\', '/'); String actualPath = actual.replace('\\', '/'); if (message != null) { assertEquals(message, expectedPath, actualPath); } else { assertEquals(expectedPath, actualPath); } } /** * To be used to check is a file is found in the file system */ public static void assertFileExists(String filename) { File file = new File(filename).getAbsoluteFile(); assertTrue("File " + filename + " should exist", file.exists()); } /** * To be used to check is a file is <b>not</b> found in the file system */ public static void assertFileNotExists(String filename) { File file = new File(filename).getAbsoluteFile(); assertFalse("File " + filename + " should not exist", file.exists()); } /** * Is this OS the given platform. * <p/> * Uses <tt>os.name</tt> from the system properties to determine the OS. * * @param platform such as Windows * @return <tt>true</tt> if its that platform. */ public static boolean isPlatform(String platform) { String osName = System.getProperty("os.name").toLowerCase(Locale.US); return osName.indexOf(platform.toLowerCase(Locale.US)) > -1; } /** * Is this Java by the given vendor. * <p/> * Uses <tt>java.vendor</tt> from the system properties to determine the vendor. * * @param vendor such as IBM * @return <tt>true</tt> if its that vendor. */ public static boolean isJavaVendor(String vendor) { String javaVendor = System.getProperty("java.vendor").toLowerCase(Locale.US); return javaVendor.indexOf(vendor.toLowerCase(Locale.US)) > -1; } /** * Is this Java 1.5 * * @return <tt>true</tt> if its Java 1.5, <tt>false</tt> if its not (for example Java 1.6 or better) */ public static boolean isJava15() { String javaVersion = System.getProperty("java.version").toLowerCase(Locale.US); return javaVersion.startsWith("1.5"); } /** * Gets the current test method name * * @return the method name */ public String getTestMethodName() { return getName(); } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.util; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import com.google.common.collect.Sets; import com.google.devtools.build.lib.testutil.MoreAsserts; import com.google.devtools.build.lib.testutil.Scratch; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.IOException; import java.nio.charset.Charset; import java.util.Collection; @RunWith(JUnit4.class) public class DependencySetTest { private Scratch scratch = new Scratch(); private DependencySet newDependencySet() { return new DependencySet(scratch.resolve("/")); } @Test public void dotDParser_simple() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", filename + ": \\", " " + file1 + " \\", " " + file2 + " "); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_simple_crlf() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", filename + ": \\\r", " " + file1 + " \\\r", " " + file2 + " "); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_simple_cr() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", filename + ": \\\r" + " " + file1 + " \\\r" + " " + file2 + " "); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_leading_crlf() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", "\r\n" + filename + ": \\\r\n" + " " + file1 + " \\\r\n" + " " + file2 + " "); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_oddFormatting() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); PathFragment file3 = new PathFragment("/usr/local/blah/blah/genhello/other.h"); PathFragment file4 = new PathFragment("/usr/local/blah/blah/genhello/onemore.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", filename + ": " + file1 + " \\", " " + file2 + "\\", " " + file3 + " " + file4); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2, file3, file4), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_relativeFilenames() throws Exception { PathFragment file1 = new PathFragment("hello.cc"); PathFragment file2 = new PathFragment("hello.h"); String filename = "hello.o"; Path dotd = scratch.file("/tmp/foo.d", filename + ": \\", " " + file1 + " \\", " " + file2 + " "); DependencySet depset = newDependencySet().read(dotd); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), depset.getDependencies()); assertEquals(depset.getOutputFileName(), filename); } @Test public void dotDParser_emptyFile() throws Exception { Path dotd = scratch.file("/tmp/empty.d"); DependencySet depset = newDependencySet().read(dotd); Collection<PathFragment> headers = depset.getDependencies(); if (!headers.isEmpty()) { fail("Not empty: " + headers.size() + " " + headers); } assertEquals(depset.getOutputFileName(), null); } @Test public void dotDParser_multipleTargets() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); Path dotd = scratch.file("/tmp/foo.d", "hello.o: \\", " " + file1, "hello2.o: \\", " " + file2); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2), newDependencySet().read(dotd).getDependencies()); } /* * Regression test: if gcc fails to execute remotely, and we retry locally, then the behavior * of gcc's DEPENDENCIES_OUTPUT option is to append, not overwrite, the .d file. As a result, * during retry, a second stanza is written to the file. * * We handle this by merging all of the stanzas. */ @Test public void dotDParser_duplicateStanza() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); PathFragment file3 = new PathFragment("/usr/local/blah/blah/genhello/other.h"); Path dotd = scratch.file("/tmp/foo.d", "hello.o: \\", " " + file1 + " \\", " " + file2 + " ", "hello.o: \\", " " + file1 + " \\", " " + file3 + " "); MoreAsserts.assertSameContents(Sets.newHashSet(file1, file2, file3), newDependencySet().read(dotd).getDependencies()); } @Test public void dotDParser_errorOnNoTrailingNewline() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); Path dotd = scratch.file("/tmp/foo.d"); FileSystemUtils.writeContent( dotd, ("hello.o: \\\n " + file1).getBytes(Charset.forName("UTF-8"))); try { newDependencySet().read(dotd); fail(); } catch (IOException e) { assertThat(e.getMessage()).contains("File does not end in a newline"); } } @Test public void writeSet() throws Exception { PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); PathFragment file3 = new PathFragment("/usr/local/blah/blah/genhello/other.h"); String filename = "/usr/local/blah/blah/genhello/hello.o"; DependencySet depSet1 = newDependencySet(); depSet1.addDependency(file1); depSet1.addDependency(file2); depSet1.addDependency(file3); depSet1.setOutputFileName(filename); Path outfile = scratch.resolve(filename); Path dotd = scratch.resolve("/usr/local/blah/blah/genhello/hello.d"); FileSystemUtils.createDirectoryAndParents(dotd.getParentDirectory()); depSet1.write(outfile, ".d"); String dotdContents = new String(FileSystemUtils.readContentAsLatin1(dotd)); String expected = "usr/local/blah/blah/genhello/hello.o: \\\n" + " /usr/local/blah/blah/genhello/hello.cc \\\n" + " /usr/local/blah/blah/genhello/hello.h \\\n" + " /usr/local/blah/blah/genhello/other.h\n"; assertEquals(expected, dotdContents); assertEquals(filename, depSet1.getOutputFileName()); } @Test public void writeReadSet() throws Exception { String filename = "/usr/local/blah/blah/genhello/hello.d"; PathFragment file1 = new PathFragment("/usr/local/blah/blah/genhello/hello.cc"); PathFragment file2 = new PathFragment("/usr/local/blah/blah/genhello/hello.h"); PathFragment file3 = new PathFragment("/usr/local/blah/blah/genhello/other.h"); DependencySet depSet1 = newDependencySet(); depSet1.addDependency(file1); depSet1.addDependency(file2); depSet1.addDependency(file3); depSet1.setOutputFileName(filename); Path dotd = scratch.resolve(filename); FileSystemUtils.createDirectoryAndParents(dotd.getParentDirectory()); depSet1.write(dotd, ".d"); DependencySet depSet2 = newDependencySet().read(dotd); assertEquals(depSet1, depSet2); // due to how pic.d files are written, absolute paths are changed into relatives assertEquals(depSet1.getOutputFileName(), "/" + depSet2.getOutputFileName()); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.internal.mxml; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import org.apache.flex.compiler.common.XMLName; import org.apache.flex.compiler.constants.IASLanguageConstants; import org.apache.flex.compiler.internal.projects.FlexProject; import org.apache.flex.compiler.mxml.IMXMLLanguageConstants; /** * This singleton class represents the 2006 dialect of MXML, * with the language namespace <code>"http://www.adobe.com/2006/mxml"</code>. * <p> * The special language tags of this dialect are {@code <Binding>}, * {@code <Component>}, {@code <Metadata>}, {@code <Model>}, * {@code <Script>}, and {@code <Style>}. */ public class MXMLDialect2006 extends MXMLDialect { // The singleton instance of this class. private static final MXMLDialect INSTANCE = new MXMLDialect2006(IMXMLLanguageConstants.NAMESPACE_MXML_2006, 2006); /** * Gets the singleton instance of this class. */ public static MXMLDialect getInstance() { return INSTANCE; } // Protected constructor protected MXMLDialect2006(String languageNamespace, int year) { super(languageNamespace, year); bindingXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.BINDING); componentXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.COMPONENT); metadataXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.METADATA); modelXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.MODEL); scriptXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.SCRIPT); styleXMLName = new XMLName(languageNamespace, IMXMLLanguageConstants.STYLE); } @Override public boolean isWhitespace(char c) { // This definition corresponds to the characters // that Java's trim() method trims. return c <= ' '; } @Override public boolean isWhitespace(String s) { int n = s.length(); for (int i = 0; i < n ; i++) { char c = s.charAt(i); if (!isWhitespace(c)) return false; } return true; } @Override public String collapseWhitespace(String s, char replacementChar) { StringBuilder sb = new StringBuilder(); boolean lastWasSpace = true; int n = s.length(); int i = 0; while (i < n) { char c = s.charAt(i++); boolean ws = Character.isWhitespace(c); if (ws) { if (lastWasSpace) ; // consume the character else sb.append(replacementChar); lastWasSpace = true; } else { sb.append(c); lastWasSpace = false; } } return trim(sb.toString()); } @Override public String trim(String s) { return s.trim(); } @Override public String[] splitAndTrim(String s) { // first make sure it isn't in array format int c = s.indexOf('['); if (c != -1) s = s.substring(c + 1); c = s.indexOf(']'); if (c != -1) s = s.substring(0, c); //check for quotes s = s.replace("'", ""); String[] a = s.split(","); if (a == null) return null; int n = a.length; for (int i = 0; i < n; i++) { a[i] = trim(a[i]); } return a; } @Override public Boolean parseBoolean(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { s = trim(s); s = s.toLowerCase(); if (s.equals(IASLanguageConstants.FALSE)) return Boolean.FALSE; else if (s.equals(IASLanguageConstants.TRUE)) return Boolean.TRUE; return null; } @Override public Integer parseInt(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { s = trim(s); // Don't parse ints with leading zeros, which are not octal. // For example, a MA zip code, 02127. if (hasLeadingZeros(s)) return null; Integer value = null; try { value = Integer.decode(s); if (value != null) return value; } catch (NumberFormatException e) { } if (flags != null && flags.contains(TextParsingFlags.ALLOW_COLOR_NAME)) { value = project.getNamedColor(s); if (value != null) return value; } return null; } @Override public Long parseUint(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { s = trim(s); // Don't parse uint's with leading zeros, which are not octal. // For example, a MA zip code, 02127. if (hasLeadingZeros(s)) return null; Long value = null; try { value = Long.decode(s); long longValue = value.longValue(); // TODO I don't understand the purpose of the following logic, // which comes from the old compiler. It seems like it should be // enforcing the positivity of the uint, but doesn't appear to do that. return (longValue == Math.abs(longValue) && longValue <= 0xFFFFFFFFL) ? value : longValue; } catch (NumberFormatException e) { } if (flags != null && flags.contains(TextParsingFlags.ALLOW_COLOR_NAME)) { Integer colorValue = project.getNamedColor(s); if (colorValue != null) return colorValue.longValue(); } return null; } @Override public Number parseNumber(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { // Don't parse Numbers with leading zeros, which are not octal. // For example, a MA zip code, 02127. if (hasLeadingZeros(s)) return null; Integer value = parseInt(project, s, flags); if (value != null) return value; try { return Double.valueOf(s); } catch (NumberFormatException e) { } return null; } @Override public String parseString(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { if (flags != null && flags.contains(TextParsingFlags.COLLAPSE_WHITE_SPACE)) s = collapseWhitespace(s, ' '); return s; } @Override public List<Object> parseArray(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { if (flags != null && flags.contains(TextParsingFlags.ALLOW_ARRAY)) { String trimmed = trim(s); if (!isArray(trimmed)) return null; List<Object> list = new ArrayList<Object>(); if (isEmptyArray(trimmed)) return list; StringBuilder buffer = new StringBuilder(); char quoteChar = '\''; boolean inQuotes = false; int n = trimmed.length(); for (int i = 1; i < n; i++) { char c = trimmed.charAt(i); switch (c) { case '[': { if (inQuotes) { buffer.append(c); } else { // The old compiler did not support nested arrays, // and in fact behaves rather strangely when you // write them. } break; } case '"': case '\'': { if (inQuotes) { if (quoteChar == c) inQuotes = false; else buffer.append(c); } else { inQuotes = true; quoteChar = c; } break; } case ',': case ']': { if (inQuotes) { buffer.append(c); } else { String elementText = trim(buffer.toString()); buffer = new StringBuilder(); // NOTE: Clear any special-processing flags, on the interpretation // that they only apply to top-level scalars. // NOTE: The old compiler did not support nested arrays. Object element = parseObject(project, elementText, null); if (element != null) list.add(element); else return null; } break; } default: { buffer.append(c); break; } } } return list; } return null; } @Override public Object parseObject(FlexProject project, String s, EnumSet<TextParsingFlags> flags) { String trimmed = trim(s); Object result; result = parseBoolean(project, trimmed, flags); if (result != null) return result; result = parseArray(project, trimmed, flags); if (result != null) return result; result = parseNumber(project, trimmed, flags); if (result != null) return result; return s; } // // Other methods // private boolean hasLeadingZeros(String s) { boolean result = false; int n = s.length(); if (n > 1 && s.charAt(0) == '0' && !(s.startsWith("0x") || s.startsWith("0X") || s.startsWith("0."))) { result = true; } return result; } protected boolean isArray(String s) { assert s.equals(trim(s)); int n = s.length(); return n >= 2 && s.charAt(0) == '[' && s.charAt(n - 1) == ']'; } private boolean isEmptyArray(String s) { assert s.equals(trim(s)); boolean result = false; if (isArray(s) && s.substring(1, s.length() - 1).trim().length() == 0) result = true; return result; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.hadoop; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.PropertyKey; import alluxio.client.file.FileOutStream; import alluxio.client.file.FileSystem; import alluxio.client.file.FileSystemContext; import alluxio.client.file.FileSystemMasterClient; import alluxio.client.file.URIStatus; import alluxio.client.file.options.CreateDirectoryOptions; import alluxio.client.file.options.CreateFileOptions; import alluxio.client.file.options.DeleteOptions; import alluxio.client.file.options.SetAttributeOptions; import alluxio.client.lineage.LineageContext; import alluxio.exception.AlluxioException; import alluxio.exception.ExceptionMessage; import alluxio.exception.FileDoesNotExistException; import alluxio.exception.InvalidPathException; import alluxio.exception.PreconditionMessage; import alluxio.exception.status.AlluxioStatusException; import alluxio.security.User; import alluxio.security.authorization.Mode; import alluxio.util.CommonUtils; import alluxio.wire.FileBlockInfo; import com.google.common.base.Preconditions; import com.google.common.net.HostAndPort; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.security.Principal; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.NotThreadSafe; import javax.security.auth.Subject; /** * Base class for Apache Hadoop based Alluxio {@link org.apache.hadoop.fs.FileSystem}. This class * really just delegates to {@link alluxio.client.file.FileSystem} for most operations. * * All implementing classes must define {@link #isZookeeperMode()} which states if fault tolerant is * used and {@link #getScheme()} for Hadoop's {@link java.util.ServiceLoader} support. */ @NotThreadSafe abstract class AbstractFileSystem extends org.apache.hadoop.fs.FileSystem { private static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class); public static final String FIRST_COM_PATH = "alluxio_dep/"; // Always tell Hadoop that we have 3x replication. private static final int BLOCK_REPLICATION_CONSTANT = 3; /** Lock for initializing the contexts, currently only one set of contexts is supported. */ private static final Object INIT_LOCK = new Object(); /** Flag for if the contexts have been initialized. */ @GuardedBy("INIT_LOCK") private static volatile boolean sInitialized = false; private FileSystemContext mContext = null; private FileSystem mFileSystem = null; private URI mUri = null; private Path mWorkingDir = new Path(AlluxioURI.SEPARATOR); private Statistics mStatistics = null; private String mAlluxioHeader = null; /** * Constructs a new {@link AbstractFileSystem} instance with specified a {@link FileSystem} * handler for tests. * * @param fileSystem handler to file system */ @SuppressFBWarnings("ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD") AbstractFileSystem(FileSystem fileSystem) { mFileSystem = fileSystem; sInitialized = true; } /** * Constructs a new {@link AbstractFileSystem} instance. */ AbstractFileSystem() {} @Override public FSDataOutputStream append(Path path, int bufferSize, Progressable progress) throws IOException { LOG.debug("append({}, {}, {})", path, bufferSize, progress); if (mStatistics != null) { mStatistics.incrementWriteOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); try { if (mFileSystem.exists(uri)) { throw new IOException(ExceptionMessage.FILE_ALREADY_EXISTS.getMessage(uri)); } return new FSDataOutputStream(mFileSystem.createFile(uri), mStatistics); } catch (AlluxioException e) { throw new IOException(e); } } @Override public void close() throws IOException { if (mContext != null && mContext != FileSystemContext.INSTANCE) { mContext.close(); } super.close(); } /** * Attempts to create a file. Overwrite will not succeed if the path exists and is a folder. * * @param path path to create * @param permission permissions of the created file/folder * @param overwrite overwrite if file exists * @param bufferSize the size in bytes of the buffer to be used * @param replication under filesystem replication factor * @param blockSize block size in bytes * @param progress queryable progress * @return an {@link FSDataOutputStream} created at the indicated path of a file */ @Override public FSDataOutputStream create(Path path, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { LOG.debug("create({}, {}, {}, {}, {}, {}, {})", path, permission, overwrite, bufferSize, replication, blockSize, progress); if (mStatistics != null) { mStatistics.incrementWriteOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); CreateFileOptions options = CreateFileOptions.defaults().setBlockSizeBytes(blockSize) .setMode(new Mode(permission.toShort())); FileOutStream outStream; try { outStream = mFileSystem.createFile(uri, options); } catch (AlluxioException e) { //now we should consider the override parameter try { if (mFileSystem.exists(uri)) { if (!overwrite) { throw new IOException(ExceptionMessage.FILE_ALREADY_EXISTS.getMessage(uri)); } if (mFileSystem.getStatus(uri).isFolder()) { throw new IOException( ExceptionMessage.FILE_CREATE_IS_DIRECTORY.getMessage(uri)); } mFileSystem.delete(uri); } outStream = mFileSystem.createFile(uri, options); } catch (AlluxioException e2) { throw new IOException(e2); } } return new FSDataOutputStream(outStream, mStatistics); } /** * Opens an {@link FSDataOutputStream} at the indicated Path with write-progress reporting. * Same as {@link #create(Path, boolean, int, short, long, Progressable)}, except fails if parent * directory doesn't already exist. * * TODO(hy): We need to refactor this method after having a new internal API support (ALLUXIO-46). * * @param path the file name to open * @param overwrite if a file with this name already exists, then if true, the file will be * overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used * @param replication required block replication for the file * @param blockSize the size in bytes of the buffer to be used * @param progress queryable progress * @see #setPermission(Path, FsPermission) * @deprecated API only for 0.20-append */ @Override @Deprecated public FSDataOutputStream createNonRecursive(Path path, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { AlluxioURI parentUri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path.getParent())); ensureExists(parentUri); return create(path, permission, overwrite, bufferSize, replication, blockSize, progress); } /** * Attempts to delete the file or directory with the specified path. * * @param path path to delete * @return true if one or more files/directories were deleted; false otherwise * @deprecated Use {@link #delete(Path, boolean)} instead. */ @Override @Deprecated public boolean delete(Path path) throws IOException { return delete(path, true); } /** * Attempts to delete the file or directory with the specified path. * * @param path path to delete * @param recursive if true, will attempt to delete all children of the path * @return true if one or more files/directories were deleted; false otherwise */ @Override public boolean delete(Path path, boolean recursive) throws IOException { LOG.debug("delete({}, {})", path, recursive); if (mStatistics != null) { mStatistics.incrementWriteOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); DeleteOptions options = DeleteOptions.defaults().setRecursive(recursive); try { mFileSystem.delete(uri, options); return true; } catch (InvalidPathException | FileDoesNotExistException e) { LOG.warn("delete failed: {}", e.getMessage()); return false; } catch (AlluxioException e) { throw new IOException(e); } } @Override public long getDefaultBlockSize() { return Configuration.getBytes(PropertyKey.USER_BLOCK_SIZE_BYTES_DEFAULT); } @Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { if (file == null) { return null; } if (mStatistics != null) { mStatistics.incrementReadOps(1); } AlluxioURI path = new AlluxioURI(HadoopUtils.getPathWithoutScheme(file.getPath())); List<FileBlockInfo> blocks = getFileBlocks(path); List<BlockLocation> blockLocations = new ArrayList<>(); for (FileBlockInfo fileBlockInfo : blocks) { long offset = fileBlockInfo.getOffset(); long end = offset + fileBlockInfo.getBlockInfo().getLength(); // Check if there is any overlapping between [start, start+len] and [offset, end] if (end >= start && offset <= start + len) { ArrayList<String> names = new ArrayList<>(); ArrayList<String> hosts = new ArrayList<>(); // add the existing in-memory block locations for (alluxio.wire.BlockLocation location : fileBlockInfo.getBlockInfo().getLocations()) { HostAndPort address = HostAndPort.fromParts(location.getWorkerAddress().getHost(), location.getWorkerAddress().getDataPort()); names.add(address.toString()); hosts.add(address.getHostText()); } // add under file system locations for (String location : fileBlockInfo.getUfsLocations()) { names.add(location); hosts.add(HostAndPort.fromString(location).getHostText()); } blockLocations.add(new BlockLocation(CommonUtils.toStringArray(names), CommonUtils.toStringArray(hosts), offset, fileBlockInfo.getBlockInfo().getLength())); } } BlockLocation[] ret = new BlockLocation[blockLocations.size()]; blockLocations.toArray(ret); return ret; } /** * {@inheritDoc} * * If the file does not exist in Alluxio, query it from HDFS. */ @Override public FileStatus getFileStatus(Path path) throws IOException { LOG.debug("getFileStatus({})", path); if (mStatistics != null) { mStatistics.incrementReadOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); URIStatus fileStatus; try { fileStatus = mFileSystem.getStatus(uri); } catch (FileDoesNotExistException e) { throw new FileNotFoundException(e.getMessage()); } catch (AlluxioException e) { throw new IOException(e); } return new FileStatus(fileStatus.getLength(), fileStatus.isFolder(), BLOCK_REPLICATION_CONSTANT, fileStatus.getBlockSizeBytes(), fileStatus.getLastModificationTimeMs(), fileStatus.getCreationTimeMs(), new FsPermission((short) fileStatus.getMode()), fileStatus.getOwner(), fileStatus.getGroup(), new Path(mAlluxioHeader + uri)); } /** * Changes owner or group of a path (i.e. a file or a directory). If username is null, the * original username remains unchanged. Same as groupname. If username and groupname are non-null, * both of them will be changed. * * @param path path to set owner or group * @param username username to be set * @param groupname groupname to be set */ @Override public void setOwner(Path path, final String username, final String groupname) throws IOException { LOG.debug("setOwner({},{},{})", path, username, groupname); AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); SetAttributeOptions options = SetAttributeOptions.defaults(); boolean ownerOrGroupChanged = false; if (username != null && !username.isEmpty()) { options.setOwner(username).setRecursive(false); ownerOrGroupChanged = true; } if (groupname != null && !groupname.isEmpty()) { options.setGroup(groupname).setRecursive(false); ownerOrGroupChanged = true; } if (ownerOrGroupChanged) { try { mFileSystem.setAttribute(uri, options); } catch (AlluxioException e) { throw new IOException(e); } } } /** * Changes permission of a path. * * @param path path to set permission * @param permission permission set to path */ @Override public void setPermission(Path path, FsPermission permission) throws IOException { LOG.debug("setMode({},{})", path, permission.toString()); AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); SetAttributeOptions options = SetAttributeOptions.defaults().setMode(new Mode(permission.toShort())).setRecursive(false); try { mFileSystem.setAttribute(uri, options); } catch (AlluxioException e) { throw new IOException(e); } } /** * Gets the URI scheme that maps to the {@link org.apache.hadoop.fs.FileSystem}. This was * introduced in Hadoop 2.x as a means to make loading new {@link org.apache.hadoop.fs.FileSystem} * s simpler. This doesn't exist in Hadoop 1.x, so cannot put {@literal @Override}. * * @return scheme hadoop should map to * * @see org.apache.hadoop.fs.FileSystem#createFileSystem(java.net.URI, * org.apache.hadoop.conf.Configuration) */ public abstract String getScheme(); @Override public URI getUri() { return mUri; } @Override public Path getWorkingDirectory() { LOG.debug("getWorkingDirectory: {}", mWorkingDir); return mWorkingDir; } /** * {@inheritDoc} * * Sets up a lazy connection to Alluxio through mFileSystem. This method will override and * invalidate the current contexts. This must be called before client operations in order to * guarantee the integrity of the contexts, meaning users should not alternate between using the * Hadoop compatible API and native Alluxio API in the same process. * * If hadoop file system cache is enabled, this method should only be called when switching user. */ @SuppressFBWarnings("ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD") @Override public void initialize(URI uri, org.apache.hadoop.conf.Configuration conf) throws IOException { // NOTE, we must switch the context classloader to the one provided by Hadoop configuration // first before anything else. This ensures all Alluxio classes are loaded by the same // classloader, given this class is already loaded by the Hadoop configuration classloader. Thread.currentThread().setContextClassLoader(conf.getClassLoader()); // When using zookeeper we get the leader master address from the alluxio.zookeeper.address // configuration property, so the user doesn't need to specify the authority. if (!Configuration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)) { Preconditions.checkNotNull(uri.getHost(), PreconditionMessage.URI_HOST_NULL); Preconditions.checkNotNull(uri.getPort(), PreconditionMessage.URI_PORT_NULL); } super.initialize(uri, conf); LOG.debug("initialize({}, {}). Connecting to Alluxio", uri, conf); HadoopUtils.addS3Credentials(conf); HadoopUtils.addSwiftCredentials(conf); setConf(conf); // HDFS doesn't allow the authority to be empty; it must be "/" instead. String authority = uri.getAuthority() == null ? "/" : uri.getAuthority(); mAlluxioHeader = getScheme() + "://" + authority; // Set the statistics member. Use mStatistics instead of the parent class's variable. mStatistics = statistics; mUri = URI.create(mAlluxioHeader); boolean masterAddIsSameAsDefault = checkMasterAddress(); if (sInitialized && masterAddIsSameAsDefault) { updateFileSystemAndContext(); return; } synchronized (INIT_LOCK) { // If someone has initialized the object since the last check, return if (sInitialized) { if (masterAddIsSameAsDefault) { updateFileSystemAndContext(); return; } else { LOG.warn(ExceptionMessage.DIFFERENT_MASTER_ADDRESS .getMessage(mUri.getHost() + ":" + mUri.getPort(), FileSystemContext.INSTANCE.getMasterAddress())); sInitialized = false; } } initializeInternal(uri, conf); sInitialized = true; } updateFileSystemAndContext(); } /** * Initializes the default contexts if the master address specified in the URI is different * from the default one. * * @param uri the uri * @param conf the hadoop conf */ void initializeInternal(URI uri, org.apache.hadoop.conf.Configuration conf) throws IOException { // Load Alluxio configuration if any and merge to the one in Alluxio file system. These // modifications to ClientContext are global, affecting all Alluxio clients in this JVM. // We assume here that all clients use the same configuration. HadoopConfigurationUtils.mergeHadoopConfiguration(conf); Configuration.set(PropertyKey.ZOOKEEPER_ENABLED, isZookeeperMode()); if (!Configuration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)) { Configuration.set(PropertyKey.MASTER_HOSTNAME, uri.getHost()); Configuration.set(PropertyKey.MASTER_RPC_PORT, uri.getPort()); } // These must be reset to pick up the change to the master address. // TODO(andrew): We should reset key value system in this situation - see ALLUXIO-1706. LineageContext.INSTANCE.reset(); FileSystemContext.INSTANCE.reset(); // Try to connect to master, if it fails, the provided uri is invalid. FileSystemMasterClient client = FileSystemContext.INSTANCE.acquireMasterClient(); try { client.connect(); // Connected, initialize. } catch (AlluxioStatusException e) { throw e.toIOException(); } finally { FileSystemContext.INSTANCE.releaseMasterClient(client); } } /** * Sets the file system and context. */ private void updateFileSystemAndContext() { Subject subject = getHadoopSubject(); if (subject != null) { mContext = FileSystemContext.create(subject); mFileSystem = FileSystem.Factory.get(mContext); } else { mContext = FileSystemContext.INSTANCE; mFileSystem = FileSystem.Factory.get(); } } /** * @return true if the master address in mUri is the same as the one in the default file * system context. */ private boolean checkMasterAddress() { InetSocketAddress masterAddress = FileSystemContext.INSTANCE.getMasterAddress(); boolean sameHost = masterAddress.getHostString().equals(mUri.getHost()); boolean samePort = masterAddress.getPort() == mUri.getPort(); if (sameHost && samePort) { return true; } return false; } /** * @return the hadoop subject if exists, null if not exist */ private Subject getHadoopSubject() { try { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); String username = ugi.getShortUserName(); if (username != null && !username.isEmpty()) { User user = new User(ugi.getShortUserName()); HashSet<Principal> principals = new HashSet<>(); principals.add(user); return new Subject(false, principals, new HashSet<>(), new HashSet<>()); } return null; } catch (IOException e) { return null; } } /** * Determines if zookeeper should be used for the {@link org.apache.hadoop.fs.FileSystem}. This * method should only be used for * {@link #initialize(java.net.URI, org.apache.hadoop.conf.Configuration)}. * * @return true if zookeeper should be used */ protected abstract boolean isZookeeperMode(); @Override public FileStatus[] listStatus(Path path) throws IOException { LOG.debug("listStatus({})", path); if (mStatistics != null) { mStatistics.incrementReadOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); List<URIStatus> statuses; try { statuses = mFileSystem.listStatus(uri); } catch (FileDoesNotExistException e) { throw new FileNotFoundException(HadoopUtils.getPathWithoutScheme(path)); } catch (AlluxioException e) { throw new IOException(e); } FileStatus[] ret = new FileStatus[statuses.size()]; for (int k = 0; k < statuses.size(); k++) { URIStatus status = statuses.get(k); ret[k] = new FileStatus(status.getLength(), status.isFolder(), BLOCK_REPLICATION_CONSTANT, status.getBlockSizeBytes(), status.getLastModificationTimeMs(), status.getCreationTimeMs(), new FsPermission((short) status.getMode()), status.getOwner(), status.getGroup(), new Path(mAlluxioHeader + status.getPath())); } return ret; } /** * Attempts to create a folder with the specified path. Parent directories will be created. * * @param path path to create * @param permission permissions to grant the created folder * @return true if the indicated folder is created successfully or already exists */ @Override public boolean mkdirs(Path path, FsPermission permission) throws IOException { LOG.debug("mkdirs({}, {})", path, permission); if (mStatistics != null) { mStatistics.incrementWriteOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); CreateDirectoryOptions options = CreateDirectoryOptions.defaults().setRecursive(true).setAllowExists(true) .setMode(new Mode(permission.toShort())); try { mFileSystem.createDirectory(uri, options); return true; } catch (AlluxioException e) { throw new IOException(e); } } /** * Attempts to open the specified file for reading. * * @param path the file name to open * @param bufferSize stream buffer size in bytes, currently unused * @return an {@link FSDataInputStream} at the indicated path of a file */ // TODO(calvin): Consider respecting the buffer size option @Override public FSDataInputStream open(Path path, int bufferSize) throws IOException { LOG.debug("open({}, {})", path, bufferSize); if (mStatistics != null) { mStatistics.incrementReadOps(1); } AlluxioURI uri = new AlluxioURI(HadoopUtils.getPathWithoutScheme(path)); return new FSDataInputStream(new HdfsFileInputStream(mContext, uri, mStatistics)); } @Override public boolean rename(Path src, Path dst) throws IOException { LOG.debug("rename({}, {})", src, dst); if (mStatistics != null) { mStatistics.incrementWriteOps(1); } AlluxioURI srcPath = new AlluxioURI(HadoopUtils.getPathWithoutScheme(src)); AlluxioURI dstPath = new AlluxioURI(HadoopUtils.getPathWithoutScheme(dst)); try { mFileSystem.rename(srcPath, dstPath); } catch (FileDoesNotExistException e) { LOG.warn("rename failed: {}", e.getMessage()); return false; } catch (AlluxioException e) { ensureExists(srcPath); URIStatus dstStatus; try { dstStatus = mFileSystem.getStatus(dstPath); } catch (IOException | AlluxioException e2) { LOG.warn("rename failed: {}", e.getMessage()); return false; } // If the destination is an existing folder, try to move the src into the folder if (dstStatus != null && dstStatus.isFolder()) { dstPath = dstPath.join(srcPath.getName()); } else { LOG.warn("rename failed: {}", e.getMessage()); return false; } try { mFileSystem.rename(srcPath, dstPath); } catch (IOException | AlluxioException e2) { LOG.error("Failed to rename {} to {}", src, dst, e2); return false; } } catch (IOException e) { LOG.error("Failed to rename {} to {}", src, dst, e); return false; } return true; } @Override public void setWorkingDirectory(Path path) { LOG.debug("setWorkingDirectory({})", path); if (path.isAbsolute()) { mWorkingDir = path; } else { mWorkingDir = new Path(mWorkingDir, path); } } /** * Convenience method which ensures the given path exists, wrapping any {@link AlluxioException} * in {@link IOException}. * * @param path the path to look up */ private void ensureExists(AlluxioURI path) throws IOException { try { mFileSystem.getStatus(path); } catch (AlluxioException e) { throw new IOException(e); } } private List<FileBlockInfo> getFileBlocks(AlluxioURI path) throws IOException { try { return mFileSystem.getStatus(path).getFileBlockInfos(); } catch (AlluxioException e) { throw new IOException(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.tools.ant.taskdefs.optional.net; import org.apache.commons.net.telnet.TelnetClient; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Calendar; import java.util.Enumeration; import java.util.Vector; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.Task; /** * Automates the telnet protocol. * */ public class TelnetTask extends Task { private static final int WAIT_INTERVAL = 250; private static final int TELNET_PORT = 23; /** * The userid to login with, if automated login is used */ private String userid = null; /** * The password to login with, if automated login is used */ private String password = null; /** * The server to connect to. */ private String server = null; /** * The tcp port to connect to. */ private int port = TELNET_PORT; /** * The list of read/write commands for this session */ private Vector telnetTasks = new Vector(); /** * If true, adds a CR to beginning of login script */ private boolean addCarriageReturn = false; /** * Default time allowed for waiting for a valid response * for all child reads. A value of 0 means no limit. */ private Integer defaultTimeout = null; /** * Verify that all parameters are included. * Connect and possibly login * Iterate through the list of Reads and writes * @throws BuildException on error */ public void execute() throws BuildException { /** A server name is required to continue */ if (server == null) { throw new BuildException("No Server Specified"); } /** A userid and password must appear together * if they appear. They are not required. */ if (userid == null && password != null) { throw new BuildException("No Userid Specified"); } if (password == null && userid != null) { throw new BuildException("No Password Specified"); } /** Create the telnet client object */ AntTelnetClient telnet = null; try { telnet = new AntTelnetClient(); try { telnet.connect(server, port); } catch (IOException e) { throw new BuildException("Can't connect to " + server); } /** Login if userid and password were specified */ if (userid != null && password != null) { login(telnet); } /** Process each sub command */ Enumeration tasksToRun = telnetTasks.elements(); while (tasksToRun != null && tasksToRun.hasMoreElements()) { TelnetSubTask task = (TelnetSubTask) tasksToRun.nextElement(); if (task instanceof TelnetRead && defaultTimeout != null) { ((TelnetRead) task).setDefaultTimeout(defaultTimeout); } task.execute(telnet); } } finally { if (telnet != null && telnet.isConnected()) { try { telnet.disconnect(); } catch (IOException e) { throw new BuildException("Error disconnecting from " + server); } } } } /** * Process a 'typical' login. If it differs, use the read * and write tasks explicitely */ private void login(AntTelnetClient telnet) { if (addCarriageReturn) { telnet.sendString("\n", true); } telnet.waitForString("ogin:"); telnet.sendString(userid, true); telnet.waitForString("assword:"); telnet.sendString(password, false); } /** * Set the the login id to use on the server; * required if <tt>password</tt> is set. * @param u a <code>String</code> value */ public void setUserid(String u) { this.userid = u; } /** * Set the the login password to use * required if <tt>userid</tt> is set. * @param p a <code>String</code> value */ public void setPassword(String p) { this.password = p; } /** * Set the hostname or address of the remote server. * @param m a <code>String</code> value */ public void setServer(String m) { this.server = m; } /** * Set the tcp port to connect to; default is 23. * @param p an <code>int</code> value */ public void setPort(int p) { this.port = p; } /** * send a carriage return after connecting; optional, defaults to false. * @param b a <code>boolean</code> value */ public void setInitialCR(boolean b) { this.addCarriageReturn = b; } /** * set a default timeout in seconds to wait for a response, * zero means forever (the default) * @param i an <code>Integer</code> value */ public void setTimeout(Integer i) { this.defaultTimeout = i; } /** * A string to wait for from the server. * A subTask &lt;read&gt; tag was found. Create the object, * Save it in our list, and return it. * @return a read telnet sub task */ public TelnetSubTask createRead() { TelnetSubTask task = (TelnetSubTask) new TelnetRead(); telnetTasks.addElement(task); return task; } /** * Add text to send to the server * A subTask &lt;write&gt; tag was found. Create the object, * Save it in our list, and return it. * @return a write telnet sub task */ public TelnetSubTask createWrite() { TelnetSubTask task = (TelnetSubTask) new TelnetWrite(); telnetTasks.addElement(task); return task; } /** * This class is the parent of the Read and Write tasks. * It handles the common attributes for both. */ public class TelnetSubTask { // CheckStyle:VisibilityModifier OFF - bc protected String taskString = ""; // CheckStyle:VisibilityModifier ON /** * Execute the subtask. * @param telnet the client * @throws BuildException always as it is not allowed to instantiate this object */ public void execute(AntTelnetClient telnet) throws BuildException { throw new BuildException("Shouldn't be able instantiate a SubTask directly"); } /** * the message as nested text * @param s the nested text */ public void addText(String s) { setString(getProject().replaceProperties(s)); } /** * the message as an attribute * @param s a <code>String</code> value */ public void setString(String s) { taskString += s; } } /** * Sends text to the connected server */ public class TelnetWrite extends TelnetSubTask { private boolean echoString = true; /** * Execute the write task. * @param telnet the task to use * @throws BuildException on error */ public void execute(AntTelnetClient telnet) throws BuildException { telnet.sendString(taskString, echoString); } /** * Whether or not the message should be echoed to the log. * Defaults to <code>true</code>. * @param b a <code>boolean</code> value */ public void setEcho(boolean b) { echoString = b; } } /** * Reads the output from the connected server * until the required string is found or we time out. */ public class TelnetRead extends TelnetSubTask { private Integer timeout = null; /** * Execute the read task. * @param telnet the task to use * @throws BuildException on error */ public void execute(AntTelnetClient telnet) throws BuildException { telnet.waitForString(taskString, timeout); } /** * a timeout value that overrides any task wide timeout. * @param i an <code>Integer</code> value */ public void setTimeout(Integer i) { this.timeout = i; } /** * Sets the default timeout if none has been set already * @param defaultTimeout an <code>Integer</code> value * @ant.attribute ignore="true" */ public void setDefaultTimeout(Integer defaultTimeout) { if (timeout == null) { timeout = defaultTimeout; } } } /** * This class handles the abstraction of the telnet protocol. * Currently it is a wrapper around <a * href="http://jakarta.apache.org/commons/net/index.html">Jakarta * Commons Net</a>. */ public class AntTelnetClient extends TelnetClient { /** * Read from the telnet session until the string we are * waiting for is found * @param s The string to wait on */ public void waitForString(String s) { waitForString(s, null); } /** * Read from the telnet session until the string we are * waiting for is found or the timeout has been reached * @param s The string to wait on * @param timeout The maximum number of seconds to wait */ public void waitForString(String s, Integer timeout) { InputStream is = this.getInputStream(); try { StringBuffer sb = new StringBuffer(); int windowStart = -s.length(); if (timeout == null || timeout.intValue() == 0) { while (windowStart < 0 || !sb.substring(windowStart).equals(s)) { sb.append((char) is.read()); windowStart++; } } else { Calendar endTime = Calendar.getInstance(); endTime.add(Calendar.SECOND, timeout.intValue()); while (windowStart < 0 || !sb.substring(windowStart).equals(s)) { while (Calendar.getInstance().before(endTime) && is.available() == 0) { Thread.sleep(WAIT_INTERVAL); } if (is.available() == 0) { log("Read before running into timeout: " + sb.toString(), Project.MSG_DEBUG); throw new BuildException( "Response timed-out waiting for \"" + s + '\"', getLocation()); } sb.append((char) is.read()); windowStart++; } } log(sb.toString(), Project.MSG_INFO); } catch (BuildException be) { throw be; } catch (Exception e) { throw new BuildException(e, getLocation()); } } /** * Write this string to the telnet session. * @param s the string to write * @param echoString if true log the string sent */ public void sendString(String s, boolean echoString) { OutputStream os = this.getOutputStream(); try { os.write((s + "\n").getBytes()); if (echoString) { log(s, Project.MSG_INFO); } os.flush(); } catch (Exception e) { throw new BuildException(e, getLocation()); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.excalibur.instrument.manager.http; import com.sun.image.codec.jpeg.JPEGCodec; import com.sun.image.codec.jpeg.JPEGEncodeParam; import com.sun.image.codec.jpeg.JPEGImageEncoder; import java.awt.Graphics; import java.awt.image.BufferedImage; import java.io.BufferedInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Map; import org.apache.excalibur.instrument.manager.http.server.AbstractHTTPURLHandler; import org.apache.excalibur.instrument.manager.http.server.HTTPRedirect; import org.apache.excalibur.instrument.manager.http.server.URLCoder; import org.apache.excalibur.instrument.manager.DefaultInstrumentManager; import org.apache.excalibur.instrument.manager.InstrumentSampleDescriptor; import org.apache.excalibur.instrument.manager.InstrumentSampleSnapshot; import org.apache.excalibur.instrument.manager.NoSuchInstrumentSampleException; /** * * @author <a href="mailto:dev@avalon.apache.org">Avalon Development Team</a> * @version CVS $Revision: 1.9 $ $Date: 2004/03/06 14:01:28 $ * @since 4.1 */ public class SampleChartHandler extends AbstractHTTPURLHandler { /** The instrument manager */ private DefaultInstrumentManager m_manager; /** Default width of the image. */ private int m_width; /** Default height of the image. */ private int m_height; /** Default antialias flag. */ private boolean m_antialias; /*--------------------------------------------------------------- * Constructors *-------------------------------------------------------------*/ /** * Creates a new SampleChartHandler. * * @param manager Reference to the instrument manager interface. * @param width Default image width. * @param height Default image height. * @param antialias True if the default antialias parameter should be true. */ public SampleChartHandler( DefaultInstrumentManager manager, int width, int height, boolean antialias ) { super( "/sample-chart.jpg", CONTENT_TYPE_IMAGE_JPEG, InstrumentManagerHTTPConnector.ENCODING ); this.m_manager = manager; this.m_width = width; this.m_height = height; this.m_antialias = antialias; } /*--------------------------------------------------------------- * AbstractHandler Methods *-------------------------------------------------------------*/ /** * Handles the specified request. * * @param The full path being handled. * @param parameters A Map of the parameters in the request. * @param os The OutputStream to write the result to. */ public void doGet( String path, Map parameters, OutputStream os ) throws IOException { String name = this.getParameter( parameters, "name" ); InstrumentSampleDescriptor desc; try { desc = this.m_manager.locateInstrumentSampleDescriptor( name ); } catch ( NoSuchInstrumentSampleException e ) { // Sample no longer exists, go back to the parent instrument. int pos = name.lastIndexOf( '.' ); if ( pos >= 0 ) { // Starting with Java 1.4, encode takes an encoding, but this needs to // work with 1.3. Use our own version. String iName = URLCoder.encode( name.substring( 0, pos ), InstrumentManagerHTTPConnector.ENCODING ); throw new HTTPRedirect( "instrument.html?name=" + iName ); } else { throw new HTTPRedirect( "instrumentable.html" ); } } int width = this.getIntegerParameter( parameters, "width", this.m_width ); width = Math.max( 1, Math.min( 2048, width ) ); int height = this.getIntegerParameter( parameters, "height", this.m_height ); height = Math.max( 1, Math.min( 1024, height ) ); boolean antialias = this.getBooleanParameter( parameters, "antialias", this.m_antialias ); InstrumentSampleSnapshot snapshot = desc.getSnapshot(); // Decide on a line interval based on the interval of the sample. long interval = snapshot.getInterval(); int hInterval; String format; String detailFormat; if( interval < 1000 ) { // Once per 10 seconds. hInterval = (int)( 10000 / interval ); format = "{3}:{4}:{5}"; detailFormat = "{1}/{2} {3}:{4}:{5}.{6}"; } else if( interval < 60000 ) { // Once per minute. hInterval = (int)( 60000 / interval ); format = "{3}:{4}:{5}"; detailFormat = "{1}/{2} {3}:{4}:{5}"; } else if( interval < 600000 ) { // Once per 10 minutes hInterval = (int)( 600000 / interval ); format = "{1}/{2} {3}:{4}"; detailFormat = "{1}/{2} {3}:{4}"; } else if( interval < 3600000 ) { // Once per hour. hInterval = (int)( 3600000 / interval ); format = "{1}/{2} {3}:{4}"; detailFormat = "{1}/{2} {3}:{4}"; } else if( interval < 86400000 ) { // Once per day. hInterval = (int)( 86400000 / interval ); format = "{1}/{2}"; detailFormat = "{1}/{2} {3}:{4}"; } else if( interval < 604800000 ) { // Once per week. hInterval = (int)( 604800000 / interval ); format = "{0}/{1}/{2}"; detailFormat = "{0}/{1}/{2}"; } else { // Default to every 10 points. hInterval = 10; format = "{0}/{1}/{2}"; detailFormat = "{0}/{1}/{2}"; } // Actually create the chart and add it to the content pane LineChart chart = new LineChart( hInterval, interval, format, detailFormat, 20, antialias ); chart.setValues( snapshot.getSamples(), snapshot.getTime() ); byte[] imageData = null; // Create a new BufferedImage onto which the plant will be painted. BufferedImage bi = new BufferedImage( width, height, BufferedImage.TYPE_INT_RGB ); // Paint the chart onto the Graphics object of the BufferedImage. chart.setSize( bi.getWidth(), bi.getHeight() ); Graphics g; try { g = bi.createGraphics(); } catch ( Throwable t ) { // Linux throws NoClassDefFoundError. // Solaris throws InternalError // On Headless UNIX machines this error will be thrown when attempting to // create an graphic. The AWT libraries require a native library that // only exists on UNIX system which have X-Windows installed. This is // never a problem on Windows systems. // Rather than giving the user nothing, send them a preprepared jpeg file // that notifies them of the problem. String imageResource = "noawtlibs.jpg"; BufferedInputStream is = new BufferedInputStream( this.getClass().getResourceAsStream( imageResource ) ); byte[] noAWTLibs; try { noAWTLibs = new byte[is.available()]; is.read( noAWTLibs, 0, noAWTLibs.length ); } finally { is.close(); } // Now write the error image out to the client. os.write( noAWTLibs ); return; } chart.paintComponent( g ); // Encode the BufferedImage as a JPEG image and write it to the output stream. JPEGImageEncoder encoder = JPEGCodec.createJPEGEncoder( os ); JPEGEncodeParam param = encoder.getDefaultJPEGEncodeParam( bi ); param.setQuality( 0.90f, true ); encoder.encode( bi, param ); } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.reteoo.builder; import org.drools.ActivationListenerFactory; import org.drools.RuleIntegrationException; import org.drools.base.ClassObjectType; import org.drools.base.mvel.MVELSalienceExpression; import org.drools.common.BaseNode; import org.drools.common.InternalRuleBase; import org.drools.common.InternalWorkingMemory; import org.drools.common.UpdateContext; import org.drools.reteoo.LeftTupleSource; import org.drools.reteoo.ReteooBuilder; import org.drools.reteoo.RuleBuilder; import org.drools.reteoo.TerminalNode; import org.drools.reteoo.WindowNode; import org.drools.rule.Accumulate; import org.drools.rule.Collect; import org.drools.rule.ConditionalBranch; import org.drools.rule.EntryPoint; import org.drools.rule.EvalCondition; import org.drools.rule.Forall; import org.drools.rule.From; import org.drools.rule.GroupElement; import org.drools.rule.InvalidPatternException; import org.drools.rule.NamedConsequence; import org.drools.rule.Pattern; import org.drools.rule.QueryElement; import org.drools.rule.Rule; import org.drools.rule.WindowDeclaration; import org.drools.rule.WindowReference; import org.drools.time.TemporalDependencyMatrix; import org.kie.conf.EventProcessingOption; import java.util.ArrayList; import java.util.List; public class ReteooRuleBuilder implements RuleBuilder { protected BuildUtils utils; public ReteooRuleBuilder() { this.utils = new BuildUtils(); this.utils.addBuilder( GroupElement.class, new GroupElementBuilder() ); this.utils.addBuilder( Pattern.class, new PatternBuilder() ); this.utils.addBuilder( EvalCondition.class, new EvalBuilder() ); this.utils.addBuilder( QueryElement.class, new QueryElementBuilder() ); this.utils.addBuilder( From.class, new FromBuilder() ); this.utils.addBuilder( Collect.class, new CollectBuilder() ); this.utils.addBuilder( Accumulate.class, new AccumulateBuilder() ); this.utils.addBuilder( Forall.class, new ForallBuilder() ); this.utils.addBuilder( EntryPoint.class, new EntryPointBuilder() ); this.utils.addBuilder( WindowReference.class, new WindowReferenceBuilder() ); this.utils.addBuilder( NamedConsequence.class, new NamedConsequenceBuilder() ); this.utils.addBuilder( ConditionalBranch.class, new ConditionalBranchBuilder() ); } /** * Creates the corresponting Rete network for the given <code>Rule</code> and adds it to * the given rule base. * * @param rule * The rule to add. * @param rulebase * The rulebase to add the rule to. * * @return a List<BaseNode> of terminal nodes for the rule * * @throws RuleIntegrationException * if an error prevents complete construction of the network for * the <code>Rule</code>. * @throws InvalidPatternException */ public List<TerminalNode> addRule( final Rule rule, final InternalRuleBase rulebase, final ReteooBuilder.IdGenerator idGenerator ) throws InvalidPatternException { // the list of terminal nodes final List<TerminalNode> nodes = new ArrayList<TerminalNode>(); // transform rule and gets the array of subrules final GroupElement[] subrules = rule.getTransformedLhs( rulebase.getConfiguration().getComponentFactory().getLogicTransformerFactory().getLogicTransformer() ); for (int i = 0; i < subrules.length; i++) { // creates a clean build context for each subrule final BuildContext context = new BuildContext( rulebase, idGenerator ); context.setRule( rule ); // if running in STREAM mode, calculate temporal distance for events if (EventProcessingOption.STREAM.equals( rulebase.getConfiguration().getEventProcessingMode() )) { TemporalDependencyMatrix temporal = this.utils.calculateTemporalDistance( subrules[i] ); context.setTemporalDistance( temporal ); } if (rulebase.getConfiguration().isSequential()) { context.setTupleMemoryEnabled( false ); context.setObjectTypeNodeMemoryEnabled( false ); } else { context.setTupleMemoryEnabled( true ); context.setObjectTypeNodeMemoryEnabled( true ); } // adds subrule final TerminalNode node = this.addSubRule( context, subrules[i], i, rule ); // adds the terminal node to the list of terminal nodes nodes.add( node ); } return nodes; } private TerminalNode addSubRule( final BuildContext context, final GroupElement subrule, final int subruleIndex, final Rule rule ) throws InvalidPatternException { // gets the appropriate builder final ReteooComponentBuilder builder = this.utils.getBuilderFor( subrule ); // checks if an initial-fact is needed if (builder.requiresLeftActivation( this.utils, subrule )) { this.addInitialFactPattern( subrule ); } // builds and attach builder.build( context, this.utils, subrule ); ActivationListenerFactory factory = context.getRuleBase().getConfiguration().getActivationListenerFactory( rule.getActivationListener() ); TerminalNode terminal = factory.createActivationListener( context.getNextId(), context.getTupleSource(), rule, subrule, subruleIndex, context ); BaseNode baseTerminalNode = (BaseNode) terminal; baseTerminalNode.networkUpdated(new UpdateContext()); baseTerminalNode.attach(context); if ( context.getRuleBase().getConfiguration().isUnlinkingEnabled() && !unlinkingAllowedForRule(context.getRule() ) ) { setUnlinkDisabledCount( null, terminal.getLeftTupleSource(), ( context.getWorkingMemories().length == 0) ? null : context.getWorkingMemories() ); } // adds the terminal node to the list of nodes created/added by this sub-rule context.getNodes().add( baseTerminalNode ); // assigns partition IDs to the new nodes //assignPartitionId(context); return terminal; } public static boolean unlinkingAllowedForRule(Rule rule) { return !(rule.isQuery() || rule.getTimer() != null || rule.getAutoFocus() || rule.getSalience() instanceof MVELSalienceExpression); } public void setUnlinkDisabledCount(LeftTupleSource startNode, LeftTupleSource lt, InternalWorkingMemory[] wms) { // while ( lt != null ) { // if ( startNode == lt ) { // return; // } // if ( NodeTypeEnums.isBetaNode( lt ) ) { // BetaNode betaNode = (BetaNode) lt; // if ( betaNode.isRightInputIsRiaNode() ) { // RightInputAdapterNode riaNode = (RightInputAdapterNode) betaNode.getRightInput(); // lt = lt.getLeftTupleSource(); // setUnlinkDisabledCount( lt, riaNode.getLeftTupleSource(), wms ); // continue; // } // // if ( wms != null && betaNode.isUnlinkingEnabled() ) { // for ( InternalWorkingMemory wm : wms ) { // BetaMemory bm; // if ( NodeTypeEnums.AccumulateNode == lt.getType() ) { // bm = ((AccumulateMemory) wm.getNodeMemory( (AccumulateNode) lt )).getBetaMemory(); // } else { // bm = (BetaMemory) wm.getNodeMemory( (BetaNode) lt ); // } // RightTupleList list = bm.getStagedAssertRightTupleList(); // int length = list.size(); // // BetaNode.propagateAssertRightTuples( betaNode, list, length, wm ); // bm.clearStagingMemory(); // } // } // betaNode.setUnlinkingEnabled( false ); // betaNode.setUnlinkedDisabledCount( betaNode.getUnlinkedDisabledCount() + 1 ); // } else if ( NodeTypeEnums.LeftInputAdapterNode == lt.getType() ) { // LeftInputAdapterNode liaNode = ((LeftInputAdapterNode) lt); // // if ( wms != null && liaNode.isUnlinkingEnabled() ) { // for ( InternalWorkingMemory wm : wms ) { // LiaNodeMemory lm = (LiaNodeMemory) wm.getNodeMemory( (LeftInputAdapterNode) lt ); // LeftTupleList list = lm.getStagedLeftTupleList(); // int length = list.size(); // // LeftInputAdapterNode.propagateLeftTuples( (LeftInputAdapterNode) lt, list, length, wm ); // lm.setStagedLeftTupleList( null ); // } // } // // liaNode.setUnlinkedDisabledCount( liaNode.getUnlinkedDisabledCount() + 1 ); // liaNode.setUnlinkingEnabled( false ); // } // lt = lt.getLeftTupleSource(); // } } /** * Adds a query pattern to the given subrule * * @param subrule */ private void addInitialFactPattern( final GroupElement subrule ) { // creates a pattern for initial fact final Pattern pattern = new Pattern( 0, ClassObjectType.InitialFact_ObjectType ); // adds the pattern as the first child of the given AND group element subrule.addChild( 0, pattern ); } public void addEntryPoint( final String id, final InternalRuleBase rulebase, final ReteooBuilder.IdGenerator idGenerator ) { // creates a clean build context for each subrule final BuildContext context = new BuildContext( rulebase, idGenerator ); EntryPoint ep = new EntryPoint( id ); ReteooComponentBuilder builder = utils.getBuilderFor( ep ); builder.build(context, utils, ep); } public WindowNode addWindowNode( WindowDeclaration window, InternalRuleBase ruleBase, ReteooBuilder.IdGenerator idGenerator ) { // creates a clean build context for each subrule final BuildContext context = new BuildContext( ruleBase, idGenerator ); if ( ruleBase.getConfiguration().isSequential() ) { context.setTupleMemoryEnabled( false ); context.setObjectTypeNodeMemoryEnabled( false ); } else { context.setTupleMemoryEnabled( true ); context.setObjectTypeNodeMemoryEnabled( true ); } // gets the appropriate builder final WindowBuilder builder = WindowBuilder.INSTANCE; // builds and attach builder.build( context, this.utils, window ); return (WindowNode) context.getObjectSource(); } }
/* $Id$ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.etch.bindings.java.support; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.Date; import org.apache.etch.bindings.java.msg.StructValue; import org.apache.etch.bindings.java.msg.Type; import org.apache.etch.bindings.java.msg.Validator; import org.apache.etch.bindings.java.msg.ValueFactory; import org.junit.Test; /** * Test Validator_StructValue. */ public class TestValidator_StructValue { private final ValueFactory vf = new DummyValueFactory(); private final Type abc = new Type( "abc" ); /** @throws Exception */ @Test public void constructor1() throws Exception { testconstructor( 0, "StructValue[abc(669497117), 0]", StructValue.class ); testconstructor( 1, "StructValue[abc(669497117), 1]", StructValue[].class ); testconstructor( 2, "StructValue[abc(669497117), 2]", StructValue[][].class ); testconstructor( 3, "StructValue[abc(669497117), 3]", StructValue[][][].class ); testconstructor( 4, "StructValue[abc(669497117), 4]", StructValue[][][][].class ); testconstructor( 5, "StructValue[abc(669497117), 5]", StructValue[][][][][].class ); testconstructor( 6, "StructValue[abc(669497117), 6]", StructValue[][][][][][].class ); testconstructor( 7, "StructValue[abc(669497117), 7]", StructValue[][][][][][][].class ); testconstructor( 8, "StructValue[abc(669497117), 8]", StructValue[][][][][][][][].class ); testconstructor( 9, "StructValue[abc(669497117), 9]", StructValue[][][][][][][][][].class ); assertEquals( 9, Validator.MAX_NDIMS ); } private void testconstructor( int n, String descr, Class<?> expectedClass ) { Validator_StructValue v = Validator_StructValue.get( abc, n ); assertEquals( n, v.getNDims() ); assertSame( expectedClass, v.getExpectedClass() ); assertEquals( descr, v.toString() ); assertEquals( abc, v.getType() ); } /** @throws Exception */ @Test( expected = IllegalArgumentException.class ) public void constructor2() throws Exception { Validator_StructValue.get( abc, -1 ); } /** @throws Exception */ @Test( expected = IllegalArgumentException.class ) public void constructor3() throws Exception { Validator_StructValue.get( abc, Validator.MAX_NDIMS+1 ); } /** @throws Exception */ @Test public void elementvalidator1() throws Exception { testelementvalidator( 1, "StructValue[abc(669497117), 0]", StructValue.class ); testelementvalidator( 2, "StructValue[abc(669497117), 1]", StructValue[].class ); testelementvalidator( 3, "StructValue[abc(669497117), 2]", StructValue[][].class ); testelementvalidator( 4, "StructValue[abc(669497117), 3]", StructValue[][][].class ); testelementvalidator( 5, "StructValue[abc(669497117), 4]", StructValue[][][][].class ); testelementvalidator( 6, "StructValue[abc(669497117), 5]", StructValue[][][][][].class ); testelementvalidator( 7, "StructValue[abc(669497117), 6]", StructValue[][][][][][].class ); testelementvalidator( 8, "StructValue[abc(669497117), 7]", StructValue[][][][][][][].class ); testelementvalidator( 9, "StructValue[abc(669497117), 8]", StructValue[][][][][][][][].class ); assertEquals( 9, Validator.MAX_NDIMS ); } private void testelementvalidator( int n, String descr, Class<?> expectedClass ) { Validator_StructValue v = (Validator_StructValue) Validator_StructValue.get( abc, n ).elementValidator(); assertEquals( n-1, v.getNDims() ); assertSame( expectedClass, v.getExpectedClass() ); assertEquals( descr, v.toString() ); assertEquals( abc, v.getType() ); } /** @throws Exception */ @Test( expected = IllegalArgumentException.class ) public void elementvalidator2() throws Exception { Validator_StructValue.get( abc, 0 ).elementValidator(); } /** @throws Exception */ @Test public void good_scalar() throws Exception { testgoodvalue( 0, new StructValue( abc, vf ) ); } /** @throws Exception */ @Test public void good_array() throws Exception { testgoodvalue( 1, new StructValue[] {} ); testgoodvalue( 2, new StructValue[][] {} ); testgoodvalue( 3, new StructValue[][][] {} ); testgoodvalue( 4, new StructValue[][][][] {} ); testgoodvalue( 5, new StructValue[][][][][] {} ); testgoodvalue( 6, new StructValue[][][][][][] {} ); testgoodvalue( 7, new StructValue[][][][][][][] {} ); testgoodvalue( 8, new StructValue[][][][][][][][] {} ); testgoodvalue( 9, new StructValue[][][][][][][][][] {} ); assertEquals( 9, Validator.MAX_NDIMS ); } private void testgoodvalue( int n, Object value ) { TypeValidator v = Validator_StructValue.get( abc, n ); assertTrue( v.validate( value ) ); assertTrue( validateValueOk( v, value ) ); } /** @throws Exception */ @Test public void bad_scalar() throws Exception { testbadvalue( 0, null ); testbadvalue( 0, false ); testbadvalue( 0, true ); testbadvalue( 0, (byte) 1 ); testbadvalue( 0, (short) 2222 ); testbadvalue( 0, 33333333 ); testbadvalue( 0, 4444444444444444L ); testbadvalue( 0, 5.5f ); testbadvalue( 0, 6.6 ); testbadvalue( 0, "" ); testbadvalue( 0, "abc" ); testbadvalue( 0, new Object() ); testbadvalue( 0, new StructValue( new Type( "def" ), vf ) ); testbadvalue( 0, new Date() ); testbadvalue( 1, null ); testbadvalue( 1, false ); testbadvalue( 1, true ); testbadvalue( 1, (byte) 1 ); testbadvalue( 1, (short) 2222 ); testbadvalue( 1, 333333 ); testbadvalue( 1, 4444444444444444L ); testbadvalue( 1, 5.5f ); testbadvalue( 1, 6.6 ); testbadvalue( 1, "" ); testbadvalue( 1, "abc" ); testbadvalue( 1, new Object() ); testbadvalue( 1, new StructValue( abc, vf ) ); testbadvalue( 1, new Date() ); } /** @throws Exception */ @Test public void bad_array() throws Exception { testbadvalue( 0, new StructValue[] {} ); testbadvalue( 1, new StructValue[][] {} ); testbadvalue( 2, new StructValue[][][] {} ); testbadvalue( 3, new StructValue[][][][] {} ); testbadvalue( 4, new StructValue[][][][][] {} ); testbadvalue( 5, new StructValue[][][][][][] {} ); testbadvalue( 6, new StructValue[][][][][][][] {} ); testbadvalue( 7, new StructValue[][][][][][][][] {} ); testbadvalue( 8, new StructValue[][][][][][][][][] {} ); testbadvalue( 9, new StructValue[][][][][][][][][][] {} ); assertEquals( 9, Validator.MAX_NDIMS ); testbadvalue( 2, new StructValue[] {} ); testbadvalue( 3, new StructValue[][] {} ); testbadvalue( 4, new StructValue[][][] {} ); testbadvalue( 5, new StructValue[][][][] {} ); testbadvalue( 6, new StructValue[][][][][] {} ); testbadvalue( 7, new StructValue[][][][][][] {} ); testbadvalue( 8, new StructValue[][][][][][][] {} ); testbadvalue( 9, new StructValue[][][][][][][][] {} ); assertEquals( 9, Validator.MAX_NDIMS ); } private void testbadvalue( int n, Object value ) { TypeValidator v = Validator_StructValue.get( abc, n ); assertFalse( v.validate( value ) ); assertFalse( validateValueOk( v, value ) ); } private boolean validateValueOk( Validator v, Object value ) { try { Object x = v.validateValue( value ); assertEquals( value, x ); return true; } catch ( Exception e ) { return false; } } }
// Copyright (C) 2011-2012 CRS4. // // This file is part of Hadoop-BAM. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. package org.seqdoop.hadoop_bam; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.compress.*; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import java.io.InputStream; import java.io.IOException; import java.io.EOFException; import java.util.regex.*; import org.seqdoop.hadoop_bam.FormatConstants.BaseQualityEncoding; import org.seqdoop.hadoop_bam.util.ConfHelper; import hbparquet.hadoop.util.ContextUtil; public class FastqInputFormat extends FileInputFormat<Text,SequencedFragment> { public static final String CONF_BASE_QUALITY_ENCODING = "hbam.fastq-input.base-quality-encoding"; public static final String CONF_FILTER_FAILED_QC = "hbam.fastq-input.filter-failed-qc"; public static final String CONF_BASE_QUALITY_ENCODING_DEFAULT = "sanger"; public static class FastqRecordReader extends RecordReader<Text,SequencedFragment> { /* * fastq format: * <fastq> := <block>+ * <block> := @<seqname>\n<seq>\n+[<seqname>]\n<qual>\n * <seqname> := [A-Za-z0-9_.:-]+ * <seq> := [A-Za-z\n\.~]+ * <qual> := [!-~\n]+ * * LP: this format is broken, no? You can have multi-line sequence and quality strings, * and the quality encoding includes '@' in its valid character range. So how should one * distinguish between \n@ as a record delimiter and and \n@ as part of a multi-line * quality string? * * For now I'm going to assume single-line sequences. This works for our sequencing * application. We'll see if someone complains in other applications. */ // start: first valid data index private long start; // end: first index value beyond the slice, i.e. slice is in range [start,end) private long end; // pos: current position in file private long pos; // file: the file being read private Path file; private LineReader lineReader; private InputStream inputStream; private Text currentKey = new Text(); private SequencedFragment currentValue = new SequencedFragment(); /* If true, will scan the identifier for read data as specified in the Casava * users' guide v1.8: * @<instrument>:<run number>:<flowcell ID>:<lane>:<tile>:<x-pos>:<y-pos> <read>:<is filtered>:<control number>:<index sequence> * After the first name that doesn't match lookForIlluminaIdentifier will be * set to false and no further scanning will be done. */ private boolean lookForIlluminaIdentifier = true; private static final Pattern ILLUMINA_PATTERN = Pattern.compile("([^:]+):(\\d+):([^:]*):(\\d+):(\\d+):(-?\\d+):(-?\\d+)\\s+([123]):([YN]):(\\d+):(.*)"); private Text buffer = new Text(); private BaseQualityEncoding qualityEncoding; private boolean filterFailedQC = false; // How long can a read get? private static final int MAX_LINE_LENGTH = 10000; public FastqRecordReader(Configuration conf, FileSplit split) throws IOException { setConf(conf); file = split.getPath(); start = split.getStart(); end = start + split.getLength(); FileSystem fs = file.getFileSystem(conf); FSDataInputStream fileIn = fs.open(file); CompressionCodecFactory codecFactory = new CompressionCodecFactory(conf); CompressionCodec codec = codecFactory.getCodec(file); if (codec == null) // no codec. Uncompressed file. { positionAtFirstRecord(fileIn); inputStream = fileIn; } else { // compressed file if (start != 0) throw new RuntimeException("Start position for compressed file is not 0! (found " + start + ")"); inputStream = codec.createInputStream(fileIn); end = Long.MAX_VALUE; // read until the end of the file } lineReader = new LineReader(inputStream); } protected void setConf(Configuration conf) { String encoding = conf.get(FastqInputFormat.CONF_BASE_QUALITY_ENCODING, conf.get(FormatConstants.CONF_INPUT_BASE_QUALITY_ENCODING, FastqInputFormat.CONF_BASE_QUALITY_ENCODING_DEFAULT)); if ("illumina".equals(encoding)) qualityEncoding = BaseQualityEncoding.Illumina; else if ("sanger".equals(encoding)) qualityEncoding = BaseQualityEncoding.Sanger; else throw new RuntimeException("Unknown input base quality encoding value " + encoding); filterFailedQC = ConfHelper.parseBoolean( conf.get(FastqInputFormat.CONF_FILTER_FAILED_QC, conf.get(FormatConstants.CONF_INPUT_FILTER_FAILED_QC)), false); } /* * Position the input stream at the start of the first record. */ private void positionAtFirstRecord(FSDataInputStream stream) throws IOException { if (start > 0) { // Advance to the start of the first record // We use a temporary LineReader to read lines until we find the // position of the right one. We then seek the file to that position. stream.seek(start); LineReader reader = new LineReader(stream); int bytesRead = 0; do { bytesRead = reader.readLine(buffer, (int)Math.min(MAX_LINE_LENGTH, end - start)); if (bytesRead > 0 && (buffer.getLength() <= 0 || buffer.getBytes()[0] != '@')) start += bytesRead; else { // line starts with @. Read two more and verify that it starts with a + // // If this isn't the start of a record, we want to backtrack to its end long backtrackPosition = start + bytesRead; bytesRead = reader.readLine(buffer, (int)Math.min(MAX_LINE_LENGTH, end - start)); bytesRead = reader.readLine(buffer, (int)Math.min(MAX_LINE_LENGTH, end - start)); if (bytesRead > 0 && buffer.getLength() > 0 && buffer.getBytes()[0] == '+') break; // all good! else { // backtrack to the end of the record we thought was the start. start = backtrackPosition; stream.seek(start); reader = new LineReader(stream); } } } while (bytesRead > 0); stream.seek(start); } // else // if start == 0 we presume it starts with a valid fastq record pos = start; } /** * Added to use mapreduce API. */ public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { } /** * Added to use mapreduce API. */ public Text getCurrentKey() { return currentKey; } /** * Added to use mapreduce API. */ public SequencedFragment getCurrentValue() { return currentValue; } /** * Added to use mapreduce API. */ public boolean nextKeyValue() throws IOException, InterruptedException { return next(currentKey, currentValue); } /** * Close this RecordReader to future operations. */ public void close() throws IOException { inputStream.close(); } /** * Create an object of the appropriate type to be used as a key. */ public Text createKey() { return new Text(); } /** * Create an object of the appropriate type to be used as a value. */ public SequencedFragment createValue() { return new SequencedFragment(); } /** * Returns the current position in the input. */ public long getPos() { return pos; } /** * How much of the input has the RecordReader consumed i.e. */ public float getProgress() { if (start == end) return 1.0f; else return Math.min(1.0f, (pos - start) / (float)(end - start)); } public String makePositionMessage() { return file.toString() + ":" + pos; } protected boolean lowLevelFastqRead(Text key, SequencedFragment value) throws IOException { // ID line long skipped = lineReader.skip(1); // skip @ pos += skipped; if (skipped == 0) return false; // EOF // ID readLineInto(key); // sequence value.clear(); readLineInto(value.getSequence()); readLineInto(buffer); if (buffer.getLength() == 0 || buffer.getBytes()[0] != '+') throw new RuntimeException("unexpected fastq line separating sequence and quality at " + makePositionMessage() + ". Line: " + buffer + ". \nSequence ID: " + key); readLineInto(value.getQuality()); // look for the Illumina-formatted name. Once it isn't found lookForIlluminaIdentifier will be set to false lookForIlluminaIdentifier = lookForIlluminaIdentifier && scanIlluminaId(key, value); if (!lookForIlluminaIdentifier) scanNameForReadNumber(key, value); return true; } /** * Reads the next key/value pair from the input for processing. */ public boolean next(Text key, SequencedFragment value) throws IOException { if (pos >= end) return false; // past end of slice try { boolean gotData; boolean goodRecord; do { gotData = lowLevelFastqRead(key, value); goodRecord = gotData && (!filterFailedQC || value.getFilterPassed() == null || value.getFilterPassed()); } while (gotData && !goodRecord); if (goodRecord) // goodRecord falso also when we couldn't read any more data { if (qualityEncoding == BaseQualityEncoding.Illumina) { try { // convert illumina to sanger scale SequencedFragment.convertQuality(value.getQuality(), BaseQualityEncoding.Illumina, BaseQualityEncoding.Sanger); } catch (FormatException e) { throw new FormatException(e.getMessage() + " Position: " + makePositionMessage() + "; Sequence ID: " + key); } } else // sanger qualities. { int outOfRangeElement = SequencedFragment.verifyQuality(value.getQuality(), BaseQualityEncoding.Sanger); if (outOfRangeElement >= 0) { throw new FormatException("fastq base quality score out of range for Sanger Phred+33 format (found " + (value.getQuality().getBytes()[outOfRangeElement] - FormatConstants.SANGER_OFFSET) + ").\n" + "Although Sanger format has been requested, maybe qualities are in Illumina Phred+64 format?\n" + "Position: " + makePositionMessage() + "; Sequence ID: " + key); } } } return goodRecord; } catch (EOFException e) { throw new RuntimeException("unexpected end of file in fastq record at " + makePositionMessage() + ". Id: " + key.toString()); } } private void scanNameForReadNumber(Text name, SequencedFragment fragment) { // look for a /[0-9] at the end of the name if (name.getLength() >= 2) { byte[] bytes = name.getBytes(); int last = name.getLength() - 1; if (bytes[last-1] == '/' && bytes[last] >= '0' && bytes[last] <= '9') fragment.setRead(bytes[last] - '0'); } } private boolean scanIlluminaId(Text name, SequencedFragment fragment) { Matcher m = ILLUMINA_PATTERN.matcher(name.toString()); boolean matches = m.matches(); if (matches) { fragment.setInstrument(m.group(1)); fragment.setRunNumber(Integer.parseInt(m.group(2))); fragment.setFlowcellId(m.group(3)); fragment.setLane(Integer.parseInt(m.group(4))); fragment.setTile(Integer.parseInt(m.group(5))); fragment.setXpos(Integer.parseInt(m.group(6))); fragment.setYpos(Integer.parseInt(m.group(7))); fragment.setRead(Integer.parseInt(m.group(8))); fragment.setFilterPassed("N".equals(m.group(9))); fragment.setControlNumber(Integer.parseInt(m.group(10))); fragment.setIndexSequence(m.group(11)); } return matches; } private int readLineInto(Text dest) throws EOFException, IOException { int bytesRead = lineReader.readLine(dest, MAX_LINE_LENGTH); if (bytesRead <= 0) throw new EOFException(); pos += bytesRead; return bytesRead; } } @Override public boolean isSplitable(JobContext context, Path path) { CompressionCodec codec = new CompressionCodecFactory(ContextUtil.getConfiguration(context)).getCodec(path); return codec == null; } public RecordReader<Text, SequencedFragment> createRecordReader( InputSplit genericSplit, TaskAttemptContext context) throws IOException, InterruptedException { context.setStatus(genericSplit.toString()); return new FastqRecordReader(ContextUtil.getConfiguration(context), (FileSplit)genericSplit); // cast as per example in TextInputFormat } }
/* Copyright (c) 2000-2021, Board of Trustees of Leland Stanford Jr. University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lockss.filter.pdf; import java.io.IOException; import org.lockss.filter.pdf.DocumentTransformUtil.*; import org.lockss.util.*; import org.lockss.util.PdfUtil.ResultPolicy; /** * <p>A document transform decorator that applies a "then" document * transform only if the PDF document to be transformed is recognized * by an "if" document transform.</p> * @author Thib Guicherd-Callin * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public class ConditionalDocumentTransform extends DocumentTransformDecorator { /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and * "then" document transform.</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the "then" document transform * as a {@link StrictDocumentTransform} if it * is not one already, false otherwise. * @param thenTransform A "then" document transform. * @see DocumentTransformDecorator#DocumentTransformDecorator(DocumentTransform) * @see AggregateDocumentTransform#AggregateDocumentTransform(DocumentTransform, DocumentTransform) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, DocumentTransform thenTransform) { super(new AggregateDocumentTransform(ifTransform, !thenStrictness || thenTransform instanceof StrictDocumentTransform ? thenTransform : new StrictDocumentTransform(thenTransform))); if (logger.isDebug3()) { StringBuffer buffer = new StringBuffer(); buffer.append("Done setting up conditional document transform "); if (thenTransform instanceof StrictDocumentTransform) { buffer.append("with existing"); } else if (thenStrictness) { buffer.append("with added"); } else { buffer.append("without"); } buffer.append(" \"then\" strictness"); logger.debug3(buffer.toString()); } } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @see AggregateDocumentTransform#AggregateDocumentTransform(DocumentTransform, DocumentTransform) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, DocumentTransform thenTransform1, DocumentTransform thenTransform2) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenTransform1, thenTransform2)); logger.debug3("Implicitly aggregated two \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @param thenTransform3 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @see AggregateDocumentTransform#AggregateDocumentTransform(DocumentTransform, DocumentTransform) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, DocumentTransform thenTransform1, DocumentTransform thenTransform2, DocumentTransform thenTransform3) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenTransform1, thenTransform2, thenTransform3)); logger.debug3("Implicitly aggregated three \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenTransforms An array of "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @see AggregateDocumentTransform#AggregateDocumentTransform(DocumentTransform[]) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, DocumentTransform[] thenTransforms) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenTransforms)); logger.debug3("Implicitly aggregated " + thenTransforms.length + " \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, ResultPolicy thenResultPolicy, DocumentTransform thenTransform1, DocumentTransform thenTransform2) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenResultPolicy, thenTransform1, thenTransform2)); logger.debug3("Implicitly aggregated two \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @param thenTransform3 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, ResultPolicy thenResultPolicy, DocumentTransform thenTransform1, DocumentTransform thenTransform2, DocumentTransform thenTransform3) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenResultPolicy, thenTransform1, thenTransform2, thenTransform3)); logger.debug3("Implicitly aggregated three \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the given * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenStrictness True to wrap the aggregated "then" document * transform as a * {@link StrictDocumentTransform}, false * otherwise. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransforms An array of "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @see AggregateDocumentTransform#AggregateDocumentTransform(PdfUtil.ResultPolicy, DocumentTransform[]) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, boolean thenStrictness, ResultPolicy thenResultPolicy, DocumentTransform[] thenTransforms) { this(ifTransform, thenStrictness, new AggregateDocumentTransform(thenResultPolicy, thenTransforms)); logger.debug3("Implicitly aggregated " + thenTransforms.length + " \"then\" transforms"); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and * "then" document transform.</p> * @param ifTransform An "if" document transform. * @param thenTransform A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, DocumentTransform thenTransform) { this(ifTransform, STRICTNESS_DEFAULT, thenTransform); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform, DocumentTransform) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, DocumentTransform thenTransform1, DocumentTransform thenTransform2) { this(ifTransform, STRICTNESS_DEFAULT, thenTransform1, thenTransform2); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @param thenTransform3 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform, DocumentTransform) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, DocumentTransform thenTransform1, DocumentTransform thenTransform2, DocumentTransform thenTransform3) { this(ifTransform, STRICTNESS_DEFAULT, thenTransform1, thenTransform2, thenTransform3); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * default aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenTransforms An array of "then" document transform. * @see ConditionalDocumentTransform#ConditionalDocumentTransform(DocumentTransform, boolean, DocumentTransform[]) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, DocumentTransform[] thenTransforms) { this(ifTransform, STRICTNESS_DEFAULT, thenTransforms); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, PdfUtil.ResultPolicy, DocumentTransform, DocumentTransform) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, ResultPolicy thenResultPolicy, DocumentTransform thenTransform1, DocumentTransform thenTransform2) { this(ifTransform, STRICTNESS_DEFAULT, thenResultPolicy, thenTransform1, thenTransform2); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransform1 A "then" document transform. * @param thenTransform2 A "then" document transform. * @param thenTransform3 A "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, PdfUtil.ResultPolicy, DocumentTransform, DocumentTransform) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, ResultPolicy thenResultPolicy, DocumentTransform thenTransform1, DocumentTransform thenTransform2, DocumentTransform thenTransform3) { this(ifTransform, STRICTNESS_DEFAULT, thenResultPolicy, thenTransform1, thenTransform2, thenTransform3); } /** * <p>Builds a new conditional document transform using the default * strictness, out of the given "if" document transform and the * aggregation of the given "then" document transforms (using the * given aggregation result policy).</p> * @param ifTransform An "if" document transform. * @param thenResultPolicy A result policy for the aggregate "then" * document transform. * @param thenTransforms An array of "then" document transform. * @see #ConditionalDocumentTransform(DocumentTransform, boolean, PdfUtil.ResultPolicy, DocumentTransform[]) * @see #STRICTNESS_DEFAULT * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public ConditionalDocumentTransform(DocumentTransform ifTransform, ResultPolicy thenResultPolicy, DocumentTransform[] thenTransforms) { this(ifTransform, STRICTNESS_DEFAULT, thenResultPolicy, thenTransforms); } /* Inherit documentation */ @Deprecated public boolean transform(PdfDocument pdfDocument) throws IOException { logger.debug3("Begin conditional document transform"); boolean ret = documentTransform.transform(pdfDocument); logger.debug2("Conditional document transform result: " + ret); return ret; } /** * <p>Te default strict policy for "then" transforms used by this * class.</p> * @see #ConditionalDocumentTransform(DocumentTransform, DocumentTransform) * @see #ConditionalDocumentTransform(DocumentTransform, DocumentTransform, DocumentTransform) * @see #ConditionalDocumentTransform(DocumentTransform, DocumentTransform, DocumentTransform, DocumentTransform) * @see #ConditionalDocumentTransform(DocumentTransform, DocumentTransform[]) * @see #ConditionalDocumentTransform(DocumentTransform, PdfUtil.ResultPolicy, DocumentTransform, DocumentTransform) * @see #ConditionalDocumentTransform(DocumentTransform, PdfUtil.ResultPolicy, DocumentTransform, DocumentTransform, DocumentTransform) * @see #ConditionalDocumentTransform(DocumentTransform, PdfUtil.ResultPolicy, DocumentTransform[]) * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated public static final boolean STRICTNESS_DEFAULT = true; /** * <p>A logger for use by this class.</p> * @deprecated Moving away from PDFBox 0.7.3 after 1.76. */ @Deprecated private static Logger logger = Logger.getLogger(ConditionalDocumentTransform.class); }
/* * Copyright (c) 2017, Tyler <https://github.com/tylerthardy> * Copyright (c) 2018, Shaun Dreclin <shaundreclin@gmail.com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.slayer; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Provides; import java.awt.Color; import java.awt.image.BufferedImage; import java.io.IOException; import static java.lang.Integer.max; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.inject.Inject; import joptsimple.internal.Strings; import lombok.AccessLevel; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import net.runelite.api.Actor; import net.runelite.api.ChatMessageType; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.Hitsplat; import net.runelite.api.ItemID; import net.runelite.api.MessageNode; import net.runelite.api.NPC; import net.runelite.api.NPCComposition; import static net.runelite.api.Skill.SLAYER; import net.runelite.api.coords.WorldPoint; import net.runelite.api.events.ActorDeath; import net.runelite.api.events.ChatMessage; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.GameTick; import net.runelite.api.events.HitsplatApplied; import net.runelite.api.events.NpcDespawned; import net.runelite.api.events.NpcSpawned; import net.runelite.api.events.StatChanged; import net.runelite.api.vars.SlayerUnlock; import net.runelite.api.widgets.Widget; import net.runelite.api.widgets.WidgetInfo; import net.runelite.client.Notifier; import net.runelite.client.callback.ClientThread; import net.runelite.client.chat.ChatColorType; import net.runelite.client.chat.ChatCommandManager; import net.runelite.client.chat.ChatMessageBuilder; import net.runelite.client.chat.ChatMessageManager; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.events.ChatInput; import net.runelite.client.events.ConfigChanged; import net.runelite.client.game.ItemManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.overlay.OverlayManager; import net.runelite.client.ui.overlay.infobox.InfoBoxManager; import net.runelite.client.util.ColorUtil; import net.runelite.client.util.Text; import net.runelite.http.api.chat.ChatClient; @PluginDescriptor( name = "Slayer", description = "Show additional slayer task related information", tags = {"combat", "notifications", "overlay", "tasks"} ) @Slf4j public class SlayerPlugin extends Plugin { //Chat messages private static final Pattern CHAT_GEM_PROGRESS_MESSAGE = Pattern.compile("^(?:You're assigned to kill|You have received a new Slayer assignment from .*:) (?:[Tt]he )?(?<name>.+?)(?: (?:in|on|south of) (?:the )?(?<location>[^;]+))?(?:; only | \\()(?<amount>\\d+)(?: more to go\\.|\\))$"); private static final String CHAT_GEM_COMPLETE_MESSAGE = "You need something new to hunt."; private static final Pattern CHAT_COMPLETE_MESSAGE = Pattern.compile("(?:\\d+,)*\\d+"); private static final String CHAT_CANCEL_MESSAGE = "Your task has been cancelled."; private static final String CHAT_CANCEL_MESSAGE_JAD = "You no longer have a slayer task as you left the fight cave."; private static final String CHAT_CANCEL_MESSAGE_ZUK = "You no longer have a slayer task as you left the Inferno."; private static final String CHAT_SUPERIOR_MESSAGE = "A superior foe has appeared..."; private static final String CHAT_BRACELET_SLAUGHTER = "Your bracelet of slaughter prevents your slayer"; private static final Pattern CHAT_BRACELET_SLAUGHTER_REGEX = Pattern.compile("Your bracelet of slaughter prevents your slayer count decreasing. It has (\\d{1,2}) charge[s]? left."); private static final String CHAT_BRACELET_EXPEDITIOUS = "Your expeditious bracelet helps you progress your"; private static final Pattern CHAT_BRACELET_EXPEDITIOUS_REGEX = Pattern.compile("Your expeditious bracelet helps you progress your slayer (?:task )?faster. It has (\\d{1,2}) charge[s]? left."); private static final String CHAT_BRACELET_SLAUGHTER_CHARGE = "Your bracelet of slaughter has "; private static final Pattern CHAT_BRACELET_SLAUGHTER_CHARGE_REGEX = Pattern.compile("Your bracelet of slaughter has (\\d{1,2}) charge[s]? left."); private static final String CHAT_BRACELET_EXPEDITIOUS_CHARGE = "Your expeditious bracelet has "; private static final Pattern CHAT_BRACELET_EXPEDITIOUS_CHARGE_REGEX = Pattern.compile("Your expeditious bracelet has (\\d{1,2}) charge[s]? left."); private static final Pattern COMBAT_BRACELET_TASK_UPDATE_MESSAGE = Pattern.compile("^You still need to kill (\\d+) monsters to complete your current Slayer assignment"); //NPC messages private static final Pattern NPC_ASSIGN_MESSAGE = Pattern.compile(".*(?:Your new task is to kill|You are to bring balance to)\\s*(?<amount>\\d+) (?<name>.+?)(?: (?:in|on|south of) (?:the )?(?<location>.+))?\\."); private static final Pattern NPC_ASSIGN_BOSS_MESSAGE = Pattern.compile("^(?:Excellent\\. )?You're now assigned to (?:kill|bring balance to) (?:the )?(.*) (\\d+) times.*Your reward point tally is (.*)\\.$"); private static final Pattern NPC_ASSIGN_FIRST_MESSAGE = Pattern.compile("^We'll start you off (?:hunting|bringing balance to) (.*), you'll need to kill (\\d*) of them\\.$"); private static final Pattern NPC_CURRENT_MESSAGE = Pattern.compile("^You're (?:still(?: meant to be)?|currently assigned to) (?:hunting|bringing balance to|kill|bring balance to|slaying) (?<name>.+?)(?: (?:in|on|south of) (?:the )?(?<location>.+))?(?:, with|; (?:you have|only)) (?<amount>\\d+)(?: more)? to go\\..*"); //Reward UI private static final Pattern REWARD_POINTS = Pattern.compile("Reward points: ((?:\\d+,)*\\d+)"); private static final int GROTESQUE_GUARDIANS_REGION = 6727; private static final int EXPEDITIOUS_CHARGE = 30; private static final int SLAUGHTER_CHARGE = 30; // Chat Command private static final String TASK_COMMAND_STRING = "!task"; private static final Pattern TASK_STRING_VALIDATION = Pattern.compile("[^a-zA-Z0-9' -]"); private static final int TASK_STRING_MAX_LENGTH = 50; @Inject private Client client; @Inject private SlayerConfig config; @Inject private OverlayManager overlayManager; @Inject private SlayerOverlay overlay; @Inject private InfoBoxManager infoBoxManager; @Inject private ItemManager itemManager; @Inject private Notifier notifier; @Inject private ClientThread clientThread; @Inject private TargetClickboxOverlay targetClickboxOverlay; @Inject private TargetWeaknessOverlay targetWeaknessOverlay; @Inject private TargetMinimapOverlay targetMinimapOverlay; @Inject private ChatMessageManager chatMessageManager; @Inject private ChatCommandManager chatCommandManager; @Inject private ScheduledExecutorService executor; @Inject private ChatClient chatClient; @Getter(AccessLevel.PACKAGE) private List<NPC> highlightedTargets = new ArrayList<>(); private final Set<NPC> taggedNpcs = new HashSet<>(); private int taggedNpcsDiedPrevTick; private int taggedNpcsDiedThisTick; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private int amount; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private int initialAmount; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private String taskLocation; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private int expeditiousChargeCount; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private int slaughterChargeCount; @Getter(AccessLevel.PACKAGE) @Setter(AccessLevel.PACKAGE) private String taskName; private TaskCounter counter; private int cachedXp = -1; private Instant infoTimer; private boolean loginFlag; private List<String> targetNames = new ArrayList<>(); @Override protected void startUp() throws Exception { overlayManager.add(overlay); overlayManager.add(targetClickboxOverlay); overlayManager.add(targetWeaknessOverlay); overlayManager.add(targetMinimapOverlay); if (client.getGameState() == GameState.LOGGED_IN) { cachedXp = client.getSkillExperience(SLAYER); if (config.amount() != -1 && !config.taskName().isEmpty()) { setExpeditiousChargeCount(config.expeditious()); setSlaughterChargeCount(config.slaughter()); clientThread.invoke(() -> setTask(config.taskName(), config.amount(), config.initialAmount(), config.taskLocation(), false)); } } chatCommandManager.registerCommandAsync(TASK_COMMAND_STRING, this::taskLookup, this::taskSubmit); } @Override protected void shutDown() throws Exception { overlayManager.remove(overlay); overlayManager.remove(targetClickboxOverlay); overlayManager.remove(targetWeaknessOverlay); overlayManager.remove(targetMinimapOverlay); removeCounter(); highlightedTargets.clear(); taggedNpcs.clear(); cachedXp = -1; chatCommandManager.unregisterCommand(TASK_COMMAND_STRING); } @Provides SlayerConfig provideSlayerConfig(ConfigManager configManager) { return configManager.getConfig(SlayerConfig.class); } @Subscribe public void onGameStateChanged(GameStateChanged event) { switch (event.getGameState()) { case HOPPING: case LOGGING_IN: cachedXp = -1; taskName = ""; amount = 0; loginFlag = true; highlightedTargets.clear(); taggedNpcs.clear(); break; case LOGGED_IN: if (config.amount() != -1 && !config.taskName().isEmpty() && loginFlag) { setExpeditiousChargeCount(config.expeditious()); setSlaughterChargeCount(config.slaughter()); setTask(config.taskName(), config.amount(), config.initialAmount(), config.taskLocation(), false); loginFlag = false; } break; } } private void save() { config.amount(amount); config.initialAmount(initialAmount); config.taskName(taskName); config.taskLocation(taskLocation); config.expeditious(expeditiousChargeCount); config.slaughter(slaughterChargeCount); } @Subscribe public void onNpcSpawned(NpcSpawned npcSpawned) { NPC npc = npcSpawned.getNpc(); if (isTarget(npc)) { highlightedTargets.add(npc); } } @Subscribe public void onNpcDespawned(NpcDespawned npcDespawned) { NPC npc = npcDespawned.getNpc(); taggedNpcs.remove(npc); highlightedTargets.remove(npc); } @Subscribe public void onGameTick(GameTick tick) { Widget npcDialog = client.getWidget(WidgetInfo.DIALOG_NPC_TEXT); if (npcDialog != null) { String npcText = Text.sanitizeMultilineText(npcDialog.getText()); //remove color and linebreaks final Matcher mAssign = NPC_ASSIGN_MESSAGE.matcher(npcText); // amount, name, (location) final Matcher mAssignFirst = NPC_ASSIGN_FIRST_MESSAGE.matcher(npcText); // name, number final Matcher mAssignBoss = NPC_ASSIGN_BOSS_MESSAGE.matcher(npcText); // name, number, points final Matcher mCurrent = NPC_CURRENT_MESSAGE.matcher(npcText); // name, (location), amount if (mAssign.find()) { String name = mAssign.group("name"); int amount = Integer.parseInt(mAssign.group("amount")); String location = mAssign.group("location"); setTask(name, amount, amount, location); } else if (mAssignFirst.find()) { int amount = Integer.parseInt(mAssignFirst.group(2)); setTask(mAssignFirst.group(1), amount, amount); } else if (mAssignBoss.find()) { int amount = Integer.parseInt(mAssignBoss.group(2)); setTask(mAssignBoss.group(1), amount, amount); int points = Integer.parseInt(mAssignBoss.group(3).replaceAll(",", "")); config.points(points); } else if (mCurrent.find()) { String name = mCurrent.group("name"); int amount = Integer.parseInt(mCurrent.group("amount")); String location = mCurrent.group("location"); setTask(name, amount, initialAmount, location); } } Widget braceletBreakWidget = client.getWidget(WidgetInfo.DIALOG_SPRITE_TEXT); if (braceletBreakWidget != null) { String braceletText = Text.removeTags(braceletBreakWidget.getText()); //remove color and linebreaks if (braceletText.contains("bracelet of slaughter")) { slaughterChargeCount = SLAUGHTER_CHARGE; config.slaughter(slaughterChargeCount); } else if (braceletText.contains("expeditious bracelet")) { expeditiousChargeCount = EXPEDITIOUS_CHARGE; config.expeditious(expeditiousChargeCount); } } Widget rewardsBarWidget = client.getWidget(WidgetInfo.SLAYER_REWARDS_TOPBAR); if (rewardsBarWidget != null) { for (Widget w : rewardsBarWidget.getDynamicChildren()) { Matcher mPoints = REWARD_POINTS.matcher(w.getText()); if (mPoints.find()) { final int prevPoints = config.points(); int points = Integer.parseInt(mPoints.group(1).replaceAll(",", "")); if (prevPoints != points) { config.points(points); removeCounter(); addCounter(); } break; } } } if (infoTimer != null && config.statTimeout() != 0) { Duration timeSinceInfobox = Duration.between(infoTimer, Instant.now()); Duration statTimeout = Duration.ofMinutes(config.statTimeout()); if (timeSinceInfobox.compareTo(statTimeout) >= 0) { removeCounter(); } } taggedNpcsDiedPrevTick = taggedNpcsDiedThisTick; taggedNpcsDiedThisTick = 0; } @Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.SPAM) { return; } String chatMsg = Text.removeTags(event.getMessage()); //remove color and linebreaks if (chatMsg.startsWith(CHAT_BRACELET_SLAUGHTER)) { Matcher mSlaughter = CHAT_BRACELET_SLAUGHTER_REGEX.matcher(chatMsg); amount++; slaughterChargeCount = mSlaughter.find() ? Integer.parseInt(mSlaughter.group(1)) : SLAUGHTER_CHARGE; config.slaughter(slaughterChargeCount); } if (chatMsg.startsWith(CHAT_BRACELET_EXPEDITIOUS)) { Matcher mExpeditious = CHAT_BRACELET_EXPEDITIOUS_REGEX.matcher(chatMsg); amount--; expeditiousChargeCount = mExpeditious.find() ? Integer.parseInt(mExpeditious.group(1)) : EXPEDITIOUS_CHARGE; config.expeditious(expeditiousChargeCount); } if (chatMsg.startsWith(CHAT_BRACELET_EXPEDITIOUS_CHARGE)) { Matcher mExpeditious = CHAT_BRACELET_EXPEDITIOUS_CHARGE_REGEX.matcher(chatMsg); if (!mExpeditious.find()) { return; } expeditiousChargeCount = Integer.parseInt(mExpeditious.group(1)); config.expeditious(expeditiousChargeCount); } if (chatMsg.startsWith(CHAT_BRACELET_SLAUGHTER_CHARGE)) { Matcher mSlaughter = CHAT_BRACELET_SLAUGHTER_CHARGE_REGEX.matcher(chatMsg); if (!mSlaughter.find()) { return; } slaughterChargeCount = Integer.parseInt(mSlaughter.group(1)); config.slaughter(slaughterChargeCount); } if (chatMsg.endsWith("; return to a Slayer master.")) { Matcher mComplete = CHAT_COMPLETE_MESSAGE.matcher(chatMsg); List<String> matches = new ArrayList<>(); while (mComplete.find()) { matches.add(mComplete.group(0).replaceAll(",", "")); } int streak = -1, points = -1; switch (matches.size()) { case 0: streak = 1; break; case 1: streak = Integer.parseInt(matches.get(0)); break; case 3: streak = Integer.parseInt(matches.get(0)); points = Integer.parseInt(matches.get(2)); break; default: log.warn("Unreachable default case for message ending in '; return to Slayer master'"); } if (streak != -1) { config.streak(streak); } if (points != -1) { config.points(points); } setTask("", 0, 0); return; } if (chatMsg.equals(CHAT_GEM_COMPLETE_MESSAGE) || chatMsg.equals(CHAT_CANCEL_MESSAGE) || chatMsg.equals(CHAT_CANCEL_MESSAGE_JAD) || chatMsg.equals(CHAT_CANCEL_MESSAGE_ZUK)) { setTask("", 0, 0); return; } if (config.showSuperiorNotification() && chatMsg.equals(CHAT_SUPERIOR_MESSAGE)) { notifier.notify(CHAT_SUPERIOR_MESSAGE); return; } Matcher mProgress = CHAT_GEM_PROGRESS_MESSAGE.matcher(chatMsg); if (mProgress.find()) { String name = mProgress.group("name"); int gemAmount = Integer.parseInt(mProgress.group("amount")); String location = mProgress.group("location"); setTask(name, gemAmount, initialAmount, location); return; } final Matcher bracerProgress = COMBAT_BRACELET_TASK_UPDATE_MESSAGE.matcher(chatMsg); if (bracerProgress.find()) { final int taskAmount = Integer.parseInt(bracerProgress.group(1)); setTask(taskName, taskAmount, initialAmount); // Avoid race condition (combat brace message goes through first before XP drop) amount++; } } @Subscribe public void onStatChanged(StatChanged statChanged) { if (statChanged.getSkill() != SLAYER) { return; } int slayerExp = statChanged.getXp(); if (slayerExp <= cachedXp) { return; } if (cachedXp == -1) { // this is the initial xp sent on login cachedXp = slayerExp; return; } final int delta = slayerExp - cachedXp; cachedXp = slayerExp; log.debug("Slayer xp change delta: {}, killed npcs: {}", delta, taggedNpcsDiedPrevTick); final Task task = Task.getTask(taskName); if (task != null && task.getExpectedKillExp() > 0) { // Only decrement a kill if the xp drop matches the expected drop. This is just for Tzhaar tasks. if (task.getExpectedKillExp() == delta) { killed(1); } } else { // This is at least one kill, but if we observe multiple tagged NPCs dieing on the previous tick, count them // instead. killed(max(taggedNpcsDiedPrevTick, 1)); } } @Subscribe public void onHitsplatApplied(HitsplatApplied hitsplatApplied) { Actor actor = hitsplatApplied.getActor(); Hitsplat hitsplat = hitsplatApplied.getHitsplat(); if (hitsplat.getHitsplatType() == Hitsplat.HitsplatType.DAMAGE_ME && highlightedTargets.contains(actor)) { // If the actor is in highlightedTargets it must be an NPC and also a task assignment taggedNpcs.add((NPC) actor); } } @Subscribe public void onActorDeath(ActorDeath actorDeath) { Actor actor = actorDeath.getActor(); if (taggedNpcs.contains(actor)) { log.debug("Tagged NPC {} has died", actor.getName()); ++taggedNpcsDiedThisTick; } } @Subscribe private void onConfigChanged(ConfigChanged event) { if (!event.getGroup().equals("slayer") || !event.getKey().equals("infobox")) { return; } if (config.showInfobox()) { clientThread.invoke(this::addCounter); } else { removeCounter(); } } @VisibleForTesting void killed(int amt) { if (amount == 0) { return; } amount -= amt; if (doubleTroubleExtraKill()) { assert amt == 1; amount--; } config.amount(amount); // save changed value if (!config.showInfobox()) { return; } // add and update counter, set timer addCounter(); counter.setCount(amount); infoTimer = Instant.now(); } private boolean doubleTroubleExtraKill() { return WorldPoint.fromLocalInstance(client, client.getLocalPlayer().getLocalLocation()).getRegionID() == GROTESQUE_GUARDIANS_REGION && SlayerUnlock.GROTESQUE_GUARDIAN_DOUBLE_COUNT.isEnabled(client); } private boolean isTarget(NPC npc) { if (targetNames.isEmpty()) { return false; } String name = npc.getName(); if (name == null) { return false; } name = name.toLowerCase(); for (String target : targetNames) { if (name.contains(target)) { NPCComposition composition = npc.getTransformedComposition(); if (composition != null) { List<String> actions = Arrays.asList(composition.getActions()); if (actions.contains("Attack") || actions.contains("Pick")) //Pick action is for zygomite-fungi { return true; } } } } return false; } private void rebuildTargetNames(Task task) { targetNames.clear(); if (task != null) { Arrays.stream(task.getTargetNames()) .map(String::toLowerCase) .forEach(targetNames::add); targetNames.add(taskName.toLowerCase().replaceAll("s$", "")); } } private void rebuildTargetList() { highlightedTargets.clear(); for (NPC npc : client.getNpcs()) { if (isTarget(npc)) { highlightedTargets.add(npc); } } } private void setTask(String name, int amt, int initAmt) { setTask(name, amt, initAmt, null); } private void setTask(String name, int amt, int initAmt, String location) { setTask(name, amt, initAmt, location, true); } private void setTask(String name, int amt, int initAmt, String location, boolean addCounter) { taskName = name; amount = amt; initialAmount = Math.max(amt, initAmt); taskLocation = location; save(); removeCounter(); if (addCounter) { infoTimer = Instant.now(); addCounter(); } Task task = Task.getTask(name); rebuildTargetNames(task); rebuildTargetList(); } private void addCounter() { if (!config.showInfobox() || counter != null || Strings.isNullOrEmpty(taskName)) { return; } Task task = Task.getTask(taskName); int itemSpriteId = ItemID.ENCHANTED_GEM; if (task != null) { itemSpriteId = task.getItemSpriteId(); } BufferedImage taskImg = itemManager.getImage(itemSpriteId); String taskTooltip = ColorUtil.wrapWithColorTag("%s", new Color(255, 119, 0)) + "</br>"; if (taskLocation != null && !taskLocation.isEmpty()) { taskTooltip += taskLocation + "</br>"; } taskTooltip += ColorUtil.wrapWithColorTag("Pts:", Color.YELLOW) + " %s</br>" + ColorUtil.wrapWithColorTag("Streak:", Color.YELLOW) + " %s"; if (initialAmount > 0) { taskTooltip += "</br>" + ColorUtil.wrapWithColorTag("Start:", Color.YELLOW) + " " + initialAmount; } counter = new TaskCounter(taskImg, this, amount); counter.setTooltip(String.format(taskTooltip, capsString(taskName), config.points(), config.streak())); infoBoxManager.addInfoBox(counter); } private void removeCounter() { if (counter == null) { return; } infoBoxManager.removeInfoBox(counter); counter = null; } void taskLookup(ChatMessage chatMessage, String message) { if (!config.taskCommand()) { return; } ChatMessageType type = chatMessage.getType(); final String player; if (type.equals(ChatMessageType.PRIVATECHATOUT)) { player = client.getLocalPlayer().getName(); } else { player = Text.removeTags(chatMessage.getName()) .replace('\u00A0', ' '); } net.runelite.http.api.chat.Task task; try { task = chatClient.getTask(player); } catch (IOException ex) { log.debug("unable to lookup slayer task", ex); return; } if (TASK_STRING_VALIDATION.matcher(task.getTask()).find() || task.getTask().length() > TASK_STRING_MAX_LENGTH || TASK_STRING_VALIDATION.matcher(task.getLocation()).find() || task.getLocation().length() > TASK_STRING_MAX_LENGTH || Task.getTask(task.getTask()) == null || !Task.LOCATIONS.contains(task.getLocation())) { log.debug("Validation failed for task name or location: {}", task); return; } int killed = task.getInitialAmount() - task.getAmount(); StringBuilder sb = new StringBuilder(); sb.append(task.getTask()); if (!Strings.isNullOrEmpty(task.getLocation())) { sb.append(" (").append(task.getLocation()).append(")"); } sb.append(": "); if (killed < 0) { sb.append(task.getAmount()).append(" left"); } else { sb.append(killed).append('/').append(task.getInitialAmount()).append(" killed"); } String response = new ChatMessageBuilder() .append(ChatColorType.NORMAL) .append("Slayer Task: ") .append(ChatColorType.HIGHLIGHT) .append(sb.toString()) .build(); final MessageNode messageNode = chatMessage.getMessageNode(); messageNode.setRuneLiteFormatMessage(response); chatMessageManager.update(messageNode); client.refreshChat(); } private boolean taskSubmit(ChatInput chatInput, String value) { if (Strings.isNullOrEmpty(taskName)) { return false; } final String playerName = client.getLocalPlayer().getName(); executor.execute(() -> { try { chatClient.submitTask(playerName, capsString(taskName), amount, initialAmount, taskLocation); } catch (Exception ex) { log.warn("unable to submit slayer task", ex); } finally { chatInput.resume(); } }); return true; } //Utils private String capsString(String str) { return str.substring(0, 1).toUpperCase() + str.substring(1); } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.savepointJdbc30_JSR169 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Savepoint; import java.sql.Statement; import com.pivotal.gemfirexd.internal.tools.ij; import org.apache.derbyTesting.functionTests.util.TestUtil; /** * Test the new class Savepoint in jdbc 30. * Also, test some mix and match of defining savepoints through JDBC and sql * Testing both callable and prepared statements meta data * * Do not put tests in this file that are not compatible with JSR169 */ public class savepointJdbc30_JSR169 { static private boolean isDerbyNet = false; static private String[] testObjects = { "table t1", "table t2", "table savepoint"}; public static void main(String[] args) { Connection con = null, con2 = null; Statement s; System.out.println("Test savepointJdbc30 starting"); try { // use the ij utility to read the property file and // make the initial connection. ij.getPropertyArg(args); con = ij.startJBMS(); con2 = ij.startJBMS(); runTests("regular connections", con,con2); con.close(); con2.close(); } catch (SQLException e) { dumpSQLExceptions(e); } catch (Throwable e) { System.out.println("FAIL -- unexpected exception:"); e.printStackTrace(System.out); } } public static void runTests(String tag, Connection con, Connection con2) throws SQLException { Statement s; System.out.println("Test savepointJdbc30 starting for " + tag); isDerbyNet = TestUtil.isNetFramework(); con.setAutoCommit(true); // make sure it is true con2.setAutoCommit(false); s = con.createStatement(); /* Create the table and do any other set-up */ setUpTest(s); //JCC translates the JDBC savepoint calls into equivalent SQL statements. //In addition, we do not allow nested savepoints when //coming through SQL statements. Because of this restriction, we can't run most of the //JDBC savepoint tests under DRDA framework. The JDBC tests have nested JDBC savepoint //calls and they fail when run under JCC(because they get translated into nested SQL savepoints). //Hence, splitting the test cases into non-DRDA and more generic tests. System.out.println("Tests common to DRDA and embedded Cloudscape"); genericTests(con, con2, s); System.out.println("Next try non-DRDA tests"); if (!isDerbyNet) nonDRDATests(con, s); con.setAutoCommit(true); TestUtil.cleanUpTest(s, testObjects); s.close(); } //The following tests have nested savepoints through JDBC calls. When coming through JCC, //these nested JDBC savepoint calls are translated into equivalent SQL savepoint statements. //But we do not allow nested savepoints coming through SQL statments //and hence these tests can't be run under DRDA framework. static void nonDRDATests(Connection con, Statement s) throws SQLException { ResultSet rs1, rs2, rs1WithHold, rs2WithHold; Savepoint savepoint1, savepoint2, savepoint3, savepoint4; //Setting autocommit to false will allow savepoints con.setAutoCommit(false); // make sure it is false //Test40 - We internally generate a unique name for unnamed savepoints. If a //named savepoint uses the currently used internal savepoint name, we won't //get an exception thrown for it because we prepend external saves with "e." //to avoid name conflicts. System.out.println("Test40 - named savepoint can't conflict with internally generated name for unnamed savepoints"); savepoint1 = con.setSavepoint(); savepoint2 = con.setSavepoint("i.SAVEPT0"); con.rollback(); //Test41 - Rolling back to a savepoint will release all the savepoints created after that savepoint. System.out.println("Test41a - Rollback to a savepoint, then try to release savepoint created after that savepoint"); savepoint1 = con.setSavepoint(); s.executeUpdate("INSERT INTO T1 VALUES(1,1)"); savepoint2 = con.setSavepoint("s1"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); savepoint3 = con.setSavepoint("s2"); s.executeUpdate("INSERT INTO T1 VALUES(3,1)"); //Rollback to first named savepoint s1. This will internally release the second named savepoint s2. con.rollback(savepoint2); rs1 = s.executeQuery("select count(*) from t1"); rs1.next(); if(rs1.getInt(1) != 1) { System.out.println("ERROR: There should have been 1 row in the table, but found " + rs1.getInt(1) + " rows"); return; } //Trying to release second named savepoint s2 should throw exception. try { con.releaseSavepoint(savepoint3); System.out.println("FAIL 41a release of rolled back savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //Trying to rollback second named savepoint s2 should throw exception. System.out.println("Test41b - Rollback to a savepoint, then try to rollback savepoint created after that savepoint"); try { con.rollback(savepoint3); System.out.println("FAIL 41b release of rolled back savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //Release the unnamed named savepoint. con.rollback(savepoint1); rs1 = s.executeQuery("select count(*) from t1"); rs1.next(); if(rs1.getInt(1) != 0) { System.out.println("ERROR: There should have been no rows in the table, but found " + rs1.getInt(1) + " rows"); return; } con.rollback(); //Test42 - Rollback/commit on a connection will release all the savepoints created for that transaction System.out.println("Test42 - Rollback/commit the transaction, then try to use savepoint from that transaction"); savepoint1 = con.setSavepoint(); savepoint2 = con.setSavepoint("s1"); con.rollback(); try { con.rollback(savepoint1); System.out.println("FAIL 42 release of rolled back savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //Testing commit next savepoint1 = con.setSavepoint(); savepoint2 = con.setSavepoint("s1"); con.commit(); try { con.rollback(savepoint1); System.out.println("FAIL 42 rollback of rolled back savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //Test43 - After releasing a savepoint, should be able to reuse it. System.out.println("Test43 - Release and reuse a savepoint name"); savepoint1 = con.setSavepoint("s1"); try { savepoint2 = con.setSavepoint("s1"); System.out.println("FAIL 43"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.releaseSavepoint(savepoint1); savepoint2 = con.setSavepoint("s1"); con.rollback(); // Test 45 reuse savepoint name after rollback - should not work System.out.println("Test 45 reuse savepoint name after rollback - should not work"); savepoint1 = con.setSavepoint("MyName"); con.rollback(savepoint1); try { savepoint2 = con.setSavepoint("MyName"); System.out.println("FAIL 45 reuse of savepoint name after rollback should fail"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 46 bug 5145 Cursors declared before and within the savepoint unit will be closed when rolling back the savepoint System.out.println("Test 46 Cursors declared before and within the savepoint unit will be closed when rolling back the savepoint"); Statement sWithHold = con.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT ); con.setAutoCommit(false); s.executeUpdate("DELETE FROM T1"); s.executeUpdate("INSERT INTO T1 VALUES(19,1)"); s.executeUpdate("INSERT INTO T1 VALUES(19,2)"); s.executeUpdate("INSERT INTO T1 VALUES(19,3)"); rs1 = s.executeQuery("select * from t1"); rs1.next(); rs1WithHold = sWithHold.executeQuery("select * from t1"); rs1WithHold.next(); savepoint1 = con.setSavepoint(); rs2 = s.executeQuery("select * from t1"); rs2.next(); rs2WithHold = sWithHold.executeQuery("select * from t1"); rs2WithHold.next(); con.rollback(savepoint1); try {//resultset declared outside the savepoint unit should be closed at this point after the rollback to savepoint rs1.next(); System.out.println("FAIL 46 shouldn't be able to use a resultset (declared before the savepoint unit) after the rollback to savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try {//holdable resultset declared outside the savepoint unit should be closed at this point after the rollback to savepoint rs1WithHold.next(); System.out.println("FAIL 46 shouldn't be able to use a holdable resultset (declared before the savepoint unit) after the rollback to savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try {//resultset declared within the savepoint unit should be closed at this point after the rollback to savepoint rs2.next(); System.out.println("FAIL 46 shouldn't be able to use a resultset (declared within the savepoint unit) after the rollback to savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try {//holdable resultset declared within the savepoint unit should be closed at this point after the rollback to savepoint rs2WithHold.next(); System.out.println("FAIL 46 shouldn't be able to use a holdable resultset (declared within the savepoint unit) after the rollback to savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 47 multiple tests for getSavepointId() System.out.println("Test 47 multiple tests for getSavepointId()"); savepoint1 = con.setSavepoint(); savepoint2 = con.setSavepoint(); System.out.println(savepoint1.getSavepointId()); System.out.println(savepoint2.getSavepointId()); con.releaseSavepoint(savepoint2); savepoint2 = con.setSavepoint(); System.out.println(savepoint2.getSavepointId()); con.commit(); savepoint2 = con.setSavepoint(); System.out.println(savepoint2.getSavepointId()); con.rollback(); savepoint2 = con.setSavepoint(); System.out.println(savepoint2.getSavepointId()); con.rollback(); // Test 48 System.out.println("Test 48 No nested SQL savepoints allowed."); savepoint1 = con.setSavepoint(); savepoint2 = con.setSavepoint(); System.out.println("Following SQL savepoint will fail because we are trying to nest it inside JDBC savepoint"); try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 48 shouldn't be able set SQL savepoint nested inside JDBC/SQL savepoints"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //rollback JDBC savepoint but still can't have SQL savepoint because there is still one JDBC savepoint con.releaseSavepoint(savepoint2); try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 48 Should have gotten exception for nested SQL savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.releaseSavepoint(savepoint1); //rollback last JDBC savepoint and now try SQL savepoint again s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); con.rollback(); } //These tests do not allow savepoint nesting and hence can be run under DRDA too static void genericTests(Connection con, Connection con2, Statement s) throws SQLException { ResultSet rs1, rs2, rs1WithHold, rs2WithHold; Savepoint savepoint1, savepoint2, savepoint3, savepoint4; //Test1 and Test1a fail under DRDA (bug 5384). //Test1 - No savepoint allowed when auto commit is true con.setAutoCommit(true); // make sure it is true try { System.out.println("Test1 - no unnamed savepoints allowed if autocommit is true"); con.setSavepoint(); // will throw exception because auto commit is true System.out.println("FAIL 1 - auto commit on"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //Test1a - No savepoint allowed when auto commit is true try { System.out.println("Test1a - no named savepoints allowed if autocommit is true"); con.setSavepoint("notallowed"); // will throw exception because auto commit is true System.out.println("FAIL 1a - auto commit on"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.setAutoCommit(false); // make sure it is false //Test2 - After releasing a savepoint, should be able to reuse it. System.out.println("Test2 - Release and reuse a savepoint name"); savepoint1 = con.setSavepoint("s1"); con.releaseSavepoint(savepoint1); savepoint2 = con.setSavepoint("s1"); con.rollback(); //Test3 - Named savepoints can't pass null for name try { System.out.println("Test3 - null name not allowed for named savepoints"); con.setSavepoint(null); System.out.println("FAIL 3 null savepoint "); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); //Test4 - Verify names/ids of named/unnamed savepoints //named savepoints don't have an id. //unnamed savepoints don't have a name (internally, all our savepoints have names, //but for unnamed savepoint, that is not exposed thro jdbc api) System.out.println("Test4 - Verify names/ids of named/unnamed savepoints"); try { savepoint1 = con.setSavepoint(); savepoint1.getSavepointId(); //following should throw exception for un-named savepoint savepoint1.getSavepointName(); System.out.println("FAIL 4 getSavepointName on id savepoint "); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); try { savepoint1 = con.setSavepoint("s1"); savepoint1.getSavepointName(); //following should throw exception for named savepoint savepoint1.getSavepointId(); System.out.println("FAIL 4 getSavepointId on named savepoint "); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // TEST 5a and 5b for bug 4465 // test 5a - create two savepoints in two different transactions // and release the first one in the subsequent transaction System.out.println("Test5a - create two savepoints in two different transactions" + " and release the first one in the subsequent transaction"); savepoint1 = con.setSavepoint("s1"); con.commit(); //The following savepoint was earlier named s1. Changed it to s2 while working on DRDA support //for savepoints. The reason for that is as follows //JCC translates all savepoint jdbc calls to equivalent sql and hence if the 2 savepoints in //different connections are named the same, then the release savepoint below will get converted to //RELEASE TO SAVEPOINT s1 and that succeeds because the 2nd connection does have a savepoint named s1. //Hence we don't really check what we intended to check which is trying to release a savepoint created //in a different transaction savepoint2 = con.setSavepoint("s2"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); try { con.releaseSavepoint(savepoint1); System.out.println("FAIL 5a - release savepoint from a different transaction did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); // test 5b - create two savepoints in two different transactions // and rollback the first one in the subsequent transaction System.out.println("Test5b - create two savepoints in two different transactions" + " and rollback the first one in the subsequent transaction"); savepoint1 = con.setSavepoint("s1"); con.commit(); //The following savepoint was earlier named s1. Changed it to s2 while working on DRDA support //for savepoints. The reason for that is as follows //JCC translates all savepoint jdbc calls to equivalent sql and hence if the 2 savepoints in //different connections are named the same, then the rollback savepoint below will get converted to //ROLLBACK TO SAVEPOINT s1 and that succeeds because the 2nd connection does have a savepoint named s1. //Hence we don't really check what we intended to check which is trying to rollback a savepoint created //in a different transaction savepoint2 = con.setSavepoint("s2"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); try { con.rollback(savepoint1); System.out.println("FAIL 5b - rollback savepoint from a different transaction did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); // test 6a - create a savepoint release it and then create another with the same name. // and release the first one System.out.println("Test6a - create a savepoint, release it, create another with" + " same name and release the first one"); savepoint1 = con.setSavepoint("s1"); con.releaseSavepoint(savepoint1); //The following savepoint was earlier named s1. Changed it to s2 while working on DRDA support //for savepoints. The reason for that is as follows //JCC translates all savepoint jdbc calls to equivalent sql and hence if the 2 savepoints in //a transaction are named the same, then the release savepoint below will get converted to //RELEASE TO SAVEPOINT s1 and that succeeds because there is a valid savepoint named s1. savepoint2 = con.setSavepoint("s2"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); try { con.releaseSavepoint(savepoint1); System.out.println("FAIL 6a - releasing a released savepoint did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); // test 6b - create a savepoints release it and then create another with the same name. // and rollback the first one System.out.println("Test6b - create a savepoint, release it, create another with" + " same name and rollback the first one"); savepoint1 = con.setSavepoint("s1"); con.releaseSavepoint(savepoint1); //The following savepoint was earlier named s1. Changed it to s2 while working on DRDA support //for savepoints. The reason for that is as follows //JCC translates all savepoint jdbc calls to equivalent sql and hence if the 2 savepoints in //a transaction are named the same, then the rollback savepoint below will get converted to //ROLLBACK TO SAVEPOINT s1 and that succeeds because there is a valid savepoint named s1. savepoint2 = con.setSavepoint("s2"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); try { con.rollback(savepoint1); System.out.println("FAIL 6b - rollback a released savepoint did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); /* TEST case just for bug 4467 // Test 10 - create a named savepoint with the a generated name savepoint1 = con2.setSavepoint("SAVEPT0"); // what exactly is the correct behaviour here? try { savepoint2 = con2.setSavepoint(); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con2.commit(); */ System.out.println("Test6c - Try to use a savepoint from another connection for release"); savepoint1 = con.setSavepoint("s1"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); try { con2.releaseSavepoint(savepoint1); System.out.println("FAIL 6c - releasing another transaction's savepoint did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); con2.commit(); /* BUG 4468 - should not be able to pass a savepoint from a different transaction for release/rollback */ // Test 7a - swap savepoints across connections System.out.println("Test7a - swap savepoints across connections with release"); savepoint1 = con2.setSavepoint("s1"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); savepoint2 = con.setSavepoint("s1"); try { con.releaseSavepoint(savepoint1); System.out.println("FAIL 7a - releasing a another transaction's savepoint did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); con2.commit(); // Test 7b - swap savepoints across connections System.out.println("Test7b - swap savepoints across connections with rollback"); savepoint1 = con2.setSavepoint("s1"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); savepoint2 = con.setSavepoint("s1"); try { con.rollback(savepoint1); System.out.println("FAIL 7b - rolling back a another transaction's savepoint did not raise error"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.commit(); con2.commit(); /* * following section attempts to call statement in a method to do a negative test * because savepoints are not supported in a trigger * however, this cannot be done because a call is not supported in a trigger. * leaving the test here for later reference for when we support the SQL version * // bug 4507 - Test 8 test all 4 savepoint commands inside the trigger code System.out.println("Test 8a set savepoint(unnamed) command inside the trigger code"); s.executeUpdate("create trigger trig1 before insert on t1 for each statement call org.apache.derbyTesting.functionTests.tests.jdbcapi.savepointJdbc30::doConnectionSetSavepointUnnamed()"); try { s.executeUpdate("insert into t1 values(1,1)"); System.out.println("FAIL 8a set savepoint(unnamed) command inside the trigger code"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("drop trigger trig1"); System.out.println("Test 8b set savepoint(named) command inside the trigger code"); s.executeUpdate("create trigger trig2 before insert on t1 for each statement call org.apache.derbyTesting.functionTests.tests.jdbcapi.savepointJdbc30::doConnectionSetSavepointNamed()"); try { s.executeUpdate("insert into t1 values(1,1)"); System.out.println("FAIL 8b set savepoint(named) command inside the trigger code"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("drop trigger trig2"); System.out.println("Test 8c release savepoint command inside the trigger code"); s.executeUpdate("create trigger trig3 before insert on t1 for each statement call org.apache.derbyTesting.functionTests.tests.jdbcapi.savepointJdbc30::doConnectionReleaseSavepoint()"); try { s.executeUpdate("insert into t1 values(1,1)"); System.out.println("FAIL 8c release savepoint command inside the trigger code"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("drop trigger trig3"); System.out.println("Test 8d rollback savepoint command inside the trigger code"); s.executeUpdate("create trigger trig4 before insert on t1 for each statement call org.apache.derbyTesting.functionTests.tests.jdbcapi.savepointJdbc30::doConnectionRollbackSavepoint()"); try { s.executeUpdate("insert into t1 values(1,1)"); System.out.println("FAIL 8d rollback savepoint command inside the trigger code"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("drop trigger trig4"); con.rollback(); */ //end commented out test 8 // Test 9 test savepoint name and verify case sensitivity System.out.println("Test 9 test savepoint name"); savepoint1 = con.setSavepoint("myname"); String savepointName = savepoint1.getSavepointName(); if (!savepointName.equals("myname")) System.out.println("fail - savepoint name mismatch"); con.rollback(); // Test 10 test savepoint name case sensitivity System.out.println("Test 10 test savepoint name case sensitivity"); savepoint1 = con.setSavepoint("MyName"); savepointName = savepoint1.getSavepointName(); if (!savepointName.equals("MyName")) System.out.println("fail - savepoint name mismatch"); con.rollback(); // Test 11 rolling back a savepoint multiple times - should work System.out.println("Test 11 rolling back a savepoint multiple times - should work"); savepoint1 = con.setSavepoint("MyName"); con.rollback(savepoint1); try { con.rollback(savepoint1); } catch (SQLException se) { System.out.println("FAIL 11 second rollback failed"); System.out.println("Exception is " + se.getMessage()); } con.rollback(); // Test 12 releasing a savepoint multiple times - should not work System.out.println("Test 12 releasing a savepoint multiple times - should not work"); savepoint1 = con.setSavepoint("MyName"); con.releaseSavepoint(savepoint1); try { con.releaseSavepoint(savepoint1); System.out.println("FAIL 12 releasing a savepoint multiple times should fail"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 13 shouldn't be able to use a savepoint from earlier transaction after setting autocommit on and off System.out.println("Test 13 shouldn't be able to use a savepoint from earlier transaction after setting autocommit on and off"); savepoint1 = con.setSavepoint("MyName"); con.setAutoCommit(true); con.setAutoCommit(false); savepoint2 = con.setSavepoint("MyName1"); try {//shouldn't be able to use savepoint from earlier tranasaction after setting autocommit on and off con.releaseSavepoint(savepoint1); System.out.println("FAIL 13 shouldn't be able to use a savepoint from earlier transaction after setting autocommit on and off"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.releaseSavepoint(savepoint2); con.rollback(); // Test 14 cause a transaction rollback and that should release the internal savepoint array System.out.println("Test 14 A non-user initiated transaction rollback should release the internal savepoint array"); Statement s1, s2; s1 = con.createStatement(); s1.executeUpdate("insert into t1 values(1,1)"); s1.executeUpdate("insert into t1 values(2,0)"); con.commit(); s1.executeUpdate("update t1 set c11=c11+1 where c12 > 0"); s2 = con2.createStatement(); savepoint1 = con2.setSavepoint("MyName"); try {//following will get lock timeout which will rollback transaction on c2 s2.executeUpdate("update t1 set c11=c11+1 where c12 < 1"); System.out.println("FAIL 14 should have gotten lock time out"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try {//the transaction rollback above should have removed the savepoint MyName con2.releaseSavepoint(savepoint1); System.out.println("FAIL 14 A non-user initiated transaction rollback should release the internal savepoint array"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); con2.rollback(); s.execute("delete from t1"); con.commit(); // Test 15 check savepoints in batch System.out.println("Test 15 check savepoints in batch"); s.execute("delete from t1"); s.addBatch("insert into t1 values(1,1)"); s.addBatch("insert into t1 values(1,1)"); savepoint1 = con.setSavepoint(); s.addBatch("insert into t1 values(1,1)"); s.executeBatch(); con.rollback(savepoint1); int val = count(con,s); if (val != 0) System.out.println("FAIL 15 savepoint should have been set before batch"); con.rollback(); // Test 16 grammar check for savepoint sq1 System.out.println("Test 16 grammar check for savepoint sq1"); try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS"); System.out.println("FAIL 16 Should have gotten exception for missing ON ROLLBACK RETAIN CURSORS"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try { s.executeUpdate("SAVEPOINT s1 UNIQUE ON ROLLBACK RETAIN CURSORS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 16 Should have gotten exception for multiple ON ROLLBACK RETAIN CURSORS"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN LOCKS"); System.out.println("FAIL 16 Should have gotten exception for multiple ON ROLLBACK RETAIN LOCKS"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } try { s.executeUpdate("SAVEPOINT s1 UNIQUE UNIQUE ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 16 Should have gotten exception for multiple UNIQUE keywords"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN CURSORS ON ROLLBACK RETAIN LOCKS"); s.executeUpdate("RELEASE TO SAVEPOINT s1"); con.rollback(); // Test 17 System.out.println("Test 17 No nested savepoints allowed when using SQL to set savepoints."); System.out.println("Test 17a Test with UNIQUE clause."); s.executeUpdate("SAVEPOINT s1 UNIQUE ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); try { s.executeUpdate("SAVEPOINT s2 UNIQUE ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 17a Should have gotten exception for nested savepoints"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } s.executeUpdate("RELEASE TO SAVEPOINT s1"); s.executeUpdate("SAVEPOINT s2 UNIQUE ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); con.rollback(); System.out.println("Test 17b Test without UNIQUE clause."); System.out.println("Since no nesting is allowed, skipping UNIQUE still gives error for trying to define another savepoint"); s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 17b Should have gotten exception for nested savepoints"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 18 System.out.println("Test 18 No nested SQL savepoints allowed inside JDBC savepoint."); savepoint1 = con.setSavepoint(); System.out.println("Following SQL savepoint will fail because we are trying to nest it inside JDBC savepoint"); try { s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 18 shouldn't be able set SQL savepoint nested inside JDBC savepoints"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //rollback the JDBC savepoint. Now since there are no user defined savepoints, we can define SQL savepoint con.releaseSavepoint(savepoint1); s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); con.rollback(); // Test 19 System.out.println("Test 19 No nested SQL savepoints allowed inside SQL savepoint."); s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("Following SQL savepoint will fail because we are trying to nest it inside SQL savepoint"); try { s.executeUpdate("SAVEPOINT s2 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 19 shouldn't be able set SQL savepoint nested inside SQL savepoint"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } //rollback the SQL savepoint. Now since there are no user defined savepoints, we can define SQL savepoint s.executeUpdate("RELEASE TO SAVEPOINT s1"); s.executeUpdate("SAVEPOINT s2 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); con.rollback(); // Test 20 System.out.println("Test 20 Rollback of SQL savepoint works same as rollback of JDBC savepoint."); s.executeUpdate("DELETE FROM T1"); con.commit(); s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); s.executeUpdate("INSERT INTO T1 VALUES(1,1)"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); s.executeUpdate("INSERT INTO T1 VALUES(3,1)"); //Rollback to SQL savepoint and should see changes rolledback s.executeUpdate("ROLLBACK TO SAVEPOINT s1"); rs1 = s.executeQuery("select count(*) from t1"); rs1.next(); if(rs1.getInt(1) != 0) { System.out.println("ERROR: There should have been 0 rows in the table, but found " + rs1.getInt(1) + " rows"); return; } con.rollback(); // Test 21 System.out.println("Test 21 After releasing the SQL savepoint, rollback the transaction and should see everything undone."); s.executeUpdate("SAVEPOINT s1 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); s.executeUpdate("INSERT INTO T1 VALUES(1,1)"); s.executeUpdate("INSERT INTO T1 VALUES(2,1)"); s.executeUpdate("INSERT INTO T1 VALUES(3,1)"); //Release the SQL savepoint and then rollback the transaction and should see changes rolledback s.executeUpdate("RELEASE TO SAVEPOINT s1"); con.rollback(); rs1 = s.executeQuery("select count(*) from t1"); rs1.next(); if(rs1.getInt(1) != 0) { System.out.println("ERROR: There should have been 0 rows in the table, but found " + rs1.getInt(1) + " rows"); return; } con.rollback(); // Test 22 System.out.println("Test 22 Should not be able to create a SQL savepoint starting with name SYS"); try { s.executeUpdate("SAVEPOINT SYSs2 ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); System.out.println("FAIL 22 shouldn't be able to create a SQL savepoint starting with name SYS"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 23 - bug 5817 - make savepoint and release non-reserved keywords System.out.println("Test 23 Should be able to use non-reserved keywords savepoint and release as identifiers"); System.out.println("Create table with savepoint and release as identifiers"); s.execute("create table savepoint (savepoint int, release int)"); rs1 = s.executeQuery("select count(*) from savepoint"); rs1.next(); if(rs1.getInt(1) != 0) { System.out.println("ERROR: There should have been 0 rows in the table, but found " + rs1.getInt(1) + " rows"); return; } System.out.println("Create a savepoint with name savepoint"); s.execute("SAVEPOINT savepoint ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); s.executeUpdate("INSERT INTO savepoint VALUES(1,1)"); System.out.println("Release the savepoint with name savepoint"); s.execute("RELEASE SAVEPOINT savepoint"); rs1 = s.executeQuery("select count(*) from savepoint"); rs1.next(); if(rs1.getInt(1) != 1) { System.out.println("ERROR: There should have been 1 rows in the table, but found " + rs1.getInt(1) + " rows"); return; } System.out.println("Create a savepoint with name release"); s.execute("SAVEPOINT release ON ROLLBACK RETAIN LOCKS ON ROLLBACK RETAIN CURSORS"); s.executeUpdate("INSERT INTO savepoint VALUES(2,1)"); System.out.println("Rollback to the savepoint with name release"); s.execute("ROLLBACK TO SAVEPOINT release"); rs1 = s.executeQuery("select count(*) from savepoint"); rs1.next(); if(rs1.getInt(1) != 1) { System.out.println("ERROR: There should have been 1 rows in the table, but found " + rs1.getInt(1) + " rows"); return; } System.out.println("Release the savepoint with name release"); s.execute("RELEASE SAVEPOINT release"); con.rollback(); // Test 24 System.out.println("Test 24 Savepoint name can't exceed 128 characters"); try { savepoint1 = con.setSavepoint("MyName1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890"); System.out.println("FAIL 24 shouldn't be able to create a SQL savepoint with name exceeding 128 characters"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); // Test 25 System.out.println("Test 25 Should not be able to create a SQL savepoint starting with name SYS through jdbc"); try { savepoint1 = con.setSavepoint("SYSs2"); System.out.println("FAIL 25 shouldn't be able to create a SQL savepoint starting with name SYS through jdbc"); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } con.rollback(); s1.close(); s2.close(); // bug 4451 - Test 26a pass Null value to rollback // bug 5374 - Passing a null savepoint to rollback or release method // used to give a npe in JCC // it should give a SQLException aying "Cannot rollback to a null savepoint" System.out.println("Test 26a rollback of null savepoint"); try { con.rollback((Savepoint) null); System.out.println("FAIL 26a rollback of null savepoint did not raise error "); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } // Test 26b pass Null value to releaseSavepoint System.out.println("Test 26b release of null savepoint"); try { con.releaseSavepoint((Savepoint) null); System.out.println("FAIL 26b release of null savepoint did not raise error "); } catch (SQLException se) { System.out.println("Expected Exception is " + se.getMessage()); } } //Set up the test by creating the table used by the rest of the test. static void setUpTest(Statement s) throws SQLException { try { /* Drop the tables, just in case they're there from another test */ s.execute("drop table t1"); s.execute("drop table t2"); s.execute("drop table savepoint"); } catch (SQLException se) { //System.out.println("Expected Exception is " + se.getMessage()); } /* Create a table */ s.execute("create table t1 (c11 int, c12 smallint)"); s.execute("create table t2 (c11 int)"); } static private int count(Connection con, Statement s) throws SQLException { int count = 0; ResultSet rs = s.executeQuery("select count(*) from t1"); rs.next(); count = rs.getInt(1); rs.close(); return count; } public static void doConnectionSetSavepointUnnamed() throws Throwable { Connection conn = DriverManager.getConnection("jdbc:default:connection"); Savepoint s1 = conn.setSavepoint(); Statement s = conn.createStatement(); s.executeUpdate("insert into t2 values(1)"); conn.rollback(s1); } public static void doConnectionSetSavepointNamed() throws Throwable { Connection conn = DriverManager.getConnection("jdbc:default:connection"); Savepoint s1 = conn.setSavepoint("s1"); Statement s = conn.createStatement(); s.executeUpdate("insert into t2 values(1)"); conn.rollback(s1); } public static void doConnectionRollbackSavepoint() throws Throwable { Connection conn = DriverManager.getConnection("jdbc:default:connection"); conn.rollback((Savepoint) null); Statement s = conn.createStatement(); s.executeUpdate("insert into t2 values(1)"); } public static void doConnectionReleaseSavepoint() throws Throwable { Connection conn = DriverManager.getConnection("jdbc:default:connection"); conn.releaseSavepoint((Savepoint) null); Statement s = conn.createStatement(); s.executeUpdate("insert into t2 values(1)"); } public static void dumpSQLExceptions (SQLException se) { System.out.println("FAIL -- unexpected exception"); while (se != null) { System.out.print("SQLSTATE("+se.getSQLState()+"):"); se.printStackTrace(System.out); se = se.getNextException(); } } }
package uk.ac.abdn.fits.mvc.control.operator; /** * @author Cheng Zeng, University of Aberdeen * */ import java.text.DecimalFormat; public class Fare { private byte charge_standard_fare; private double fare_distance1_mile; private String fare_dist1_type; private double fare_dist1_charge; private double fare_dist2_mile_1; private double fare_dist2_mile_2; private String fare_dist2_type; private double fare_dist2_charge; private double fare_dist3_mile_1; private double fare_dist3_mile_2; private String fare_dist3_type; private double fare_dist3_charge; private double fare_dist4_mile_1; private double fare_dist4_mile_2; private String fare_dist4_type; private double fare_dist4_charge; private double return_fare_multiplier; private double discount_for_over60; private double discount_for_under16; private boolean fare_structure_checkbox_escort; private boolean fare_structure_checkbox_charge_for_dead_mileage; public byte getCharge_standard_fare() { return charge_standard_fare; } public void setCharge_standard_fare(byte charge_standard_fare) { this.charge_standard_fare = charge_standard_fare; } public double getFare_distance1_mile() { return fare_distance1_mile; } public void setFare_distance1_mile(double fare_distance1_mile) { this.fare_distance1_mile = fare_distance1_mile; } public String getFare_dist1_type() { return fare_dist1_type; } public void setFare_dist1_type(String fare_dist1_type) { this.fare_dist1_type = fare_dist1_type; } public double getFare_dist1_charge() { return fare_dist1_charge; } public void setFare_dist1_charge(double fare_dist1_charge) { this.fare_dist1_charge = fare_dist1_charge; } public double getFare_dist2_mile_1() { return fare_dist2_mile_1; } public void setFare_dist2_mile_1(double fare_dist2_mile_1) { this.fare_dist2_mile_1 = fare_dist2_mile_1; } public double getFare_dist2_mile_2() { return fare_dist2_mile_2; } public void setFare_dist2_mile_2(double fare_dist2_mile_2) { this.fare_dist2_mile_2 = fare_dist2_mile_2; } public String getFare_dist2_type() { return fare_dist2_type; } public void setFare_dist2_type(String fare_dist2_type) { this.fare_dist2_type = fare_dist2_type; } public double getFare_dist2_charge() { return fare_dist2_charge; } public void setFare_dist2_charge(double fare_dist2_charge) { this.fare_dist2_charge = fare_dist2_charge; } public double getFare_dist3_mile_1() { return fare_dist3_mile_1; } public void setFare_dist3_mile_1(double fare_dist3_mile_1) { this.fare_dist3_mile_1 = fare_dist3_mile_1; } public double getFare_dist3_mile_2() { return fare_dist3_mile_2; } public void setFare_dist3_mile_2(double fare_dist3_mile_2) { this.fare_dist3_mile_2 = fare_dist3_mile_2; } public String getFare_dist3_type() { return fare_dist3_type; } public void setFare_dist3_type(String fare_dist3_type) { this.fare_dist3_type = fare_dist3_type; } public double getFare_dist3_charge() { return fare_dist3_charge; } public void setFare_dist3_charge(double fare_dist3_charge) { this.fare_dist3_charge = fare_dist3_charge; } public double getFare_dist4_mile_1() { return fare_dist4_mile_1; } public void setFare_dist4_mile_1(double fare_dist4_mile_1) { this.fare_dist4_mile_1 = fare_dist4_mile_1; } public double getFare_dist4_mile_2() { return fare_dist4_mile_2; } public void setFare_dist4_mile_2(double fare_dist4_mile_2) { this.fare_dist4_mile_2 = fare_dist4_mile_2; } public String getFare_dist4_type() { return fare_dist4_type; } public void setFare_dist4_type(String fare_dist4_type) { this.fare_dist4_type = fare_dist4_type; } public double getFare_dist4_charge() { return fare_dist4_charge; } public void setFare_dist4_charge(double fare_dist4_charge) { this.fare_dist4_charge = fare_dist4_charge; } public double getReturn_fare_multiplier() { return return_fare_multiplier; } public void setReturn_fare_multiplier(double return_fare_multiplier) { this.return_fare_multiplier = return_fare_multiplier; } public String getDiscount_for_over60() { return new DecimalFormat("#").format(discount_for_over60).toString(); } public void setDiscount_for_over60(double discount_for_over60) { this.discount_for_over60 = discount_for_over60; } public String getDiscount_for_under16() { return new DecimalFormat("#").format(discount_for_under16).toString(); } public void setDiscount_for_under16(double discount_for_under16) { this.discount_for_under16 = discount_for_under16; } public boolean isFare_structure_checkbox_escort() { return fare_structure_checkbox_escort; } public void setFare_structure_checkbox_escort( boolean fare_structure_checkbox_escort) { this.fare_structure_checkbox_escort = fare_structure_checkbox_escort; } public boolean isFare_structure_checkbox_charge_for_dead_mileage() { return fare_structure_checkbox_charge_for_dead_mileage; } public void setFare_structure_checkbox_charge_for_dead_mileage( boolean fare_structure_checkbox_charge_for_dead_mileage) { this.fare_structure_checkbox_charge_for_dead_mileage = fare_structure_checkbox_charge_for_dead_mileage; } public void setField(OperatorDataInputForm form ){ // if yes, the value is 0. If no, the value is 1. The default is 0. charge_standard_fare = form.getTab_fare_structure_radioBtns().equals("0")?(byte)0:1; fare_distance1_mile = form.getFare_distance1_mile(); fare_dist1_type = form.getFare_dist1_type(); fare_dist1_charge = form.getFare_dist1_charge(); fare_dist2_mile_1 = form.getFare_dist2_mile_1(); fare_dist2_mile_2 = form.getFare_dist2_mile_2(); fare_dist2_type = form.getFare_dist2_type(); fare_dist2_charge = form.getFare_dist2_charge(); fare_dist3_mile_1 = form.getFare_dist3_mile_1(); fare_dist3_mile_2 = form.getFare_dist3_mile_2(); fare_dist3_type = form.getFare_dist3_type(); fare_dist3_charge = form.getFare_dist3_charge(); fare_dist4_mile_1 = form.getFare_dist4_mile_1(); fare_dist4_mile_2 = form.getFare_dist4_mile_2(); fare_dist4_type = form.getFare_dist4_type(); fare_dist4_charge= form.getFare_dist4_charge(); return_fare_multiplier = form.getReturn_fare_multiplier(); discount_for_over60= form.getDiscount_for_over60(); discount_for_under16 = form.getDiscount_for_under16(); fare_structure_checkbox_escort = form.isFare_structure_checkbox_escort(); fare_structure_checkbox_charge_for_dead_mileage = form.isFare_structure_checkbox_charge_for_dead_mileage(); } public String toString(){ StringBuilder sb = new StringBuilder(); sb.append("charge_standard_fare: "+ this.getCharge_standard_fare()+"\n"); sb.append("fare_distance1_mile: "+ this.getFare_distance1_mile()+"\n"); sb.append("fare_dist1_type: "+ this.getFare_dist1_type()+"\n"); sb.append("fare_dist1_charge: "+ this.getFare_dist1_charge()+"\n"); sb.append("fare_dist2_mile_1: "+ this.getFare_dist2_mile_1()+"\n"); sb.append("fare_dist2_mile_2: "+ this.getFare_dist2_mile_2()+"\n"); sb.append("fare_dist2_type: "+ this.getFare_dist2_type()+"\n"); sb.append("fare_dist2_charge: "+ this.getFare_dist2_charge()+"\n"); sb.append("fare_dist3_mile_1: "+ this.getFare_dist3_mile_1()+"\n"); sb.append("fare_dist3_mile_2: "+ this.getFare_dist3_mile_2()+"\n"); sb.append("fare_dist3_type: "+ this.getFare_dist3_type()+"\n"); sb.append("fare_dist3_charge: "+ this.getFare_dist3_charge()+"\n"); sb.append("fare_dist4_mile_1: "+ this.getFare_dist4_mile_1()+"\n"); sb.append("fare_dist4_mile_2: "+ this.getFare_dist4_mile_2()+"\n"); sb.append("fare_dist4_type: "+ this.getFare_dist4_type()+"\n"); sb.append("fare_dist4_charge: "+ this.getFare_dist4_charge()+"\n"); sb.append("return_fare_multiplier: "+ this.getReturn_fare_multiplier()+"\n"); sb.append("discount_for_over60: "+ this.getDiscount_for_over60()+"\n"); sb.append("discount_for_under16: "+ this.getDiscount_for_under16()+"\n"); sb.append("fare_structure_checkbox_escort: "+ this.isFare_structure_checkbox_escort()+"\n"); sb.append("fare_structure_checkbox_charge_for_dead_mileage: "+ this.isFare_structure_checkbox_charge_for_dead_mileage()+"\n"); return sb.toString(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package durmoth.terrain; import com.jme3.asset.AssetManager; import com.jme3.asset.MaterialKey; import com.jme3.effect.ParticleEmitter; import com.jme3.effect.ParticleMesh; import com.jme3.export.binary.BinaryExporter; import com.jme3.light.PointLight; import com.jme3.material.Material; import com.jme3.material.RenderState.BlendMode; import com.jme3.math.ColorRGBA; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.terrain.geomipmap.TerrainQuad; import com.jme3.terrain.heightmap.AbstractHeightMap; import com.jme3.terrain.heightmap.ImageBasedHeightMap; import com.jme3.texture.plugins.AWTLoader; import com.jme3.texture.Image; import com.jme3.texture.Texture; import com.jme3.texture.Texture2D; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.List; import javax.swing.ImageIcon; import javax.swing.JOptionPane; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.scene.Geometry; import com.jme3.scene.shape.Sphere; import com.jme3.util.TangentBinormalGenerator; import java.io.File; import java.io.IOException; import java.util.logging.Logger; /** * * @author User */ public class Terrain { private int size; private int xOff, yOff; private int seed; private BufferedImage heightMap; private BufferedImage alphaMap; private TerrainAssets assetManager; private TerrainQuad terrainObject; private Node terrain; private List<VegetationObject> vegList = new ArrayList<VegetationObject>(); private List<RawVegetationObject> rawVegList = new ArrayList<RawVegetationObject>(); private List<RawVegetationObject> done = new ArrayList<RawVegetationObject>(); private float xScale = 1.0f; private float yScale = 1.0f; public Terrain(int tile_size, int xOffset, int yOffset, int s, TerrainAssets assetMan){ size = tile_size; xOff = xOffset; yOff = yOffset; seed = s; assetManager = assetMan; } public void setXScale(float s){ xScale = s; } public void setYScale(float s){ yScale = s; } public float getXScale(){ return xScale; } public float getYScale(){ return yScale; } public void setHeightMap(BufferedImage hMap){ heightMap = hMap; } public void generateHeightMap(){ TerrainHeightMapGenerator thmg = new TerrainHeightMapGenerator(this); this.setHeightMap(thmg.generate()); } public BufferedImage getHeightMap(){ return heightMap; } public void setAssetManager(TerrainAssets ass){ assetManager = ass; } public void setAlphaMap(BufferedImage aMap){ alphaMap = aMap; } public void generateAlphaMap(){ TerrainAlphaMapGenerator tamg = new TerrainAlphaMapGenerator(this); this.setAlphaMap(tamg.generate()); } public BufferedImage getAlphaMap(){ return alphaMap; } public void setRawVegetationObjectList(List<RawVegetationObject> vegL){ rawVegList = vegL; } public void generateVegetation(){ VegetationGenerator vegGen = new VegetationGenerator(this); setRawVegetationObjectList(vegGen.generate()); } public List<RawVegetationObject> getRawVegetationObjectList(){ return rawVegList; } public int getSize(){ return size; } public int getxOff(){ return xOff; } public int getyOff(){ return yOff; } public int getSeed(){ return seed; } public List<VegetationObject> getVegetationObjectList(){ return vegList; } public void setVegetationObjectList(List<VegetationObject> vegL){ vegList = vegL; } public void terrainNodeSetup(){ Image height = new AWTLoader().load(heightMap, true); Image alpha = new AWTLoader().load(alphaMap, true); Texture2D alpha_map = new Texture2D(alpha); Material terr_mat = assetManager.getMaterial("terrain"); terr_mat.setTexture("AlphaMap", alpha_map); //Material terr_mat = new Material(assetManager, "Common/MatDefs/Misc/ShowNormals.j3md"); AbstractHeightMap heightmap = null; heightmap = new ImageBasedHeightMap(height); heightmap.load(); terrain = new Node(); Node terrainP = new Node(); Node terrainG = new Node(); terrainObject = new TerrainQuad("Terrain_"+xOff+"_"+yOff, (size/4)+1 ,size+1, heightmap.getHeightMap()); terrainObject.scale(xScale, 0.3f, yScale); terrainObject.setLocalTranslation(xOff*size*xScale+size*xScale/2, 0f, yOff*size*yScale+size*yScale/2); terrainObject.setMaterial(terr_mat); //TODO Verbessern terrainP.attachChild(terrainObject); terrain.setName("Terrain_"+xOff+"_"+yOff); terrainP.setName("terrainP"); terrainG.setName("terrainG"); terrain.attachChild(terrainG); terrain.attachChild(terrainP); /*System.out.println(terrain); System.out.println(terrain.getChildren()); for(Spatial s : terrain.getChildren()) System.out.println(((Node) s).getChildren());*/ } public boolean createVegetationStep(long maxDelay){ long startTime = System.nanoTime(); for(RawVegetationObject o : rawVegList){ if(o.getType()==VegetationObjectType.GRASS){ Spatial grass = assetManager.getModel("grass"); Material mat = assetManager.getMaterial("grass"); grass.setMaterial(mat); grass.setQueueBucket(Bucket.Translucent); float x = o.getPosition().x*xScale; float z = o.getPosition().y*yScale; float y = terrainObject.getHeight(new Vector2f(x,z)); Vector3f pos = new Vector3f(x, y, z); grass.rotate(0f, o.getRotation(), 0f); grass.setLocalTranslation(pos); if(y>33f&&y<36f){ ((Node) terrain.getChild("terrainG") ).attachChild(grass); vegList.add(new VegetationObject(o.getType(),pos, o.getRotation())); } } if(o.getType()==VegetationObjectType.TREE1){ Spatial grass = assetManager.getModel("tree1"); Material wood = assetManager.getMaterial("wood"); TangentBinormalGenerator.generate(grass); grass.setMaterial(wood); grass.scale(2.0f); float x = o.getPosition().x*xScale; float z = o.getPosition().y*yScale; float y = terrainObject.getHeight(new Vector2f(x,z)); Vector3f pos = new Vector3f(x, y, z); grass.rotate(0f, o.getRotation(), 0f); grass.setLocalTranslation(pos); if(y>33f){ ((Node) terrain.getChild("terrainG") ).attachChild(grass); vegList.add(new VegetationObject(o.getType(),pos, o.getRotation())); } } if(o.getType()==VegetationObjectType.TREE2){ Spatial grass = assetManager.getModel("tree3"); Material wood = assetManager.getMaterial("tree3"); grass.setMaterial(wood); //TangentBinormalGenerator.generate(grass); //grass.setMaterial(wood); grass.setQueueBucket(Bucket.Translucent); grass.scale(2.0f); float x = o.getPosition().x*xScale; float z = o.getPosition().y*yScale; float y = terrainObject.getHeight(new Vector2f(x,z)); Vector3f pos = new Vector3f(x, y, z); grass.rotate(0f, o.getRotation(), 0f); grass.setLocalTranslation(pos); if(y>33f){ ((Node) terrain.getChild("terrainG") ).attachChild(grass); vegList.add(new VegetationObject(o.getType(),pos, o.getRotation())); } } if(o.getType()==VegetationObjectType.LILYPAD){ Spatial grass = assetManager.getModel("lilypad"); Material mat = assetManager.getMaterial("lilypad"); grass.setMaterial(mat); grass.setQueueBucket(Bucket.Translucent); float x = o.getPosition().x*xScale; float z = o.getPosition().y*yScale; float y = 35.81f; float sy = terrainObject.getHeight(new Vector2f(x,z)); Vector3f pos = new Vector3f(x, y, z); grass.rotate(0f, o.getRotation(), 0f); grass.setLocalTranslation(pos); if(sy<=34.81f){ ((Node) terrain.getChild("terrainG") ).attachChild(grass); vegList.add(new VegetationObject(o.getType(),pos, o.getRotation())); } } if(o.getType()==VegetationObjectType.SWAMP_FIRE){ ParticleEmitter fireEffect = new ParticleEmitter("Emitter", ParticleMesh.Type.Triangle, 30); Material fireMat = assetManager.getMaterial("fire"); fireEffect.setMaterial(fireMat); fireEffect.setImagesX(2); fireEffect.setImagesY(2); fireEffect.setEndColor( new ColorRGBA(1f, 0f, 0f, 1f) ); fireEffect.setStartColor( new ColorRGBA(1f, 1f, 0f, 0.5f) ); fireEffect.getParticleInfluencer().setInitialVelocity(new Vector3f(0, 4, 0)); fireEffect.setStartSize(3f); fireEffect.setEndSize(1f); fireEffect.setGravity(0f,0f,0f); fireEffect.setLowLife(0.5f); fireEffect.setHighLife(3f); fireEffect.getParticleInfluencer().setVelocityVariation(0.3f); float x = o.getPosition().x*xScale; float z = o.getPosition().y*yScale; float y = 34.81f; float sy = terrainObject.getHeight(new Vector2f(x,z)); Vector3f pos = new Vector3f(x, y, z); fireEffect.setLocalTranslation(pos); fireEffect.setQueueBucket(Bucket.Translucent); if(sy<31.81){ ((Node) terrain.getChild("terrainG") ).attachChild(fireEffect); vegList.add(new VegetationObject(o.getType(),pos, o.getRotation())); PointLight lamp = new PointLight(); lamp.setPosition(fireEffect.getLocalTranslation()); lamp.setColor(ColorRGBA.Orange); lamp.setRadius(100); //terrainG.addLight(lamp); } } done.add(o); if(System.nanoTime()>startTime+maxDelay) break; } System.out.println(rawVegList.size()+" "+done.size()+" "+rawVegList.isEmpty()); rawVegList.removeAll(done); done.removeAll(done); return rawVegList.isEmpty(); } public Node getTerrainNode(){ terrain.setLocalTranslation(0, -130, 0); terrain.move(0f, 94.2f, 0f); return terrain; } }
package com.skyrocketgwt.demo.client.widgets; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import com.google.gwt.cell.client.CheckboxCell; import com.google.gwt.cell.client.EditTextCell; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.cell.client.SelectionCell; import com.google.gwt.cell.client.TextCell; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.resources.client.TextResource; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.ColumnSortEvent.ListHandler; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.DefaultSelectionEventManager; import com.google.gwt.view.client.MultiSelectionModel; import com.google.gwt.view.client.SelectionModel; import com.skyrocketgwt.core.client.cellview.SkyCellTable; import com.skyrocketgwt.core.client.cellview.SkySimplePager; import com.skyrocketgwt.core.client.cellview.SkySimplePager.TextLocation; import com.skyrocketgwt.demo.client.resources.code.CodeResources; import com.skyrocketgwt.demo.client.widgets.ContactDatabase.Category; import com.skyrocketgwt.demo.client.widgets.ContactDatabase.ContactInfo; import com.skyrocketgwt.demo.client.widgets.images.ImageResources; public class SkyCellTableDemo extends AbstractWidget { @Override public Widget getWidgetDemo() { // Create a CellTable. // Set a key provider that provides a unique key for each contact. If // key is // used to identify contacts when fields (such as the name and address) // change. skyCellTable = new SkyCellTable<ContactInfo>( ContactDatabase.ContactInfo.KEY_PROVIDER); skyCellTable.setWidth("100%", true); // Do not refresh the headers and footers every time the data is // updated. skyCellTable.setAutoHeaderRefreshDisabled(true); skyCellTable.setAutoFooterRefreshDisabled(true); // Attach a column sort handler to the ListDataProvider to sort the // list. ListHandler<ContactInfo> sortHandler = new ListHandler<ContactInfo>( ContactDatabase.get().getDataProvider().getList()); skyCellTable.addColumnSortHandler(sortHandler); pager = new SkySimplePager(true, 20, true); pager.setDisplay(skyCellTable); // Add a selection model so we can select cells. final SelectionModel<ContactInfo> selectionModel = new MultiSelectionModel<ContactInfo>( ContactDatabase.ContactInfo.KEY_PROVIDER); skyCellTable.setSelectionModel(selectionModel, DefaultSelectionEventManager .<ContactInfo> createCheckboxManager()); // Initialize the columns. initTableColumns(selectionModel, sortHandler); // Add the CellList to the adapter in the database. ContactDatabase.get().addDataDisplay(skyCellTable); VerticalPanel panel = new VerticalPanel(); panel.add(skyCellTable); panel.add(pager); return panel; } /** * The constants used in this Content Widget. */ SkyCellTable<ContactInfo> skyCellTable; SkySimplePager pager; /** * Add the columns to the table. */ private void initTableColumns( final SelectionModel<ContactInfo> selectionModel, ListHandler<ContactInfo> sortHandler) { CwConstants constants = GWT.create(CwConstants.class); // Checkbox column. This table will uses a checkbox column for // selection. // Alternatively, you can call cellTable.setSelectionEnabled(true) to // enable // mouse selection. Column<ContactInfo, Boolean> checkColumn = new Column<ContactInfo, Boolean>( new CheckboxCell(true, false)) { @Override public Boolean getValue(ContactInfo object) { // Get the value from the selection model. return selectionModel.isSelected(object); } }; skyCellTable.addColumn(checkColumn, SafeHtmlUtils.fromSafeConstant("<br/>")); skyCellTable.setColumnWidth(checkColumn, 40, Unit.PX); // First name. Column<ContactInfo, String> firstNameColumn = new Column<ContactInfo, String>( new EditTextCell()) { @Override public String getValue(ContactInfo object) { return object.getFirstName(); } }; firstNameColumn.setSortable(true); sortHandler.setComparator(firstNameColumn, new Comparator<ContactInfo>() { @Override public int compare(ContactInfo o1, ContactInfo o2) { return o1.getFirstName().compareTo(o2.getFirstName()); } }); skyCellTable.addColumn(firstNameColumn, constants.cwCellTableColumnFirstName()); firstNameColumn .setFieldUpdater(new FieldUpdater<ContactInfo, String>() { @Override public void update(int index, ContactInfo object, String value) { // Called when the user changes the value. object.setFirstName(value); ContactDatabase.get().refreshDisplays(); } }); skyCellTable.setColumnWidth(firstNameColumn, 20, Unit.PCT); // Last name. Column<ContactInfo, String> lastNameColumn = new Column<ContactInfo, String>( new EditTextCell()) { @Override public String getValue(ContactInfo object) { return object.getLastName(); } }; lastNameColumn.setSortable(true); sortHandler.setComparator(lastNameColumn, new Comparator<ContactInfo>() { @Override public int compare(ContactInfo o1, ContactInfo o2) { return o1.getLastName().compareTo(o2.getLastName()); } }); skyCellTable.addColumn(lastNameColumn, constants.cwCellTableColumnLastName()); lastNameColumn.setFieldUpdater(new FieldUpdater<ContactInfo, String>() { @Override public void update(int index, ContactInfo object, String value) { // Called when the user changes the value. object.setLastName(value); ContactDatabase.get().refreshDisplays(); } }); skyCellTable.setColumnWidth(lastNameColumn, 20, Unit.PCT); // Category. final Category[] categories = ContactDatabase.get().queryCategories(); List<String> categoryNames = new ArrayList<String>(); for (Category category : categories) { categoryNames.add(category.getDisplayName()); } SelectionCell categoryCell = new SelectionCell(categoryNames); Column<ContactInfo, String> categoryColumn = new Column<ContactInfo, String>( categoryCell) { @Override public String getValue(ContactInfo object) { return object.getCategory().getDisplayName(); } }; skyCellTable.addColumn(categoryColumn, constants.cwCellTableColumnCategory()); categoryColumn.setFieldUpdater(new FieldUpdater<ContactInfo, String>() { @Override public void update(int index, ContactInfo object, String value) { for (Category category : categories) { if (category.getDisplayName().equals(value)) { object.setCategory(category); } } ContactDatabase.get().refreshDisplays(); } }); skyCellTable.setColumnWidth(categoryColumn, 130, Unit.PX); // Address. Column<ContactInfo, String> addressColumn = new Column<ContactInfo, String>( new TextCell()) { @Override public String getValue(ContactInfo object) { return object.getAddress(); } }; addressColumn.setSortable(true); addressColumn.setDefaultSortAscending(false); sortHandler.setComparator(addressColumn, new Comparator<ContactInfo>() { @Override public int compare(ContactInfo o1, ContactInfo o2) { return o1.getAddress().compareTo(o2.getAddress()); } }); skyCellTable .addColumn(addressColumn, constants.cwCellTableColumnAddress()); skyCellTable.setColumnWidth(addressColumn, 60, Unit.PCT); } @Override public String getWidgetDescription() { return "SkyCellTableDemo"; } @Override public String getSourceCode() { return null; } @Override public String getCSS() { return null; } @Override public List<TextResource> getSourceList(CodeResources resources) { List<TextResource> sourceList = new ArrayList<TextResource>(); sourceList.add(resources.skyCellTableDemo_java()); return sourceList; } @Override public ImageResource getImageResource(ImageResources resources) { return resources.skyCellTable(); } @Override public ImageResource getInactiveImageResource(ImageResources resources) { return resources.skyCellTableBW(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.managers; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.jboss.logging.Logger; import org.keycloak.authentication.ClientAuthenticator; import org.keycloak.authentication.ClientAuthenticatorFactory; import org.keycloak.common.constants.ServiceAccountConstants; import org.keycloak.common.util.Time; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.ProtocolMapperModel; import org.keycloak.models.RealmModel; import org.keycloak.models.UserManager; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionProvider; import org.keycloak.models.session.UserSessionPersisterProvider; import org.keycloak.models.utils.RepresentationToModel; import org.keycloak.protocol.LoginProtocol; import org.keycloak.protocol.LoginProtocolFactory; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.protocol.oidc.mappers.UserSessionNoteMapper; import org.keycloak.representations.adapters.config.BaseRealmConfig; import org.keycloak.representations.adapters.config.PolicyEnforcerConfig; import org.keycloak.representations.idm.ClientRepresentation; import java.net.URI; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ClientManager { private static final Logger logger = Logger.getLogger(ClientManager.class); protected RealmManager realmManager; public ClientManager(RealmManager realmManager) { this.realmManager = realmManager; } public ClientManager() { } /** * Should not be called from an import. This really expects that the client is created from the admin console. * * @param session * @param realm * @param rep * @param addDefaultRoles * @return */ public static ClientModel createClient(KeycloakSession session, RealmModel realm, ClientRepresentation rep, boolean addDefaultRoles) { ClientModel client = RepresentationToModel.createClient(session, realm, rep, addDefaultRoles); if (rep.getProtocol() != null) { LoginProtocolFactory providerFactory = (LoginProtocolFactory) session.getKeycloakSessionFactory().getProviderFactory(LoginProtocol.class, rep.getProtocol()); providerFactory.setupClientDefaults(rep, client); } // remove default mappers if there is a template if (rep.getProtocolMappers() == null && rep.getClientTemplate() != null) { Set<ProtocolMapperModel> mappers = client.getProtocolMappers(); for (ProtocolMapperModel mapper : mappers) client.removeProtocolMapper(mapper); } return client; } public boolean removeClient(RealmModel realm, ClientModel client) { if (realm.removeClient(client.getId())) { UserSessionProvider sessions = realmManager.getSession().sessions(); if (sessions != null) { sessions.onClientRemoved(realm, client); } UserSessionPersisterProvider sessionsPersister = realmManager.getSession().getProvider(UserSessionPersisterProvider.class); if (sessionsPersister != null) { sessionsPersister.onClientRemoved(realm, client); } UserModel serviceAccountUser = realmManager.getSession().users().getServiceAccount(client); if (serviceAccountUser != null) { new UserManager(realmManager.getSession()).removeUser(realm, serviceAccountUser); } return true; } else { return false; } } public Set<String> validateRegisteredNodes(ClientModel client) { Map<String, Integer> registeredNodes = client.getRegisteredNodes(); if (registeredNodes == null || registeredNodes.isEmpty()) { return Collections.emptySet(); } int currentTime = Time.currentTime(); Set<String> validatedNodes = new TreeSet<String>(); if (client.getNodeReRegistrationTimeout() > 0) { List<String> toRemove = new LinkedList<String>(); for (Map.Entry<String, Integer> entry : registeredNodes.entrySet()) { Integer lastReRegistration = entry.getValue(); if (lastReRegistration + client.getNodeReRegistrationTimeout() < currentTime) { toRemove.add(entry.getKey()); } else { validatedNodes.add(entry.getKey()); } } // Remove time-outed nodes for (String node : toRemove) { client.unregisterNode(node); } } else { // Periodic node reRegistration is disabled, so allow all nodes validatedNodes.addAll(registeredNodes.keySet()); } return validatedNodes; } public void enableServiceAccount(ClientModel client) { client.setServiceAccountsEnabled(true); // Add dedicated user for this service account if (realmManager.getSession().users().getServiceAccount(client) == null) { String username = ServiceAccountConstants.SERVICE_ACCOUNT_USER_PREFIX + client.getClientId(); logger.debugf("Creating service account user '%s'", username); // Don't use federation for service account user UserModel user = realmManager.getSession().userLocalStorage().addUser(client.getRealm(), username); user.setEnabled(true); user.setEmail(username + "@placeholder.org"); user.setServiceAccountClientLink(client.getId()); } // Add protocol mappers to retrieve clientId in access token if (client.getProtocolMapperByName(OIDCLoginProtocol.LOGIN_PROTOCOL, ServiceAccountConstants.CLIENT_ID_PROTOCOL_MAPPER) == null) { logger.debugf("Creating service account protocol mapper '%s' for client '%s'", ServiceAccountConstants.CLIENT_ID_PROTOCOL_MAPPER, client.getClientId()); ProtocolMapperModel protocolMapper = UserSessionNoteMapper.createClaimMapper(ServiceAccountConstants.CLIENT_ID_PROTOCOL_MAPPER, ServiceAccountConstants.CLIENT_ID, ServiceAccountConstants.CLIENT_ID, "String", false, "", true, true); client.addProtocolMapper(protocolMapper); } // Add protocol mappers to retrieve hostname and IP address of client in access token if (client.getProtocolMapperByName(OIDCLoginProtocol.LOGIN_PROTOCOL, ServiceAccountConstants.CLIENT_HOST_PROTOCOL_MAPPER) == null) { logger.debugf("Creating service account protocol mapper '%s' for client '%s'", ServiceAccountConstants.CLIENT_HOST_PROTOCOL_MAPPER, client.getClientId()); ProtocolMapperModel protocolMapper = UserSessionNoteMapper.createClaimMapper(ServiceAccountConstants.CLIENT_HOST_PROTOCOL_MAPPER, ServiceAccountConstants.CLIENT_HOST, ServiceAccountConstants.CLIENT_HOST, "String", false, "", true, true); client.addProtocolMapper(protocolMapper); } if (client.getProtocolMapperByName(OIDCLoginProtocol.LOGIN_PROTOCOL, ServiceAccountConstants.CLIENT_ADDRESS_PROTOCOL_MAPPER) == null) { logger.debugf("Creating service account protocol mapper '%s' for client '%s'", ServiceAccountConstants.CLIENT_ADDRESS_PROTOCOL_MAPPER, client.getClientId()); ProtocolMapperModel protocolMapper = UserSessionNoteMapper.createClaimMapper(ServiceAccountConstants.CLIENT_ADDRESS_PROTOCOL_MAPPER, ServiceAccountConstants.CLIENT_ADDRESS, ServiceAccountConstants.CLIENT_ADDRESS, "String", false, "", true, true); client.addProtocolMapper(protocolMapper); } } public void clientIdChanged(ClientModel client, String newClientId) { logger.debugf("Updating clientId from '%s' to '%s'", client.getClientId(), newClientId); UserModel serviceAccountUser = realmManager.getSession().users().getServiceAccount(client); if (serviceAccountUser != null) { String username = ServiceAccountConstants.SERVICE_ACCOUNT_USER_PREFIX + newClientId; serviceAccountUser.setUsername(username); serviceAccountUser.setEmail(username + "@placeholder.org"); } } @JsonPropertyOrder({"realm", "realm-public-key", "bearer-only", "auth-server-url", "ssl-required", "resource", "public-client", "credentials", "use-resource-role-mappings"}) public static class InstallationAdapterConfig extends BaseRealmConfig { @JsonProperty("resource") protected String resource; @JsonProperty("use-resource-role-mappings") protected Boolean useResourceRoleMappings; @JsonProperty("bearer-only") protected Boolean bearerOnly; @JsonProperty("public-client") protected Boolean publicClient; @JsonProperty("credentials") protected Map<String, Object> credentials; @JsonProperty("policy-enforcer") protected PolicyEnforcerConfig enforcerConfig; public Boolean isUseResourceRoleMappings() { return useResourceRoleMappings; } public void setUseResourceRoleMappings(Boolean useResourceRoleMappings) { this.useResourceRoleMappings = useResourceRoleMappings; } public String getResource() { return resource; } public void setResource(String resource) { this.resource = resource; } public Map<String, Object> getCredentials() { return credentials; } public void setCredentials(Map<String, Object> credentials) { this.credentials = credentials; } public Boolean getPublicClient() { return publicClient; } public void setPublicClient(Boolean publicClient) { this.publicClient = publicClient; } public Boolean getBearerOnly() { return bearerOnly; } public void setBearerOnly(Boolean bearerOnly) { this.bearerOnly = bearerOnly; } public PolicyEnforcerConfig getEnforcerConfig() { return this.enforcerConfig; } public void setEnforcerConfig(PolicyEnforcerConfig enforcerConfig) { this.enforcerConfig = enforcerConfig; } } public InstallationAdapterConfig toInstallationRepresentation(RealmModel realmModel, ClientModel clientModel, URI baseUri) { InstallationAdapterConfig rep = new InstallationAdapterConfig(); rep.setAuthServerUrl(baseUri.toString()); rep.setRealm(realmModel.getName()); rep.setSslRequired(realmModel.getSslRequired().name().toLowerCase()); if (clientModel.isPublicClient() && !clientModel.isBearerOnly()) rep.setPublicClient(true); if (clientModel.isBearerOnly()) rep.setBearerOnly(true); if (clientModel.getRoles().size() > 0) rep.setUseResourceRoleMappings(true); rep.setResource(clientModel.getClientId()); if (showClientCredentialsAdapterConfig(clientModel)) { Map<String, Object> adapterConfig = getClientCredentialsAdapterConfig(clientModel); rep.setCredentials(adapterConfig); } return rep; } public String toJBossSubsystemConfig(RealmModel realmModel, ClientModel clientModel, URI baseUri) { StringBuffer buffer = new StringBuffer(); buffer.append("<secure-deployment name=\"WAR MODULE NAME.war\">\n"); buffer.append(" <realm>").append(realmModel.getName()).append("</realm>\n"); buffer.append(" <auth-server-url>").append(baseUri.toString()).append("</auth-server-url>\n"); if (clientModel.isBearerOnly()){ buffer.append(" <bearer-only>true</bearer-only>\n"); } else if (clientModel.isPublicClient()) { buffer.append(" <public-client>true</public-client>\n"); } buffer.append(" <ssl-required>").append(realmModel.getSslRequired().name()).append("</ssl-required>\n"); buffer.append(" <resource>").append(clientModel.getClientId()).append("</resource>\n"); String cred = clientModel.getSecret(); if (showClientCredentialsAdapterConfig(clientModel)) { Map<String, Object> adapterConfig = getClientCredentialsAdapterConfig(clientModel); for (Map.Entry<String, Object> entry : adapterConfig.entrySet()) { buffer.append(" <credential name=\"" + entry.getKey() + "\">"); Object value = entry.getValue(); if (value instanceof Map) { buffer.append("\n"); Map<String, Object> asMap = (Map<String, Object>) value; for (Map.Entry<String, Object> credEntry : asMap.entrySet()) { buffer.append(" <" + credEntry.getKey() + ">" + credEntry.getValue().toString() + "</" + credEntry.getKey() + ">\n"); } buffer.append(" </credential>\n"); } else { buffer.append(value.toString()).append("</credential>\n"); } } } if (clientModel.getRoles().size() > 0) { buffer.append(" <use-resource-role-mappings>true</use-resource-role-mappings>\n"); } buffer.append("</secure-deployment>\n"); return buffer.toString(); } private boolean showClientCredentialsAdapterConfig(ClientModel client) { if (client.isPublicClient()) { return false; } if (client.isBearerOnly() && client.getNodeReRegistrationTimeout() <= 0) { return false; } return true; } private Map<String, Object> getClientCredentialsAdapterConfig(ClientModel client) { String clientAuthenticator = client.getClientAuthenticatorType(); ClientAuthenticatorFactory authenticator = (ClientAuthenticatorFactory) realmManager.getSession().getKeycloakSessionFactory().getProviderFactory(ClientAuthenticator.class, clientAuthenticator); return authenticator.getAdapterConfiguration(client); } }
/* * Copyright (C) 2014 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.sample.castcompanionlibrary.cast.tracks; import android.annotation.SuppressLint; import android.content.Context; import android.content.SharedPreferences; import android.content.res.Resources; import android.graphics.Color; import android.graphics.Typeface; import android.preference.CheckBoxPreference; import android.preference.ListPreference; import android.preference.PreferenceManager; import android.preference.PreferenceScreen; import android.view.accessibility.CaptioningManager; import com.google.android.gms.cast.TextTrackStyle; import com.google.sample.castcompanionlibrary.R; import com.google.sample.castcompanionlibrary.cast.VideoCastManager; import com.google.sample.castcompanionlibrary.cast.exceptions.CastException; import com.google.sample.castcompanionlibrary.utils.LogUtils; import com.google.sample.castcompanionlibrary.utils.Utils; import java.util.HashMap; import java.util.Map; import static com.google.sample.castcompanionlibrary.utils.LogUtils.LOGD; import static com.google.sample.castcompanionlibrary.utils.LogUtils.LOGE; /** * This class manages preference settings for captions for Android versions prior to KitKat and * provides a number of methods that would work across all supported versions of Android. */ public class TracksPreferenceManager implements SharedPreferences.OnSharedPreferenceChangeListener { private final Context mContext; private final SharedPreferences mSharedPreferences; public TracksPreferenceManager(Context context) { mContext = context; mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(mContext); mSharedPreferences.registerOnSharedPreferenceChangeListener(this); } private static final String TAG = LogUtils.makeLogTag(TracksPreferenceManager.class); public static final String FONT_FAMILY_SANS_SERIF = "FONT_FAMILY_SANS_SERIF"; public static final String EDGE_TYPE_DEFAULT = "EDGE_TYPE_NONE"; private ListPreference mCaptionFontScaleListPreference; private ListPreference mCaptionFontFamilyListPreference; private ListPreference mCaptionTextColorListPreference; private ListPreference mCaptionTextOpacityListPreference; private ListPreference mCaptionEdgeTypeListPreference; private ListPreference mCaptionBackgroundColorListPreference; private ListPreference mCaptionBackgroundOpacityListPreference; private CheckBoxPreference mCaptionAvailability; private static Map<String, String> OPACITY_MAPPING = new HashMap<String, String>(); private static Map<String, Integer> FONT_FAMILY_MAPPING = new HashMap<String, Integer>(); private static Map<String, Integer> EDGE_TYPE_MAPPING = new HashMap<String, Integer>(); private boolean isInitialized = false; static { OPACITY_MAPPING.put("FF", "100"); OPACITY_MAPPING.put("BF", "75"); OPACITY_MAPPING.put("80", "50"); OPACITY_MAPPING.put("3F", "25"); } static { FONT_FAMILY_MAPPING.put("FONT_FAMILY_SANS_SERIF", TextTrackStyle.FONT_FAMILY_SANS_SERIF); FONT_FAMILY_MAPPING.put("FONT_FAMILY_SERIF", TextTrackStyle.FONT_FAMILY_SERIF); FONT_FAMILY_MAPPING.put("FONT_FAMILY_MONOSPACED_SANS_SERIF", TextTrackStyle.FONT_FAMILY_MONOSPACED_SANS_SERIF); } static { EDGE_TYPE_MAPPING.put("EDGE_TYPE_NONE", TextTrackStyle.EDGE_TYPE_NONE); EDGE_TYPE_MAPPING.put("EDGE_TYPE_OUTLINE", TextTrackStyle.EDGE_TYPE_OUTLINE); EDGE_TYPE_MAPPING.put("EDGE_TYPE_DROP_SHADOW", TextTrackStyle.EDGE_TYPE_DROP_SHADOW); } @SuppressLint("NewApi") public TextTrackStyle getTextTrackStyle() { final TextTrackStyle textTrackStyle = TextTrackStyle.fromSystemSettings(mContext); if (Utils.IS_KITKAT_OR_ABOVE) { return textTrackStyle; } else { // we need to populate all the fields ourselves textTrackStyle.setFontGenericFamily(FONT_FAMILY_MAPPING.get(getFontFamily())); textTrackStyle.setBackgroundColor(Color.parseColor(getBackgroundColor())); textTrackStyle.setEdgeType(EDGE_TYPE_MAPPING.get(getEdgeType())); textTrackStyle.setFontScale(getFontScale()); boolean isBold = Typeface.DEFAULT.isBold(); boolean isItalic = Typeface.DEFAULT.isItalic(); int fontStyle = TextTrackStyle.FONT_STYLE_NORMAL; if (isBold && isItalic) { fontStyle = TextTrackStyle.FONT_STYLE_BOLD_ITALIC; } else if (!isBold && !isItalic) { fontStyle = TextTrackStyle.FONT_STYLE_NORMAL; } else if (isBold) { fontStyle = TextTrackStyle.FONT_STYLE_BOLD; } textTrackStyle.setFontStyle(fontStyle); textTrackStyle.setForegroundColor( combineColorAndOpacity(getTextColor(), getTextOpacity())); LOGD(TAG, "Edge is: " + getEdgeType()); textTrackStyle.setBackgroundColor(combineColorAndOpacity(getBackgroundColor(), getBackgroundOpacity()) ); } return textTrackStyle; } @SuppressLint("NewApi") public boolean isCaptionEnabled() { if (Utils.IS_KITKAT_OR_ABOVE) { CaptioningManager captioningManager = (CaptioningManager) mContext.getSystemService(Context.CAPTIONING_SERVICE); return captioningManager.isEnabled(); } else { return Utils.getBooleanFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_enabled), false); } } public void setFontFamily(String fontFamily) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_font_family), fontFamily); } public String getFontFamily() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_font_family), FONT_FAMILY_SANS_SERIF); } public void setFontScale(String value) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_font_scale), value); } public float getFontScale() { String scaleStr = Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_font_scale), TextTrackStyle.DEFAULT_FONT_SCALE + ""); return Float.parseFloat(scaleStr); } public void setTextColor(String textColor) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_text_color), textColor); } public String getTextColor() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_text_color), mContext.getString(R.string.prefs_caption_text_color_value_default)); } public void setTextOpacity(String textColor) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_text_opacity), textColor); } public String getTextOpacity() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_text_opacity), mContext.getString(R.string.prefs_caption_text_opacity_value_default)); } public void setEdgeType(String textColor) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_edge_type), textColor); } public String getEdgeType() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_edge_type), EDGE_TYPE_DEFAULT); } public void setBackgroundColor(Context mContext, String textColor) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_background_color), textColor); } public String getBackgroundColor() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_background_color), mContext.getString(R.string.prefs_caption_background_color_value_default)); } public void setBackgroundOpacity(String textColor) { Utils.saveStringToPreference(mContext, mContext.getString(R.string.ccl_key_caption_background_opacity), textColor); } public String getBackgroundOpacity() { return Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_background_opacity), mContext.getString(R.string.prefs_caption_background_opacity_value_default)); } public void setupPreferences(PreferenceScreen screen) { mCaptionAvailability = (CheckBoxPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_enabled)); mCaptionFontScaleListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_font_scale)); mCaptionFontFamilyListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_font_family)); mCaptionTextColorListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_text_color)); mCaptionTextOpacityListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_text_opacity)); mCaptionEdgeTypeListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_edge_type)); mCaptionBackgroundColorListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_background_color)); mCaptionBackgroundOpacityListPreference = (ListPreference) screen.findPreference( mContext.getString(R.string.ccl_key_caption_background_opacity)); isInitialized = true; onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_enabled), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_font_family), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_font_scale), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_text_color), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_text_opacity), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_edge_type), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_background_color), false); onSharedPreferenceChanged(mSharedPreferences, mContext.getString(R.string.ccl_key_caption_background_opacity), false); } private void setCaptionAvailability(boolean status) { mCaptionFontScaleListPreference.setEnabled(status); mCaptionFontFamilyListPreference.setEnabled(status); mCaptionTextColorListPreference.setEnabled(status); mCaptionTextOpacityListPreference.setEnabled(status); mCaptionEdgeTypeListPreference.setEnabled(status); mCaptionBackgroundColorListPreference.setEnabled(status); mCaptionBackgroundOpacityListPreference.setEnabled(status); } private String getCaptionSummaryForList(SharedPreferences sharedPreferences, int keyResourceId, int defaultResourceId, int namesResourceId, int valuesResourceId) { Resources resources = mContext.getResources(); String value = sharedPreferences.getString(resources.getString(keyResourceId), resources.getString(defaultResourceId)); String[] labels = resources.getStringArray(namesResourceId); String[] values = resources.getStringArray(valuesResourceId); for (int i = 0; i < values.length; i++) { if (values[i].equals(value)) { return labels[i]; } } return ""; } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { onSharedPreferenceChanged(sharedPreferences, key, true); } public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key, boolean broadcast) { if (!isInitialized) { return; } if (mContext.getString(R.string.ccl_key_caption_enabled).equals(key)) { mCaptionAvailability.setSummary( mCaptionAvailability.isChecked() ? R.string.prefs_caption_enabled : R.string.prefs_caption_disabled ); setCaptionAvailability(mCaptionAvailability.isChecked()); if (broadcast) { try { VideoCastManager.getInstance() .onTextTrackEnabledChanged(mCaptionAvailability.isChecked()); } catch (CastException e) { } } return; } if (mContext.getString(R.string.ccl_key_caption_font_scale).equals(key)) { mCaptionFontScaleListPreference .setSummary( getCaptionSummaryForList(sharedPreferences, R.string.ccl_key_caption_font_scale, R.string.prefs_caption_font_scale_value_default, R.array.prefs_caption_font_scale_names, R.array.prefs_caption_font_scale_values) ); } else if (mContext.getString(R.string.ccl_key_caption_font_family).equals(key)) { mCaptionFontFamilyListPreference .setSummary( getCaptionSummaryForList(sharedPreferences, R.string.ccl_key_caption_font_family, R.string.prefs_caption_font_family_value_default, R.array.prefs_caption_font_family_names, R.array.prefs_caption_font_family_values) ); } else if (mContext.getString(R.string.ccl_key_caption_text_color).equals(key)) { mCaptionTextColorListPreference .setSummary( getCaptionSummaryForList(sharedPreferences, R.string.ccl_key_caption_text_color, R.string.prefs_caption_text_color_value_default, R.array.prefs_caption_color_names, R.array.prefs_caption_color_values) ); } else if (mContext.getString(R.string.ccl_key_caption_text_opacity).equals(key)) { String opacity = Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_text_opacity), mContext.getString(R.string.prefs_caption_text_opacity_value_default)); mCaptionTextOpacityListPreference .setSummary(OPACITY_MAPPING.get(opacity) + "%%"); } else if (mContext.getString(R.string.ccl_key_caption_edge_type).equals(key)) { mCaptionEdgeTypeListPreference .setSummary( getCaptionSummaryForList(sharedPreferences, R.string.ccl_key_caption_edge_type, R.string.prefs_caption_edge_type_value_default, R.array.prefs_caption_edge_type_names, R.array.prefs_caption_edge_type_values) ); } else if (mContext.getString(R.string.ccl_key_caption_background_color).equals(key)) { mCaptionBackgroundColorListPreference .setSummary(getCaptionSummaryForList(sharedPreferences, R.string.ccl_key_caption_background_color, R.string.prefs_caption_background_color_value_default, R.array.prefs_caption_color_names, R.array.prefs_caption_color_values)); } else if (mContext.getString(R.string.ccl_key_caption_background_opacity).equals(key)) { String opacity = Utils.getStringFromPreference(mContext, mContext.getString(R.string.ccl_key_caption_background_opacity), mContext.getString(R.string.prefs_caption_background_opacity_value_default)); mCaptionBackgroundOpacityListPreference .setSummary(OPACITY_MAPPING.get(opacity) + "%%"); } if (broadcast) { try { VideoCastManager.getInstance() .onTextTrackStyleChanged(getTextTrackStyle()); } catch (CastException e) { LOGE(TAG, "Failed to report text track style", e); } } } private static int combineColorAndOpacity(String color, String opacity) { color = color.replace("#", ""); return Color.parseColor("#" + opacity + color); } }
/** * Copyright 2016 Ezhome Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.eusecom.attendance.rxfirebase2.database; import android.util.Log; import com.eusecom.attendance.rxfirebase2.FirebaseChildEvent; import com.eusecom.attendance.rxfirebase2.FirebaseChildEvent.EventType; import com.google.firebase.database.ChildEventListener; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.Query; import com.google.firebase.database.ValueEventListener; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.xml.datatype.Duration; import rx.Observable; import rx.Scheduler; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.functions.Action0; import rx.functions.Func1; import rx.schedulers.Schedulers; import rx.subscriptions.Subscriptions; import static android.R.attr.delay; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; /** * The class is used as Decorator to * Firebase Database functionality with RxJava */ public class RxFirebaseDatabase { public static volatile RxFirebaseDatabase instance; /** * Observe Scheduler */ private Scheduler observeOnScheduler; /** * Singleton pattern * * @return {@link RxFirebaseDatabase} */ public static RxFirebaseDatabase getInstance() { if (instance == null) { synchronized (RxFirebaseDatabase.class) { if (instance == null) { instance = new RxFirebaseDatabase(); } } } return instance; } protected RxFirebaseDatabase() { //empty constructor, prevent initialisation } /** * This method will set specific Scheduler on what values will be Observed on * * @param observeOnScheduler {@link Scheduler} for observed on */ public RxFirebaseDatabase observeOn(Scheduler observeOnScheduler) { this.observeOnScheduler = observeOnScheduler; return this; } /** * This methods observes data saving with push in order to generate the key * automatically according to Firebase hashing key rules. * * @param firebaseRef this is reference of a Firebase * @param object {@link Object} whatever object we want to save * @return an {@link rx.Observable} of the generated key after * the object persistence */ public Observable<String> observeSetValuePush(final DatabaseReference firebaseRef, final Object object, final int del) { return Observable.create(new Observable.OnSubscribe<String>() { @Override public void call(final Subscriber<? super String> subscriber) { final DatabaseReference ref = firebaseRef.push(); ref.addListenerForSingleValueEvent(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { Log.d("ListenerRx ", "new Push ForSingleValueEvent "); subscriber.onNext(ref.getKey()); subscriber.onCompleted(); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); ref.setValue(object); } }).compose(this.<String>applyScheduler()); } /** * This methods observes key of edited data . * * @param firebaseRef this is reference of a Firebase * @param object {@link Object} whatever object we want to save * @return an {@link rx.Observable} of the generated key after * the object persistence */ public Observable<String> observeEditValue(final DatabaseReference firebaseRef, final Object object, final int del) { return Observable.create(new Observable.OnSubscribe<String>() { @Override public void call(final Subscriber<? super String> subscriber) { final DatabaseReference ref = firebaseRef; ref.addListenerForSingleValueEvent(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { Log.d("ListenerRx ", "Edit ForSingleValueEvent "); subscriber.onNext(ref.getKey()); subscriber.onCompleted(); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); ref.setValue(object); } }).compose(this.<String>applyScheduler()); } /** * This methods observes data deleting by the key in reference. * * @param firebaseRef {@link Query} this is reference of a Firebase Query and key * @param object {@link Object} null object we want to delete * @return an {@link rx.Observable} of the deleted key after * the object persistence */ public Observable<String> observeDelValuePush(final DatabaseReference firebaseRef, final Object object, final int del) { return Observable.create(new Observable.OnSubscribe<String>() { @Override public void call(final Subscriber<? super String> subscriber) { final DatabaseReference ref = firebaseRef; ref.addListenerForSingleValueEvent(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { subscriber.onNext(ref.getKey()); subscriber.onCompleted(); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); ref.setValue(object); } }).compose(this.<String>applyScheduler()); } /** * This methods observes a firebase query and returns back * an Observable of the {@link DataSnapshot} * when the firebase client uses a {@link ValueEventListener} * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of datasnapshot to use */ public Observable<DataSnapshot> observeValueEvent(final Query firebaseRef) { return Observable.create(new Observable.OnSubscribe<DataSnapshot>() { @Override public void call(final Subscriber<? super DataSnapshot> subscriber) { final ValueEventListener listener = firebaseRef.addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { subscriber.onNext(dataSnapshot); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); // When the subscription is cancelled, remove the listener subscriber.add(Subscriptions.create(new Action0() { @Override public void call() { firebaseRef.removeEventListener(listener); } })); } }).compose(this.<DataSnapshot>applyScheduler()); } /** * This methods observes a firebase query and returns back delayed * Observable of the {@link DataSnapshot} * when the firebase client uses a {@link ValueEventListener} * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of datasnapshot to use */ public Observable<DataSnapshot> observeValueEventDelayed(final Query firebaseRef) { return Observable.create(new Observable.OnSubscribe<DataSnapshot>() { @Override public void call(final Subscriber<? super DataSnapshot> subscriber) { final ValueEventListener listener = firebaseRef.addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { subscriber.onNext(dataSnapshot); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); // When the subscription is cancelled, remove the listener subscriber.add(Subscriptions.create(new Action0() { @Override public void call() { firebaseRef.removeEventListener(listener); } })); } }).delay(500, TimeUnit.MILLISECONDS).observeOn(AndroidSchedulers.mainThread()).compose(this.<DataSnapshot>applyScheduler()); } //work subscription is delayed, onNext is not delayed }).delaySubscription(10, TimeUnit.SECONDS).compose(this.<DataSnapshot>applyScheduler()); /** * This methods observes a firebase query and returns back ONCE * an Observable of the {@link DataSnapshot} * when the firebase client uses a {@link ValueEventListener} * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of datasnapshot to use */ public Observable<DataSnapshot> observeSingleValue(final Query firebaseRef) { return Observable.create(new Observable.OnSubscribe<DataSnapshot>() { @Override public void call(final Subscriber<? super DataSnapshot> subscriber) { final ValueEventListener listener = new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { subscriber.onNext(dataSnapshot); subscriber.onCompleted(); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }; firebaseRef.addListenerForSingleValueEvent(listener); // When the subscription is cancelled, remove the listener subscriber.add(Subscriptions.create(new Action0() { @Override public void call() { firebaseRef.removeEventListener(listener); } })); } }).compose(this.<DataSnapshot>applyScheduler()); } /** * This methods observes a firebase query and returns back * an Observable of the {@link DataSnapshot} * when the firebase client uses a {@link ChildEventListener} * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of {@link //FirebaseChildEvent} * to use */ public Observable<FirebaseChildEvent> observeChildEvent(final Query firebaseRef) { return Observable.create(new Observable.OnSubscribe<FirebaseChildEvent>() { @Override public void call(final Subscriber<? super FirebaseChildEvent> subscriber) { final ChildEventListener childEventListener = firebaseRef.addChildEventListener(new ChildEventListener() { @Override public void onChildAdded(DataSnapshot dataSnapshot, String previousChildName) { subscriber.onNext( new FirebaseChildEvent(dataSnapshot, previousChildName, EventType.ADDED)); } @Override public void onChildChanged(DataSnapshot dataSnapshot, String previousChildName) { subscriber.onNext( new FirebaseChildEvent(dataSnapshot, previousChildName, EventType.CHANGED)); } @Override public void onChildRemoved(DataSnapshot dataSnapshot) { subscriber.onNext(new FirebaseChildEvent(dataSnapshot, EventType.REMOVED)); } @Override public void onChildMoved(DataSnapshot dataSnapshot, String previousChildName) { subscriber.onNext( new FirebaseChildEvent(dataSnapshot, previousChildName, EventType.MOVED)); } @Override public void onCancelled(DatabaseError error) { FirebaseDatabaseErrorFactory.buildError(subscriber, error); } }); // this is used to remove the listener when the subscriber is // cancelled (unsubscribe) subscriber.add(Subscriptions.create(new Action0() { @Override public void call() { firebaseRef.removeEventListener(childEventListener); } })); } }).compose(this.<FirebaseChildEvent>applyScheduler()); } /** * Creates an observable only for the child changed method * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of {@link FirebaseChildEvent} * to use */ public Observable<FirebaseChildEvent> observeChildAdded(final Query firebaseRef) { return observeChildEvent(firebaseRef).filter(filterChildEvent(EventType.ADDED)); } /** * Creates an observable only for the child changed method * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of {@link FirebaseChildEvent} * to use */ public Observable<FirebaseChildEvent> observeChildChanged(final Query firebaseRef) { return observeChildEvent(firebaseRef).filter(filterChildEvent(EventType.CHANGED)); } /** * Creates an observable only for the child removed method * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of {@link FirebaseChildEvent} * to use */ public Observable<FirebaseChildEvent> observeChildRemoved(final Query firebaseRef) { return observeChildEvent(firebaseRef).filter(filterChildEvent(EventType.REMOVED)); } /** * Creates an observable only for the child removed method * * @param firebaseRef {@link Query} this is reference of a Firebase Query * @return an {@link rx.Observable} of {@link FirebaseChildEvent} * to use */ public Observable<FirebaseChildEvent> observeChildMoved(final Query firebaseRef) { return observeChildEvent(firebaseRef).filter(filterChildEvent(EventType.MOVED)); } /** * Functions which filters a stream of {@link Observable} according to firebase * child event type * * @param type {@link FirebaseChildEvent} * @return {@link rx.functions.Func1} a function which returns a boolean if the type are equals */ private Func1<FirebaseChildEvent, Boolean> filterChildEvent(final EventType type) { return new Func1<FirebaseChildEvent, Boolean>() { @Override public Boolean call(FirebaseChildEvent firebaseChildEvent) { return firebaseChildEvent.getEventType() == type; } }; } /** * Function that receives the current Observable and should apply scheduler * * @param <T> source Observable * @return an {@link rx.Observable} with new or the same observe on scheduler */ @SuppressWarnings("unchecked") private <T> Observable.Transformer<T, T> applyScheduler() { return new Observable.Transformer<T, T>() { @Override public Observable<T> call(Observable<T> observable) { if (observeOnScheduler != null) { return observable.observeOn(observeOnScheduler); } //System.out.println("Observable thread: " + Thread.currentThread().getName()); return observable; } }; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pi.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pi-2018-02-27/GetResourceMetadata" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetResourceMetadataRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Web Services service for which Performance Insights returns metrics. * </p> */ private String serviceType; /** * <p> * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights * gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. * </p> */ private String identifier; /** * <p> * The Amazon Web Services service for which Performance Insights returns metrics. * </p> * * @param serviceType * The Amazon Web Services service for which Performance Insights returns metrics. * @see ServiceType */ public void setServiceType(String serviceType) { this.serviceType = serviceType; } /** * <p> * The Amazon Web Services service for which Performance Insights returns metrics. * </p> * * @return The Amazon Web Services service for which Performance Insights returns metrics. * @see ServiceType */ public String getServiceType() { return this.serviceType; } /** * <p> * The Amazon Web Services service for which Performance Insights returns metrics. * </p> * * @param serviceType * The Amazon Web Services service for which Performance Insights returns metrics. * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceType */ public GetResourceMetadataRequest withServiceType(String serviceType) { setServiceType(serviceType); return this; } /** * <p> * The Amazon Web Services service for which Performance Insights returns metrics. * </p> * * @param serviceType * The Amazon Web Services service for which Performance Insights returns metrics. * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceType */ public GetResourceMetadataRequest withServiceType(ServiceType serviceType) { this.serviceType = serviceType.toString(); return this; } /** * <p> * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights * gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. * </p> * * @param identifier * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance * Insights gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. */ public void setIdentifier(String identifier) { this.identifier = identifier; } /** * <p> * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights * gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. * </p> * * @return An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance * Insights gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. */ public String getIdentifier() { return this.identifier; } /** * <p> * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights * gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. * </p> * * @param identifier * An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance * Insights gathers metrics from this data source. To use a DB instance as a data source, specify its * <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public GetResourceMetadataRequest withIdentifier(String identifier) { setIdentifier(identifier); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServiceType() != null) sb.append("ServiceType: ").append(getServiceType()).append(","); if (getIdentifier() != null) sb.append("Identifier: ").append(getIdentifier()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetResourceMetadataRequest == false) return false; GetResourceMetadataRequest other = (GetResourceMetadataRequest) obj; if (other.getServiceType() == null ^ this.getServiceType() == null) return false; if (other.getServiceType() != null && other.getServiceType().equals(this.getServiceType()) == false) return false; if (other.getIdentifier() == null ^ this.getIdentifier() == null) return false; if (other.getIdentifier() != null && other.getIdentifier().equals(this.getIdentifier()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServiceType() == null) ? 0 : getServiceType().hashCode()); hashCode = prime * hashCode + ((getIdentifier() == null) ? 0 : getIdentifier().hashCode()); return hashCode; } @Override public GetResourceMetadataRequest clone() { return (GetResourceMetadataRequest) super.clone(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.easy.json; import java.io.IOException; import java.io.InputStream; import java.util.List; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.memory.OutOfMemoryException; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.store.dfs.DrillFileSystem; import org.apache.drill.exec.store.easy.json.JsonProcessor.ReadState; import org.apache.drill.exec.store.easy.json.reader.CountingJsonReader; import org.apache.drill.exec.vector.BaseValueVector; import org.apache.drill.exec.vector.complex.fn.JsonReader; import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter; import org.apache.hadoop.fs.Path; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; public class JSONRecordReader extends AbstractRecordReader { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader.class); private VectorContainerWriter writer; // Data we're consuming private Path hadoopPath; private JsonNode embeddedContent; private InputStream stream; private final DrillFileSystem fileSystem; private JsonProcessor jsonReader; private int recordCount; private long runningRecordCount = 0; private final FragmentContext fragmentContext; private OperatorContext operatorContext; private final boolean enableAllTextMode; private final boolean readNumbersAsDouble; /** * Create a JSON Record Reader that uses a file based input stream. * @param fragmentContext * @param inputPath * @param fileSystem * @param columns * @throws OutOfMemoryException */ public JSONRecordReader(final FragmentContext fragmentContext, final String inputPath, final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException { this(fragmentContext, inputPath, null, fileSystem, columns); } /** * Create a new JSON Record Reader that uses a in memory materialized JSON stream. * @param fragmentContext * @param embeddedContent * @param fileSystem * @param columns * @throws OutOfMemoryException */ public JSONRecordReader(final FragmentContext fragmentContext, final JsonNode embeddedContent, final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException { this(fragmentContext, null, embeddedContent, fileSystem, columns); } private JSONRecordReader(final FragmentContext fragmentContext, final String inputPath, final JsonNode embeddedContent, final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException { Preconditions.checkArgument( (inputPath == null && embeddedContent != null) || (inputPath != null && embeddedContent == null), "One of inputPath or embeddedContent must be set but not both." ); if(inputPath != null){ this.hadoopPath = new Path(inputPath); }else{ this.embeddedContent = embeddedContent; } this.fileSystem = fileSystem; this.fragmentContext = fragmentContext; // only enable all text mode if we aren't using embedded content mode. this.enableAllTextMode = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR); this.readNumbersAsDouble = fragmentContext.getOptions().getOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE).bool_val; setColumns(columns); } @Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { this.operatorContext = context; try{ if (hadoopPath != null) { this.stream = fileSystem.openPossiblyCompressedStream(hadoopPath); } this.writer = new VectorContainerWriter(output); if (isSkipQuery()) { this.jsonReader = new CountingJsonReader(fragmentContext.getManagedBuffer()); } else { this.jsonReader = new JsonReader(fragmentContext.getManagedBuffer(), ImmutableList.copyOf(getColumns()), enableAllTextMode, true, readNumbersAsDouble); } setupParser(); }catch(final Exception e){ handleAndRaise("Failure reading JSON file", e); } } private void setupParser() throws IOException{ if(hadoopPath != null){ jsonReader.setSource(stream); }else{ jsonReader.setSource(embeddedContent); } } protected void handleAndRaise(String suffix, Exception e) throws UserException { String message = e.getMessage(); int columnNr = -1; if (e instanceof JsonParseException) { final JsonParseException ex = (JsonParseException) e; message = ex.getOriginalMessage(); columnNr = ex.getLocation().getColumnNr(); } UserException.Builder exceptionBuilder = UserException.dataReadError(e) .message("%s - %s", suffix, message); if (columnNr > 0) { exceptionBuilder.pushContext("Column ", columnNr); } exceptionBuilder.pushContext("Record ", currentRecordNumberInFile()) .pushContext("File ", hadoopPath.toUri().getPath()); throw exceptionBuilder.build(logger); } private long currentRecordNumberInFile() { return runningRecordCount + recordCount + 1; } @Override public int next() { writer.allocate(); writer.reset(); recordCount = 0; ReadState write = null; // Stopwatch p = new Stopwatch().start(); try{ outside: while(recordCount < BaseValueVector.INITIAL_VALUE_ALLOCATION){ writer.setPosition(recordCount); write = jsonReader.write(writer); if(write == ReadState.WRITE_SUCCEED){ // logger.debug("Wrote record."); recordCount++; }else{ // logger.debug("Exiting."); break outside; } } jsonReader.ensureAtLeastOneField(writer); writer.setValueCount(recordCount); // p.stop(); // System.out.println(String.format("Wrote %d records in %dms.", recordCount, p.elapsed(TimeUnit.MILLISECONDS))); updateRunningCount(); return recordCount; } catch (final Exception e) { handleAndRaise("Error parsing JSON", e); } // this is never reached return 0; } private void updateRunningCount() { runningRecordCount += recordCount; } @Override public void cleanup() { try { if(stream != null){ stream.close(); } } catch (final IOException e) { logger.warn("Failure while closing stream.", e); } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.http; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.server.coordination.BatchDataSegmentAnnouncer; import io.druid.server.coordination.SegmentChangeRequestHistory; import io.druid.server.coordination.SegmentChangeRequestsSnapshot; import io.druid.server.http.security.StateResourceFilter; import io.druid.server.security.AuthConfig; import javax.annotation.Nullable; import javax.servlet.AsyncContext; import javax.servlet.AsyncEvent; import javax.servlet.AsyncListener; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import java.io.IOException; /** */ @Path("/druid-internal/v1/segments/") @ResourceFilters(StateResourceFilter.class) public class SegmentListerResource { protected static final EmittingLogger log = new EmittingLogger(SegmentListerResource.class); protected final ObjectMapper jsonMapper; protected final ObjectMapper smileMapper; protected final AuthConfig authConfig; private final BatchDataSegmentAnnouncer announcer; @Inject public SegmentListerResource( @Json ObjectMapper jsonMapper, @Smile ObjectMapper smileMapper, AuthConfig authConfig, @Nullable BatchDataSegmentAnnouncer announcer ) { this.jsonMapper = jsonMapper; this.smileMapper = smileMapper; this.authConfig = authConfig; this.announcer = announcer; } /** * This endpoint is used by HttpServerInventoryView to keep an up-to-date list of segments served by * historical/realtime nodes. * * This endpoint lists segments served by this server and can also incrementally provide the segments added/dropped * since last response. * * Here is how, this is used. * * (1) Client sends first request /druid/internal/v1/segments?counter=-1&timeout=<timeout> * Server responds with list of segments currently served and a <counter,hash> pair. * * (2) Client sends subsequent requests /druid/internal/v1/segments?counter=<counter>&hash=<hash>&timeout=<timeout> * Where <counter,hash> values are used from the last response. Server responds with list of segment updates * since given counter. * * This endpoint makes the client wait till either there is some segment update or given timeout elapses. * * So, clients keep on sending next request immediately after receiving the response in order to keep the list * of segments served by this server up-to-date. * * @param counter counter received in last response. * @param hash hash received in last response. * @param timeout after which response is sent even if there are no new segment updates. * @param req * @throws IOException */ @GET @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) @Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) public void getSegments( @QueryParam("counter") long counter, @QueryParam("hash") long hash, @QueryParam("timeout") long timeout, @Context final HttpServletRequest req ) throws IOException { if (announcer == null) { sendErrorResponse(req, HttpServletResponse.SC_NOT_FOUND, "announcer is not available."); return; } if (timeout <= 0) { sendErrorResponse(req, HttpServletResponse.SC_BAD_REQUEST, "timeout must be positive."); return; } final ResponseContext context = createContext(req.getHeader("Accept")); final ListenableFuture<SegmentChangeRequestsSnapshot> future = announcer.getSegmentChangesSince( new SegmentChangeRequestHistory.Counter( counter, hash ) ); final AsyncContext asyncContext = req.startAsync(); asyncContext.addListener( new AsyncListener() { @Override public void onComplete(AsyncEvent event) throws IOException { } @Override public void onTimeout(AsyncEvent event) throws IOException { // HTTP 204 NO_CONTENT is sent to the client. future.cancel(true); event.getAsyncContext().complete(); } @Override public void onError(AsyncEvent event) throws IOException { } @Override public void onStartAsync(AsyncEvent event) throws IOException { } } ); Futures.addCallback( future, new FutureCallback<SegmentChangeRequestsSnapshot>() { @Override public void onSuccess(SegmentChangeRequestsSnapshot result) { try { HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); response.setStatus(HttpServletResponse.SC_OK); context.inputMapper.writeValue(asyncContext.getResponse().getOutputStream(), result); asyncContext.complete(); } catch (Exception ex) { log.debug(ex, "Request timed out or closed already."); } } @Override public void onFailure(Throwable th) { try { HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); if (th instanceof IllegalArgumentException) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, th.getMessage()); } else { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, th.getMessage()); } asyncContext.complete(); } catch (Exception ex) { log.debug(ex, "Request timed out or closed already."); } } } ); asyncContext.setTimeout(timeout); } private void sendErrorResponse(HttpServletRequest req, int code, String error) throws IOException { AsyncContext asyncContext = req.startAsync(); HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); response.sendError(code, error); asyncContext.complete(); } private ResponseContext createContext(String requestType) { boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(requestType); return new ResponseContext(isSmile ? smileMapper : jsonMapper); } private static class ResponseContext { private final ObjectMapper inputMapper; ResponseContext(ObjectMapper inputMapper) { this.inputMapper = inputMapper; } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Yahoo!, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import com.gargoylesoftware.htmlunit.Page; import com.gargoylesoftware.htmlunit.WebResponse; import edu.umd.cs.findbugs.annotations.CheckForNull; import net.sf.json.JSONObject; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.TestExtension; import org.kohsuke.stapler.export.ExportedBean; import org.xml.sax.SAXException; import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.IOException; import java.net.HttpURLConnection; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * @author Kohsuke Kawaguchi */ public class ApiTest { @Rule public JenkinsRule j = new JenkinsRule(); @Test @Issue("JENKINS-2828") public void xpath() throws Exception { j.createWebClient().goTo("api/xml?xpath=/*[1]", "application/xml"); } @Issue("JENKINS-27607") @Test public void json() throws Exception { FreeStyleProject p = j.createFreeStyleProject("p"); JenkinsRule.WebClient wc = j.createWebClient(); WebResponse response = wc.goTo(p.getUrl() + "api/json?tree=name", "application/json").getWebResponse(); JSONObject json = JSONObject.fromObject(response.getContentAsString()); assertEquals("p", json.get("name")); String s = wc.goTo(p.getUrl() + "api/json?tree=name&jsonp=wrap", "application/javascript").getWebResponse().getContentAsString(); assertTrue(s.startsWith("wrap(")); assertEquals(')', s.charAt(s.length()-1)); json = JSONObject.fromObject(s.substring("wrap(".length(), s.length() - 1)); assertEquals("p", json.get("name")); } @Test @Issue("JENKINS-3267") public void wrappedZeroItems() throws Exception { Page page = j.createWebClient().goTo("api/xml?wrapper=root&xpath=/hudson/nonexistent", "application/xml"); assertEquals("<root/>", page.getWebResponse().getContentAsString()); } /** * Test that calling the XML API with the XPath {@code document} function fails. * * @throws Exception if so */ @Issue("SECURITY-165") @Test public void xPathDocumentFunction() throws Exception { File f = new File(j.jenkins.getRootDir(), "queue.xml"); JenkinsRule.WebClient wc = j.createWebClient() .withThrowExceptionOnFailingStatusCode(false); // could expect application/xml but as an error occurred it's a text/html that is returned Page page = wc.goTo("api/xml?xpath=document(\"" + f.getAbsolutePath() + "\")", null); assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, page.getWebResponse().getStatusCode()); assertThat(page.getWebResponse().getContentAsString(), containsString("Illegal function: document")); } @Test @Issue("JENKINS-3267") public void wrappedOneItem() throws Exception { Page page = j.createWebClient().goTo("api/xml?wrapper=root&xpath=/hudson/view/name", "application/xml"); assertEquals("<root><name>"+ AllView.DEFAULT_VIEW_NAME+"</name></root>", page.getWebResponse().getContentAsString()); } @Test public void wrappedMultipleItems() throws Exception { j.createFreeStyleProject(); j.createFreeStyleProject(); Page page = j.createWebClient().goTo("api/xml?wrapper=root&xpath=/hudson/job/name", "application/xml"); assertEquals("<root><name>test0</name><name>test1</name></root>", page.getWebResponse().getContentAsString()); } @Test public void unwrappedZeroItems() throws Exception { j.createWebClient().assertFails("api/xml?xpath=/hudson/nonexistent", HttpURLConnection.HTTP_NOT_FOUND); } @Test public void unwrappedOneItem() throws Exception { Page page = j.createWebClient().goTo("api/xml?xpath=/hudson/view/name", "application/xml"); assertEquals("<name>"+ AllView.DEFAULT_VIEW_NAME+"</name>", page.getWebResponse().getContentAsString()); } @Test public void unwrappedLongString() throws Exception { j.jenkins.setSystemMessage("Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."); Page page = j.createWebClient().goTo("api/xml?xpath=/hudson/description", "application/xml"); assertEquals( "<description>"+j.jenkins.getSystemMessage()+"</description>", page.getWebResponse().getContentAsString()); } @Test public void unwrappedMultipleItems() throws Exception { j.createFreeStyleProject(); j.createFreeStyleProject(); j.createWebClient().assertFails("api/xml?xpath=/hudson/job/name", HttpURLConnection.HTTP_INTERNAL_ERROR); } @Issue("JENKINS-22566") @Test public void parameter() throws Exception { FreeStyleProject p = j.createFreeStyleProject("p"); p.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("foo", ""))); j.assertBuildStatusSuccess(p.scheduleBuild2(0, new ParametersAction(new StringParameterValue("foo", "bar")))); Page page = j.createWebClient().goTo( p.getUrl() + "api/xml?tree=builds[actions[parameters[name,value]]]&xpath=freeStyleProject/build/action/parameter", "application/xml"); assertEquals( "<parameter _class=\"hudson.model.StringParameterValue\"><name>foo</name><value>bar</value></parameter>", page.getWebResponse().getContentAsString()); } @Issue("JENKINS-22566") @Ignore("TODO currently fails with: org.dom4j.DocumentException: Error on line 1 of document : An invalid XML character (Unicode: 0x1b) was found in the element content of the document") @Test public void escapedParameter() throws Exception { FreeStyleProject p = j.createFreeStyleProject("p"); p.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("foo", ""))); j.assertBuildStatusSuccess(p.scheduleBuild2(0, new ParametersAction(new StringParameterValue("foo", "bar\u001B")))); Page page = j.createWebClient().goTo( p.getUrl() + "api/xml?tree=builds[actions[parameters[name,value]]]&xpath=freeStyleProject/build/action/parameter", "application/xml"); assertEquals( "<parameter _class=\"hudson.model.StringParameterValue\"><name>foo</name><value>bar&#x1b;</value></parameter>", page.getWebResponse().getContentAsString()); } @Test @Issue("SECURITY-1704") public void project_notExposedToIFrame() throws Exception { FreeStyleProject p = j.createFreeStyleProject("p"); ensureXmlIsNotExposedToIFrame(p.getUrl()); ensureJsonIsNotExposedToIFrame(p.getUrl()); ensurePythonIsNotExposedToIFrame(p.getUrl()); } @Test @Issue("SECURITY-1704") public void custom_notExposedToIFrame() throws Exception { ensureXmlIsNotExposedToIFrame("custom/"); ensureJsonIsNotExposedToIFrame("custom/"); ensurePythonIsNotExposedToIFrame("custom/"); } /** * Test the wrapper parameter for the api/xml urls to avoid XSS. * @throws Exception See {@link #checkWrapperParam(String, Integer, String)} */ @Issue("SECURITY-1129") @Test public void wrapperXss() throws Exception { String wrapper = "html%20xmlns=\"http://www.w3.org/1999/xhtml\"><script>alert(%27XSS%20Detected%27)</script></html><!--"; checkWrapperParam(wrapper, HttpServletResponse.SC_BAD_REQUEST, Messages.Api_WrapperParamInvalid()); } /** * Test the wrapper parameter for the api/xml urls with a bad name. * @throws Exception See {@link #checkWrapperParam(String, Integer, String)} */ @Issue("SECURITY-1129") @Test public void wrapperBadName() throws Exception { String wrapper = "-badname"; checkWrapperParam(wrapper, HttpServletResponse.SC_BAD_REQUEST, Messages.Api_WrapperParamInvalid()); } /** * Test the wrapper parameter with a good name, to ensure the security fix doesn't break anything. * @throws Exception See {@link #checkWrapperParam(String, Integer, String)} */ @Issue("SECURITY-1129") @Test public void wrapperGoodName() throws Exception { String wrapper = "__GoodName-..-OK"; checkWrapperParam(wrapper, HttpServletResponse.SC_OK, null); } /** * Check the response for a XML api with the wrapper param specified. At least the statusCode or the responseMessage * should be indicated. * @param wrapper the wrapper param passed in the url. * @param statusCode the status code expected in the response. If it's null, it's not checked. * @param responseMessage the message expected in the response. If it's null, it's not checked. * @throws IOException See {@link org.jvnet.hudson.test.JenkinsRule.WebClient#goTo(String, String)} * @throws SAXException See {@link org.jvnet.hudson.test.JenkinsRule.WebClient#goTo(String, String)} */ private void checkWrapperParam(String wrapper, Integer statusCode, String responseMessage) throws IOException, SAXException { if (statusCode == null && responseMessage == null) { fail("You should check at least one, the statusCode or the responseMessage when testing the wrapper param"); } JenkinsRule.WebClient wc = j.createWebClient(); wc.getOptions().setThrowExceptionOnFailingStatusCode(false); WebResponse response = wc.goTo(String.format("whoAmI/api/xml?xpath=*&wrapper=%s", wrapper), null).getWebResponse(); if (response != null) { if (statusCode != null) { assertEquals(statusCode.intValue(), response.getStatusCode()); } if (responseMessage != null) { assertEquals(responseMessage, response.getContentAsString()); } } else { fail("The response shouldn't be null"); } } private void ensureXmlIsNotExposedToIFrame(String itemUrl) throws Exception { WebResponse response = j.createWebClient().goTo(itemUrl + "api/xml", "application/xml").getWebResponse(); assertThat(response.getResponseHeaderValue("X-Frame-Options"), equalTo("deny")); } private void ensureJsonIsNotExposedToIFrame(String itemUrl) throws Exception { WebResponse response = j.createWebClient().goTo(itemUrl + "api/json", "application/json").getWebResponse(); assertThat(response.getResponseHeaderValue("X-Frame-Options"), equalTo("deny")); } private void ensurePythonIsNotExposedToIFrame(String itemUrl) throws Exception { WebResponse response = j.createWebClient().goTo(itemUrl + "api/python", "text/x-python").getWebResponse(); assertThat(response.getResponseHeaderValue("X-Frame-Options"), equalTo("deny")); } @TestExtension("custom_notExposedToIFrame") public static class CustomObject implements RootAction { @Override public @CheckForNull String getIconFileName() { return null; } @Override public @CheckForNull String getDisplayName() { return null; } @Override public @CheckForNull String getUrlName() { return "custom"; } public Api getApi() { return new Api(new CustomData("s3cr3t")); } @ExportedBean static class CustomData { private String secret; CustomData(String secret){ this.secret = secret; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fetch; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; /** * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified * after reducing all of the matches returned by the query phase */ public class FetchPhase implements SearchPhase { private final FetchSubPhase[] fetchSubPhases; public FetchPhase(List<FetchSubPhase> fetchSubPhases) { this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]); this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsFetchSubPhase(this); } @Override public void preProcess(SearchContext context) { } @Override public void execute(SearchContext context) { final FieldsVisitor fieldsVisitor; Set<String> fieldNames = null; List<String> fieldNamePatterns = null; StoredFieldsContext storedFieldsContext = context.storedFieldsContext(); if (storedFieldsContext == null) { // no fields specified, default to return source if no explicit indication if (!context.hasScriptFields() && !context.hasFetchSourceContext()) { context.fetchSourceContext(new FetchSourceContext(true)); } fieldsVisitor = new FieldsVisitor(context.sourceRequested()); } else if (storedFieldsContext.fetchFields() == false) { // disable stored fields entirely fieldsVisitor = null; } else { for (String fieldName : context.storedFieldsContext().fieldNames()) { if (fieldName.equals(SourceFieldMapper.NAME)) { FetchSourceContext fetchSourceContext = context.hasFetchSourceContext() ? context.fetchSourceContext() : FetchSourceContext.FETCH_SOURCE; context.fetchSourceContext(new FetchSourceContext(true, fetchSourceContext.includes(), fetchSourceContext.excludes())); continue; } if (Regex.isSimpleMatchPattern(fieldName)) { if (fieldNamePatterns == null) { fieldNamePatterns = new ArrayList<>(); } fieldNamePatterns.add(fieldName); } else { MappedFieldType fieldType = context.smartNameFieldType(fieldName); if (fieldType == null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. if (context.getObjectMapper(fieldName) != null) { throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); } } if (fieldNames == null) { fieldNames = new HashSet<>(); } fieldNames.add(fieldName); } } boolean loadSource = context.sourceRequested(); if (fieldNames == null && fieldNamePatterns == null) { // empty list specified, default to disable _source if no explicit indication fieldsVisitor = new FieldsVisitor(loadSource); } else { fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource); } } try { SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); for (int index = 0; index < context.docIdsToLoadSize(); index++) { if (context.isCancelled()) { throw new TaskCancelledException("cancelled"); } int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index]; int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); int subDocId = docId - subReaderContext.docBase; final SearchHit searchHit; int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId); if (rootDocId != -1) { searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, subReaderContext); } else { searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext); } hits[index] = searchHit; hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher()); for (FetchSubPhase fetchSubPhase : fetchSubPhases) { fetchSubPhase.hitExecute(context, hitContext); } } for (FetchSubPhase fetchSubPhase : fetchSubPhases) { fetchSubPhase.hitsExecute(context, hits); } context.fetchResult().hits(new SearchHits(hits, context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } } private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { if (context.mapperService().hasNested()) { BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext); if (!bits.get(subDocId)) { return bits.nextSetBit(subDocId); } } return -1; } private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); Map<String, DocumentField> searchFields = null; if (!fieldsVisitor.fields().isEmpty()) { searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); } } DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } return searchHit; } private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException { // Also if highlighting is requested on nested documents we need to fetch the _source from the root document, // otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail, // because the entire _source is only stored with the root document. final Uid uid; final BytesReference source; final boolean needSource = context.sourceRequested() || context.highlight() != null; if (needSource || (context instanceof InnerHitsContext.InnerHitSubContext == false)) { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); rootFieldsVisitor.postProcess(context.mapperService()); uid = rootFieldsVisitor.uid(); source = rootFieldsVisitor.source(); } else { // In case of nested inner hits we already know the uid, so no need to fetch it from stored fields again! uid = ((InnerHitsContext.InnerHitSubContext) context).getUid(); source = null; } Map<String, DocumentField> searchFields = getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(uid.type()); SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedSubDocId, context, subReaderContext); assert nestedObjectMapper != null; SearchHit.NestedIdentity nestedIdentity = getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, context.mapperService(), nestedObjectMapper); if (source != null) { Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(source, true); Map<String, Object> sourceAsMap = tuple.v2(); // Isolate the nested json array object that matches with nested hit and wrap it back into the same json // structure with the nested json array object being the actual content. The latter is important, so that // features like source filtering and highlighting work consistent regardless of whether the field points // to a json object array for consistency reasons on how we refer to fields Map<String, Object> nestedSourceAsMap = new HashMap<>(); Map<String, Object> current = nestedSourceAsMap; for (SearchHit.NestedIdentity nested = nestedIdentity; nested != null; nested = nested.getChild()) { String nestedPath = nested.getField().string(); current.put(nestedPath, new HashMap<>()); Object extractedValue = XContentMapValues.extractValue(nestedPath, sourceAsMap); List<?> nestedParsedSource; if (extractedValue instanceof List) { // nested field has an array value in the _source nestedParsedSource = (List<?>) extractedValue; } else if (extractedValue instanceof Map) { // nested field has an object value in the _source. This just means the nested field has just one inner object, // which is valid, but uncommon. nestedParsedSource = Collections.singletonList(extractedValue); } else { throw new IllegalStateException("extracted source isn't an object or an array"); } if ((nestedParsedSource.get(0) instanceof Map) == false && nestedObjectMapper.parentObjectMapperAreNested(context.mapperService()) == false) { // When one of the parent objects are not nested then XContentMapValues.extractValue(...) extracts the values // from two or more layers resulting in a list of list being returned. This is because nestedPath // encapsulates two or more object layers in the _source. // // This is why only the first element of nestedParsedSource needs to be checked. throw new IllegalArgumentException("Cannot execute inner hits. One or more parent object fields of nested field [" + nestedObjectMapper.name() + "] are not nested. All parent fields need to be nested fields too"); } sourceAsMap = (Map<String, Object>) nestedParsedSource.get(nested.getOffset()); if (nested.getChild() == null) { current.put(nestedPath, sourceAsMap); } else { Map<String, Object> next = new HashMap<>(); current.put(nestedPath, next); current = next; } } context.lookup().source().setSource(nestedSourceAsMap); XContentType contentType = tuple.v1(); BytesReference nestedSource = contentBuilder(contentType).map(nestedSourceAsMap).bytes(); context.lookup().source().setSource(nestedSource); context.lookup().source().setSourceContentType(contentType); } return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields); } private Map<String, DocumentField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) { Map<String, DocumentField> searchFields = null; if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false); if (nestedFieldsVisitor != null) { loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId); nestedFieldsVisitor.postProcess(context.mapperService()); if (!nestedFieldsVisitor.fields().isEmpty()) { searchFields = new HashMap<>(nestedFieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : nestedFieldsVisitor.fields().entrySet()) { searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); } } } } return searchFields; } private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, LeafReaderContext subReaderContext, MapperService mapperService, ObjectMapper nestedObjectMapper) throws IOException { int currentParent = nestedSubDocId; ObjectMapper nestedParentObjectMapper; ObjectMapper current = nestedObjectMapper; String originalName = nestedObjectMapper.name(); SearchHit.NestedIdentity nestedIdentity = null; do { Query parentFilter; nestedParentObjectMapper = current.getParentObjectMapper(mapperService); if (nestedParentObjectMapper != null) { if (nestedParentObjectMapper.nested().isNested() == false) { current = nestedParentObjectMapper; continue; } parentFilter = nestedParentObjectMapper.nestedTypeFilter(); } else { parentFilter = Queries.newNonNestedFilter(); } Query childFilter = nestedObjectMapper.nestedTypeFilter(); if (childFilter == null) { current = nestedParentObjectMapper; continue; } final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false); Scorer childScorer = childWeight.scorer(subReaderContext); if (childScorer == null) { current = nestedParentObjectMapper; continue; } DocIdSetIterator childIter = childScorer.iterator(); BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext); int offset = 0; int nextParent = parentBits.nextSetBit(currentParent); for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS; docId = childIter.nextDoc()) { offset++; } currentParent = nextParent; current = nestedObjectMapper = nestedParentObjectMapper; int currentPrefix = current == null ? 0 : current.name().length() + 1; nestedIdentity = new SearchHit.NestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity); if (current != null) { originalName = current.name(); } } while (current != null); return nestedIdentity; } private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); } catch (IOException e) { throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.uicd.backend.recorder.services; import com.google.uicd.backend.core.config.UicdConfig; import com.google.uicd.backend.core.constants.ActionType; import com.google.uicd.backend.core.constants.LocalStorageConstant; import com.google.uicd.backend.core.db.ActionEntity; import com.google.uicd.backend.core.exceptions.UicdActionException; import com.google.uicd.backend.core.exceptions.UicdException; import com.google.uicd.backend.core.recorder.utils.ActionEntityFileUtil; import com.google.uicd.backend.core.uicdactions.BaseAction; import com.google.uicd.backend.core.uicdactions.ClickAction; import com.google.uicd.backend.core.uicdactions.CompoundAction; import com.google.uicd.backend.core.uicdactions.DragAction; import com.google.uicd.backend.core.uicdactions.LongClickAction; import com.google.uicd.backend.core.uicdactions.PythonScriptAction; import com.google.uicd.backend.core.uicdactions.ScreenContentValidationAction; import com.google.uicd.backend.core.uicdactions.SwipeAction; import com.google.uicd.backend.core.xmlparser.NodeContext; import com.google.uicd.backend.recorder.db.DbActionStorageManager; import com.google.uicd.backend.recorder.db.DbTestCaseTreeStorageManager; import com.google.uicd.backend.recorder.db.TestCaseTreeEntity; import com.google.uicd.backend.recorder.utils.TestCaseTreeFileUtil; import com.google.uicd.backend.recorder.workflowmgr.WorkflowManager; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import net.lingala.zip4j.ZipFile; import net.lingala.zip4j.exception.ZipException; import net.lingala.zip4j.model.ZipParameters; import org.apache.commons.io.FileUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Service; /** Implements the import/export test case logic */ @Service public class TestCasesImportExportManager { @Autowired private ApplicationContext applicationContext; @Autowired private DbTestCaseTreeStorageManager testCaseTreeStorageManager; @Autowired private DbActionStorageManager actionStorageManager; @Autowired private WorkflowManager workflowManager; public void unzipAndImport(File zipFile, String projectId) throws UicdException { Path tmpFolderPath = getZipTmpFolderPath(projectId); try { FileUtils.forceMkdir(tmpFolderPath.toFile()); FileUtils.cleanDirectory(tmpFolderPath.toFile()); new ZipFile(zipFile).extractAll(tmpFolderPath.toString()); } catch (IOException e) { throw new UicdException("Can not unzip file."); } List<ActionEntity> actionEntities = ActionEntityFileUtil.loadTestCases( Paths.get(tmpFolderPath.toString(), LocalStorageConstant.TESTCASES_FOLDERNAME)); Optional<TestCaseTreeEntity> testCaseTreeEntityOptional = TestCaseTreeFileUtil.loadTestTreeFromFolder( Paths.get(tmpFolderPath.toString(), LocalStorageConstant.TESTCASES_TREE_FOLDERNAME)); if (!testCaseTreeEntityOptional.isPresent()) { throw new UicdException("Can not find test case tree, export failed."); } TestCaseTreeEntity testCaseTreeEntity = testCaseTreeEntityOptional.get(); refurbishEntities(actionEntities, testCaseTreeEntity, /* ignoreTestTreeId */ false); Optional<TestCaseTreeEntity> testCaseTreeEntityImported = testCaseTreeStorageManager.getFirstTreeByProjectId(projectId); if (!testCaseTreeEntityImported.isPresent()) { throw new UicdException("Can not find projectId:" + projectId); } testCaseTreeEntityImported.get().setTreeDetails(testCaseTreeEntity.getTreeDetails()); testCaseTreeStorageManager.save(testCaseTreeEntityImported.get()); // ActionEntities are managed by jpa, need do converting here, so that we don't need detach the // Object actionStorageManager.saveActions( actionEntities.stream() .map(item -> BaseAction.fromJson(item.getDetails())) .collect(Collectors.toList())); // Need invalid cache to make sure actionStorageManager.clearCache(); } public boolean hasPermissionToSoftCopy(BaseAction baseAction) { String currentUser = UicdConfig.getInstance().getCurrentUser(); String shareWith = baseAction.getShareWith(); List<String> shareWithList = Arrays.asList(shareWith.split(",")); if (shareWithList.contains(currentUser) || baseAction.getCreatedBy().equals(currentUser)) { return true; } return false; } public String deepImportAction(String actionId) throws UicdActionException { List<ActionEntity> originalActionEntities = new ArrayList<>(); List<BaseAction> pythonActionList = new ArrayList<>(); fetchActionRecursively(actionId, originalActionEntities, pythonActionList); HashMap<String, String> actionIdMapping = new HashMap<>(); refurbishActionEntities(originalActionEntities, actionIdMapping); // ActionEntities are managed by jpa, need do converting here, so that we don't need detach the // Object actionStorageManager.saveActions( originalActionEntities.stream() .map(item -> BaseAction.fromJson(item.getDetails())) .collect(Collectors.toList())); actionStorageManager.clearCache(); return actionIdMapping.get(actionId); } public ZipFile zipAndExportTopLevelCaseOnly( String projectId, String projectName, String zipFileName) throws UicdException { if (zipFileName.isEmpty()) { zipFileName = projectName + ".zip"; } Path zipFileFullPath = getZipFullPath(projectId, zipFileName); Path tmpFolderPath = getZipTmpFolderPath(projectId); // Clean tmp folder first, make sure it is a clean export prepareTmpFolderForZip(zipFileFullPath, tmpFolderPath); Optional<TestCaseTreeEntity> testCaseTreeEntityOptional = testCaseTreeStorageManager.getFirstTreeByProjectId(projectId); if (!testCaseTreeEntityOptional.isPresent()) { throw new UicdException("Can not get current tree!"); } List<ActionEntity> topLevelTests = new ArrayList<>(); final List<String> actionIdList = getActionListFromTreeDetails(testCaseTreeEntityOptional.get().getTreeDetails()); for (String actionId : actionIdList) { Optional<String> actionStrOptional = workflowManager.getWorkflow(actionId); if (actionStrOptional.isPresent()) { BaseAction action = BaseAction.fromJson(actionStrOptional.get()); if (action.getActionType() == ActionType.COMPOUND_ACTION) { CompoundAction compoundAction = (CompoundAction) action; if (compoundAction.isTopLevelWorkflow()) { topLevelTests.add(new ActionEntity(action, true)); } } } } ActionEntityFileUtil.saveTestCases(topLevelTests, tmpFolderPath); return genZipFile(zipFileFullPath, tmpFolderPath); } private ZipFile genZipFile(Path zipFileFullPath, Path tmpFolderPath) throws UicdException { try { ZipParameters zipParameters = new ZipParameters(); zipParameters.setIncludeRootFolder(false); if (zipFileFullPath.toFile().exists()) { FileUtils.forceDelete(zipFileFullPath.toFile()); } ZipFile zipFile = new ZipFile(zipFileFullPath.toString()); zipFile.addFolder(new File(tmpFolderPath.toString()), zipParameters); return zipFile; } catch (ZipException e) { throw new UicdException("Can not zip files." + e.getMessage()); } catch (IOException e) { throw new UicdException("Can not delete old zip files." + e.getMessage()); } } public ZipFile zipAndExport(String projectId, String projectName, String zipFileName) throws UicdException { if (zipFileName.isEmpty()) { zipFileName = projectName + ".zip"; } Path zipFileFullPath = getZipFullPath(projectId, zipFileName); Path tmpFolderPath = getZipTmpFolderPath(projectId); // Clean tmp folder first, make sure it is a clean export prepareTmpFolderForZip(zipFileFullPath, tmpFolderPath); Optional<TestCaseTreeEntity> testCaseTreeEntityOptional = testCaseTreeStorageManager.getFirstTreeByProjectId(projectId); if (!testCaseTreeEntityOptional.isPresent()) { throw new UicdException("Can not get current tree!"); } List<ActionEntity> actionEntities = new ArrayList<>(); final List<String> actionIdList = getActionListFromTreeDetails(testCaseTreeEntityOptional.get().getTreeDetails()); List<BaseAction> pythonActionList = new ArrayList<>(); for (String actionId : actionIdList) { fetchActionRecursively(actionId, actionEntities, pythonActionList); } // Save actions to folder ActionEntityFileUtil.saveTestCases( actionEntities, Paths.get(tmpFolderPath.toString(), LocalStorageConstant.TESTCASES_FOLDERNAME)); for (BaseAction pyAction : pythonActionList) { if (pyAction.getActionType() == ActionType.PYTHON_SCRIPT_ACTION) { PythonScriptAction pythonScriptAction = (PythonScriptAction) pyAction; ActionEntityFileUtil.saveToFile( pythonScriptAction.script, Paths.get(tmpFolderPath.toString(), LocalStorageConstant.PYTHON_SCRIPTS_FOLDERNAME) .toString(), String.format( "%s_%s.py", pythonScriptAction.getName(), pythonScriptAction.getActionId().toString().substring(0, 6))); } } // Save tree to folder TestCaseTreeFileUtil.saveTestTree( testCaseTreeEntityOptional.get(), Paths.get(tmpFolderPath.toString(), LocalStorageConstant.TESTCASES_TREE_FOLDERNAME)); return genZipFile(zipFileFullPath, tmpFolderPath); } private void prepareTmpFolderForZip(Path zipFileFullPath, Path tmpFolderPath) throws UicdException { try { FileUtils.forceMkdir(tmpFolderPath.toFile()); FileUtils.forceMkdir(zipFileFullPath.getParent().toFile()); FileUtils.cleanDirectory(tmpFolderPath.toFile()); } catch (IOException e) { throw new UicdException("Can not clean tmp folder." + e.getMessage()); } } /** * Deep copy the tree and related actions when user try to import project from another user. In * Uicd actions are linked by its action id, we need find every actionId recursively on both the * testcase tree and in the action table/file. Re-mapping to new uuid and save to * database/filesystem. * * @param srcProjectId * @param targetProjectId * @throws UicdException */ public void copyTree(String srcProjectId, String targetProjectId) throws UicdException { Optional<TestCaseTreeEntity> srcProjectTestCaseTree = testCaseTreeStorageManager.getFirstTreeByProjectId(srcProjectId); String treeDetails = srcProjectTestCaseTree.get().getTreeDetails(); final List<String> actionIdList = getActionListFromTreeDetails(treeDetails); Optional<TestCaseTreeEntity> srcTestCaseTreeEntityOptional = testCaseTreeStorageManager.getFirstTreeByProjectId(srcProjectId); Optional<TestCaseTreeEntity> targetTestCaseTreeEntityOptional = testCaseTreeStorageManager.getFirstTreeByProjectId(targetProjectId); if (!srcTestCaseTreeEntityOptional.isPresent() || !targetTestCaseTreeEntityOptional.isPresent()) { throw new UicdException("Can not find test tree."); } targetTestCaseTreeEntityOptional.get().setTreeDetails(treeDetails); List<ActionEntity> originalActionEntities = new ArrayList<>(); List<BaseAction> pythonActionList = new ArrayList<>(); for (String actionId : actionIdList) { fetchActionRecursively(actionId, originalActionEntities, pythonActionList); } /* clone the entities, otherwise entities are still in the attached state */ List<ActionEntity> copiedActionEntities = cloneActionEntities(originalActionEntities); refurbishEntities( copiedActionEntities, targetTestCaseTreeEntityOptional.get(), /* ignoreTestTreeId */ true); actionStorageManager.saveActions(convertActionEntityListToActionList(copiedActionEntities)); testCaseTreeStorageManager.save(targetTestCaseTreeEntityOptional.get()); // Need clear cache, to make sure it will reload from db, otherwise, the compound action doesn't // have the child instance. actionStorageManager.clearCache(); } private Path getZipFullPath(String projectId, String zipFileName) { return Paths.get( UicdConfig.getInstance().getBaseFolder(), LocalStorageConstant.EXPORT_FOLDERNAME, projectId, zipFileName); } private Path getImportZipFullPath(String zipFileName) { return Paths.get( UicdConfig.getInstance().getBaseFolder(), LocalStorageConstant.ZIP_UPLOAD_FOLDERNAME, zipFileName); } private Path getZipTmpFolderPath(String projectId) { return Paths.get( UicdConfig.getInstance().getBaseFolder(), LocalStorageConstant.EXPORT_FOLDERNAME, projectId, LocalStorageConstant.ZIP_OUTPUT_FOLDERNAME); } private List<String> getActionListFromTreeDetails(String treeDetails) { // This regex will extra actionId from something like this: // "additionalData":["c29e660d-a490-4640-904a-2b504cc6794f"] // Original regex pattern: \"additionalData\"\:\[\"([^\]]+)\"\] final String regex = "\\\"additionalData\\\"\\:\\[\\\"([^\\]]+)\\\"\\]"; final Matcher m = Pattern.compile(regex).matcher(treeDetails); final List<String> actionIdList = new ArrayList<>(); while (m.find()) { actionIdList.add(m.group(1)); } return actionIdList; } private List<ActionEntity> convertActionListToActionEntityList(List<BaseAction> baseActionList) { return baseActionList.stream() .map(action -> new ActionEntity(action)) .collect(Collectors.toList()); } private List<BaseAction> convertActionEntityListToActionList( List<ActionEntity> actionEntitiesList) { return actionEntitiesList.stream() .map(actionEntity -> BaseAction.fromJson(actionEntity.getDetails())) .collect(Collectors.toList()); } private List<ActionEntity> cloneActionEntities(List<ActionEntity> actionEntities) { return actionEntities.stream() .map(actionEntity -> new ActionEntity(actionEntity)) .collect(Collectors.toList()); } /** * After adding the import/export feature the actionId will be a issue, in the following senario: * a) UserA export project.zip from database b) UserB import, modify and export projectB.zip c) * UserA import projectB.zip, now the system will be confused about the uuicd, it will override * UserA's test cases With the new design of "project" feature by adding the following logic: 1) * regenerate all the uuicd, 2) update the createdAt and createdBy field * * @param actionEntities Original action entity * @param testCaseTreeEntity testCaseTreeEntity need update * @param ignoreTestTreeId when deep copy the tree, the tree id is also the new id associated with * project, don't need to do the generate the id. */ private void refurbishEntities( List<ActionEntity> actionEntities, TestCaseTreeEntity testCaseTreeEntity, boolean ignoreTestTreeId) { HashMap<String, String> uuidMapping = new HashMap<>(); refurbishActionEntities(actionEntities, uuidMapping); if (!ignoreTestTreeId) { uuidMapping.putIfAbsent( testCaseTreeEntity.getUuid().toString(), UUID.randomUUID().toString()); testCaseTreeEntity.setUuid(uuidMapping.get(testCaseTreeEntity.getUuid().toString())); } testCaseTreeEntity.setCreatedBy(UicdConfig.getInstance().getCurrentUser()); testCaseTreeEntity.setCreatedAt(Instant.now()); String treeDetails = testCaseTreeEntity.getTreeDetails(); for (Map.Entry<String, String> entry : uuidMapping.entrySet()) { treeDetails = treeDetails.replace(entry.getKey(), entry.getValue()); } testCaseTreeEntity.setTreeDetails(treeDetails); } private void refurbishActionEntities( List<ActionEntity> actionEntities, HashMap<String, String> uuidMapping) { for (int j = 0; j < actionEntities.size(); j++) { ActionEntity actionEntity = actionEntities.get(j); actionEntity.setCreatedBy(UicdConfig.getInstance().getCurrentUser()); actionEntity.setCreatedAt(Instant.now()); BaseAction action = BaseAction.fromJson(actionEntity.getDetails()); action.setCreatedBy(UicdConfig.getInstance().getCurrentUser()); // Generate new random uuid and put into the map. uuidMapping.putIfAbsent(action.getActionId().toString(), UUID.randomUUID().toString()); // Update the actionId action.setActionId(UUID.fromString(uuidMapping.get(action.getActionId().toString()))); action.setCreatedBy(UicdConfig.getInstance().getCurrentUser()); if (action.getActionType() == ActionType.COMPOUND_ACTION) { CompoundAction compoundAction = (CompoundAction) action; for (int i = 0; i < compoundAction.childrenIdList.size(); i++) { // Generate new random uuid and put into the map, to make it simple we are not doing a // BFS/DFS for the action tree. Add the old-> new uuid mapping here also since we don't // know which comes first. String currentChildActionId = compoundAction.childrenIdList.get(i); Optional<BaseAction> currentChildAction = compoundAction.childrenActions.stream() .filter(o -> o.getActionId().toString().equals(currentChildActionId)) .findFirst(); UUID randomUUID = UUID.randomUUID(); uuidMapping.putIfAbsent(currentChildActionId, randomUUID.toString()); compoundAction.childrenIdList.set(i, uuidMapping.get(currentChildActionId)); if (currentChildAction.isPresent()) { currentChildAction.get().setActionId(randomUUID); currentChildAction.get().setCreatedBy(UicdConfig.getInstance().getCurrentUser()); generateRandomNodeContextUuid(currentChildAction.get(), uuidMapping); } } } actionEntities.set(j, new ActionEntity(action)); } } private void generateRandomNodeContextUuid( BaseAction baseAction, HashMap<String, String> uuidMapping) { List<NodeContext> nodeContextsToUpdate = new ArrayList<>(); switch (baseAction.getActionType()) { case CLICK_ACTION: ClickAction clickAction = (ClickAction) baseAction; nodeContextsToUpdate.add(clickAction.getNodeContext()); break; case DRAG_ACTION: DragAction dragAction = (DragAction) baseAction; nodeContextsToUpdate.add(dragAction.getNodeContext()); nodeContextsToUpdate.add(dragAction.getEndPointNodeContext()); break; case LONG_CLICK_ACTION: LongClickAction longClickAction = (LongClickAction) baseAction; nodeContextsToUpdate.add(longClickAction.getNodeContext()); break; // the following actions extend ScreenContentValidationAction // that has NodeContext case CONDITION_CLICK_ACTION: case LOOP_SCREEN_CONTENT_VALIDATION_ACTION: case SCROLL_SCREEN_CONTENT_VALIDATION_ACTION: case ML_IMAGE_VALIDATION_ACTION: case SCREEN_CONTENT_VALIDATION_ACTION: ScreenContentValidationAction screenContentValidationAction = (ScreenContentValidationAction) baseAction; nodeContextsToUpdate.add(screenContentValidationAction.getSavedNodeContext()); break; case SWIPE_ACTION: SwipeAction swipeAction = (SwipeAction) baseAction; nodeContextsToUpdate.add(swipeAction.getStartPointNodeContext()); nodeContextsToUpdate.add(swipeAction.getEndPointNodeContext()); break; default: // do nothing because remaining Action Type don't have NodeContext return; } updateUuidAndMapping(nodeContextsToUpdate, uuidMapping); } private void updateUuidAndMapping( List<NodeContext> nodeContextsToUpdate, HashMap<String, String> uuidMapping) { for (NodeContext nodeContextToUpdate : nodeContextsToUpdate) { if (nodeContextToUpdate != null) { uuidMapping.computeIfAbsent( nodeContextToUpdate.getUuid(), k -> UUID.randomUUID().toString()); nodeContextToUpdate.setUuid(uuidMapping.get(nodeContextToUpdate.getUuid())); List<NodeContext> childNodeContexts = nodeContextToUpdate.getChildrenNodeContext(); updateUuidAndMapping(childNodeContexts, uuidMapping); } } } private void fetchActionRecursively( String actionId, List<ActionEntity> actionEntities, List<BaseAction> pythonActionList) throws UicdActionException { // already contains action, skip if (actionEntities.stream().filter(o -> o.getUuid().equals(actionId)).findFirst().isPresent()) { return; } BaseAction action = actionStorageManager.getActionByUUID(actionId); if (action != null) { actionEntities.add(new ActionEntity(action)); if (action.getActionType() == ActionType.COMPOUND_ACTION) { CompoundAction compoundAction = (CompoundAction) action; for (String childActionId : compoundAction.childrenIdList) { Optional<BaseAction> childAction = compoundAction.childrenActions.stream() .filter(x -> x != null) .filter(o -> o.getActionId().toString().equals(childActionId)) .findFirst(); // In current design we already have the compound action in the childrenAction list, // however still need to call the fetchActionRecursively, so that it will be added to the // actionEntities and get deep copied later. if (childAction.isPresent() && childAction.get().getActionType() == ActionType.PYTHON_SCRIPT_ACTION) { pythonActionList.add(childAction.get()); } if (!childAction.isPresent() || childAction.get().getActionType() == ActionType.COMPOUND_ACTION) { fetchActionRecursively(childActionId, actionEntities, pythonActionList); } } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler; import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; import org.apache.kafka.streams.errors.LogAndFailExceptionHandler; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.FailOnInvalidTimestamp; import org.apache.kafka.streams.processor.LogAndSkipOnInvalidTimestamp; import org.apache.kafka.streams.processor.TimestampExtractor; import org.apache.kafka.streams.state.StateSerdes; import org.apache.kafka.test.InternalMockProcessorContext; import org.apache.kafka.test.MockSourceNode; import org.apache.kafka.test.MockTimestampExtractor; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class RecordQueueTest { private final Serializer<Integer> intSerializer = new IntegerSerializer(); private final Deserializer<Integer> intDeserializer = new IntegerDeserializer(); private final TimestampExtractor timestampExtractor = new MockTimestampExtractor(); private final String[] topics = {"topic"}; private final Sensor droppedRecordsSensor = new Metrics().sensor("skipped-records"); final InternalMockProcessorContext context = new InternalMockProcessorContext( StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class), new RecordCollectorImpl( null, new LogContext("record-queue-test "), new DefaultProductionExceptionHandler(), droppedRecordsSensor ) ); private final MockSourceNode mockSourceNodeWithMetrics = new MockSourceNode<>(topics, intDeserializer, intDeserializer); private final RecordQueue queue = new RecordQueue( new TopicPartition(topics[0], 1), mockSourceNodeWithMetrics, timestampExtractor, new LogAndFailExceptionHandler(), context, new LogContext()); private final RecordQueue queueThatSkipsDeserializeErrors = new RecordQueue( new TopicPartition(topics[0], 1), mockSourceNodeWithMetrics, timestampExtractor, new LogAndContinueExceptionHandler(), context, new LogContext()); private final byte[] recordValue = intSerializer.serialize(null, 10); private final byte[] recordKey = intSerializer.serialize(null, 1); @Before public void before() { mockSourceNodeWithMetrics.init(context); } @After public void after() { mockSourceNodeWithMetrics.close(); } @Test public void testTimeTracking() { assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); // add three 3 out-of-order records with timestamp 2, 1, 3 final List<ConsumerRecord<byte[], byte[]>> list1 = Arrays.asList( new ConsumerRecord<>("topic", 1, 2, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 3, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); queue.addRawRecords(list1); assertEquals(3, queue.size()); assertEquals(2L, queue.headRecordTimestamp()); // poll the first record, now with 1, 3 assertEquals(2L, queue.poll().timestamp); assertEquals(2, queue.size()); assertEquals(1L, queue.headRecordTimestamp()); // poll the second record, now with 3 assertEquals(1L, queue.poll().timestamp); assertEquals(1, queue.size()); assertEquals(3L, queue.headRecordTimestamp()); // add three 3 out-of-order records with timestamp 4, 1, 2 // now with 3, 4, 1, 2 final List<ConsumerRecord<byte[], byte[]>> list2 = Arrays.asList( new ConsumerRecord<>("topic", 1, 4, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 2, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); queue.addRawRecords(list2); assertEquals(4, queue.size()); assertEquals(3L, queue.headRecordTimestamp()); // poll the third record, now with 4, 1, 2 assertEquals(3L, queue.poll().timestamp); assertEquals(3, queue.size()); assertEquals(4L, queue.headRecordTimestamp()); // poll the rest records assertEquals(4L, queue.poll().timestamp); assertEquals(1L, queue.headRecordTimestamp()); assertEquals(1L, queue.poll().timestamp); assertEquals(2L, queue.headRecordTimestamp()); assertEquals(2L, queue.poll().timestamp); assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); // add three more records with 4, 5, 6 final List<ConsumerRecord<byte[], byte[]>> list3 = Arrays.asList( new ConsumerRecord<>("topic", 1, 4, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 5, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 6, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); queue.addRawRecords(list3); assertEquals(3, queue.size()); assertEquals(4L, queue.headRecordTimestamp()); // poll one record again, the timestamp should advance now assertEquals(4L, queue.poll().timestamp); assertEquals(2, queue.size()); assertEquals(5L, queue.headRecordTimestamp()); // clear the queue queue.clear(); assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); // re-insert the three records with 4, 5, 6 queue.addRawRecords(list3); assertEquals(3, queue.size()); assertEquals(4L, queue.headRecordTimestamp()); } @Test public void shouldTrackPartitionTimeAsMaxSeenTimestamp() { assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); // add three 3 out-of-order records with timestamp 2, 1, 3, 4 final List<ConsumerRecord<byte[], byte[]>> list1 = Arrays.asList( new ConsumerRecord<>("topic", 1, 2, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 3, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 4, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); assertEquals(queue.partitionTime(), RecordQueue.UNKNOWN); queue.addRawRecords(list1); assertEquals(queue.partitionTime(), 2L); queue.poll(); assertEquals(queue.partitionTime(), 2L); queue.poll(); assertEquals(queue.partitionTime(), 3L); } @Test public void shouldSetTimestampAndRespectMaxTimestampPolicy() { assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); queue.setPartitionTime(150L); final List<ConsumerRecord<byte[], byte[]>> list1 = Arrays.asList( new ConsumerRecord<>("topic", 1, 200, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 100, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 300, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 400, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); assertEquals(150L, queue.partitionTime()); queue.addRawRecords(list1); assertEquals(200L, queue.partitionTime()); queue.setPartitionTime(500L); queue.poll(); assertEquals(500L, queue.partitionTime()); queue.poll(); assertEquals(500L, queue.partitionTime()); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionWhenKeyDeserializationFails() { final byte[] key = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, key, recordValue)); queue.addRawRecords(records); } @Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionWhenValueDeserializationFails() { final byte[] value = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, value)); queue.addRawRecords(records); } @Test public void shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler() { final byte[] key = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, key, recordValue)); queueThatSkipsDeserializeErrors.addRawRecords(records); assertEquals(0, queueThatSkipsDeserializeErrors.size()); } @Test public void shouldNotThrowStreamsExceptionWhenValueDeserializationFailsWithSkipHandler() { final byte[] value = Serdes.Long().serializer().serialize("foo", 1L); final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, value)); queueThatSkipsDeserializeErrors.addRawRecords(records); assertEquals(0, queueThatSkipsDeserializeErrors.size()); } @Test(expected = StreamsException.class) public void shouldThrowOnNegativeTimestamp() { final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, -1L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); final RecordQueue queue = new RecordQueue( new TopicPartition(topics[0], 1), new MockSourceNode<>(topics, intDeserializer, intDeserializer), new FailOnInvalidTimestamp(), new LogAndContinueExceptionHandler(), new InternalMockProcessorContext(), new LogContext()); queue.addRawRecords(records); } @Test public void shouldDropOnNegativeTimestamp() { final List<ConsumerRecord<byte[], byte[]>> records = Collections.singletonList( new ConsumerRecord<>("topic", 1, 1, -1L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); final RecordQueue queue = new RecordQueue( new TopicPartition(topics[0], 1), new MockSourceNode<>(topics, intDeserializer, intDeserializer), new LogAndSkipOnInvalidTimestamp(), new LogAndContinueExceptionHandler(), new InternalMockProcessorContext(), new LogContext()); queue.addRawRecords(records); assertEquals(0, queue.size()); } @Test public void shouldPassPartitionTimeToTimestampExtractor() { final PartitionTimeTrackingTimestampExtractor timestampExtractor = new PartitionTimeTrackingTimestampExtractor(); final RecordQueue queue = new RecordQueue( new TopicPartition(topics[0], 1), mockSourceNodeWithMetrics, timestampExtractor, new LogAndFailExceptionHandler(), context, new LogContext()); assertTrue(queue.isEmpty()); assertEquals(0, queue.size()); assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp()); // add three 3 out-of-order records with timestamp 2, 1, 3, 4 final List<ConsumerRecord<byte[], byte[]>> list1 = Arrays.asList( new ConsumerRecord<>("topic", 1, 2, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 3, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue), new ConsumerRecord<>("topic", 1, 4, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)); assertEquals(RecordQueue.UNKNOWN, timestampExtractor.partitionTime); queue.addRawRecords(list1); // no (known) timestamp has yet been passed to the timestamp extractor assertEquals(RecordQueue.UNKNOWN, timestampExtractor.partitionTime); queue.poll(); assertEquals(2L, timestampExtractor.partitionTime); queue.poll(); assertEquals(2L, timestampExtractor.partitionTime); queue.poll(); assertEquals(3L, timestampExtractor.partitionTime); } class PartitionTimeTrackingTimestampExtractor implements TimestampExtractor { private long partitionTime = RecordQueue.UNKNOWN; public long extract(final ConsumerRecord<Object, Object> record, final long partitionTime) { if (partitionTime < this.partitionTime) { throw new IllegalStateException("Partition time should not decrease"); } this.partitionTime = partitionTime; return record.offset(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.util; import com.facebook.presto.client.IntervalDayTime; import com.facebook.presto.client.IntervalYearMonth; import com.facebook.presto.common.type.TimeZoneKey; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.sql.tree.IntervalLiteral.IntervalField; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.DurationFieldType; import org.joda.time.LocalDateTime; import org.joda.time.MutablePeriod; import org.joda.time.Period; import org.joda.time.ReadWritablePeriod; import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.DateTimeParser; import org.joda.time.format.DateTimePrinter; import org.joda.time.format.ISODateTimeFormat; import org.joda.time.format.PeriodFormatter; import org.joda.time.format.PeriodFormatterBuilder; import org.joda.time.format.PeriodParser; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static com.facebook.presto.common.type.DateTimeEncoding.unpackMillisUtc; import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static com.facebook.presto.util.DateTimeZoneIndex.getChronology; import static com.facebook.presto.util.DateTimeZoneIndex.getDateTimeZone; import static com.facebook.presto.util.DateTimeZoneIndex.packDateTimeWithZone; import static com.facebook.presto.util.DateTimeZoneIndex.unpackChronology; import static com.facebook.presto.util.DateTimeZoneIndex.unpackDateTimeZone; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; public final class DateTimeUtils { private DateTimeUtils() { } private static final DateTimeFormatter DATE_FORMATTER = ISODateTimeFormat.date().withZoneUTC(); public static int parseDate(String value) { return (int) TimeUnit.MILLISECONDS.toDays(DATE_FORMATTER.parseMillis(value)); } public static String printDate(int days) { return DATE_FORMATTER.print(TimeUnit.DAYS.toMillis(days)); } private static final DateTimeFormatter LEGACY_TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER; private static final DateTimeFormatter TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER; private static final DateTimeFormatter TIMESTAMP_WITH_TIME_ZONE_FORMATTER; private static final DateTimeFormatter TIMESTAMP_WITH_OR_WITHOUT_TIME_ZONE_FORMATTER; static { DateTimeParser[] timestampWithoutTimeZoneParser = { DateTimeFormat.forPattern("yyyy-M-d").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS").getParser()}; DateTimePrinter timestampWithoutTimeZonePrinter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS").getPrinter(); LEGACY_TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() .append(timestampWithoutTimeZonePrinter, timestampWithoutTimeZoneParser) .toFormatter() .withOffsetParsed(); TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() .append(timestampWithoutTimeZonePrinter, timestampWithoutTimeZoneParser) .toFormatter() .withZoneUTC(); DateTimeParser[] timestampWithTimeZoneParser = { DateTimeFormat.forPattern("yyyy-M-dZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d Z").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:mZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m Z").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:sZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s Z").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS Z").getParser(), DateTimeFormat.forPattern("yyyy-M-dZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d ZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:mZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m ZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:sZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s ZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSZZZ").getParser(), DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS ZZZ").getParser()}; DateTimePrinter timestampWithTimeZonePrinter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS ZZZ").getPrinter(); TIMESTAMP_WITH_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() .append(timestampWithTimeZonePrinter, timestampWithTimeZoneParser) .toFormatter() .withOffsetParsed(); DateTimeParser[] timestampWithOrWithoutTimeZoneParser = Stream.concat(Stream.of(timestampWithoutTimeZoneParser), Stream.of(timestampWithTimeZoneParser)) .toArray(DateTimeParser[]::new); TIMESTAMP_WITH_OR_WITHOUT_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() .append(timestampWithTimeZonePrinter, timestampWithOrWithoutTimeZoneParser) .toFormatter() .withOffsetParsed(); } /** * {@link LocalDateTime#getLocalMillis()} */ private static final MethodHandle getLocalMillis; static { try { Method getLocalMillisMethod = LocalDateTime.class.getDeclaredMethod("getLocalMillis"); getLocalMillisMethod.setAccessible(true); getLocalMillis = MethodHandles.lookup().unreflect(getLocalMillisMethod); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } /** * Parse a string (optionally containing a zone) as a value of either TIMESTAMP or TIMESTAMP WITH TIME ZONE type. * <p> * For example: {@code "2000-01-01 01:23:00"} is parsed to TIMESTAMP {@code 2000-01-01T01:23:00} * and {@code "2000-01-01 01:23:00 +01:23"} is parsed to TIMESTAMP WITH TIME ZONE * {@code 2000-01-01T01:23:00.000+01:23}. * * @return stack representation of TIMESTAMP or TIMESTAMP WITH TIME ZONE type, depending on input */ public static long parseTimestampLiteral(String value) { try { DateTime dateTime = TIMESTAMP_WITH_TIME_ZONE_FORMATTER.parseDateTime(value); return packDateTimeWithZone(dateTime); } catch (Exception e) { return TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.parseMillis(value); } } /** * Parse a string (optionally containing a zone) as a value of either TIMESTAMP or TIMESTAMP WITH TIME ZONE type. * If the string doesn't specify a zone, it is interpreted in {@code timeZoneKey} zone. * * @return stack representation of legacy TIMESTAMP or TIMESTAMP WITH TIME ZONE type, depending on input */ @Deprecated public static long parseTimestampLiteral(TimeZoneKey timeZoneKey, String value) { try { DateTime dateTime = TIMESTAMP_WITH_TIME_ZONE_FORMATTER.parseDateTime(value); return packDateTimeWithZone(dateTime); } catch (RuntimeException e) { return LEGACY_TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.withChronology(getChronology(timeZoneKey)).parseMillis(value); } } /** * Parse a string (optionally containing a zone) as a value of TIMESTAMP WITH TIME ZONE type. * If the string doesn't specify a zone, it is interpreted in {@code timeZoneKey} zone. * <p> * For example: {@code "2000-01-01 01:23:00"} is parsed to TIMESTAMP WITH TIME ZONE * {@code 2000-01-01T01:23:00 <provided zone>} and {@code "2000-01-01 01:23:00 +01:23"} * is parsed to TIMESTAMP WITH TIME ZONE {@code 2000-01-01T01:23:00.000+01:23}. * * @return stack representation of TIMESTAMP WITH TIME ZONE type */ public static long parseTimestampWithTimeZone(TimeZoneKey timeZoneKey, String timestampWithTimeZone) { DateTime dateTime = TIMESTAMP_WITH_OR_WITHOUT_TIME_ZONE_FORMATTER.withChronology(getChronology(timeZoneKey)).withOffsetParsed().parseDateTime(timestampWithTimeZone); return packDateTimeWithZone(dateTime); } /** * Parse a string (optionally containing a zone) as a value of TIMESTAMP type. * If the string specifies a zone, the zone is discarded. * <p> * For example: {@code "2000-01-01 01:23:00"} is parsed to TIMESTAMP {@code 2000-01-01T01:23:00} * and {@code "2000-01-01 01:23:00 +01:23"} is also parsed to TIMESTAMP {@code 2000-01-01T01:23:00.000}. * * @return stack representation of TIMESTAMP type */ public static long parseTimestampWithoutTimeZone(String value) { LocalDateTime localDateTime = TIMESTAMP_WITH_OR_WITHOUT_TIME_ZONE_FORMATTER.parseLocalDateTime(value); try { return (long) getLocalMillis.invokeExact(localDateTime); } catch (Throwable e) { throw new RuntimeException(e); } } /** * Parse a string (optionally containing a zone) as a value of TIMESTAMP type. * If the string doesn't specify a zone, it is interpreted in {@code timeZoneKey} zone. * * @return stack representation of legacy TIMESTAMP type */ @Deprecated public static long parseTimestampWithoutTimeZone(TimeZoneKey timeZoneKey, String value) { return TIMESTAMP_WITH_OR_WITHOUT_TIME_ZONE_FORMATTER.withChronology(getChronology(timeZoneKey)).parseMillis(value); } public static String printTimestampWithTimeZone(long timestampWithTimeZone) { ISOChronology chronology = unpackChronology(timestampWithTimeZone); long millis = unpackMillisUtc(timestampWithTimeZone); return TIMESTAMP_WITH_TIME_ZONE_FORMATTER.withChronology(chronology).print(millis); } public static String printTimestampWithoutTimeZone(long timestamp) { return TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.print(timestamp); } @Deprecated public static String printTimestampWithoutTimeZone(TimeZoneKey timeZoneKey, long timestamp) { return LEGACY_TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.withChronology(getChronology(timeZoneKey)).print(timestamp); } public static boolean timestampHasTimeZone(String value) { try { try { TIMESTAMP_WITH_TIME_ZONE_FORMATTER.parseMillis(value); return true; } catch (RuntimeException e) { // `.withZoneUTC()` makes `timestampHasTimeZone` return value independent of JVM zone TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.withZoneUTC().parseMillis(value); return false; } } catch (RuntimeException e) { throw new IllegalArgumentException(format("Invalid timestamp '%s'", value)); } } private static final DateTimeFormatter TIME_FORMATTER; private static final DateTimeFormatter TIME_WITH_TIME_ZONE_FORMATTER; static { DateTimeParser[] timeWithoutTimeZoneParser = { DateTimeFormat.forPattern("H:m").getParser(), DateTimeFormat.forPattern("H:m:s").getParser(), DateTimeFormat.forPattern("H:m:s.SSS").getParser()}; DateTimePrinter timeWithoutTimeZonePrinter = DateTimeFormat.forPattern("HH:mm:ss.SSS").getPrinter(); TIME_FORMATTER = new DateTimeFormatterBuilder().append(timeWithoutTimeZonePrinter, timeWithoutTimeZoneParser).toFormatter().withZoneUTC(); DateTimeParser[] timeWithTimeZoneParser = { DateTimeFormat.forPattern("H:mZ").getParser(), DateTimeFormat.forPattern("H:m Z").getParser(), DateTimeFormat.forPattern("H:m:sZ").getParser(), DateTimeFormat.forPattern("H:m:s Z").getParser(), DateTimeFormat.forPattern("H:m:s.SSSZ").getParser(), DateTimeFormat.forPattern("H:m:s.SSS Z").getParser(), DateTimeFormat.forPattern("H:mZZZ").getParser(), DateTimeFormat.forPattern("H:m ZZZ").getParser(), DateTimeFormat.forPattern("H:m:sZZZ").getParser(), DateTimeFormat.forPattern("H:m:s ZZZ").getParser(), DateTimeFormat.forPattern("H:m:s.SSSZZZ").getParser(), DateTimeFormat.forPattern("H:m:s.SSS ZZZ").getParser()}; DateTimePrinter timeWithTimeZonePrinter = DateTimeFormat.forPattern("HH:mm:ss.SSS ZZZ").getPrinter(); TIME_WITH_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder().append(timeWithTimeZonePrinter, timeWithTimeZoneParser).toFormatter().withOffsetParsed(); } /** * Parse a string (optionally containing a zone) as a value of either TIME or TIME WITH TIME ZONE type. * <p> * For example: {@code "01:23:00"} is parsed to TIME {@code 01:23:00} * and {@code "01:23:00 +01:23"} is parsed to TIME WITH TIME ZONE * {@code 01:23:00+01:23}. * * @return stack representation of TIME or TIME WITH TIME ZONE type, depending on input */ public static long parseTimeLiteral(String value) { try { return parseTimeWithTimeZone(value); } catch (Exception e) { return parseTimeWithoutTimeZone(value); } } /** * Parse a string (optionally containing a zone) as a value of either TIME or TIME WITH TIME ZONE type. * If the string doesn't specify a zone, it is interpreted in {@code timeZoneKey} zone. * * @return stack representation of legacy TIME or TIME WITH TIME ZONE type, depending on input */ @Deprecated public static long parseTimeLiteral(TimeZoneKey timeZoneKey, String value) { try { return parseTimeWithTimeZone(value); } catch (Exception e) { return parseTimeWithoutTimeZone(timeZoneKey, value); } } /** * Parse a string containing a zone as a value of TIME WITH TIME ZONE type. * <p> * For example: {@code "01:23:00 +01:23"} is parsed to TIME WITH TIME ZONE * {@code 01:23:00+01:23} and {@code "01:23:00"} is rejected. * * @return stack representation of TIME WITH TIME ZONE type */ public static long parseTimeWithTimeZone(String timeWithTimeZone) { DateTime dateTime = TIME_WITH_TIME_ZONE_FORMATTER.parseDateTime(timeWithTimeZone); return packDateTimeWithZone(dateTime); } /** * Parse a string (without a zone) as a value of TIME type. * <p> * For example: {@code "01:23:00"} is parsed to TIME {@code 01:23:00} * and {@code "01:23:00 +01:23"} is rejected. * * @return stack representation of TIME type */ public static long parseTimeWithoutTimeZone(String value) { return TIME_FORMATTER.parseMillis(value); } /** * Parse a string (without a zone) as a value of TIME type, interpreted in {@code timeZoneKey} zone. * * @return stack representation of legacy TIME type */ @Deprecated public static long parseTimeWithoutTimeZone(TimeZoneKey timeZoneKey, String value) { return TIME_FORMATTER.withZone(getDateTimeZone(timeZoneKey)).parseMillis(value); } public static String printTimeWithTimeZone(long timeWithTimeZone) { DateTimeZone timeZone = unpackDateTimeZone(timeWithTimeZone); long millis = unpackMillisUtc(timeWithTimeZone); return TIME_WITH_TIME_ZONE_FORMATTER.withZone(timeZone).print(millis); } public static String printTimeWithoutTimeZone(long value) { return TIME_FORMATTER.print(value); } @Deprecated public static String printTimeWithoutTimeZone(TimeZoneKey timeZoneKey, long value) { return TIME_FORMATTER.withZone(getDateTimeZone(timeZoneKey)).print(value); } public static boolean timeHasTimeZone(String value) { try { try { parseTimeWithTimeZone(value); return true; } catch (RuntimeException e) { parseTimeWithoutTimeZone(value); return false; } } catch (RuntimeException e) { throw new IllegalArgumentException(format("Invalid time '%s'", value)); } } private static final int YEAR_FIELD = 0; private static final int MONTH_FIELD = 1; private static final int DAY_FIELD = 3; private static final int HOUR_FIELD = 4; private static final int MINUTE_FIELD = 5; private static final int SECOND_FIELD = 6; private static final int MILLIS_FIELD = 7; private static final PeriodFormatter INTERVAL_DAY_SECOND_FORMATTER = cretePeriodFormatter(IntervalField.DAY, IntervalField.SECOND); private static final PeriodFormatter INTERVAL_DAY_MINUTE_FORMATTER = cretePeriodFormatter(IntervalField.DAY, IntervalField.MINUTE); private static final PeriodFormatter INTERVAL_DAY_HOUR_FORMATTER = cretePeriodFormatter(IntervalField.DAY, IntervalField.HOUR); private static final PeriodFormatter INTERVAL_DAY_FORMATTER = cretePeriodFormatter(IntervalField.DAY, IntervalField.DAY); private static final PeriodFormatter INTERVAL_HOUR_SECOND_FORMATTER = cretePeriodFormatter(IntervalField.HOUR, IntervalField.SECOND); private static final PeriodFormatter INTERVAL_HOUR_MINUTE_FORMATTER = cretePeriodFormatter(IntervalField.HOUR, IntervalField.MINUTE); private static final PeriodFormatter INTERVAL_HOUR_FORMATTER = cretePeriodFormatter(IntervalField.HOUR, IntervalField.HOUR); private static final PeriodFormatter INTERVAL_MINUTE_SECOND_FORMATTER = cretePeriodFormatter(IntervalField.MINUTE, IntervalField.SECOND); private static final PeriodFormatter INTERVAL_MINUTE_FORMATTER = cretePeriodFormatter(IntervalField.MINUTE, IntervalField.MINUTE); private static final PeriodFormatter INTERVAL_SECOND_FORMATTER = cretePeriodFormatter(IntervalField.SECOND, IntervalField.SECOND); private static final PeriodFormatter INTERVAL_YEAR_MONTH_FORMATTER = cretePeriodFormatter(IntervalField.YEAR, IntervalField.MONTH); private static final PeriodFormatter INTERVAL_YEAR_FORMATTER = cretePeriodFormatter(IntervalField.YEAR, IntervalField.YEAR); private static final PeriodFormatter INTERVAL_MONTH_FORMATTER = cretePeriodFormatter(IntervalField.MONTH, IntervalField.MONTH); public static long parseDayTimeInterval(String value, IntervalField startField, Optional<IntervalField> endField) { IntervalField end = endField.orElse(startField); if (startField == IntervalField.DAY && end == IntervalField.SECOND) { return parsePeriodMillis(INTERVAL_DAY_SECOND_FORMATTER, value, startField, end); } if (startField == IntervalField.DAY && end == IntervalField.MINUTE) { return parsePeriodMillis(INTERVAL_DAY_MINUTE_FORMATTER, value, startField, end); } if (startField == IntervalField.DAY && end == IntervalField.HOUR) { return parsePeriodMillis(INTERVAL_DAY_HOUR_FORMATTER, value, startField, end); } if (startField == IntervalField.DAY && end == IntervalField.DAY) { return parsePeriodMillis(INTERVAL_DAY_FORMATTER, value, startField, end); } if (startField == IntervalField.HOUR && end == IntervalField.SECOND) { return parsePeriodMillis(INTERVAL_HOUR_SECOND_FORMATTER, value, startField, end); } if (startField == IntervalField.HOUR && end == IntervalField.MINUTE) { return parsePeriodMillis(INTERVAL_HOUR_MINUTE_FORMATTER, value, startField, end); } if (startField == IntervalField.HOUR && end == IntervalField.HOUR) { return parsePeriodMillis(INTERVAL_HOUR_FORMATTER, value, startField, end); } if (startField == IntervalField.MINUTE && end == IntervalField.SECOND) { return parsePeriodMillis(INTERVAL_MINUTE_SECOND_FORMATTER, value, startField, end); } if (startField == IntervalField.MINUTE && end == IntervalField.MINUTE) { return parsePeriodMillis(INTERVAL_MINUTE_FORMATTER, value, startField, end); } if (startField == IntervalField.SECOND && end == IntervalField.SECOND) { return parsePeriodMillis(INTERVAL_SECOND_FORMATTER, value, startField, end); } throw new IllegalArgumentException("Invalid day second interval qualifier: " + startField + " to " + end); } public static long parsePeriodMillis(PeriodFormatter periodFormatter, String value, IntervalField startField, IntervalField endField) { try { Period period = parsePeriod(periodFormatter, value); return IntervalDayTime.toMillis( period.getValue(DAY_FIELD), period.getValue(HOUR_FIELD), period.getValue(MINUTE_FIELD), period.getValue(SECOND_FIELD), period.getValue(MILLIS_FIELD)); } catch (IllegalArgumentException e) { throw invalidInterval(e, value, startField, endField); } } public static long parseYearMonthInterval(String value, IntervalField startField, Optional<IntervalField> endField) { IntervalField end = endField.orElse(startField); if (startField == IntervalField.YEAR && end == IntervalField.MONTH) { PeriodFormatter periodFormatter = INTERVAL_YEAR_MONTH_FORMATTER; return parsePeriodMonths(value, periodFormatter, startField, end); } if (startField == IntervalField.YEAR && end == IntervalField.YEAR) { return parsePeriodMonths(value, INTERVAL_YEAR_FORMATTER, startField, end); } if (startField == IntervalField.MONTH && end == IntervalField.MONTH) { return parsePeriodMonths(value, INTERVAL_MONTH_FORMATTER, startField, end); } throw new IllegalArgumentException("Invalid year month interval qualifier: " + startField + " to " + end); } private static long parsePeriodMonths(String value, PeriodFormatter periodFormatter, IntervalField startField, IntervalField endField) { try { Period period = parsePeriod(periodFormatter, value); return IntervalYearMonth.toMonths( period.getValue(YEAR_FIELD), period.getValue(MONTH_FIELD)); } catch (IllegalArgumentException e) { throw invalidInterval(e, value, startField, endField); } } private static Period parsePeriod(PeriodFormatter periodFormatter, String value) { boolean negative = value.startsWith("-"); if (negative) { value = value.substring(1); } Period period = periodFormatter.parsePeriod(value); for (DurationFieldType type : period.getFieldTypes()) { checkArgument(period.get(type) >= 0, "Period field %s is negative", type); } if (negative) { period = period.negated(); } return period; } private static PrestoException invalidInterval(Throwable throwable, String value, IntervalField startField, IntervalField endField) { String message; if (startField == endField) { message = format("Invalid INTERVAL %s value: %s", startField, value); } else { message = format("Invalid INTERVAL %s TO %s value: %s", startField, endField, value); } return new PrestoException(INVALID_FUNCTION_ARGUMENT, message, throwable); } private static PeriodFormatter cretePeriodFormatter(IntervalField startField, IntervalField endField) { if (endField == null) { endField = startField; } List<PeriodParser> parsers = new ArrayList<>(); PeriodFormatterBuilder builder = new PeriodFormatterBuilder(); switch (startField) { case YEAR: builder.appendYears(); parsers.add(builder.toParser()); if (endField == IntervalField.YEAR) { break; } builder.appendLiteral("-"); // fall through case MONTH: builder.appendMonths(); parsers.add(builder.toParser()); if (endField != IntervalField.MONTH) { throw new IllegalArgumentException("Invalid interval qualifier: " + startField + " to " + endField); } break; case DAY: builder.appendDays(); parsers.add(builder.toParser()); if (endField == IntervalField.DAY) { break; } builder.appendLiteral(" "); // fall through case HOUR: builder.appendHours(); parsers.add(builder.toParser()); if (endField == IntervalField.HOUR) { break; } builder.appendLiteral(":"); // fall through case MINUTE: builder.appendMinutes(); parsers.add(builder.toParser()); if (endField == IntervalField.MINUTE) { break; } builder.appendLiteral(":"); // fall through case SECOND: builder.appendSecondsWithOptionalMillis(); parsers.add(builder.toParser()); break; } return new PeriodFormatter(builder.toPrinter(), new OrderedPeriodParser(parsers)); } private static class OrderedPeriodParser implements PeriodParser { private final List<PeriodParser> parsers; private OrderedPeriodParser(List<PeriodParser> parsers) { this.parsers = parsers; } @Override public int parseInto(ReadWritablePeriod period, String text, int position, Locale locale) { int bestValidPos = position; ReadWritablePeriod bestValidPeriod = null; int bestInvalidPos = position; for (PeriodParser parser : parsers) { ReadWritablePeriod parsedPeriod = new MutablePeriod(); int parsePos = parser.parseInto(parsedPeriod, text, position, locale); if (parsePos >= position) { if (parsePos > bestValidPos) { bestValidPos = parsePos; bestValidPeriod = parsedPeriod; if (parsePos >= text.length()) { break; } } } else if (parsePos < 0) { parsePos = ~parsePos; if (parsePos > bestInvalidPos) { bestInvalidPos = parsePos; } } } if (bestValidPos > position || (bestValidPos == position)) { // Restore the state to the best valid parse. if (bestValidPeriod != null) { period.setPeriod(bestValidPeriod); } return bestValidPos; } return ~bestInvalidPos; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.breakpoints.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.util.treeView.TreeState; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.ui.*; import com.intellij.ui.popup.util.DetailController; import com.intellij.ui.popup.util.DetailViewImpl; import com.intellij.ui.popup.util.ItemWrapper; import com.intellij.ui.popup.util.MasterController; import com.intellij.util.SingleAlarm; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashSet; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.xdebugger.XDebuggerBundle; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.breakpoints.XBreakpointType; import com.intellij.xdebugger.breakpoints.ui.XBreakpointGroupingRule; import com.intellij.xdebugger.impl.breakpoints.XBreakpointBase; import com.intellij.xdebugger.impl.breakpoints.XBreakpointManagerImpl; import com.intellij.xdebugger.impl.breakpoints.XBreakpointUtil; import com.intellij.xdebugger.impl.breakpoints.XBreakpointsDialogState; import com.intellij.xdebugger.impl.breakpoints.ui.grouping.XBreakpointCustomGroup; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointItemNode; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointItemsTreeController; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointsCheckboxTree; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointsGroupNode; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.util.*; import java.util.List; public class BreakpointsDialog extends DialogWrapper { @NotNull private final Project myProject; private final Object myInitialBreakpoint; private final List<BreakpointPanelProvider> myBreakpointsPanelProviders; private BreakpointItemsTreeController myTreeController; final JLabel temp = new JLabel(); private final MasterController myMasterController = new MasterController() { @Override public ItemWrapper[] getSelectedItems() { final List<BreakpointItem> res = myTreeController.getSelectedBreakpoints(false); return res.toArray(new ItemWrapper[res.size()]); } @Override public JLabel getPathLabel() { return temp; } }; private final DetailController myDetailController = new DetailController(myMasterController); private final Collection<BreakpointItem> myBreakpointItems = new ArrayList<>(); private final SingleAlarm myRebuildAlarm = new SingleAlarm(new Runnable() { @Override public void run() { collectItems(); myTreeController.rebuildTree(myBreakpointItems); myDetailController.doUpdateDetailView(true); } }, 100, myDisposable); private final List<XBreakpointGroupingRule> myRulesAvailable = new ArrayList<>(); private final Set<XBreakpointGroupingRule> myRulesEnabled = new TreeSet<>(XBreakpointGroupingRule.PRIORITY_COMPARATOR); private final Disposable myListenerDisposable = Disposer.newDisposable(); private final List<ToggleActionButton> myToggleRuleActions = new ArrayList<>(); private XBreakpointManagerImpl getBreakpointManager() { return (XBreakpointManagerImpl)XDebuggerManager.getInstance(myProject).getBreakpointManager(); } protected BreakpointsDialog(@NotNull Project project, Object breakpoint, @NotNull List<BreakpointPanelProvider> providers) { super(project); myProject = project; myBreakpointsPanelProviders = providers; myInitialBreakpoint = breakpoint; collectGroupingRules(); collectItems(); setTitle("Breakpoints"); setModal(false); init(); setOKButtonText("Done"); } private String getSplitterProportionKey() { return getDimensionServiceKey() + ".splitter"; } @Nullable @Override protected JComponent createCenterPanel() { JPanel mainPanel = new JPanel(new BorderLayout()); JBSplitter splitPane = new JBSplitter(0.3f); splitPane.setSplitterProportionKey(getSplitterProportionKey()); splitPane.setFirstComponent(createMasterView()); splitPane.setSecondComponent(createDetailView()); mainPanel.add(splitPane, BorderLayout.CENTER); return mainPanel; } private JComponent createDetailView() { DetailViewImpl detailView = new DetailViewImpl(myProject); detailView.setEmptyLabel(XDebuggerBundle.message("xbreakpoint.label.empty")); myDetailController.setDetailView(detailView); return detailView; } void collectItems() { if (!myBreakpointsPanelProviders.isEmpty()) { disposeItems(); myBreakpointItems.clear(); for (BreakpointPanelProvider panelProvider : myBreakpointsPanelProviders) { panelProvider.provideBreakpointItems(myProject, myBreakpointItems); } } } void initSelection(Collection<BreakpointItem> breakpoints) { XBreakpointsDialogState settings = (getBreakpointManager()).getBreakpointsDialogSettings(); if (settings != null && settings.getTreeState() != null) { settings.getTreeState().applyTo(myTreeController.getTreeView()); if (myTreeController.getTreeView().getSelectionCount() == 0) { myTreeController.selectFirstBreakpointItem(); } } else { TreeUtil.expandAll(myTreeController.getTreeView()); myTreeController.selectFirstBreakpointItem(); } selectBreakpoint(myInitialBreakpoint); } @Nullable @Override protected String getDimensionServiceKey() { return getClass().getName(); } @NotNull @Override protected Action[] createActions() { return new Action[]{getOKAction(), getHelpAction()}; } private class ToggleBreakpointGroupingRuleEnabledAction extends ToggleActionButton { private final XBreakpointGroupingRule myRule; public ToggleBreakpointGroupingRuleEnabledAction(XBreakpointGroupingRule rule) { super(rule.getPresentableName(), rule.getIcon()); myRule = rule; getTemplatePresentation().setText(rule.getPresentableName()); } @Override public boolean isSelected(AnActionEvent e) { return myRulesEnabled.contains(myRule); } @Override public void setSelected(AnActionEvent e, boolean state) { if (state) { myRulesEnabled.add(myRule); } else { myRulesEnabled.remove(myRule); } myTreeController.setGroupingRules(myRulesEnabled); } } private JComponent createMasterView() { myTreeController = new BreakpointItemsTreeController(myRulesEnabled) { @Override public void nodeStateWillChangeImpl(CheckedTreeNode node) { if (node instanceof BreakpointItemNode) { ((BreakpointItemNode)node).getBreakpointItem().saveState(); } super.nodeStateWillChangeImpl(node); } @Override public void nodeStateDidChangeImpl(CheckedTreeNode node) { super.nodeStateDidChangeImpl(node); if (node instanceof BreakpointItemNode) { myDetailController.doUpdateDetailView(true); } } @Override protected void selectionChangedImpl() { super.selectionChangedImpl(); saveCurrentItem(); myDetailController.updateDetailView(); } }; final JTree tree = new BreakpointsCheckboxTree(myProject, myTreeController) { @Override protected void onDoubleClick(CheckedTreeNode node) { if (node instanceof BreakpointsGroupNode) { TreePath path = TreeUtil.getPathFromRoot(node); if (isExpanded(path)) { collapsePath(path); } else { expandPath(path); } } else { navigate(false); } } }; PopupHandler.installPopupHandler(tree, new ActionGroup() { @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { ActionGroup group = new ActionGroup("Move to group", true) { @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { Set<String> groups = getBreakpointManager().getAllGroups(); AnAction[] res = new AnAction[groups.size()+3]; int i = 0; res[i++] = new MoveToGroupAction(null); for (String group : groups) { res[i++] = new MoveToGroupAction(group); } res[i++] = new Separator(); res[i] = new MoveToGroupAction(); return res; } }; List<AnAction> res = new ArrayList<>(); res.add(group); Object component = tree.getLastSelectedPathComponent(); if (tree.getSelectionCount() == 1 && component instanceof BreakpointsGroupNode && ((BreakpointsGroupNode)component).getGroup() instanceof XBreakpointCustomGroup) { res.add(new SetAsDefaultGroupAction((XBreakpointCustomGroup)((BreakpointsGroupNode)component).getGroup())); } if (tree.getSelectionCount() == 1 && component instanceof BreakpointItemNode) { res.add(new EditDescriptionAction((XBreakpointBase)((BreakpointItemNode)component).getBreakpointItem().getBreakpoint())); } return res.toArray(new AnAction[res.size()]); } }, ActionPlaces.UNKNOWN, ActionManager.getInstance()); new AnAction("BreakpointDialog.GoToSource") { @Override public void actionPerformed(AnActionEvent e) { navigate(true); close(OK_EXIT_CODE); } }.registerCustomShortcutSet(CommonShortcuts.ENTER, tree, myDisposable); new AnAction("BreakpointDialog.ShowSource") { @Override public void actionPerformed(AnActionEvent e) { navigate(true); close(OK_EXIT_CODE); } }.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet(), tree, myDisposable); DefaultActionGroup breakpointTypes = XBreakpointUtil.breakpointTypes() .filter(XBreakpointType::isAddBreakpointButtonVisible) .map(AddXBreakpointAction::new) .toListAndThen(DefaultActionGroup::new); ToolbarDecorator decorator = ToolbarDecorator.createDecorator(tree). setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { JBPopupFactory.getInstance() .createActionGroupPopup(null, breakpointTypes, DataManager.getInstance().getDataContext(button.getContextComponent()), JBPopupFactory.ActionSelectionAid.NUMBERING, false) .show(button.getPreferredPopupPoint()); } }). setRemoveAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { myTreeController.removeSelectedBreakpoints(myProject); } }). setRemoveActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { for (BreakpointItem item : myTreeController.getSelectedBreakpoints(true)) { if (item.allowedToRemove()) { return true; } } return false; } }). setToolbarPosition(ActionToolbarPosition.TOP). setToolbarBorder(IdeBorderFactory.createEmptyBorder()); myToggleRuleActions.forEach(decorator::addExtraAction); JPanel decoratedTree = decorator.createPanel(); decoratedTree.setBorder(IdeBorderFactory.createEmptyBorder()); JScrollPane pane = UIUtil.getParentOfType(JScrollPane.class, tree); if (pane != null) pane.setBorder(IdeBorderFactory.createBorder()); myTreeController.setTreeView(tree); myTreeController.buildTree(myBreakpointItems); initSelection(myBreakpointItems); myBreakpointsPanelProviders.forEach(provider -> provider.addListener(myRebuildAlarm::cancelAndRequest, myProject, myListenerDisposable)); return decoratedTree; } private void navigate(final boolean requestFocus) { myTreeController.getSelectedBreakpoints(false).stream().findFirst().ifPresent(b -> b.navigate(requestFocus)); } @Nullable @Override public JComponent getPreferredFocusedComponent() { return myTreeController.getTreeView(); } private void collectGroupingRules() { for (BreakpointPanelProvider provider : myBreakpointsPanelProviders) { provider.createBreakpointsGroupingRules(myRulesAvailable); } myRulesAvailable.sort(XBreakpointGroupingRule.PRIORITY_COMPARATOR); myRulesEnabled.clear(); XBreakpointsDialogState settings = (getBreakpointManager()).getBreakpointsDialogSettings(); for (XBreakpointGroupingRule rule : myRulesAvailable) { if (rule.isAlwaysEnabled() || (settings != null && settings.getSelectedGroupingRules().contains(rule.getId()) ) ) { myRulesEnabled.add(rule); } } for (XBreakpointGroupingRule rule : myRulesAvailable) { if (!rule.isAlwaysEnabled()) { myToggleRuleActions.add(new ToggleBreakpointGroupingRuleEnabledAction(rule)); } } } private void saveBreakpointsDialogState() { final XBreakpointsDialogState dialogState = new XBreakpointsDialogState(); saveTreeState(dialogState); final List<XBreakpointGroupingRule> rulesEnabled = ContainerUtil.filter(myRulesEnabled, rule -> !rule.isAlwaysEnabled()); dialogState.setSelectedGroupingRules(new HashSet<>(ContainerUtil.map(rulesEnabled, rule -> rule.getId()))); getBreakpointManager().setBreakpointsDialogSettings(dialogState); } private void saveTreeState(XBreakpointsDialogState state) { JTree tree = myTreeController.getTreeView(); state.setTreeState(TreeState.createOn(tree, (DefaultMutableTreeNode)tree.getModel().getRoot())); } @Override protected void dispose() { saveCurrentItem(); Disposer.dispose(myListenerDisposable); saveBreakpointsDialogState(); disposeItems(); super.dispose(); } private void disposeItems() { myBreakpointItems.forEach(BreakpointItem::dispose); } @Nullable @Override protected String getHelpId() { return "reference.dialogs.breakpoints"; } private void saveCurrentItem() { ItemWrapper item = myDetailController.getSelectedItem(); if (item instanceof BreakpointItem) { ((BreakpointItem)item).saveState(); } } private class AddXBreakpointAction extends AnAction implements DumbAware { private final XBreakpointType<?, ?> myType; public AddXBreakpointAction(XBreakpointType<?, ?> type) { myType = type; getTemplatePresentation().setIcon(type.getEnabledIcon()); getTemplatePresentation().setText(type.getTitle()); } @Override public void actionPerformed(AnActionEvent e) { saveCurrentItem(); XBreakpoint<?> breakpoint = myType.addBreakpoint(myProject, null); if (breakpoint != null) { selectBreakpoint(breakpoint); } } } private boolean selectBreakpoint(Object breakpoint) { if (breakpoint != null) { for (BreakpointItem item : myBreakpointItems) { if (item.getBreakpoint() == breakpoint) { myTreeController.selectBreakpointItem(item, null); return true; } } } return false; } private class MoveToGroupAction extends AnAction { private final String myGroup; private final boolean myNewGroup; private MoveToGroupAction(String group) { super(group == null ? "<no group>" : group); myGroup = group; myNewGroup = false; } private MoveToGroupAction() { super("Create new..."); myNewGroup = true; myGroup = null; } @Override public void actionPerformed(AnActionEvent e) { String groupName = myGroup; if (myNewGroup) { groupName = Messages.showInputDialog("New group name", "New Group", AllIcons.Nodes.NewFolder); if (groupName == null) { return; } } for (BreakpointItem item : myTreeController.getSelectedBreakpoints(true)) { Object breakpoint = item.getBreakpoint(); if (breakpoint instanceof XBreakpointBase) { ((XBreakpointBase)breakpoint).setGroup(groupName); } } myTreeController.rebuildTree(myBreakpointItems); } } private class SetAsDefaultGroupAction extends AnAction { private final String myName; private SetAsDefaultGroupAction(XBreakpointCustomGroup group) { super(group.isDefault() ? "Unset as default" : "Set as default"); myName = group.isDefault() ? null : group.getName(); } @Override public void actionPerformed(AnActionEvent e) { getBreakpointManager().setDefaultGroup(myName); myTreeController.rebuildTree(myBreakpointItems); } } private class EditDescriptionAction extends AnAction { private final XBreakpointBase myBreakpoint; private EditDescriptionAction(XBreakpointBase breakpoint) { super("Edit description"); myBreakpoint = breakpoint; } @Override public void actionPerformed(AnActionEvent e) { String description = Messages.showInputDialog("", "Edit Description", null, myBreakpoint.getUserDescription(), null); if (description == null) { return; } myBreakpoint.setUserDescription(description); myTreeController.rebuildTree(myBreakpointItems); } } }
/* Android Asynchronous Http Client Copyright (c) 2011 James Smith <james@loopj.com> http://loopj.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.heyzap.http; import java.io.IOException; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; import org.apache.http.client.HttpResponseException; import org.apache.http.entity.BufferedHttpEntity; import org.apache.http.util.EntityUtils; import android.os.Handler; import android.os.Message; import android.os.Looper; /** * Used to intercept and handle the responses from requests made using * {@link AsyncHttpClient}. The {@link #onSuccess(String)} method is * designed to be anonymously overridden with your own response handling code. * <p> * Additionally, you can override the {@link #onFailure(Throwable, String)}, * {@link #onStart()}, and {@link #onFinish()} methods as required. * <p> * For example: * <p> * <pre> * AsyncHttpClient client = new AsyncHttpClient(); * client.get("http://www.google.com", new AsyncHttpResponseHandler() { * &#064;Override * public void onStart() { * // Initiated the request * } * * &#064;Override * public void onSuccess(String response) { * // Successfully got a response * } * * &#064;Override * public void onFailure(Throwable e, String response) { * // Response failed :( * } * * &#064;Override * public void onFinish() { * // Completed the request (either success or failure) * } * }); * </pre> */ public class AsyncHttpResponseHandler { private static final int SUCCESS_MESSAGE = 0; private static final int FAILURE_MESSAGE = 1; private static final int START_MESSAGE = 2; private static final int FINISH_MESSAGE = 3; private Handler handler; /** * Creates a new AsyncHttpResponseHandler */ public AsyncHttpResponseHandler() { // Set up a handler to post events back to the correct thread if possible if(Looper.myLooper() != null) { handler = new Handler(){ public void handleMessage(Message msg){ AsyncHttpResponseHandler.this.handleMessage(msg); } }; } } // // Callbacks to be overridden, typically anonymously // /** * Fired when the request is started, override to handle in your own code */ public void onStart() {} /** * Fired in all cases when the request is finished, after both success and failure, override to handle in your own code */ public void onFinish() {} /** * Fired when a request returns successfully, override to handle in your own code * @param content the body of the HTTP response from the server */ public void onSuccess(String content) {} /** * Fired when a request fails to complete, override to handle in your own code * @param error the underlying cause of the failure * @deprecated use {@link #onFailure(Throwable, String)} */ public void onFailure(Throwable error) {} /** * Fired when a request fails to complete, override to handle in your own code * @param error the underlying cause of the failure * @param content the response body, if any */ public void onFailure(Throwable error, String content) { // By default, call the deprecated onFailure(Throwable) for compatibility onFailure(error); } // // Pre-processing of messages (executes in background threadpool thread) // protected void sendSuccessMessage(String responseBody) { sendMessage(obtainMessage(SUCCESS_MESSAGE, responseBody)); } protected void sendFailureMessage(Throwable e, String responseBody) { sendMessage(obtainMessage(FAILURE_MESSAGE, new Object[]{e, responseBody})); } protected void sendStartMessage() { sendMessage(obtainMessage(START_MESSAGE, null)); } protected void sendFinishMessage() { sendMessage(obtainMessage(FINISH_MESSAGE, null)); } // // Pre-processing of messages (in original calling thread, typically the UI thread) // protected void handleSuccessMessage(String responseBody) { onSuccess(responseBody); } protected void handleFailureMessage(Throwable e, String responseBody) { onFailure(e, responseBody); } // Methods which emulate android's Handler and Message methods protected void handleMessage(Message msg) { switch(msg.what) { case SUCCESS_MESSAGE: handleSuccessMessage((String)msg.obj); break; case FAILURE_MESSAGE: Object[] repsonse = (Object[])msg.obj; handleFailureMessage((Throwable)repsonse[0], (String)repsonse[1]); break; case START_MESSAGE: onStart(); break; case FINISH_MESSAGE: onFinish(); break; } } protected void sendMessage(Message msg) { if(handler != null){ handler.sendMessage(msg); } else { handleMessage(msg); } } protected Message obtainMessage(int responseMessage, Object response) { Message msg = null; if(handler != null){ msg = this.handler.obtainMessage(responseMessage, response); }else{ msg = new Message(); msg.what = responseMessage; msg.obj = response; } return msg; } // Interface to AsyncHttpRequest void sendResponseMessage(HttpResponse response) { StatusLine status = response.getStatusLine(); String responseBody = null; try { HttpEntity entity = null; HttpEntity temp = response.getEntity(); if(temp != null) { entity = new BufferedHttpEntity(temp); responseBody = EntityUtils.toString(entity, "UTF-8"); } } catch(IOException e) { sendFailureMessage(e, null); } if(status.getStatusCode() >= 300) { sendFailureMessage(new HttpResponseException(status.getStatusCode(), status.getReasonPhrase()), responseBody); } else { sendSuccessMessage(responseBody); } } }
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.physical_web.collection; import org.apache.commons.codec.binary.Base64; import org.json.JSONArray; import org.json.JSONObject; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * Collection of Physical Web URL devices and related metadata. */ public class PhysicalWebCollection { private static final int SCHEMA_VERSION = 1; private static final String SCHEMA_VERSION_KEY = "schema"; private static final String DEVICES_KEY = "devices"; private static final String METADATA_KEY = "metadata"; private static final String ICON_MAP_KEY = "iconmap"; private PwsClient mPwsClient; private Map<String, UrlDevice> mDeviceIdToUrlDeviceMap; private Map<String, PwsResult> mBroadcastUrlToPwsResultMap; private Map<String, byte[]> mIconUrlToIconMap; private Set<String> mPendingBroadcastUrls; private Set<String> mPendingIconUrls; private Set<String> mFailedResolveUrls; /** * Construct a PhysicalWebCollection. */ public PhysicalWebCollection() { mPwsClient = new PwsClient(); mDeviceIdToUrlDeviceMap = new HashMap<>(); mBroadcastUrlToPwsResultMap = new HashMap<>(); mIconUrlToIconMap = new HashMap<>(); mPendingBroadcastUrls = new HashSet<>(); mPendingIconUrls = new HashSet<>(); mFailedResolveUrls = new HashSet<>(); } /** * Add a UrlDevice to the collection. * @param urlDevice The UrlDevice to add. * @return true if the device already existed in the map */ public boolean addUrlDevice(UrlDevice urlDevice) { boolean alreadyFound = mDeviceIdToUrlDeviceMap.containsKey(urlDevice.getId()); mDeviceIdToUrlDeviceMap.put(urlDevice.getId(), urlDevice); return alreadyFound; } /** * Remove a UrlDevice from the collection. * @param urlDevice The UrlDevice to remove. */ public void removeUrlDevice(UrlDevice urlDevice) { mDeviceIdToUrlDeviceMap.remove(urlDevice.getId()); } /** * Add URL metadata to the collection. * @param pwsResult The PwsResult to add. */ public void addMetadata(PwsResult pwsResult) { mBroadcastUrlToPwsResultMap.put(pwsResult.getRequestUrl(), pwsResult); } /** * Add an Icon to the collection. * @param url The url of the icon. * @param icon The bitmap of the icon. */ public void addIcon(String url, byte[] icon) { mIconUrlToIconMap.put(url, icon); } /** * Clear results and devices. */ public void clear(){ mDeviceIdToUrlDeviceMap.clear(); mBroadcastUrlToPwsResultMap.clear(); mIconUrlToIconMap.clear(); mPendingBroadcastUrls.clear(); mPendingIconUrls.clear(); mFailedResolveUrls.clear(); } /** * Get an Icon from the collection. * @param url The url of the icon. * @return The associated icon. This will be null if there is no icon. */ public byte[] getIcon(String url) { return mIconUrlToIconMap.get(url); } /** * Fetches a UrlDevice by its ID. * @param id The ID of the UrlDevice. * @return the UrlDevice with the given ID. */ public UrlDevice getUrlDeviceById(String id) { return mDeviceIdToUrlDeviceMap.get(id); } /** * Fetches cached URL metadata using the URL broadcasted by the Physical Web device. * @param broadcastUrl The URL broadcasted by the device. * @return Cached metadata relevant to the given URL. */ public PwsResult getMetadataByBroadcastUrl(String broadcastUrl) { return mBroadcastUrlToPwsResultMap.get(broadcastUrl); } /** * Gets all UrlDevices stored in the collection. * @return List of UrlDevices */ public List<UrlDevice> getUrlDevices() { return (List) new ArrayList(mDeviceIdToUrlDeviceMap.values()); } /** * Create a JSON object that represents this data structure. * @return a JSON serialization of this data structure. */ public JSONObject jsonSerialize() { JSONObject jsonObject = new JSONObject(); // Serialize the UrlDevices JSONArray urlDevices = new JSONArray(); for (UrlDevice urlDevice : mDeviceIdToUrlDeviceMap.values()) { urlDevices.put(urlDevice.jsonSerialize()); } jsonObject.put(DEVICES_KEY, urlDevices); // Serialize the URL metadata JSONArray metadata = new JSONArray(); for (PwsResult pwsResult : mBroadcastUrlToPwsResultMap.values()) { metadata.put(pwsResult.jsonSerialize()); } jsonObject.put(METADATA_KEY, metadata); JSONObject iconMap = new JSONObject(); for (String iconUrl : mIconUrlToIconMap.keySet()) { iconMap.put(iconUrl, new String(Base64.encodeBase64(getIcon(iconUrl)), Charset.forName("UTF-8"))); } jsonObject.put(ICON_MAP_KEY, iconMap); jsonObject.put(SCHEMA_VERSION_KEY, SCHEMA_VERSION); return jsonObject; } /** * Populate this data structure with UrlDevices represented by a given JSON object. * @param jsonObject a serialized PhysicalWebCollection. * @return The PhysicalWebCollection represented by the serialized object. * @throws PhysicalWebCollectionException on invalid or unrecognized input */ public static PhysicalWebCollection jsonDeserialize(JSONObject jsonObject) throws PhysicalWebCollectionException { // Check the schema version int schemaVersion = jsonObject.getInt(SCHEMA_VERSION_KEY); if (schemaVersion > SCHEMA_VERSION) { throw new PhysicalWebCollectionException( "Cannot handle schema version " + schemaVersion + ". " + "This library only knows of schema version " + SCHEMA_VERSION); } PhysicalWebCollection collection = new PhysicalWebCollection(); // Deserialize the UrlDevices JSONArray urlDevices = jsonObject.getJSONArray(DEVICES_KEY); for (int i = 0; i < urlDevices.length(); i++) { JSONObject urlDeviceJson = urlDevices.getJSONObject(i); UrlDevice urlDevice = UrlDevice.jsonDeserialize(urlDeviceJson); collection.addUrlDevice(urlDevice); } // Deserialize the URL metadata JSONArray metadata = jsonObject.getJSONArray(METADATA_KEY); for (int i = 0; i < metadata.length(); i++) { JSONObject pwsResultJson = metadata.getJSONObject(i); PwsResult pwsResult = PwsResult.jsonDeserialize(pwsResultJson); collection.addMetadata(pwsResult); } JSONObject iconMap = jsonObject.getJSONObject(ICON_MAP_KEY); for (Iterator<String> iconUrls = iconMap.keys(); iconUrls.hasNext();) { String iconUrl = iconUrls.next(); collection.addIcon(iconUrl, Base64.decodeBase64( iconMap.getString(iconUrl).getBytes(Charset.forName("UTF-8")))); } return collection; } /** * Return a list of PwPairs sorted by rank in descending order. * These PwPairs will be deduplicated by siteUrls (favoring the PwPair with * the highest rank). * @param comparator to sort pairs by * @return a sorted list of PwPairs. */ public List<PwPair> getPwPairsSortedByRank(Comparator<PwPair> comparator) { // Get all valid PwPairs. List<PwPair> allPwPairs = getPwPairs(); // Sort the list in descending order. Collections.sort(allPwPairs, comparator); // Filter the list. return removeDuplicateSiteUrls(allPwPairs); } /** * Return a list of PwPairs sorted by rank in descending order, including only the top-ranked * pair from each group. * @param comparator to sort pairs by * @return a sorted list of PwPairs. */ public List<PwPair> getGroupedPwPairsSortedByRank(Comparator<PwPair> comparator) { // Get all valid PwPairs. List<PwPair> allPwPairs = getPwPairs(); // Group pairs with the same groupId, keeping only the top-ranked PwPair. List<PwPair> groupedPwPairs = removeDuplicateGroupIds(allPwPairs, null, comparator); // Sort by descending rank. Collections.sort(groupedPwPairs, comparator); // Remove duplicate site URLs. return removeDuplicateSiteUrls(groupedPwPairs); } /** * Return a list of all pairs of valid URL devices and corresponding URL metadata. * @return list of PwPairs. */ public List<PwPair> getPwPairs() { List<PwPair> allPwPairs = new ArrayList<>(); for (UrlDevice urlDevice : mDeviceIdToUrlDeviceMap.values()) { PwsResult pwsResult = mBroadcastUrlToPwsResultMap.get(urlDevice.getUrl()); if (pwsResult != null) { allPwPairs.add(new PwPair(urlDevice, pwsResult)); } } return allPwPairs; } /** * Return the top-ranked PwPair for a given group ID. * @param groupId * @param comparator to sort pairs by * @return a PwPair. */ public PwPair getTopRankedPwPairByGroupId(String groupId, Comparator<PwPair> comparator) { for (PwPair pwPair : getGroupedPwPairsSortedByRank(comparator)) { if (pwPair.getPwsResult().getGroupId().equals(groupId)) { return pwPair; } } return null; } /** * If a site URL appears multiple times in the pairs list, keep only the first example. * @param allPwPairs input PwPairs list. * @return filtered PwPairs list with all duplicated site URLs removed. */ private static List<PwPair> removeDuplicateSiteUrls(List<PwPair> allPwPairs) { List<PwPair> filteredPwPairs = new ArrayList<>(); Set<String> siteUrls = new HashSet<>(); for (PwPair pwPair : allPwPairs) { String siteUrl = pwPair.getPwsResult().getSiteUrl(); if (!siteUrls.contains(siteUrl)) { siteUrls.add(siteUrl); filteredPwPairs.add(pwPair); } } return filteredPwPairs; } /** * Given a list of PwPairs, return a filtered list such that only one PwPair from each group * is included. * @param allPairs Input PwPairs list. * @param outGroupMap Optional output map from discovered group IDs to UrlGroups, may be null. * @return Filtered PwPairs list. */ private static List<PwPair> removeDuplicateGroupIds(List<PwPair> allPairs, Map<String, UrlGroup> outGroupMap, Comparator<PwPair> comparator) { List<PwPair> filteredPairs = new ArrayList<>(); Map<String, UrlGroup> groupMap = outGroupMap; if (groupMap == null) { groupMap = new HashMap<>(); } else { groupMap.clear(); } for (PwPair pwPair : allPairs) { PwsResult pwsResult = pwPair.getPwsResult(); String groupId = pwsResult.getGroupId(); if (groupId == null || groupId.equals("")) { // Pairs without a group are always included filteredPairs.add(pwPair); } else { // Create the group if it doesn't exist UrlGroup urlGroup = groupMap.get(groupId); if (urlGroup == null) { urlGroup = new UrlGroup(groupId); groupMap.put(groupId, urlGroup); } urlGroup.addPair(pwPair); } } for (UrlGroup urlGroup : groupMap.values()) { filteredPairs.add(urlGroup.getTopPair(comparator)); } return filteredPairs; } /** * Set the URL, the API version, the API Key for making PWS requests. * @param pwsEndpoint The new PWS endpoint. * @param pwsApiVersion The new PWS API version. */ public void setPwsEndpoint(String pwsEndpoint, int pwsApiVersion) { mPwsClient.setEndpoint(pwsEndpoint, pwsApiVersion); } /** * Set the URL, the API version, the API Key for making PWS requests. * @param pwsEndpoint The new PWS endpoint. * @param pwsApiVersion The new PWS API version. * @param pwsApiKey The new PWS API key. */ public void setPwsEndpoint(String pwsEndpoint, int pwsApiVersion, String pwsApiKey) { mPwsClient.setEndpoint(pwsEndpoint, pwsApiVersion, pwsApiKey); } private class AugmentedPwsResultIconCallback extends PwsResultIconCallback { private String mUrl; private PwsResultIconCallback mCallback; AugmentedPwsResultIconCallback(String url, PwsResultIconCallback callback) { mUrl = url; mCallback = callback; } @Override public void onIcon(byte[] icon) { mPendingIconUrls.remove(mUrl); addIcon(mUrl, icon); mCallback.onIcon(icon); } @Override public void onError(int httpResponseCode, Exception e) { mPendingIconUrls.remove(mUrl); mCallback.onError(httpResponseCode, e); } } /** * Triggers an HTTP request to be made to the PWS. * This method fetches a results from the PWS for all broadcast URLs, * depending on the supplied parameters. * @param pwsResultCallback The callback to run when we get an HTTPResponse. * If this value is null, we will not fetch the PwsResults, only icons. * @param pwsResultIconCallback The callback to run when we get a favicon. * If this value is null, we will not fetch the icons. */ public void fetchPwsResults(final PwsResultCallback pwsResultCallback, final PwsResultIconCallback pwsResultIconCallback) { // Get new URLs to fetch. Set<String> newResolveUrls = new HashSet<>(); Set<String> newIconUrls = new HashSet<>(); for (UrlDevice urlDevice : mDeviceIdToUrlDeviceMap.values()) { String url = urlDevice.getUrl(); if (!mPendingBroadcastUrls.contains(url) && !mFailedResolveUrls.contains(url)) { PwsResult pwsResult = mBroadcastUrlToPwsResultMap.get(url); if (pwsResult == null) { newResolveUrls.add(url); mPendingBroadcastUrls.add(url); } else if (pwsResult.hasIconUrl() && !mPendingIconUrls.contains(pwsResult.getIconUrl()) && !mIconUrlToIconMap.containsKey(pwsResult.getIconUrl())) { newIconUrls.add(pwsResult.getIconUrl()); mPendingIconUrls.add(pwsResult.getIconUrl()); } } } // Make the resolve request. final Set<String> finalResolveUrls = newResolveUrls; PwsResultCallback augmentedCallback = new PwsResultCallback() { @Override public void onPwsResult(PwsResult pwsResult) { addMetadata(pwsResult); if (pwsResultIconCallback != null) { PwsResultIconCallback augmentedIconCallback = new AugmentedPwsResultIconCallback(pwsResult.getIconUrl(), pwsResultIconCallback); mPwsClient.downloadIcon(pwsResult.getIconUrl(), augmentedIconCallback); } pwsResultCallback.onPwsResult(pwsResult); } @Override public void onPwsResultAbsent(String url) { mFailedResolveUrls.add(url); pwsResultCallback.onPwsResultAbsent(url); } @Override public void onPwsResultError(Collection<String> urls, int httpResponseCode, Exception e) { pwsResultCallback.onPwsResultError(urls, httpResponseCode, e); } @Override public void onResponseReceived(long durationMillis) { for (String url : finalResolveUrls) { mPendingBroadcastUrls.remove(url); } pwsResultCallback.onResponseReceived(durationMillis); } }; if (pwsResultCallback != null && newResolveUrls.size() > 0) { mPwsClient.resolve(newResolveUrls, augmentedCallback); } // Make the icon requests. if (pwsResultIconCallback != null) { for (final String iconUrl : newIconUrls) { PwsResultIconCallback augmentedIconCallback = new AugmentedPwsResultIconCallback(iconUrl, pwsResultIconCallback); mPwsClient.downloadIcon(iconUrl, augmentedIconCallback); } } } /** * Cancel all current HTTP requests. */ public void cancelAllRequests() { mPwsClient.cancelAllRequests(); } }
/* Copyright 2015 Tom Roida Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package de.roida.app.WaterproofQuickcheck; import android.app.AlertDialog; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.graphics.Color; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.os.Handler; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.method.ScrollingMovementMethod; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.SeekBar; import android.widget.TextView; import com.androidplot.util.PlotStatistics; import com.androidplot.xy.*; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.RatingBar; import android.widget.Toast; public class DetailsActivity extends AppCompatActivity implements SensorEventListener { SharedPreferences SP; // Preferences from "Settings" activity; private double pressureDeviationMax; // max pressure deviation allowed from ideal curve. Area: time * delta pressure private long slideMinTime; // how long must the user press the slider at least? (in milliseconds) private static final int HISTORY_SIZE = 30; // number of points to plot in history private long pressureDropFactor = 0; // adapt the "ideal" curve - this constant is different per device type, maybe even per individual device private long MeasureTime; // time of measurement after low peak detection in millisec private static final float maxValueThreshold = 0.15f; // when pressing, this value has to be exceeded, otherwise we do not even start to evaluate... private SensorManager sensorMgr = null; private Sensor orSensor = null; private TextView TextViewResult, TextViewRating; private String ResultText =""; private RatingBar ratingBarTom; private XYPlot aprHistoryPlot = null; private LineAndPointFormatter aprFormatter, idealFormatter; private SimpleXYSeries pressureHistorySeries = null; private SimpleXYSeries pressureIdealSeries = null; private SeekBar swiperTom ; // The seekBar to be pressed to increase the pressure inside the device private float rawValue = 0; private float maxValue = 0; // while pressing, detect the max pressure value building up private float minValue = 0; // after pressing, detect the min pressure in the device private long minValueTimestamp, progressTimestamp; //...and the timestamp in nanoseconds. (Divide by 1000 000 000 to get seconds.) private boolean minValueDetect = false; // currently no detection of min peak in progress private int risingValuesCounter = 0; private double pressureDeviation = 0; // the area of the plannedPressureCurve - actualPressureCurve private double onSensorChangedLastTimestamp = 0; private long SysNanoTimeTarget = 0; private boolean currentlyOnDetails = false; // has the user switched to the "Details" screen? private float timeConstant; // Low pass filter constant (in seconds) private float alpha = 0.0f; private long timestamp = System.nanoTime(); // time stamps for low pass filter private long timestampOld = System.nanoTime(); // time of measurement after low peak detection in millisec private float filteredSensorValue = 1000; // initial setup of filter output (in mbar) private int count = 1; private Handler handler = new Handler(); // handler to update the UI TextView TextViewHead; /** ----------------------------------------------------------------------------------------- */ @Override protected void onCreate(Bundle savedInstanceState) { //Called when the activity is first created super.onCreate(savedInstanceState); setContentView(R.layout.activity_details); // load preferences SP = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); PreferenceManager.setDefaultValues(this, R.xml.pref_general, false); reloadPrefs(); // setup the plot: aprHistoryPlot = (XYPlot) findViewById(R.id.plot); pressureHistorySeries = new SimpleXYSeries("Measured Pressure"); pressureHistorySeries.useImplicitXVals(); pressureIdealSeries = new SimpleXYSeries("Ideal pressure"); pressureIdealSeries.useImplicitXVals(); aprHistoryPlot.setRangeBoundaries(-1, 1, BoundaryMode.FIXED); aprHistoryPlot.setDomainBoundaries(0, 30, BoundaryMode.AUTO); aprFormatter = new LineAndPointFormatter(Color.rgb(100, 100, 200), Color.BLACK, null, null); //line, point, fill idealFormatter = new LineAndPointFormatter(Color.rgb(200, 80, 80), Color.RED, null, null); aprHistoryPlot.addSeries(pressureHistorySeries, aprFormatter); aprHistoryPlot.addSeries(pressureIdealSeries, idealFormatter); aprHistoryPlot.setDomainStepValue(5); aprHistoryPlot.setTicksPerRangeLabel(3); aprHistoryPlot.setDomainLabel("Sample Index"); aprHistoryPlot.getDomainLabelWidget().pack(); aprHistoryPlot.setRangeLabel("Pressure delta (mbar)"); aprHistoryPlot.getRangeLabelWidget().pack(); TextViewResult = (TextView) findViewById(R.id.textViewResult); TextViewResult.setMovementMethod(new ScrollingMovementMethod()); TextViewRating = (TextView) findViewById(R.id.textViewRating); swiperTom=(SeekBar) findViewById(R.id.swiper); ratingBarTom = (RatingBar) findViewById(R.id.ratingBar); final PlotStatistics histStats = new PlotStatistics(1000, false); aprHistoryPlot.addListener(histStats); swiperTom.setEnabled(true); ratingBarTom.setEnabled(false); aprHistoryPlot.setVisibility(View.INVISIBLE); TextViewResult.setVisibility(View.INVISIBLE); TextViewHead = (TextView) findViewById(R.id.textViewHead); // register for orientation sensor events: sensorMgr = (SensorManager) getApplicationContext().getSystemService(Context.SENSOR_SERVICE); for (Sensor sensor : sensorMgr.getSensorList(Sensor.TYPE_PRESSURE)) { if (sensor.getType() == Sensor.TYPE_PRESSURE) { orSensor = sensor; } } // if we can't access the sensor then exit: if (orSensor == null) { System.out.println("Failed to attach to Sensor."); TextViewResult.setEnabled(true); TextViewResult.setVisibility(View.VISIBLE); TextViewResult.setText("ERROR: No pressure sensor detected! This app will not work on this device!"); } else sensorMgr.registerListener(this, orSensor, SensorManager.SENSOR_DELAY_UI); //SENSOR_DELAY_NORMAL //Now set the SeekBar swiperTom.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { int progress = 0; @Override public void onProgressChanged(SeekBar swiperTom, int progressValue, boolean fromUser) { progress = progressValue; if (fromUser) { if (maxValue < rawValue - filteredSensorValue) { maxValue = rawValue - filteredSensorValue; } } double slideTimeElapsed = (System.nanoTime() - progressTimestamp) / 1000000; //in millisec double tw1 = slideTimeElapsed /slideMinTime * 100; if ((progressValue > (tw1))) { //slow down swiperTom.setProgress((int) (tw1 + 0.5)); //set progress to max allowed value for this moment in time } } @Override public void onStartTrackingTouch(SeekBar seekBar) { progressTimestamp = System.nanoTime(); ratingBarTom.setRating(0); ratingBarTom.setEnabled(false); if ((filteredSensorValue - rawValue < 0.5) && (filteredSensorValue - rawValue > -0.5)) { ResultText += "Measuring..." + "\n"; TextViewResult.setText(ResultText); TextViewRating.setText("Measuring..."); maxValue = 0; } else { ResultText += "Error: pressure out of expected range +/-0.5" + "\n"; TextViewResult.setText(ResultText); } } @Override public void onStopTrackingTouch(SeekBar swiperTom) { if (swiperTom.getProgress() > 90) { ratingBarTom.setEnabled(true); if (maxValue > maxValueThreshold) { ResultText += "done: Peak was " + Float.toString(maxValue) + " good" + "\n"; TextViewResult.setText(ResultText); minValue = maxValue; // now we start the detection of the low peak! minValueDetect = true; swiperTom.setEnabled(false); ratingBarTom.setRating(0.5f); } else { ResultText += "done: Peak was " + Float.toString(maxValue) + " bad" + "\n"; TextViewResult.setText(ResultText); TextViewRating.setText("No pressure peak could be detected. Do NOT expose device to water!"); } } else { ResultText += "Aborted: you did not swipe long enough!" + "\n"; TextViewResult.setText(ResultText); TextViewRating.setText(""); } swiperTom.setProgress(0); } }); } // Called whenever a new orSensor reading is taken. @Override public synchronized void onSensorChanged(SensorEvent sensorEvent) { double ideal = 0; long SysNanoTimeNow = System.nanoTime(); double tneu = (SysNanoTimeNow-minValueTimestamp )/1000000; // in milliseconds since minValue double deltaT = (SysNanoTimeNow-onSensorChangedLastTimestamp)/1000000; // in milliseconds since last onSensorChanged event; onSensorChangedLastTimestamp = SysNanoTimeNow; rawValue = sensorEvent.values[0]; double pressureDeviationMinus = 0; // positive value means: pressure is adapting slower than expected - good! // get rid the oldest sample in history: if (pressureHistorySeries.size() > HISTORY_SIZE) { pressureHistorySeries.removeFirst(); } if (pressureIdealSeries.size() > HISTORY_SIZE) { pressureIdealSeries.removeFirst(); } int sw1 = swiperTom.getProgress(); if ((sw1==0) && !minValueDetect && (risingValuesCounter==0)) //no peak detection and no measuring in progress lowPass(rawValue); // writes the filtered low-pass-value into "filteredSensorValue" // add the latest history sample: pressureHistorySeries.addLast(null, rawValue - filteredSensorValue); pressureIdealSeries .addLast(null, 9999); // do not draw because out of range // if the low peak detection is in progress, detect it! if (risingValuesCounter>0) { risingValuesCounter++; } else if (minValueDetect) { if ((rawValue-filteredSensorValue)<minValue) { minValue = rawValue - filteredSensorValue; // there is a lower value minValueTimestamp = SysNanoTimeNow; SysNanoTimeTarget = SysNanoTimeNow + MeasureTime*1000000; }else{ // take the last value as low peak! Now start the rising pressure curve analysis! // look for something like this: // * * * * // * // * // * // * // * // * ratingBarTom.setRating(1); // now collect the values in an array until leveling risingValuesCounter = 1; pressureIdealSeries.removeLast(); pressureIdealSeries.removeLast(); pressureIdealSeries.addLast(null, minValue); ideal = -1/((tneu*tneu/pressureDropFactor+1/(-minValue))); pressureIdealSeries.addLast(null, ideal); pressureDeviation = 0; pressureDeviation = Math.abs((ideal-(rawValue - filteredSensorValue)) * deltaT/1000); // = ((ideal-actual)* Delta T), absolute pressureDeviationMinus = ((ideal-(rawValue - filteredSensorValue)) * deltaT/1000); // = ((ideal-actual)* Delta T) } } //evaluate and reset if full if (risingValuesCounter>1){ ideal = -1/((tneu*tneu/pressureDropFactor+1/(-minValue))); pressureIdealSeries.removeLast(); pressureIdealSeries.addLast(null, ideal); pressureDeviation = pressureDeviation + Math.abs((ideal-(rawValue - filteredSensorValue)) * deltaT/1000); // = ((ideal-actual)* Delta T), absolute pressureDeviationMinus = pressureDeviationMinus + ((ideal-(rawValue - filteredSensorValue)) * deltaT/1000); // = ((ideal-actual)* Delta T) if (SysNanoTimeNow>SysNanoTimeTarget){ //final evaluation + reset if full // final evaluation: // star #1: min peak could be detected // star #2: no significant deviation from ideal curve float rating = 1+Math.round((float) pressureDeviationMax/(float) pressureDeviation); ResultText += "Rating: " + Float.toString(rating) + "\n"; TextViewResult.setText(ResultText); ratingBarTom.setRating(rating); ResultText += "Measured deviation of pressure over time vs. expected curve: " + Double.toString(pressureDeviation) + "\n"; TextViewResult.setText(ResultText); if (rating > 1.7) { //OK! ResultText += "OK - Device looks to be sealed" + "\n"; TextViewResult.setText(ResultText); TextViewRating.setText("OK - Device looks to be sealed!"); } else { //not OK! ResultText += "Not OK - Device looks to be NOT sealed. Do not expose to water!" + "\n"; TextViewResult.setText(ResultText); TextViewRating.setText("Not OK - Device looks NOT to be sealed. Do not expose to water!"); } if ((rating < 1.5) && (pressureDeviationMinus > 0)) { Toast.makeText(getApplicationContext(), "Try less pressure!", Toast.LENGTH_LONG).show(); ResultText += "Exceeding the expected curve. Try less pressure!" + "\n"; TextViewResult.setText(ResultText); } // System.out.println("pressureDeviationMinus: " + Double.toString(pressureDeviationMinus)); //TODO remove resetMeasurement();// reset measurement } } } public void resetMeasurement(){ risingValuesCounter = 0; minValue = 0; maxValue = 0; minValueDetect=false; pressureDeviation = 0; swiperTom.setEnabled(true); timestamp = System.nanoTime(); timestampOld = System.nanoTime(); } @Override public void onAccuracyChanged(Sensor sensor, int i) { // Not interested in this event } public boolean openAboutDialog (MenuItem item){ Context context = getApplicationContext(); PackageManager packageManager = context.getPackageManager(); String packageName = context.getPackageName(); String versionName = "not available"; // initialize String try { versionName = packageManager.getPackageInfo(packageName, 0).versionName; } catch (PackageManager.NameNotFoundException e) { e.printStackTrace(); } AlertDialog.Builder builder = new AlertDialog.Builder(this); String aboutMessage = getResources().getString(R.string.about_message); aboutMessage = "Version: " + versionName + "\n" + aboutMessage; builder.setMessage(aboutMessage) .setTitle(R.string.about_title); AlertDialog dialog = builder.create(); dialog.show(); return true; } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_details, menu); return true; } private Runnable runnable = new Runnable() { @Override public void run() { aprHistoryPlot.redraw();// redraw the Plot TextViewHead.setText("Pressure sensor - raw value: " + String.format("%.3f", rawValue)+" filtered: " + String.format("%.3f", filteredSensorValue)); //TODO line wrap z3c? if (currentlyOnDetails) handler.postDelayed(this, 100); } }; @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_detailedView) { if (!currentlyOnDetails) { aprHistoryPlot.setVisibility(View.VISIBLE); TextViewResult.setVisibility(View.VISIBLE); TextView disclaimer = (TextView)findViewById(R.id.disclaimer); disclaimer.setEnabled(false); disclaimer.setVisibility(View.INVISIBLE); currentlyOnDetails = true; handler.postDelayed(runnable, 100); //activte the handler with a freq. of 10hz to update graph. return true; } else { currentlyOnDetails = false; aprHistoryPlot.setVisibility(View.INVISIBLE); TextViewResult.setVisibility(View.INVISIBLE); TextView disclaimer = (TextView) findViewById(R.id.disclaimer); disclaimer.setEnabled(true); disclaimer.setVisibility(View.VISIBLE); handler.removeCallbacks(runnable); //Stop handler for UI update TextViewHead.setText(R.string.headline); } } if (id == R.id.action_settings) { Intent intent = new Intent(DetailsActivity.this, SettingsActivity.class); startActivity(intent); return true; } return super.onOptionsItemSelected(item); } protected void onPause() { super.onPause(); sensorMgr.unregisterListener(this); resetMeasurement();// reset measurement } protected void onResume() { super.onResume(); sensorMgr.registerListener(this, orSensor, SensorManager.SENSOR_DELAY_UI); timestampOld=System.nanoTime(); reloadPrefs(); } public void lowPass(float input) { timestamp = System.nanoTime(); // Find the sample period (between updates). // Convert from nanoseconds to seconds float dt = 1 / (count / ((timestamp - timestampOld) / 1000000000.0f)); // Calculate alpha alpha = timeConstant / (timeConstant + dt); filteredSensorValue = alpha * filteredSensorValue + (1 - alpha) * input; timestampOld = timestamp; } @Override public void onBackPressed() { if (currentlyOnDetails) { currentlyOnDetails = false; aprHistoryPlot.setVisibility(View.INVISIBLE); TextViewResult.setVisibility(View.INVISIBLE); TextView disclaimer = (TextView)findViewById(R.id.disclaimer); disclaimer.setEnabled(true); disclaimer.setVisibility(View.VISIBLE); } else finish(); } private void reloadPrefs () { pressureDeviationMax = Float.parseFloat(SP.getString("pref_max_deviation", "9999")); slideMinTime = Integer.parseInt(SP.getString("pref_slide_min_time", "9999")); MeasureTime = Long.parseLong(SP.getString("pref_measure_time", "9999")); timeConstant = Float.parseFloat(SP.getString("pref_time_constant", "9999")); pressureDropFactor = Long.parseLong(SP.getString("pref_ideal_pressure_drop", "0")); //System.out.println("PrefsLoaded. " + Long.toString(slideMinTime)); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.structuralsearch.impl.matcher.handlers; import com.intellij.dupLocator.iterators.NodeIterator; import com.intellij.dupLocator.util.NodeFilter; import com.intellij.psi.PsiComment; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiRecursiveElementWalkingVisitor; import com.intellij.structuralsearch.MatchResult; import com.intellij.structuralsearch.impl.matcher.CompiledPattern; import com.intellij.structuralsearch.impl.matcher.MatchContext; import com.intellij.structuralsearch.impl.matcher.MatchResultImpl; import com.intellij.structuralsearch.impl.matcher.filters.DefaultFilter; import com.intellij.structuralsearch.impl.matcher.strategies.MatchingStrategy; import org.jetbrains.annotations.NotNull; import java.util.HashSet; import java.util.Set; /** * Root of handlers for pattern node matching. Handles simplest type of the match. */ public abstract class MatchingHandler { protected NodeFilter filter; private PsiElement pinnedElement; public void setFilter(NodeFilter filter) { this.filter = filter; } /** * Matches given handler node against given value. * @param matchedNode for matching * @param context of the matching * @return true if matching was successful and false otherwise */ public boolean match(PsiElement patternNode, PsiElement matchedNode, MatchContext context) { return (patternNode == null) ? matchedNode == null : canMatch(patternNode, matchedNode, context); } public boolean canMatch(final PsiElement patternNode, final PsiElement matchedNode, MatchContext context) { return (filter != null) ? filter.accepts(matchedNode) : DefaultFilter.accepts(patternNode, matchedNode); } public boolean matchSequentially(NodeIterator patternNodes, NodeIterator matchNodes, MatchContext context) { final MatchingStrategy strategy = context.getPattern().getStrategy(); final PsiElement currentPatternNode = patternNodes.current(); final PsiElement currentMatchNode = matchNodes.current(); skipIfNecessary(matchNodes, currentPatternNode, strategy); skipIfNecessary(patternNodes, matchNodes.current(), strategy); if (!patternNodes.hasNext()) { return !matchNodes.hasNext(); } final PsiElement patternElement = patternNodes.current(); final MatchingHandler handler = context.getPattern().getHandler(patternElement); if (!(handler instanceof TopLevelMatchingHandler)) skipComments(matchNodes, currentPatternNode); if (matchNodes.hasNext() && handler.match(patternElement, matchNodes.current(), context)) { patternNodes.advance(); skipIfNecessary(patternNodes, matchNodes.current(), strategy); if (shouldAdvanceTheMatchFor(patternElement, matchNodes.current())) { matchNodes.advance(); skipIfNecessary(matchNodes, patternNodes.current(), strategy); if (patternNodes.hasNext()) skipComments(matchNodes, patternNodes.current()); } if (patternNodes.hasNext()) { final MatchingHandler nextHandler = context.getPattern().getHandler(patternNodes.current()); if (nextHandler.matchSequentially(patternNodes, matchNodes, context)) { return true; } else { patternNodes.rewindTo(currentPatternNode); matchNodes.rewindTo(currentMatchNode); } } else { // match was found return handler.isMatchSequentiallySucceeded(matchNodes); } } return false; } private static void skipComments(NodeIterator matchNodes, PsiElement patternNode) { if (patternNode instanceof PsiComment) return; while (matchNodes.current() instanceof PsiComment) matchNodes.advance(); } private static void skipIfNecessary(NodeIterator nodes, PsiElement elementToMatchWith, MatchingStrategy strategy) { while (nodes.hasNext() && strategy.shouldSkip(nodes.current(), elementToMatchWith)) { nodes.advance(); } } protected boolean isMatchSequentiallySucceeded(final NodeIterator matchNodes) { skipComments(matchNodes, null); return !matchNodes.hasNext(); } static class ClearStateVisitor extends PsiRecursiveElementWalkingVisitor { private CompiledPattern pattern; ClearStateVisitor() { super(true); } @Override public void visitElement(@NotNull PsiElement element) { // We do not reset certain handlers because they are also bound to higher level nodes // e.g. Identifier handler in name is also bound to PsiMethod if (pattern.isToResetHandler(element)) { final MatchingHandler handler = pattern.getHandlerSimple(element); if (handler != null) { handler.reset(); } } super.visitElement(element); } synchronized void clearState(CompiledPattern _pattern, PsiElement el) { pattern = _pattern; el.acceptChildren(this); pattern = null; } } protected static ClearStateVisitor clearingVisitor = new ClearStateVisitor(); public boolean matchInAnyOrder(NodeIterator patternNodes, NodeIterator matchedNodes, final MatchContext context) { final MatchResultImpl saveResult = context.hasResult() ? context.getResult() : null; context.setResult(null); try { if (patternNodes.hasNext() && !matchedNodes.hasNext()) { return validateSatisfactionOfHandlers(patternNodes, context); } Set<PsiElement> matchedElements = null; while (patternNodes.hasNext()) { final PsiElement patternNode = patternNodes.current(); patternNodes.advance(); final CompiledPattern pattern = context.getPattern(); final MatchingHandler handler = pattern.getHandler(patternNode); matchedNodes.reset(); boolean allElementsMatched = true; int matchedOccurs = 0; do { final PsiElement pinnedNode = handler.getPinnedNode(); final PsiElement matchedNode = (pinnedNode != null) ? pinnedNode : matchedNodes.current(); if (pinnedNode == null) matchedNodes.advance(); if (matchedElements == null || !matchedElements.contains(matchedNode)) { allElementsMatched = false; if (handler.match(patternNode, matchedNode, context)) { matchedOccurs++; if (matchedElements == null) matchedElements = new HashSet<>(); matchedElements.add(matchedNode); if (handler.shouldAdvanceThePatternFor(patternNode, matchedNode)) { break; } } else if (pinnedNode != null) { return false; } // clear state of dependent objects clearingVisitor.clearState(pattern, patternNode); } if (!matchedNodes.hasNext() || pinnedNode != null) { if (!handler.validate(context, matchedOccurs)) return false; if (allElementsMatched || !patternNodes.hasNext()) { final boolean result = validateSatisfactionOfHandlers(patternNodes, context); if (result && matchedElements != null) { context.notifyMatchedElements(matchedElements); } return result; } break; } } while(true); if (!handler.validate(context, matchedOccurs)) return false; } final boolean result = validateSatisfactionOfHandlers(patternNodes, context); if (result && matchedElements != null) { context.notifyMatchedElements(matchedElements); } return result; } finally { if (saveResult != null) { if (context.hasResult()) { for (MatchResult child : context.getResult().getChildren()) { saveResult.addChild(child); } } context.setResult(saveResult); } } } protected static boolean validateSatisfactionOfHandlers(NodeIterator patternNodes, MatchContext context) { for (; patternNodes.hasNext(); patternNodes.advance()) { if (!context.getPattern().getHandler(patternNodes.current()).validate(context, 0)) { return false; } } return true; } boolean validate(MatchContext context, int matchedOccurs) { return matchedOccurs == 1; } public NodeFilter getFilter() { return filter; } public boolean shouldAdvanceThePatternFor(PsiElement patternElement, PsiElement matchedElement) { return true; } public boolean shouldAdvanceTheMatchFor(PsiElement patternElement, PsiElement matchedElement) { return true; } public void reset() { //pinnedElement = null; } public PsiElement getPinnedNode() { return pinnedElement; } public void setPinnedElement(final PsiElement pinnedElement) { this.pinnedElement = pinnedElement; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.concurrent; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.Validate; /** * <p> * A specialized <em>semaphore</em> implementation that provides a number of * permits in a given time frame. * </p> * <p> * This class is similar to the {@code java.util.concurrent.Semaphore} class * provided by the JDK in that it manages a configurable number of permits. * Using the {@link #acquire()} method a permit can be requested by a thread. * However, there is an additional timing dimension: there is no {@code * release()} method for freeing a permit, but all permits are automatically * released at the end of a configurable time frame. If a thread calls * {@link #acquire()} and the available permits are already exhausted for this * time frame, the thread is blocked. When the time frame ends all permits * requested so far are restored, and blocking threads are waked up again, so * that they can try to acquire a new permit. This basically means that in the * specified time frame only the given number of operations is possible. * </p> * <p> * A use case for this class is to artificially limit the load produced by a * process. As an example consider an application that issues database queries * on a production system in a background process to gather statistical * information. This background processing should not produce so much database * load that the functionality and the performance of the production system are * impacted. Here a {@code TimedSemaphore} could be installed to guarantee that * only a given number of database queries are issued per second. * </p> * <p> * A thread class for performing database queries could look as follows: * </p> * * <pre> * public class StatisticsThread extends Thread { * // The semaphore for limiting database load. * private final TimedSemaphore semaphore; * // Create an instance and set the semaphore * public StatisticsThread(TimedSemaphore timedSemaphore) { * semaphore = timedSemaphore; * } * // Gather statistics * public void run() { * try { * while(true) { * semaphore.acquire(); // limit database load * performQuery(); // issue a query * } * } catch(InterruptedException) { * // fall through * } * } * ... * } * </pre> * * <p> * The following code fragment shows how a {@code TimedSemaphore} is created * that allows only 10 operations per second and passed to the statistics * thread: * </p> * * <pre> * TimedSemaphore sem = new TimedSemaphore(1, TimeUnit.SECOND, 10); * StatisticsThread thread = new StatisticsThread(sem); * thread.start(); * </pre> * * <p> * When creating an instance the time period for the semaphore must be * specified. {@code TimedSemaphore} uses an executor service with a * corresponding period to monitor this interval. The {@code * ScheduledExecutorService} to be used for this purpose can be provided at * construction time. Alternatively the class creates an internal executor * service. * </p> * <p> * Client code that uses {@code TimedSemaphore} has to call the * {@link #acquire()} method in aach processing step. {@code TimedSemaphore} * keeps track of the number of invocations of the {@link #acquire()} method and * blocks the calling thread if the counter exceeds the limit specified. When * the timer signals the end of the time period the counter is reset and all * waiting threads are released. Then another cycle can start. * </p> * <p> * It is possible to modify the limit at any time using the * {@link #setLimit(int)} method. This is useful if the load produced by an * operation has to be adapted dynamically. In the example scenario with the * thread collecting statistics it may make sense to specify a low limit during * day time while allowing a higher load in the night time. Reducing the limit * takes effect immediately by blocking incoming callers. If the limit is * increased, waiting threads are not released immediately, but wake up when the * timer runs out. Then, in the next period more processing steps can be * performed without blocking. By setting the limit to 0 the semaphore can be * switched off: in this mode the {@link #acquire()} method never blocks, but * lets all callers pass directly. * </p> * <p> * When the {@code TimedSemaphore} is no more needed its {@link #shutdown()} * method should be called. This causes the periodic task that monitors the time * interval to be canceled. If the {@code ScheduledExecutorService} has been * created by the semaphore at construction time, it is also shut down. * resources. After that {@link #acquire()} must not be called any more. * </p> * * @since 3.0 * @version $Id$ */ public class TimedSemaphore { /** * Constant for a value representing no limit. If the limit is set to a * value less or equal this constant, the {@code TimedSemaphore} will be * effectively switched off. */ public static final int NO_LIMIT = 0; /** Constant for the thread pool size for the executor. */ private static final int THREAD_POOL_SIZE = 1; /** The executor service for managing the timer thread. */ private final ScheduledExecutorService executorService; /** Stores the period for this timed semaphore. */ private final long period; /** The time unit for the period. */ private final TimeUnit unit; /** A flag whether the executor service was created by this object. */ private final boolean ownExecutor; /** A future object representing the timer task. */ private ScheduledFuture<?> task; // @GuardedBy("this") /** Stores the total number of invocations of the acquire() method. */ private long totalAcquireCount; // @GuardedBy("this") /** * The counter for the periods. This counter is increased every time a * period ends. */ private long periodCount; // @GuardedBy("this") /** The limit. */ private int limit; // @GuardedBy("this") /** The current counter. */ private int acquireCount; // @GuardedBy("this") /** The number of invocations of acquire() in the last period. */ private int lastCallsPerPeriod; // @GuardedBy("this") /** A flag whether shutdown() was called. */ private boolean shutdown; // @GuardedBy("this") /** * Creates a new instance of {@link TimedSemaphore} and initializes it with * the given time period and the limit. * * @param timePeriod the time period * @param timeUnit the unit for the period * @param limit the limit for the semaphore * @throws IllegalArgumentException if the period is less or equals 0 */ public TimedSemaphore(final long timePeriod, final TimeUnit timeUnit, final int limit) { this(null, timePeriod, timeUnit, limit); } /** * Creates a new instance of {@link TimedSemaphore} and initializes it with * an executor service, the given time period, and the limit. The executor * service will be used for creating a periodic task for monitoring the time * period. It can be <b>null</b>, then a default service will be created. * * @param service the executor service * @param timePeriod the time period * @param timeUnit the unit for the period * @param limit the limit for the semaphore * @throws IllegalArgumentException if the period is less or equals 0 */ public TimedSemaphore(final ScheduledExecutorService service, final long timePeriod, final TimeUnit timeUnit, final int limit) { Validate.inclusiveBetween(1, Long.MAX_VALUE, timePeriod, "Time period must be greater than 0!"); period = timePeriod; unit = timeUnit; if (service != null) { executorService = service; ownExecutor = false; } else { final ScheduledThreadPoolExecutor s = new ScheduledThreadPoolExecutor( THREAD_POOL_SIZE); s.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); s.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); executorService = s; ownExecutor = true; } setLimit(limit); } /** * Returns the limit enforced by this semaphore. The limit determines how * many invocations of {@link #acquire()} are allowed within the monitored * period. * * @return the limit */ public final synchronized int getLimit() { return limit; } /** * Sets the limit. This is the number of times the {@link #acquire()} method * can be called within the time period specified. If this limit is reached, * further invocations of {@link #acquire()} will block. Setting the limit * to a value &lt;= {@link #NO_LIMIT} will cause the limit to be disabled, * i.e. an arbitrary number of{@link #acquire()} invocations is allowed in * the time period. * * @param limit the limit */ public final synchronized void setLimit(final int limit) { this.limit = limit; } /** * Initializes a shutdown. After that the object cannot be used any more. * This method can be invoked an arbitrary number of times. All invocations * after the first one do not have any effect. */ public synchronized void shutdown() { if (!shutdown) { if (ownExecutor) { // if the executor was created by this instance, it has // to be shutdown getExecutorService().shutdownNow(); } if (task != null) { task.cancel(false); } shutdown = true; } } /** * Tests whether the {@link #shutdown()} method has been called on this * object. If this method returns <b>true</b>, this instance cannot be used * any longer. * * @return a flag whether a shutdown has been performed */ public synchronized boolean isShutdown() { return shutdown; } /** * Tries to acquire a permit from this semaphore. This method will block if * the limit for the current period has already been reached. If * {@link #shutdown()} has already been invoked, calling this method will * cause an exception. The very first call of this method starts the timer * task which monitors the time period set for this {@code TimedSemaphore}. * From now on the semaphore is active. * * @throws InterruptedException if the thread gets interrupted * @throws IllegalStateException if this semaphore is already shut down */ public synchronized void acquire() throws InterruptedException { if (isShutdown()) { throw new IllegalStateException("TimedSemaphore is shut down!"); } if (task == null) { task = startTimer(); } boolean canPass = false; do { canPass = getLimit() <= NO_LIMIT || acquireCount < getLimit(); if (!canPass) { wait(); } else { acquireCount++; } } while (!canPass); } /** * Returns the number of (successful) acquire invocations during the last * period. This is the number of times the {@link #acquire()} method was * called without blocking. This can be useful for testing or debugging * purposes or to determine a meaningful threshold value. If a limit is set, * the value returned by this method won't be greater than this limit. * * @return the number of non-blocking invocations of the {@link #acquire()} * method */ public synchronized int getLastAcquiresPerPeriod() { return lastCallsPerPeriod; } /** * Returns the number of invocations of the {@link #acquire()} method for * the current period. This may be useful for testing or debugging purposes. * * @return the current number of {@link #acquire()} invocations */ public synchronized int getAcquireCount() { return acquireCount; } /** * Returns the number of calls to the {@link #acquire()} method that can * still be performed in the current period without blocking. This method * can give an indication whether it is safe to call the {@link #acquire()} * method without risking to be suspended. However, there is no guarantee * that a subsequent call to {@link #acquire()} actually is not-blocking * because in the mean time other threads may have invoked the semaphore. * * @return the current number of available {@link #acquire()} calls in the * current period */ public synchronized int getAvailablePermits() { return getLimit() - getAcquireCount(); } /** * Returns the average number of successful (i.e. non-blocking) * {@link #acquire()} invocations for the entire life-time of this {@code * TimedSemaphore}. This method can be used for instance for statistical * calculations. * * @return the average number of {@link #acquire()} invocations per time * unit */ public synchronized double getAverageCallsPerPeriod() { return periodCount == 0 ? 0 : (double) totalAcquireCount / (double) periodCount; } /** * Returns the time period. This is the time monitored by this semaphore. * Only a given number of invocations of the {@link #acquire()} method is * possible in this period. * * @return the time period */ public long getPeriod() { return period; } /** * Returns the time unit. This is the unit used by {@link #getPeriod()}. * * @return the time unit */ public TimeUnit getUnit() { return unit; } /** * Returns the executor service used by this instance. * * @return the executor service */ protected ScheduledExecutorService getExecutorService() { return executorService; } /** * Starts the timer. This method is called when {@link #acquire()} is called * for the first time. It schedules a task to be executed at fixed rate to * monitor the time period specified. * * @return a future object representing the task scheduled */ protected ScheduledFuture<?> startTimer() { return getExecutorService().scheduleAtFixedRate(new Runnable() { @Override public void run() { endOfPeriod(); } }, getPeriod(), getPeriod(), getUnit()); } /** * The current time period is finished. This method is called by the timer * used internally to monitor the time period. It resets the counter and * releases the threads waiting for this barrier. */ synchronized void endOfPeriod() { lastCallsPerPeriod = acquireCount; totalAcquireCount += acquireCount; periodCount++; acquireCount = 0; notifyAll(); } }
package org.vvv.chatbotdb.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.vvv.chatbotdb.model.Chatbot; import org.vvv.chatbotdb.model.Topic; public class ChatbotDBHelper extends DBObject { private static Log log = LogFactory.getLog(ChatbotDBHelper.class); public Chatbot save(Chatbot chatbot) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "INSERT INTO chatbots(chatbot_name) VALUES (?)"; PreparedStatement pstmt = null; ResultSet keys = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); pstmt.setString(1, chatbot.getName()); pstmt.executeUpdate(); keys = pstmt.getGeneratedKeys(); keys.next(); Long key = keys.getLong(1); chatbot.setId(key); } catch (SQLException e) { log.error("Error during save chabot: " + chatbot.getName(), e); throw e; } finally { if (keys != null) { try { keys.close(); } catch (SQLException ex) { // ignore } } if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } return chatbot; } public void delete(Chatbot chatbot) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "DELETE FROM chatbots WHERE id = ?"; PreparedStatement pstmt = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setLong(1, chatbot.getId()); pstmt.executeUpdate(); } catch (SQLException e) { log.error("Error during deleting the chatbot: " + chatbot.getId(), e); throw e; } finally { if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } } public void delete(String chatbotName) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { this.unlinkTopics(chatbotName); String sql = "DELETE FROM chatbots WHERE chatbot_name = ?"; PreparedStatement pstmt = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setString(1, chatbotName); pstmt.executeUpdate(); } catch (SQLException e) { log.error("Error during deleting the chatbot: " + chatbotName, e); throw e; } finally { if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } } public void unlinkTopics(String chatbotName) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "DELETE FROM topics_chatbots WHERE chatbot_id = (SELECT id FROM chatbots " + "WHERE chatbot_name = ?)"; PreparedStatement pstmt = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setString(1, chatbotName); pstmt.executeUpdate(); } catch (SQLException e) { log.error("Error during deleting the chatbot topics links: " + chatbotName, e); throw e; } finally { if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } } public Chatbot getById(Long id) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "SELECT id, chatbot_name FROM chatbots WHERE id = ?"; PreparedStatement pstmt = null; ResultSet rs = null; Chatbot chatbot = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setLong(1, id); rs = pstmt.executeQuery(); if (rs.next()) { chatbot = new Chatbot(); chatbot.setId(rs.getLong("id")); chatbot.setName(rs.getString("chatbot_name")); } } catch (SQLException e) { log.error("Error during selecting the chatbot: id " + id, e); throw e; } finally { if (rs != null) { try { rs.close(); } catch (Exception ex) { } } if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } return chatbot; } public void link(Chatbot chatbot, Topic topic) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "INSERT INTO topics_chatbots(chatbot_id, topic_id) VALUES (?, ?)"; PreparedStatement pstmt = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setLong(1, chatbot.getId()); pstmt.setLong(2, topic.getId()); pstmt.executeUpdate(); } catch (SQLException e) { log.error("Error during link chatbot and topic: " + chatbot.getId() + " topic:" + topic.getId(), e); throw e; } finally { if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } } public void unlink(Chatbot chatbot, Topic topic) throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { String sql = "DELETE FROM topics_chabots WHERE chatbot_id = ? and topic_id = ?"; PreparedStatement pstmt = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); pstmt.setLong(1, chatbot.getId()); pstmt.setLong(2, topic.getId()); pstmt.executeUpdate(); } catch (SQLException e) { log.error("Error during unlink the chabot: " + chatbot.getId() + " topic:" + topic.getId(), e); throw e; } finally { if (pstmt != null) { try { pstmt.close(); } catch (SQLException ex) { // ignore } } } } public List<Chatbot> list() throws SQLException, InstantiationException, IllegalAccessException, ClassNotFoundException { List<Chatbot> chatbots = new ArrayList<Chatbot>(); String sql = "SELECT id, chatbot_name FROM chatbots"; PreparedStatement pstmt = null; ResultSet rs = null; try { Connection conn = super.getDbHelper().getConnection(); pstmt = conn.prepareStatement(sql); rs = pstmt.executeQuery(); while (rs.next()) { Chatbot chatbot = new Chatbot(); chatbot.setId(rs.getLong("id")); chatbot.setName(rs.getString("chatbot_name")); chatbots.add(chatbot); } } catch (SQLException e) { log.error("Error list chatbots", e); throw e; } finally { try { rs.close(); } catch (Exception ex) { } try { pstmt.close(); } catch (Exception ex) { } } return chatbots; } }
package com.examples; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Locale; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.text.format.DateFormat; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.View.OnClickListener; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.GridView; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; public class SimpleCalendarViewActivity extends Activity implements OnClickListener { private static final String tag = "SimpleCalendarViewActivity"; private ImageView calendarToJournalButton; private Button selectedDayMonthYearButton; private Button currentMonth; private ImageView prevMonth; private ImageView nextMonth; private GridView calendarView; private GridCellAdapter adapter; private Calendar _calendar; private int month, year; private final DateFormat dateFormatter = new DateFormat(); private static final String dateTemplate = "MMMM yyyy"; String date_month_year; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.simple_calendar_view); _calendar = Calendar.getInstance(Locale.getDefault()); month = _calendar.get(Calendar.MONTH) + 1; year = _calendar.get(Calendar.YEAR); Log.d(tag, "Calendar Instance:= " + "Month: " + month + " " + "Year: " + year); selectedDayMonthYearButton = (Button) this.findViewById(R.id.selectedDayMonthYear); selectedDayMonthYearButton.setText("Selected: "); prevMonth = (ImageView) this.findViewById(R.id.prevMonth); prevMonth.setOnClickListener(this); currentMonth = (Button) this.findViewById(R.id.currentMonth); currentMonth.setText(dateFormatter.format(dateTemplate, _calendar.getTime())); nextMonth = (ImageView) this.findViewById(R.id.nextMonth); nextMonth.setOnClickListener(this); calendarView = (GridView) this.findViewById(R.id.calendar); Log.i("Calender" ,"Welcome"); // Initialised adapter = new GridCellAdapter(getApplicationContext(), R.id.calendar_day_gridcell, month, year); adapter.notifyDataSetChanged(); calendarView.setAdapter(adapter); } /** * * @param month * @param year */ private void setGridCellAdapterToDate(int month, int year) { adapter = new GridCellAdapter(getApplicationContext(), R.id.calendar_day_gridcell, month, year); _calendar.set(year, month - 1, _calendar.get(Calendar.DAY_OF_MONTH)); currentMonth.setText(dateFormatter.format(dateTemplate, _calendar.getTime())); adapter.notifyDataSetChanged(); calendarView.setAdapter(adapter); } @Override public void onClick(View v) { if (v == prevMonth) { if (month <= 1) { month = 12; year--; } else { month--; } Log.d(tag, "Setting Prev Month in GridCellAdapter: " + "Month: " + month + " Year: " + year); setGridCellAdapterToDate(month, year); } if (v == nextMonth) { if (month > 11) { month = 1; year++; } else { month++; } Log.d(tag, "Setting Next Month in GridCellAdapter: " + "Month: " + month + " Year: " + year); setGridCellAdapterToDate(month, year); } } @Override public void onDestroy() { Log.d(tag, "Destroying View ..."); super.onDestroy(); } // /////////////////////////////////////////////////////////////////////////////////////// // Inner Class public class GridCellAdapter extends BaseAdapter implements OnClickListener { private static final String tag = "GridCellAdapter"; private final Context _context; private final List<String> list; private static final int DAY_OFFSET = 1; private final String[] weekdays = new String[]{"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"}; private final String[] months = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"}; private final int[] daysOfMonth = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; private final int month, year; private int daysInMonth, prevMonthDays; private int currentDayOfMonth; private int currentWeekDay; private Button gridcell; private TextView num_events_per_day; private final HashMap eventsPerMonthMap; private final SimpleDateFormat dateFormatter = new SimpleDateFormat("dd-MMM-yyyy"); // Days in Current Month public GridCellAdapter(Context context, int textViewResourceId, int month, int year) { super(); this._context = context; this.list = new ArrayList<String>(); this.month = month; this.year = year; Log.d(tag, "==> Passed in Date FOR Month: " + month + " " + "Year: " + year); Calendar calendar = Calendar.getInstance(); setCurrentDayOfMonth(calendar.get(Calendar.DAY_OF_MONTH)); setCurrentWeekDay(calendar.get(Calendar.DAY_OF_WEEK)); Log.d(tag, "New Calendar:= " + calendar.getTime().toString()); Log.d(tag, "CurrentDayOfWeek :" + getCurrentWeekDay()); Log.d(tag, "CurrentDayOfMonth :" + getCurrentDayOfMonth()); // Print Month printMonth(month, year); // Find Number of Events eventsPerMonthMap = findNumberOfEventsPerMonth(year, month); } private String getMonthAsString(int i) { return months[i]; } private String getWeekDayAsString(int i) { return weekdays[i]; } private int getNumberOfDaysOfMonth(int i) { return daysOfMonth[i]; } public String getItem(int position) { return list.get(position); } @Override public int getCount() { return list.size(); } /** * Prints Month * * @param mm * @param yy */ private void printMonth(int mm, int yy) { Log.d(tag, "==> printMonth: mm: " + mm + " " + "yy: " + yy); // The number of days to leave blank at // the start of this month. int trailingSpaces = 0; int leadSpaces = 0; int daysInPrevMonth = 0; int prevMonth = 0; int prevYear = 0; int nextMonth = 0; int nextYear = 0; int currentMonth = mm - 1; String currentMonthName = getMonthAsString(currentMonth); daysInMonth = getNumberOfDaysOfMonth(currentMonth); Log.d(tag, "Current Month: " + " " + currentMonthName + " having " + daysInMonth + " days."); // Gregorian Calendar : MINUS 1, set to FIRST OF MONTH GregorianCalendar cal = new GregorianCalendar(yy, currentMonth, 1); Log.d(tag, "Gregorian Calendar:= " + cal.getTime().toString()); if (currentMonth == 11) { prevMonth = currentMonth - 1; daysInPrevMonth = getNumberOfDaysOfMonth(prevMonth); nextMonth = 0; prevYear = yy; nextYear = yy + 1; Log.d(tag, "*->PrevYear: " + prevYear + " PrevMonth:" + prevMonth + " NextMonth: " + nextMonth + " NextYear: " + nextYear); } else if (currentMonth == 0) { prevMonth = 11; prevYear = yy - 1; nextYear = yy; daysInPrevMonth = getNumberOfDaysOfMonth(prevMonth); nextMonth = 1; Log.d(tag, "**--> PrevYear: " + prevYear + " PrevMonth:" + prevMonth + " NextMonth: " + nextMonth + " NextYear: " + nextYear); } else { prevMonth = currentMonth - 1; nextMonth = currentMonth + 1; nextYear = yy; prevYear = yy; daysInPrevMonth = getNumberOfDaysOfMonth(prevMonth); Log.d(tag, "***---> PrevYear: " + prevYear + " PrevMonth:" + prevMonth + " NextMonth: " + nextMonth + " NextYear: " + nextYear); } // Compute how much to leave before before the first day of the // month. // getDay() returns 0 for Sunday. int currentWeekDay = cal.get(Calendar.DAY_OF_WEEK) - 1; trailingSpaces = currentWeekDay; Log.d(tag, "Week Day:" + currentWeekDay + " is " + getWeekDayAsString(currentWeekDay)); Log.d(tag, "No. Trailing space to Add: " + trailingSpaces); Log.d(tag, "No. of Days in Previous Month: " + daysInPrevMonth); if (cal.isLeapYear(cal.get(Calendar.YEAR)) && mm == 1) { ++daysInMonth; } // Trailing Month days for (int i = 0; i < trailingSpaces; i++) { Log.d(tag, "PREV MONTH:= " + prevMonth + " => " + getMonthAsString(prevMonth) + " " + String.valueOf((daysInPrevMonth - trailingSpaces + DAY_OFFSET) + i)); list.add(String.valueOf((daysInPrevMonth - trailingSpaces + DAY_OFFSET) + i) + "-GREY" + "-" + getMonthAsString(prevMonth) + "-" + prevYear); } // Current Month Days for (int i = 1; i <= daysInMonth; i++) { Log.d(currentMonthName, String.valueOf(i) + " " + getMonthAsString(currentMonth) + " " + yy); if (i == getCurrentDayOfMonth()) { list.add(String.valueOf(i) + "-BLUE" + "-" + getMonthAsString(currentMonth) + "-" + yy); } else { list.add(String.valueOf(i) + "-WHITE" + "-" + getMonthAsString(currentMonth) + "-" + yy); } } // Leading Month days for (int i = 0; i < list.size() % 7; i++) { Log.d(tag, "NEXT MONTH:= " + getMonthAsString(nextMonth)); list.add(String.valueOf(i + 1) + "-GREY" + "-" + getMonthAsString(nextMonth) + "-" + nextYear); } } /** * NOTE: YOU NEED TO IMPLEMENT THIS PART Given the YEAR, MONTH, retrieve * ALL entries from a SQLite database for that month. Iterate over the * List of All entries, and get the dateCreated, which is converted into * day. * * @param year * @param month * @return */ private HashMap findNumberOfEventsPerMonth(int year, int month) { HashMap map = new HashMap<String, Integer>(); return map; } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { View row = convertView; if (row == null) { LayoutInflater inflater = (LayoutInflater) _context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); row = inflater.inflate(R.layout.calendar_day_gridcell, parent, false); } // Get a reference to the Day gridcell gridcell = (Button) row.findViewById(R.id.calendar_day_gridcell); gridcell.setOnClickListener(this); // ACCOUNT FOR SPACING Log.d(tag, "Current Day: " + getCurrentDayOfMonth()); String[] day_color = list.get(position).split("-"); String theday = day_color[0]; String themonth = day_color[2]; String theyear = day_color[3]; if ((!eventsPerMonthMap.isEmpty()) && (eventsPerMonthMap != null)) { if (eventsPerMonthMap.containsKey(theday)) { num_events_per_day = (TextView) row.findViewById(R.id.num_events_per_day); Integer numEvents = (Integer) eventsPerMonthMap.get(theday); num_events_per_day.setText(numEvents.toString()); } } // Set the Day GridCell gridcell.setText(theday); gridcell.setTag(theday + "-" + themonth + "-" + theyear); Log.d(tag, "Setting GridCell " + theday + "-" + themonth + "-" + theyear); if (day_color[1].equals("GREY")) { gridcell.setTextColor(Color.LTGRAY); } if (day_color[1].equals("WHITE")) { gridcell.setTextColor(Color.WHITE); } if (day_color[1].equals("BLUE")) { gridcell.setTextColor(getResources().getColor(R.color.static_text_color)); } return row; } @Override public void onClick(View view) { date_month_year = (String) view.getTag(); selectedDayMonthYearButton.setText("Selected: " + date_month_year); Log.i("Calender" , "date= "+ date_month_year); try { Date parsedDate = dateFormatter.parse(date_month_year); Log.i(tag, "Parsed Date: " + parsedDate.toString()); } catch (ParseException e) { e.printStackTrace(); } showDialog( 0 ); } public int getCurrentDayOfMonth() { return currentDayOfMonth; } private void setCurrentDayOfMonth(int currentDayOfMonth) { this.currentDayOfMonth = currentDayOfMonth; } public void setCurrentWeekDay(int currentWeekDay) { this.currentWeekDay = currentWeekDay; } public int getCurrentWeekDay() { return currentWeekDay; } } protected Dialog onCreateDialog( int id ) { final CharSequence[] items = {"Set Alarm", "Memo", "Reminder" , "Auto Message"}; AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(".."); builder.setIcon(R.drawable.cooltext589940243); builder.setNegativeButton("Quit", new DialogInterface.OnClickListener(){ public void onClick(DialogInterface dialog , int arg1) { } }); builder.setCancelable(true); builder.setSingleChoiceItems(items, -1, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { Toast.makeText(getApplicationContext(), items[item], Toast.LENGTH_SHORT).show(); if(items[item].equals("Set Alarm")) { String s="com.sonendra.mishra"; Intent intent = new Intent(s); intent.putExtra("date", date_month_year); startActivity(intent); } else if(items[item].equals("Memo")) { String s="com.sonendra.mishra2"; Intent intent = new Intent(s); intent.putExtra("date", date_month_year); startActivity(intent); } else if(items[item].equals("Reminder")) { String s="com.sonendra.mishra1"; Intent intent = new Intent(s); intent.putExtra("date", date_month_year); startActivity(intent); } else { String s="com.sonendra.mishra3"; Intent intent = new Intent(s); intent.putExtra("date", date_month_year); startActivity(intent); } } }); AlertDialog alert = builder.create(); return alert; } }
/* * Copyright (c) 2018, Tomas Slusny <slusnucky@gmail.com> * Copyright (c) 2018, PandahRS <https://github.com/PandahRS> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.discord; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import lombok.AllArgsConstructor; import lombok.Getter; import net.runelite.api.Client; import net.runelite.api.Skill; import net.runelite.api.Varbits; @AllArgsConstructor @Getter enum DiscordGameEventType { IN_GAME("In Game", -3), IN_MENU("In Menu", -3), PLAYING_DEADMAN("Playing Deadman Mode", -3), PLAYING_PVP("Playing in a PVP world", -3), TRAINING_ATTACK(Skill.ATTACK), TRAINING_DEFENCE(Skill.DEFENCE), TRAINING_STRENGTH(Skill.STRENGTH), TRAINING_HITPOINTS(Skill.HITPOINTS, -1), TRAINING_SLAYER(Skill.SLAYER, 1), TRAINING_RANGED(Skill.RANGED), TRAINING_MAGIC(Skill.MAGIC), TRAINING_PRAYER(Skill.PRAYER), TRAINING_COOKING(Skill.COOKING), TRAINING_WOODCUTTING(Skill.WOODCUTTING), TRAINING_FLETCHING(Skill.FLETCHING), TRAINING_FISHING(Skill.FISHING, 1), TRAINING_FIREMAKING(Skill.FIREMAKING), TRAINING_CRAFTING(Skill.CRAFTING), TRAINING_SMITHING(Skill.SMITHING), TRAINING_MINING(Skill.MINING), TRAINING_HERBLORE(Skill.HERBLORE), TRAINING_AGILITY(Skill.AGILITY), TRAINING_THIEVING(Skill.THIEVING), TRAINING_FARMING(Skill.FARMING), TRAINING_RUNECRAFT(Skill.RUNECRAFT), TRAINING_HUNTER(Skill.HUNTER), TRAINING_CONSTRUCTION(Skill.CONSTRUCTION), // Bosses BOSS_ABYSSAL_SIRE("Abyssal Sire", DiscordAreaType.BOSSES, 11851, 11850, 12363, 12362), BOSS_CERBERUS("Cerberus", DiscordAreaType.BOSSES, 4883, 5140, 5395), BOSS_COMMANDER_ZILYANA("Commander Zilyana", DiscordAreaType.BOSSES, 11602), BOSS_DKS("Dagannoth Kings", DiscordAreaType.BOSSES, 11588, 11589), BOSS_GENERAL_GRAARDOR("General Graardor", DiscordAreaType.BOSSES, 11347), BOSS_GIANT_MOLE("Giant Mole", DiscordAreaType.BOSSES, 6993, 6992), BOSS_GROTESQUE_GUARDIANS("Grotesque Guardians", DiscordAreaType.BOSSES, 6727), BOSS_HYDRA("Alchemical Hydra", DiscordAreaType.BOSSES, 5536), BOSS_KQ("Kalphite Queen", DiscordAreaType.BOSSES, 13972), BOSS_KRAKEN("Kraken", DiscordAreaType.BOSSES, 9116), BOSS_KREEARRA("Kree'arra", DiscordAreaType.BOSSES, 11346), BOSS_KRIL_TSUTSAROTH("K'ril Tsutsaroth", DiscordAreaType.BOSSES, 11603), BOSS_SKOTIZO("Skotizo", DiscordAreaType.BOSSES, 6810), BOSS_SMOKE_DEVIL("Thermonuclear smoke devil", DiscordAreaType.BOSSES, 9363, 9619), BOSS_VORKATH("Vorkath", DiscordAreaType.BOSSES, 9023), BOSS_WINTERTODT("Wintertodt", DiscordAreaType.BOSSES, 6462), BOSS_ZALCANO("Zalcano", DiscordAreaType.BOSSES, 12126), BOSS_ZULRAH("Zulrah", DiscordAreaType.BOSSES, 9007), BOSS_NIGHTMARE("Nightmare of Ashihama", DiscordAreaType.BOSSES, 15515), // Cities CITY_AL_KHARID("Al Kharid" , DiscordAreaType.CITIES, 13105, 13106), CITY_APE_ATOLL("Ape Atoll" , DiscordAreaType.CITIES, 10795, 11051, 10974, 11050), CITY_ARCEUUS_HOUSE("Arceuus" , DiscordAreaType.CITIES, 6459, 6715, 6458, 6714), CITY_ARDOUGNE("Ardougne" , DiscordAreaType.CITIES, 10548, 10547, 10292, 10291, 10036, 10035, 9780, 9779), CITY_BARBARIAN_VILLAGE("Barbarian Village" , DiscordAreaType.CITIES, 12341), CITY_BANDIT_CAMP("Bandit Camp" , DiscordAreaType.CITIES, 12591), CITY_BEDABIN_CAMP("Bedabin Camp" , DiscordAreaType.CITIES, 12590), CITY_BRIMHAVEN("Brimhaven" , DiscordAreaType.CITIES, 11057, 11058), CITY_BURGH_DE_ROTT("Burgh de Rott" , DiscordAreaType.CITIES, 13874, 13873, 14130, 14129), CITY_BURTHORPE("Burthorpe" , DiscordAreaType.CITIES, 11319, 11575), CITY_CANIFIS("Canifis" , DiscordAreaType.CITIES, 13878), CITY_CATHERBY("Catherby" , DiscordAreaType.CITIES, 11317, 11318, 11061), CITY_CORSAIR_CAVE("Corsair Cove" , DiscordAreaType.CITIES, 10028, 10284), CITY_DARKMEYER("Darkmeyer", DiscordAreaType.CITIES, 14388), CITY_DORGESH_KAAN("Dorgesh-Kaan" , DiscordAreaType.CITIES, 10835, 10834), CITY_DRAYNOR("Draynor" , DiscordAreaType.CITIES, 12338), CITY_EDGEVILLE("Edgeville" , DiscordAreaType.CITIES, 12342), CITY_ENTRANA("Entrana" , DiscordAreaType.CITIES, 11060, 11316), CITY_FALADOR("Falador" , DiscordAreaType.CITIES, 11828, 11572, 11571, 11827, 12084), CITY_GOBLIN_VILLAGE("Goblin Village" , DiscordAreaType.CITIES, 11830), CITY_GUTANOTH("Gu'Tanoth" , DiscordAreaType.CITIES, 10031), CITY_GWENITH("Gwenith", DiscordAreaType.CITIES, 8757), CITY_HOSIDIUS_HOUSE("Hosidius" , DiscordAreaType.CITIES, 6710, 6711, 6712, 6713, 6455, 6456, 6965, 6966, 6967, 6968, 7221, 7223, 7224, 7478, 7479), CITY_JATISZO("Jatizso" , DiscordAreaType.CITIES, 9531), CITY_JIGGIG("Jiggig" , DiscordAreaType.CITIES, 9775), CITY_KARAMJA("Karamja" , DiscordAreaType.CITIES, 11569, 11568, 11567, 11566, 11313, 11312, 11311), CITY_KELDAGRIM("Keldagrim" , DiscordAreaType.CITIES, 11423, 11422, 11679, 11678), CITY_LLETYA("Lletya" , DiscordAreaType.CITIES, 9265), CITY_LOVAKENGJ_HOUSE("Lovakengj" , DiscordAreaType.CITIES, 5692, 5948, 5691, 5947, 6203, 6202, 5690, 5946), CITY_LUMBRIDGE("Lumbridge" , DiscordAreaType.CITIES, 12850), CITY_LUNAR_ISLE("Lunar Isle" , DiscordAreaType.CITIES, 8253, 8252, 8509, 8508), CITY_MEIYERDITCH("Meiyerditch" , DiscordAreaType.CITIES, 14132, 14387, 14386, 14385), CITY_MISCELLANIA("Miscellania" , DiscordAreaType.CITIES, 10044, 10300), CITY_MOS_LE_HARMLESS("Mos Le'Harmless" , DiscordAreaType.CITIES, 14638), CITY_MORTTON("Mort'ton" , DiscordAreaType.CITIES, 13875), CITY_MOR_UI_REK("Mor UI Rek" , DiscordAreaType.CITIES, 9808, 9807, 10064, 10063), CITY_MOUNT_KARUULM("Mount Karuulm", DiscordAreaType.CITIES, 5179, 4923, 5180), CITY_MYNYDD("Mynydd", DiscordAreaType.CITIES, 8501), CITY_NARDAH("Nardah" , DiscordAreaType.CITIES, 13613), CITY_NEITIZNOT("Neitiznot" , DiscordAreaType.CITIES, 9275), CITY_PISCATORIS("Piscatoris" , DiscordAreaType.CITIES, 9273), CITY_POLLNIVNEACH("Pollnivneach" , DiscordAreaType.CITIES, 13358), CITY_PORT_KHAZARD("Port Khazard" , DiscordAreaType.CITIES, 10545), CITY_PORT_PHASMATYS("Port Phasmatys" , DiscordAreaType.CITIES, 14646), CITY_PORT_SARIM("Port Sarim" , DiscordAreaType.CITIES, 12082), CITY_PISCARILIUS_HOUSE("Port Piscarilius" , DiscordAreaType.CITIES, 6971, 7227, 6970, 7226), CITY_PRIFDDINAS("Prifddinas", DiscordAreaType.CITIES, 8499, 8500, 8755, 8756, 9011, 9012, 9013, 12894, 12895, 13150, 13151), CITY_RELLEKKA("Rellekka" , DiscordAreaType.CITIES, 10553), CITY_RIMMINGTON("Rimmington" , DiscordAreaType.CITIES, 11826, 11570), CITY_SEERS_VILLAGE("Seers' Village" , DiscordAreaType.CITIES, 10806), CITY_SHAYZIEN_HOUSE("Shayzien" , DiscordAreaType.CITIES, 5944, 5943, 6200, 6199, 5688), CITY_SHILO_VILLAGE("Shilo Village" , DiscordAreaType.CITIES, 11310), CITY_SOPHANEM("Sophanem" , DiscordAreaType.CITIES, 13099), CITY_TAI_BWO_WANNAI("Tai Bwo Wannai" , DiscordAreaType.CITIES, 11056, 11055), CITY_TAVERLEY("Taverley" , DiscordAreaType.CITIES, 11574, 11573), CITY_TREE_GNOME_STRONGHOLD("Tree Gnome Stronghold" , DiscordAreaType.CITIES, 9782, 9781), CITY_TREE_GNOME_VILLAGE("Tree Gnome Village" , DiscordAreaType.CITIES, 10033), CITY_TROLL_STRONGHOLD("Troll Stronghold" , DiscordAreaType.CITIES, 11321), CITY_TYRAS_CAMP("Tyras Camp" , DiscordAreaType.CITIES, 8753, 8752), CITY_UZER("Uzer" , DiscordAreaType.CITIES, 13872), CITY_VARROCK("Varrock" , DiscordAreaType.CITIES, 12596, 12597, 12598, 12852, 12853, 12854, 13108, 13109, 13110), CITY_WITCHHAVEN("Witchaven" , DiscordAreaType.CITIES, 10803), CITY_WOODCUTTING_GUILD("Woodcutting Guild", DiscordAreaType.CITIES, 6454, 6198, 6298), CITY_YANILLE("Yanille" , DiscordAreaType.CITIES, 10288, 10032), CITY_ZANARIS("Zanaris" , DiscordAreaType.CITIES, 9285, 9541, 9540, 9797), CITY_ZULANDRA("Zul-Andra" , DiscordAreaType.CITIES, 8751), // Dungeons DUNGEON_ABANDONED_MINE("Abandoned Mine", DiscordAreaType.DUNGEONS, 13718, 11079, 11078, 11077, 10823, 10822, 10821), DUNGEON_AH_ZA_RHOON("Ah Za Rhoon", DiscordAreaType.DUNGEONS, 11666), DUNGEON_ANCIENT_CAVERN("Ancient Cavern", DiscordAreaType.DUNGEONS, 6483, 6995), DUNGEON_APE_ATOLL("Ape Atoll Dungeon", DiscordAreaType.DUNGEONS, 11150, 10894), DUNGEON_ARDY_SEWERS("Ardougne Sewers", DiscordAreaType.DUNGEONS, 10136), DUNGEON_ASGARNIAN_ICE_CAVES("Asgarnian Ice Caves", DiscordAreaType.DUNGEONS, 12181), DUNGEON_BRIMHAVEN("Brimhaven Dungeon", DiscordAreaType.DUNGEONS, 10901, 10900, 10899, 10645, 10644, 10643), DUNGEON_BRINE_RAT_CAVERN("Brine Rat Cavern", DiscordAreaType.DUNGEONS, 10910), DUNGEON_CATACOMBS_OF_KOUREND("Catacombs of Kourend", DiscordAreaType.DUNGEONS, 6557, 6556, 6813, 6812), DUNGEON_CHASM_OF_FIRE("Chasm of Fire", DiscordAreaType.DUNGEONS, 5789), DUNGEON_CLOCK_TOWER("Clock Tower Basement", DiscordAreaType.DUNGEONS, 10390), DUNGEON_CORSAIR_COVE("Corsair Cove Dungeon", DiscordAreaType.DUNGEONS, 8076, 8332), DUNGEON_CRABCLAW_CAVES("Crabclaw Caves", DiscordAreaType.DUNGEONS, 6553, 6809), DUNGEON_DIGSITE("Digsite Dungeon", DiscordAreaType.DUNGEONS, 13465), DUNGEON_DORGESHKAAN("Dorgesh-Kaan South Dungeon", DiscordAreaType.DUNGEONS, 10833), DUNGEON_DORGESHUUN_MINES("Dorgeshuun Mines", DiscordAreaType.DUNGEONS, 12950, 13206), DUNGEON_DRAYNOR_SEWERS("Draynor Sewers", DiscordAreaType.DUNGEONS, 12439, 12438), DUNGEON_DWARVEN_MINES("Dwarven Mines", DiscordAreaType.DUNGEONS, 12185, 12184, 12183), DUNGEON_EAGLES_PEAK("Eagles' Peak Dungeon", DiscordAreaType.DUNGEONS, 8013), DUNGEON_EDGEVILLE("Edgeville Dungeon", DiscordAreaType.DUNGEONS, 12441, 12442, 12443, 12698), DUNGEON_ELEMENTAL_WORKSHOP("Elemental Workshop", DiscordAreaType.DUNGEONS, 10906, 7760), DUNGEON_ENAKHRAS_TEMPLE("Enakhra's Temple", DiscordAreaType.DUNGEONS, 12423), DUNGEON_ENTRANA("Entrana Dungeon", DiscordAreaType.DUNGEONS, 11416), DUNGEON_EVIL_CHICKENS_LAIR("Evil Chicken's Lair", DiscordAreaType.DUNGEONS, 9796), DUNGEON_EXPERIMENT_CAVE("Experiment Cave", DiscordAreaType.DUNGEONS, 14235, 13979), DUNGEON_FREMENNIK_SLAYER("Fremennik Slayer Dungeon", DiscordAreaType.DUNGEONS, 10908, 11164), DUNGEON_GOBLIN_CAVE("Goblin Cave", DiscordAreaType.DUNGEONS, 10393), DUNGEON_GRAND_TREE_TUNNELS("Grand Tree Tunnels", DiscordAreaType.DUNGEONS, 9882), DUNGEON_HAM("H.A.M Dungeon", DiscordAreaType.DUNGEONS, 12694, 10321), DUNGEON_IORWERTH("Iorwerth Dungeon", DiscordAreaType.DUNGEONS, 12737, 12738, 12993, 12994), DUNGEON_JATIZSO_MINES("Jatizso Mines", DiscordAreaType.DUNGEONS, 9631), DUNGEON_JIGGIG_BURIAL_TOMB("Jiggig Burial Tomb", DiscordAreaType.DUNGEONS, 9875, 9874), DUNGEON_JOGRE("Jogre Dungeon", DiscordAreaType.DUNGEONS, 11412), DUNGEON_KARAMJA_VOLCANO("Karamja Volcano", DiscordAreaType.DUNGEONS, 11413, 11414), DUNGEON_KARUULM("Karuulm Slayer Dungeon", DiscordAreaType.DUNGEONS, 5280, 5279, 5023, 5535, 5022, 4766, 4510, 4511, 4767, 4768, 4512), DUNGEON_KHARAZI("Khazari Dungeon", DiscordAreaType.DUNGEONS, 11153), DUNGEON_LIGHTHOUSE("Lighthouse", DiscordAreaType.DUNGEONS, 10140), DUNGEON_LIZARDMAN_CAVES("Lizardman Caves", DiscordAreaType.DUNGEONS, 5275), DUNGEON_LUMBRIDGE_SWAMP_CAVES("Lumbridge Swamp Caves", DiscordAreaType.DUNGEONS, 12693, 12949), DUNGEON_LUNAR_ISLE_MINE("Lunar Isle Mine", DiscordAreaType.DUNGEONS, 9377), DUNGEON_MISCELLANIA("Miscellania Dungeon", DiscordAreaType.DUNGEONS, 10144, 10400), DUNGEON_MOGRE_CAMP("Mogre Camp", DiscordAreaType.DUNGEONS, 11924), DUNGEON_MOS_LE_HARMLESS_CAVES("Mos Le'Harmless Caves", DiscordAreaType.DUNGEONS, 14994, 14995, 15251), DUNGEON_MOUSE_HOLE("Mouse Hole", DiscordAreaType.DUNGEONS, 9046), DUNGEON_OBSERVATORY("Observatory Dungeon", DiscordAreaType.DUNGEONS, 9362), DUNGEON_OGRE_ENCLAVE("Ogre Enclave", DiscordAreaType.DUNGEONS, 10387), DUNGEON_QUIDAMORTEM_CAVE("Quidamortem Cave", DiscordAreaType.DUNGEONS, 4763), DUNGEON_RASHILIYIAS_TOMB("Rashiliyta's Tomb", DiscordAreaType.DUNGEONS, 11668), DUNGEON_SARADOMINSHRINE("Saradomin Shrine (Paterdomus)", DiscordAreaType.DUNGEONS, 13722), DUNGEON_SHADE_CATACOMBS("Shade Catacombs", DiscordAreaType.DUNGEONS, 13975), DUNGEON_SHAYZIEN_CRYPTS("Shayzien Crypts", DiscordAreaType.DUNGEONS, 6043), DUNGEON_SMOKE("Smoke Dungeon", DiscordAreaType.DUNGEONS, 12946, 13202), DUNGEON_SOPHANEM("Sophanem Dungeon", DiscordAreaType.DUNGEONS, 13200), DUNGEON_STRONGHOLD_SECURITY("Stronghold of Security", DiscordAreaType.DUNGEONS, 7505, 8017, 8530, 9297), DUNGEON_TARNS_LAIR("Tarn's Lair", DiscordAreaType.DUNGEONS, 12616, 12615), DUNGEON_TAVERLEY("Taverley Dungeon", DiscordAreaType.DUNGEONS, 11673, 11672, 11929, 11928, 11417), DUNGEON_TEMPLE_OF_IKOV("Temple of Ikov", DiscordAreaType.DUNGEONS, 10649, 10905, 10650), DUNGEON_TEMPLE_OF_MARIMBO("Temple of Marimbo", DiscordAreaType.DUNGEONS, 11151), DUNGEON_THE_WARRENS("The Warrens", DiscordAreaType.DUNGEONS, 7070, 7326), DUNGEON_TOLNA("Dungeon of Tolna", DiscordAreaType.DUNGEONS, 13209), DUNGEON_TOWER_OF_LIFE("Tower of Life Basement", DiscordAreaType.DUNGEONS, 12100), DUNGEON_TRAHAEARN_MINE("Trahaearn Mine", DiscordAreaType.DUNGEONS, 13250), DUNGEON_TUNNEL_OF_CHAOS("Tunnel of Chaos", DiscordAreaType.DUNGEONS, 12625), DUNGEON_UNDERGROUND_PASS("Underground Pass", DiscordAreaType.DUNGEONS, 9369, 9370), DUNGEON_VARROCKSEWERS("Varrock Sewers", DiscordAreaType.DUNGEONS, 12954, 13210), DUNGEON_WATER_RAVINE("Water Ravine", DiscordAreaType.DUNGEONS, 13461), DUNGEON_WATERBIRTH("Waterbirth Dungeon", DiscordAreaType.DUNGEONS, 9886, 10142, 7492, 7748), DUNGEON_WATERFALL("Waterfall Dungeon", DiscordAreaType.DUNGEONS, 10394), DUNGEON_WHITE_WOLF_MOUNTAIN_CAVES("White Wolf Mountain Caves", DiscordAreaType.DUNGEONS, 11418, 11419, 11675), DUNGEON_WITCHAVEN_SHRINE("Witchhaven Shrine Dungeon", DiscordAreaType.DUNGEONS, 10903), DUNGEON_YANILLE_AGILITY("Yanille Agility Dungeon", DiscordAreaType.DUNGEONS, 10388), DUNGEON_MOTHERLODE_MINE("Motherlode Mine", DiscordAreaType.DUNGEONS, 14679, 14680, 14681, 14935, 14936, 14937, 15191, 15192, 15193), DUNGEON_NIGHTMARE("Nightmare Dungeon", DiscordAreaType.DUNGEONS, 14999, 15000, 15001, 15255, 15256, 15257, 15511, 15512, 15513), // Minigames MG_BARBARIAN_ASSAULT("Barbarian Assault", DiscordAreaType.MINIGAMES, 10332), MG_BARROWS("Barrows", DiscordAreaType.MINIGAMES, 14131, 14231), MG_BLAST_FURNACE("Blast Furnace", DiscordAreaType.MINIGAMES, 7757), MG_BRIMHAVEN_AGILITY_ARENA("Brimhaven Agility Arena", DiscordAreaType.MINIGAMES, 11157), MG_BURTHORPE_GAMES_ROOM("Burthorpe Games Room", DiscordAreaType.MINIGAMES, 8781), MG_CASTLE_WARS("Castle Wars", DiscordAreaType.MINIGAMES, 9520), MG_CLAN_WARS("Clan Wars", DiscordAreaType.MINIGAMES, 13135, 13134, 13133, 13131, 13130, 13387, 13386), MG_DUEL_ARENA("Duel Arena", DiscordAreaType.MINIGAMES, 13362), MG_FISHING_TRAWLER("Fishing Trawler", DiscordAreaType.MINIGAMES, 7499), MG_GAUNTLET("The Gauntlet", DiscordAreaType.MINIGAMES, 12127, 7512, 7768), MG_INFERNO("The Inferno", DiscordAreaType.MINIGAMES, 9043), MG_LAST_MAN_STANDING("Last Man Standing", DiscordAreaType.MINIGAMES, 13660, 13659, 13658, 13916, 13915, 13914), MG_HALLOWED_SEPULCHRE("Hallowed Sepulchre", DiscordAreaType.MINIGAMES, 8797, 9051, 9052, 9053, 9054, 9309, 9563, 9565, 9821, 10074, 10075, 10077), MG_MAGE_TRAINING_ARENA("Mage Training Arena", DiscordAreaType.MINIGAMES, 13462, 13463), MG_NIGHTMARE_ZONE("Nightmare Zone", DiscordAreaType.MINIGAMES, 9033), MG_PEST_CONTROL("Pest Control", DiscordAreaType.MINIGAMES, 10536), MG_PYRAMID_PLUNDER("Pyramid Plunder", DiscordAreaType.MINIGAMES, 7749), MG_ROGUES_DEN("Rogues' Den", DiscordAreaType.MINIGAMES, 11855, 11854, 12111, 12110), MG_SORCERESS_GARDEN("Sorceress's Garden", DiscordAreaType.MINIGAMES, 11605), MG_TEMPLE_TREKKING("Temple Trekking", DiscordAreaType.MINIGAMES, 8014, 8270, 8256, 8782, 9038, 9294, 9550, 9806), MG_TITHE_FARM("Tithe Farm", DiscordAreaType.MINIGAMES, 7222), MG_TROUBLE_BREWING("Trouble Brewing", DiscordAreaType.MINIGAMES, 15150), MG_TZHAAR_FIGHT_CAVES("Tzhaar Fight Caves", DiscordAreaType.MINIGAMES, 9551), MG_TZHAAR_FIGHT_PITS("Tzhaar Fight Pits", DiscordAreaType.MINIGAMES, 9552), MG_VOLCANIC_MINE("Volcanic Mine", DiscordAreaType.MINIGAMES, 15263, 15262), // Raids RAIDS_CHAMBERS_OF_XERIC("Chambers of Xeric", DiscordAreaType.RAIDS, Varbits.IN_RAID), RAIDS_THEATRE_OF_BLOOD("Theatre of Blood", DiscordAreaType.RAIDS, Varbits.THEATRE_OF_BLOOD); private static final Map<Integer, DiscordGameEventType> FROM_REGION; private static final List<DiscordGameEventType> FROM_VARBITS; static { ImmutableMap.Builder<Integer, DiscordGameEventType> regionMapBuilder = new ImmutableMap.Builder<>(); ImmutableList.Builder<DiscordGameEventType> fromVarbitsBuilder = ImmutableList.builder(); for (DiscordGameEventType discordGameEventType : DiscordGameEventType.values()) { if (discordGameEventType.getVarbits() != null) { fromVarbitsBuilder.add(discordGameEventType); continue; } if (discordGameEventType.getRegionIds() == null) { continue; } for (int region : discordGameEventType.getRegionIds()) { regionMapBuilder.put(region, discordGameEventType); } } FROM_REGION = regionMapBuilder.build(); FROM_VARBITS = fromVarbitsBuilder.build(); } @Nullable private String imageKey; @Nullable private String state; @Nullable private String details; private int priority; private boolean shouldClear; private boolean shouldTimeout; @Nullable private DiscordAreaType discordAreaType; @Nullable private Varbits varbits; @Nullable private int[] regionIds; DiscordGameEventType(Skill skill) { this(skill, 0); } DiscordGameEventType(Skill skill, int priority) { this.details = training(skill); this.priority = priority; this.imageKey = imageKeyOf(skill); this.shouldTimeout = true; } DiscordGameEventType(String areaName, DiscordAreaType areaType, int... regionIds) { this.state = exploring(areaType, areaName); this.priority = -2; this.discordAreaType = areaType; this.regionIds = regionIds; this.shouldClear = true; } DiscordGameEventType(String state, int priority) { this.state = state; this.priority = priority; this.shouldClear = true; } DiscordGameEventType(String areaName, DiscordAreaType areaType, Varbits varbits) { this.state = exploring(areaType, areaName); this.priority = -2; this.discordAreaType = areaType; this.varbits = varbits; this.shouldClear = true; } private static String training(final Skill skill) { return training(skill.getName()); } private static String training(final String what) { return "Training: " + what; } private static String imageKeyOf(final Skill skill) { return imageKeyOf(skill.getName().toLowerCase()); } private static String imageKeyOf(final String what) { return "icon_" + what; } private static String exploring(DiscordAreaType areaType, String areaName) { return areaName; } public static DiscordGameEventType fromSkill(final Skill skill) { switch (skill) { case ATTACK: return TRAINING_ATTACK; case DEFENCE: return TRAINING_DEFENCE; case STRENGTH: return TRAINING_STRENGTH; case RANGED: return TRAINING_RANGED; case PRAYER: return TRAINING_PRAYER; case MAGIC: return TRAINING_MAGIC; case COOKING: return TRAINING_COOKING; case WOODCUTTING: return TRAINING_WOODCUTTING; case FLETCHING: return TRAINING_FLETCHING; case FISHING: return TRAINING_FISHING; case FIREMAKING: return TRAINING_FIREMAKING; case CRAFTING: return TRAINING_CRAFTING; case SMITHING: return TRAINING_SMITHING; case MINING: return TRAINING_MINING; case HERBLORE: return TRAINING_HERBLORE; case AGILITY: return TRAINING_AGILITY; case THIEVING: return TRAINING_THIEVING; case SLAYER: return TRAINING_SLAYER; case FARMING: return TRAINING_FARMING; case RUNECRAFT: return TRAINING_RUNECRAFT; case HUNTER: return TRAINING_HUNTER; case CONSTRUCTION: return TRAINING_CONSTRUCTION; default: return null; } } public static DiscordGameEventType fromRegion(final int regionId) { return FROM_REGION.get(regionId); } public static DiscordGameEventType fromVarbit(final Client client) { for (DiscordGameEventType fromVarbit : FROM_VARBITS) { if (client.getVar(fromVarbit.getVarbits()) != 0) { return fromVarbit; } } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.arrow.gandiva.expression; import org.apache.arrow.flatbuf.DateUnit; import org.apache.arrow.flatbuf.TimeUnit; import org.apache.arrow.flatbuf.Type; import org.apache.arrow.gandiva.exceptions.GandivaException; import org.apache.arrow.gandiva.exceptions.UnsupportedTypeException; import org.apache.arrow.gandiva.ipc.GandivaTypes; import org.apache.arrow.util.Preconditions; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; /** * Utility methods to convert between Arrow and Gandiva types. */ public class ArrowTypeHelper { private ArrowTypeHelper() {} static final int WIDTH_8 = 8; static final int WIDTH_16 = 16; static final int WIDTH_32 = 32; static final int WIDTH_64 = 64; private static void initArrowTypeInt( ArrowType.Int intType, GandivaTypes.ExtGandivaType.Builder builder) throws GandivaException { int width = intType.getBitWidth(); if (intType.getIsSigned()) { switch (width) { case WIDTH_8: { builder.setType(GandivaTypes.GandivaType.INT8); return; } case WIDTH_16: { builder.setType(GandivaTypes.GandivaType.INT16); return; } case WIDTH_32: { builder.setType(GandivaTypes.GandivaType.INT32); return; } case WIDTH_64: { builder.setType(GandivaTypes.GandivaType.INT64); return; } default: { throw new UnsupportedTypeException("Unsupported width for integer type"); } } } // unsigned int switch (width) { case WIDTH_8: { builder.setType(GandivaTypes.GandivaType.UINT8); return; } case WIDTH_16: { builder.setType(GandivaTypes.GandivaType.UINT16); return; } case WIDTH_32: { builder.setType(GandivaTypes.GandivaType.UINT32); return; } case WIDTH_64: { builder.setType(GandivaTypes.GandivaType.UINT64); return; } default: { throw new UnsupportedTypeException("Unsupported width for integer type"); } } } private static void initArrowTypeFloat( ArrowType.FloatingPoint floatType, GandivaTypes.ExtGandivaType.Builder builder) throws GandivaException { switch (floatType.getPrecision()) { case HALF: { builder.setType(GandivaTypes.GandivaType.HALF_FLOAT); break; } case SINGLE: { builder.setType(GandivaTypes.GandivaType.FLOAT); break; } case DOUBLE: { builder.setType(GandivaTypes.GandivaType.DOUBLE); break; } default: { throw new UnsupportedTypeException("Floating point type with unknown precision"); } } } private static void initArrowTypeDecimal(ArrowType.Decimal decimalType, GandivaTypes.ExtGandivaType.Builder builder) { Preconditions.checkArgument(decimalType.getPrecision() > 0 && decimalType.getPrecision() <= 38, "Gandiva only supports decimals of upto 38 " + "precision. Input precision : " + decimalType.getPrecision()); builder.setPrecision(decimalType.getPrecision()); builder.setScale(decimalType.getScale()); builder.setType(GandivaTypes.GandivaType.DECIMAL); } private static void initArrowTypeDate(ArrowType.Date dateType, GandivaTypes.ExtGandivaType.Builder builder) { short dateUnit = dateType.getUnit().getFlatbufID(); switch (dateUnit) { case DateUnit.DAY: { builder.setType(GandivaTypes.GandivaType.DATE32); break; } case DateUnit.MILLISECOND: { builder.setType(GandivaTypes.GandivaType.DATE64); break; } default: { // not supported break; } } } private static void initArrowTypeTime(ArrowType.Time timeType, GandivaTypes.ExtGandivaType.Builder builder) { short timeUnit = timeType.getUnit().getFlatbufID(); switch (timeUnit) { case TimeUnit.SECOND: { builder.setType(GandivaTypes.GandivaType.TIME32); builder.setTimeUnit(GandivaTypes.TimeUnit.SEC); break; } case TimeUnit.MILLISECOND: { builder.setType(GandivaTypes.GandivaType.TIME32); builder.setTimeUnit(GandivaTypes.TimeUnit.MILLISEC); break; } case TimeUnit.MICROSECOND: { builder.setType(GandivaTypes.GandivaType.TIME64); builder.setTimeUnit(GandivaTypes.TimeUnit.MICROSEC); break; } case TimeUnit.NANOSECOND: { builder.setType(GandivaTypes.GandivaType.TIME64); builder.setTimeUnit(GandivaTypes.TimeUnit.NANOSEC); break; } default: { // not supported } } } private static void initArrowTypeTimestamp(ArrowType.Timestamp timestampType, GandivaTypes.ExtGandivaType.Builder builder) { short timeUnit = timestampType.getUnit().getFlatbufID(); switch (timeUnit) { case TimeUnit.SECOND: { builder.setType(GandivaTypes.GandivaType.TIMESTAMP); builder.setTimeUnit(GandivaTypes.TimeUnit.SEC); break; } case TimeUnit.MILLISECOND: { builder.setType(GandivaTypes.GandivaType.TIMESTAMP); builder.setTimeUnit(GandivaTypes.TimeUnit.MILLISEC); break; } case TimeUnit.MICROSECOND: { builder.setType(GandivaTypes.GandivaType.TIMESTAMP); builder.setTimeUnit(GandivaTypes.TimeUnit.MICROSEC); break; } case TimeUnit.NANOSECOND: { builder.setType(GandivaTypes.GandivaType.TIMESTAMP); builder.setTimeUnit(GandivaTypes.TimeUnit.NANOSEC); break; } default: { // not supported } } } /** * Converts an arrow type into a protobuf. * * @param arrowType Arrow type to be converted * @return Protobuf representing the arrow type */ public static GandivaTypes.ExtGandivaType arrowTypeToProtobuf(ArrowType arrowType) throws GandivaException { GandivaTypes.ExtGandivaType.Builder builder = GandivaTypes.ExtGandivaType.newBuilder(); byte typeId = arrowType.getTypeID().getFlatbufID(); switch (typeId) { case Type.NONE: { // 0 builder.setType(GandivaTypes.GandivaType.NONE); break; } case Type.Null: { // 1 // TODO: Need to handle this later break; } case Type.Int: { // 2 ArrowTypeHelper.initArrowTypeInt((ArrowType.Int) arrowType, builder); break; } case Type.FloatingPoint: { // 3 ArrowTypeHelper.initArrowTypeFloat((ArrowType.FloatingPoint) arrowType, builder); break; } case Type.Binary: { // 4 builder.setType(GandivaTypes.GandivaType.BINARY); break; } case Type.Utf8: { // 5 builder.setType(GandivaTypes.GandivaType.UTF8); break; } case Type.Bool: { // 6 builder.setType(GandivaTypes.GandivaType.BOOL); break; } case Type.Decimal: { // 7 ArrowTypeHelper.initArrowTypeDecimal((ArrowType.Decimal) arrowType, builder); break; } case Type.Date: { // 8 ArrowTypeHelper.initArrowTypeDate((ArrowType.Date) arrowType, builder); break; } case Type.Time: { // 9 ArrowTypeHelper.initArrowTypeTime((ArrowType.Time) arrowType, builder); break; } case Type.Timestamp: { // 10 ArrowTypeHelper.initArrowTypeTimestamp((ArrowType.Timestamp) arrowType, builder); break; } case Type.Interval: { // 11 break; } case Type.List: { // 12 break; } case Type.Struct_: { // 13 break; } case Type.Union: { // 14 break; } case Type.FixedSizeBinary: { // 15 break; } case Type.FixedSizeList: { // 16 break; } case Type.Map: { // 17 break; } default: { break; } } if (!builder.hasType()) { // type has not been set // throw an exception throw new UnsupportedTypeException("Unsupported type" + arrowType.toString()); } return builder.build(); } /** * Converts an arrow field object to a protobuf. * @param field Arrow field to be converted * @return Protobuf representing the arrow field */ public static GandivaTypes.Field arrowFieldToProtobuf(Field field) throws GandivaException { GandivaTypes.Field.Builder builder = GandivaTypes.Field.newBuilder(); builder.setName(field.getName()); builder.setType(ArrowTypeHelper.arrowTypeToProtobuf(field.getType())); builder.setNullable(field.isNullable()); for (Field child : field.getChildren()) { builder.addChildren(ArrowTypeHelper.arrowFieldToProtobuf(child)); } return builder.build(); } /** * Converts a schema object to a protobuf. * @param schema Schema object to be converted * @return Protobuf representing a schema object */ public static GandivaTypes.Schema arrowSchemaToProtobuf(Schema schema) throws GandivaException { GandivaTypes.Schema.Builder builder = GandivaTypes.Schema.newBuilder(); for (Field field : schema.getFields()) { builder.addColumns(ArrowTypeHelper.arrowFieldToProtobuf(field)); } return builder.build(); } }
/** * Copyright (c) 2002-2015 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.neo4j.driver.v1.internal.packstream; import java.io.IOException; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.util.List; import java.util.Map; import static java.lang.Integer.toHexString; import static java.lang.String.format; import static java.util.Arrays.asList; /** * PackStream is a messaging serialisation format heavily inspired by MessagePack. * The key differences are in the type system itself which (among other things) replaces extensions with structures. * The Packer and Unpacker implementations are also faster than their MessagePack counterparts. * * Note that several marker byte values are RESERVED for future use. * Extra markers should <em>not</em> be added casually and such additions must be follow a strict process involving both client and server software. * * The table below shows all allocated marker byte values. * * <table> * <tr><th>Marker</th><th>Binary</th><th>Type</th><th>Description</th></tr> * <tr><td><code>00..7F</code></td><td><code>0xxxxxxx</code></td><td>+TINY_INT</td><td>Integer 0 to 127</td></tr> * <tr><td><code>80..8F</code></td><td><code>1000xxxx</code></td><td>TINY_STRING</td><td></td></tr> * <tr><td><code>90..9F</code></td><td><code>1001xxxx</code></td><td>TINY_LIST</td><td></td></tr> * <tr><td><code>A0..AF</code></td><td><code>1010xxxx</code></td><td>TINY_MAP</td><td></td></tr> * <tr><td><code>B0..BF</code></td><td><code>1011xxxx</code></td><td>TINY_STRUCT</td><td></td></tr> * <tr><td><code>C0</code></td><td><code>11000000</code></td><td>NULL</td><td></td></tr> * <tr><td><code>C1</code></td><td><code>11000001</code></td><td>FLOAT_64</td><td>64-bit floating point number (double)</td></tr> * <tr><td><code>C2</code></td><td><code>11000010</code></td><td>FALSE</td><td>Boolean false</td></tr> * <tr><td><code>C3</code></td><td><code>11000011</code></td><td>TRUE</td><td>Boolean true</td></tr> * <tr><td><code>C4..C7</code></td><td><code>110001xx</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>C8</code></td><td><code>11001000</code></td><td>INT_8</td><td>8-bit signed integer</td></tr> * <tr><td><code>C9</code></td><td><code>11001001</code></td><td>INT_8</td><td>16-bit signed integer</td></tr> * <tr><td><code>CA</code></td><td><code>11001010</code></td><td>INT_8</td><td>32-bit signed integer</td></tr> * <tr><td><code>CB</code></td><td><code>11001011</code></td><td>INT_8</td><td>64-bit signed integer</td></tr> * <tr><td><code>CC</code></td><td><code>11001100</code></td><td>BYTES_8</td><td>Byte string (fewer than 2<sup>8</sup> bytes)</td></tr> * <tr><td><code>CD</code></td><td><code>11001101</code></td><td>BYTES_16</td><td>Byte string (fewer than 2<sup>16</sup> bytes)</td></tr> * <tr><td><code>CE</code></td><td><code>11001110</code></td><td>BYTES_32</td><td>Byte string (fewer than 2<sup>32</sup> bytes)</td></tr> * <tr><td><code>CF</code></td><td><code>11001111</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>D0</code></td><td><code>11010000</code></td><td>STRING_8</td><td>UTF-8 encoded string (fewer than 2<sup>8</sup> bytes)</td></tr> * <tr><td><code>D1</code></td><td><code>11010001</code></td><td>STRING_16</td><td>UTF-8 encoded string (fewer than 2<sup>16</sup> bytes)</td></tr> * <tr><td><code>D2</code></td><td><code>11010010</code></td><td>STRING_32</td><td>UTF-8 encoded string (fewer than 2<sup>32</sup> bytes)</td></tr> * <tr><td><code>D3</code></td><td><code>11010011</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>D4</code></td><td><code>11010100</code></td><td>LIST_8</td><td>List (fewer than 2<sup>8</sup> items)</td></tr> * <tr><td><code>D5</code></td><td><code>11010101</code></td><td>LIST_16</td><td>List (fewer than 2<sup>16</sup> items)</td></tr> * <tr><td><code>D6</code></td><td><code>11010110</code></td><td>LIST_32</td><td>List (fewer than 2<sup>32</sup> items)</td></tr> * <tr><td><code>D7</code></td><td><code>11010111</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>D8</code></td><td><code>11011000</code></td><td>MAP_8</td><td>Map (fewer than 2<sup>8</sup> key:value pairs)</td></tr> * <tr><td><code>D9</code></td><td><code>11011001</code></td><td>MAP_16</td><td>Map (fewer than 2<sup>16</sup> key:value pairs)</td></tr> * <tr><td><code>DA</code></td><td><code>11011010</code></td><td>MAP_32</td><td>Map (fewer than 2<sup>32</sup> key:value pairs)</td></tr> * <tr><td><code>DB</code></td><td><code>11011011</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>DC</code></td><td><code>11011100</code></td><td>STRUCT_8</td><td>Structure (fewer than 2<sup>8</sup> fields)</td></tr> * <tr><td><code>DD</code></td><td><code>11011101</code></td><td>STRUCT_16</td><td>Structure (fewer than 2<sup>16</sup> fields)</td></tr> * <tr><td><code>DE</code></td><td><code>11011110</code></td><td>STRUCT_32</td><td>Structure (fewer than 2<sup>32</sup> fields)</td></tr> * <tr><td><code>DF</code></td><td><code>11011111</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>E0..EF</code></td><td><code>1110xxxx</code></td><td><em>RESERVED</em></td><td></td></tr> * <tr><td><code>F0..FF</code></td><td><code>1111xxxx</code></td><td>-TINY_INT</td><td>Integer -1 to -16</td></tr> * </table> * */ public class PackStream { public static final byte TINY_STRING = (byte) 0x80; public static final byte TINY_LIST = (byte) 0x90; public static final byte TINY_MAP = (byte) 0xA0; public static final byte TINY_STRUCT = (byte) 0xB0; public static final byte NULL = (byte) 0xC0; public static final byte FLOAT_64 = (byte) 0xC1; public static final byte FALSE = (byte) 0xC2; public static final byte TRUE = (byte) 0xC3; public static final byte RESERVED_C4 = (byte) 0xC4; public static final byte RESERVED_C5 = (byte) 0xC5; public static final byte RESERVED_C6 = (byte) 0xC6; public static final byte RESERVED_C7 = (byte) 0xC7; public static final byte INT_8 = (byte) 0xC8; public static final byte INT_16 = (byte) 0xC9; public static final byte INT_32 = (byte) 0xCA; public static final byte INT_64 = (byte) 0xCB; public static final byte BYTES_8 = (byte) 0xCC; public static final byte BYTES_16 = (byte) 0xCD; public static final byte BYTES_32 = (byte) 0xCE; public static final byte RESERVED_CF = (byte) 0xCF; public static final byte STRING_8 = (byte) 0xD0; public static final byte STRING_16 = (byte) 0xD1; public static final byte STRING_32 = (byte) 0xD2; public static final byte RESERVED_D3 = (byte) 0xD3; public static final byte LIST_8 = (byte) 0xD4; public static final byte LIST_16 = (byte) 0xD5; public static final byte LIST_32 = (byte) 0xD6; public static final byte RESERVED_D7 = (byte) 0xD7; public static final byte MAP_8 = (byte) 0xD8; public static final byte MAP_16 = (byte) 0xD9; public static final byte MAP_32 = (byte) 0xDA; public static final byte RESERVED_DB = (byte) 0xDB; public static final byte STRUCT_8 = (byte) 0xDC; public static final byte STRUCT_16 = (byte) 0xDD; public static final byte RESERVED_DE = (byte) 0xDE; // TODO STRUCT_32? or the class javadoc is wrong? public static final byte RESERVED_DF = (byte) 0xDF; public static final byte RESERVED_E0 = (byte) 0xE0; public static final byte RESERVED_E1 = (byte) 0xE1; public static final byte RESERVED_E2 = (byte) 0xE2; public static final byte RESERVED_E3 = (byte) 0xE3; public static final byte RESERVED_E4 = (byte) 0xE4; public static final byte RESERVED_E5 = (byte) 0xE5; public static final byte RESERVED_E6 = (byte) 0xE6; public static final byte RESERVED_E7 = (byte) 0xE7; public static final byte RESERVED_E8 = (byte) 0xE8; public static final byte RESERVED_E9 = (byte) 0xE9; public static final byte RESERVED_EA = (byte) 0xEA; public static final byte RESERVED_EB = (byte) 0xEB; public static final byte RESERVED_EC = (byte) 0xEC; public static final byte RESERVED_ED = (byte) 0xED; public static final byte RESERVED_EE = (byte) 0xEE; public static final byte RESERVED_EF = (byte) 0xEF; private static final long PLUS_2_TO_THE_31 = 2147483648L; private static final long PLUS_2_TO_THE_15 = 32768L; private static final long PLUS_2_TO_THE_7 = 128L; private static final long MINUS_2_TO_THE_4 = -16L; private static final long MINUS_2_TO_THE_7 = -128L; private static final long MINUS_2_TO_THE_15 = -32768L; private static final long MINUS_2_TO_THE_31 = -2147483648L; private static final String EMPTY_STRING = ""; private static final Charset UTF_8 = Charset.forName( "UTF-8" ); private static final int DEFAULT_BUFFER_CAPACITY = 8192; private PackStream() {} public static class Packer { private PackOutput out; public Packer( PackOutput out ) { this.out = out; } public void reset( PackOutput out ) { this.out = out; } public void reset( WritableByteChannel channel ) { ((BufferedChannelOutput) out).reset( channel ); } public void flush() throws IOException { out.flush(); } public void packRaw( byte[] data ) throws IOException { out.writeBytes( data, 0, data.length ); } public void packNull() throws IOException { out.writeByte( NULL ); } public void pack( boolean value ) throws IOException { out.writeByte( value ? TRUE : FALSE ); } public void pack( long value ) throws IOException { if ( value >= MINUS_2_TO_THE_4 && value < PLUS_2_TO_THE_7) { out.writeByte( (byte) value ); } else if ( value >= MINUS_2_TO_THE_7 && value < MINUS_2_TO_THE_4 ) { out.writeByte( INT_8 ) .writeByte( (byte) value ); } else if ( value >= MINUS_2_TO_THE_15 && value < PLUS_2_TO_THE_15 ) { out.writeByte( INT_16 ) .writeShort( (short) value ); } else if ( value >= MINUS_2_TO_THE_31 && value < PLUS_2_TO_THE_31 ) { out.writeByte( INT_32 ) .writeInt( (int) value ); } else { out.writeByte( INT_64 ) .writeLong( value ); } } public void pack( double value ) throws IOException { out.writeByte( FLOAT_64 ) .writeDouble( value ); } public void pack( byte[] values ) throws IOException { if ( values == null ) { packNull(); } else { packBytesHeader( values.length ); packRaw( values ); } } public void pack( String value ) throws IOException { if ( value == null ) { packNull(); } else { byte[] utf8 = value.getBytes( UTF_8 ); packStringHeader( utf8.length ); packRaw( utf8 ); } } public void packString( byte[] utf8 ) throws IOException { if ( utf8 == null ) { packNull(); } else { packStringHeader( utf8.length ); packRaw( utf8 ); } } public void pack( List values ) throws IOException { if ( values == null ) { packNull(); } else { packListHeader( values.size() ); for ( Object value : values ) { pack( value ); } } } public void pack( Map values ) throws IOException { if ( values == null ) { packNull(); } else { packMapHeader( values.size() ); for ( Object key : values.keySet() ) { pack( key ); pack( values.get( key ) ); } } } public void pack( Object value ) throws IOException { if ( value == null ) { packNull(); } else if ( value instanceof Boolean ) { pack( (boolean) value ); } else if ( value instanceof boolean[] ) { pack( asList( value ) ); } else if ( value instanceof Byte ) { pack( (byte) value ); } else if ( value instanceof byte[] ) { pack( (byte[]) value ); } else if ( value instanceof Short ) { pack( (short) value ); } else if ( value instanceof short[] ) { pack( asList( value ) ); } else if ( value instanceof Integer ) { pack( (int) value ); } else if ( value instanceof int[] ) { pack( asList( value ) ); } else if ( value instanceof Long ) { pack( (long) value ); } else if ( value instanceof long[] ) { pack( asList( value ) ); } else if ( value instanceof Float ) { pack( (float) value ); } else if ( value instanceof float[] ) { pack( asList( value ) ); } else if ( value instanceof Double ) { pack( (double) value ); } else if ( value instanceof double[] ) { pack( asList( value ) ); } else if ( value instanceof Character ) { pack( Character.toString( (char) value ) ); } else if ( value instanceof char[] ) { pack( new String( (char[]) value ) ); } else if ( value instanceof String ) { pack( (String) value ); } else if ( value instanceof String[] ) { pack( asList( value ) ); } else if ( value instanceof List ) { pack( (List) value ); } else if ( value instanceof Map ) { pack( (Map) value ); } else { throw new Unpackable( format( "Cannot pack object %s", value ) );} } public void packBytesHeader( int size ) throws IOException { if ( size <= Byte.MAX_VALUE ) { out.writeByte( BYTES_8 ) .writeByte( (byte) size ); } else if ( size <= Short.MAX_VALUE ) { out.writeByte( BYTES_16 ) .writeShort( (short) size ); } else { out.writeByte( BYTES_32 ) .writeInt( size ); } } public void packStringHeader( int size ) throws IOException { if ( size < 0x10 ) { out.writeByte( (byte) (TINY_STRING | size) ); } else if ( size <= Byte.MAX_VALUE ) { out.writeByte( STRING_8 ) .writeByte( (byte) size ); } else if ( size <= Short.MAX_VALUE ) { out.writeByte( STRING_16 ) .writeShort( (short) size ); } else { out.writeByte( STRING_32 ) .writeInt( size ); } } public void packListHeader( int size ) throws IOException { if ( size < 0x10 ) { out.writeByte( (byte) (TINY_LIST | size) ); } else if ( size <= Byte.MAX_VALUE ) { out.writeByte( LIST_8 ) .writeByte( (byte) size ); } else if ( size <= Short.MAX_VALUE ) { out.writeByte( LIST_16 ) .writeShort( (short) size ); } else { out.writeByte( LIST_32 ) .writeInt( size ); } } public void packMapHeader( int size ) throws IOException { if ( size < 0x10 ) { out.writeByte( (byte) (TINY_MAP | size) ); } else if ( size <= Byte.MAX_VALUE ) { out.writeByte( MAP_8 ) .writeByte( (byte) size ); } else if ( size <= Short.MAX_VALUE ) { out.writeByte( MAP_16 ) .writeShort( (short) size ); } else { out.writeByte( MAP_32 ) .writeInt( size ); } } public void packStructHeader( int size, byte signature ) throws IOException { if ( size < 0x10 ) { out.writeByte( (byte) (TINY_STRUCT | size) ) .writeByte( signature ); } else if ( size <= Byte.MAX_VALUE ) { out.writeByte( STRUCT_8 ) .writeByte( (byte) size ) .writeByte( signature ); } else if ( size <= Short.MAX_VALUE ) { out.writeByte( STRUCT_16 ) .writeShort( (short) size ) .writeByte( signature ); } else { throw new Overflow( "Structures cannot have more than " + Short.MAX_VALUE + " fields" ); } } } public static class Unpacker { private PackInput in; public Unpacker( ReadableByteChannel channel ) { this( DEFAULT_BUFFER_CAPACITY ); reset( channel ); } public Unpacker( int bufferCapacity ) { assert bufferCapacity >= 8 : "Buffer must be at least 8 bytes."; this.in = new BufferedChannelInput( bufferCapacity ); } public Unpacker( PackInput in ) { this.in = in; } public Unpacker reset( ReadableByteChannel ch ) { ((BufferedChannelInput)in).reset( ch ); return this; } public boolean hasNext() throws IOException { return in.hasMoreData(); } public long unpackStructHeader() throws IOException { final byte markerByte = in.readByte(); final byte markerHighNibble = (byte) (markerByte & 0xF0); final byte markerLowNibble = (byte) (markerByte & 0x0F); if ( markerHighNibble == TINY_STRUCT ) { return markerLowNibble; } switch(markerByte) { case STRUCT_8: return unpackUINT8(); case STRUCT_16: return unpackUINT16(); default: throw new Unexpected( "Expected a struct, but got: " + toHexString( markerByte )); } } public byte unpackStructSignature() throws IOException { return in.readByte(); } public long unpackListHeader() throws IOException { final byte markerByte = in.readByte(); final byte markerHighNibble = (byte) (markerByte & 0xF0); final byte markerLowNibble = (byte) (markerByte & 0x0F); if ( markerHighNibble == TINY_LIST ) { return markerLowNibble; } switch(markerByte) { case LIST_8: return unpackUINT8(); case LIST_16: return unpackUINT16(); case LIST_32: return unpackUINT32(); default: throw new Unexpected( "Expected a list, but got: " + toHexString( markerByte & 0xFF )); } } public long unpackMapHeader() throws IOException { final byte markerByte = in.readByte(); final byte markerHighNibble = (byte) (markerByte & 0xF0); final byte markerLowNibble = (byte) (markerByte & 0x0F); if ( markerHighNibble == TINY_MAP ) { return markerLowNibble; } switch(markerByte) { case MAP_8: return unpackUINT8(); case MAP_16: return unpackUINT16(); case MAP_32: return unpackUINT32(); default: throw new Unexpected( "Expected a map, but got: " + toHexString( markerByte )); } } public long unpackLong() throws IOException { final byte markerByte = in.readByte(); if ( markerByte >= MINUS_2_TO_THE_4) { return markerByte; } switch(markerByte) { case INT_8: return in.readByte(); case INT_16: return in.readShort(); case INT_32: return in.readInt(); case INT_64: return in.readLong(); default: throw new Unexpected( "Expected an integer, but got: " + toHexString( markerByte )); } } public double unpackDouble() throws IOException { final byte markerByte = in.readByte(); if(markerByte == FLOAT_64) { return in.readDouble(); } throw new Unexpected( "Expected a double, but got: " + toHexString( markerByte )); } public String unpackString() throws IOException { final byte markerByte = in.readByte(); if( markerByte == TINY_STRING ) // Note no mask, so we compare to 0x80. { return EMPTY_STRING; } return new String(unpackUtf8(markerByte), UTF_8); } public byte[] unpackBytes() throws IOException { final byte markerByte = in.readByte(); switch(markerByte) { case BYTES_8: return unpackBytes( unpackUINT8() ); case BYTES_16: return unpackBytes( unpackUINT16() ); case BYTES_32: { long size = unpackUINT32(); if ( size <= Integer.MAX_VALUE ) { return unpackBytes( (int) size ); } else { throw new Overflow( "BYTES_32 too long for Java" ); } } default: throw new Unexpected( "Expected binary data, but got: 0x" + toHexString( markerByte & 0xFF )); } } /** * This may seem confusing. This method exists to move forward the internal pointer when encountering * a null value. The idiomatic usage would be someone using {@link #peekNextType()} to detect a null type, * and then this method to "skip past it". * @return null * @throws IOException if the unpacked value was not null */ public Object unpackNull() throws IOException { final byte markerByte = in.readByte(); if ( markerByte != NULL ) { throw new Unexpected( "Expected a null, but got: 0x" + toHexString( markerByte & 0xFF ) ); } return null; } private byte[] unpackUtf8(byte markerByte) throws IOException { final byte markerHighNibble = (byte) (markerByte & 0xF0); final byte markerLowNibble = (byte) (markerByte & 0x0F); if ( markerHighNibble == TINY_STRING ) { return unpackBytes( markerLowNibble ); } switch(markerByte) { case STRING_8: return unpackBytes( unpackUINT8() ); case STRING_16: return unpackBytes( unpackUINT16() ); case STRING_32: { long size = unpackUINT32(); if ( size <= Integer.MAX_VALUE ) { return unpackBytes( (int) size ); } else { throw new Overflow( "STRING_32 too long for Java" ); } } default: throw new Unexpected( "Expected a string, but got: 0x" + toHexString( markerByte & 0xFF )); } } public boolean unpackBoolean() throws IOException { final byte markerByte = in.readByte(); switch ( markerByte ) { case TRUE: return true; case FALSE: return false; default: throw new Unexpected( "Expected a boolean, but got: 0x" + toHexString( markerByte & 0xFF ) ); } } private int unpackUINT8() throws IOException { return in.readByte() & 0xFF; } private int unpackUINT16() throws IOException { return in.readShort() & 0xFFFF; } private long unpackUINT32() throws IOException { return in.readInt() & 0xFFFFFFFFL; } private byte[] unpackBytes( int size ) throws IOException { byte[] heapBuffer = new byte[size]; in.readBytes( heapBuffer, 0, heapBuffer.length ); return heapBuffer; } public PackType peekNextType() throws IOException { final byte markerByte = in.peekByte(); final byte markerHighNibble = (byte) (markerByte & 0xF0); switch(markerHighNibble) { case TINY_STRING: return PackType.STRING; case TINY_LIST: return PackType.LIST; case TINY_MAP: return PackType.MAP; case TINY_STRUCT: return PackType.STRUCT; } switch(markerByte) { case NULL: return PackType.NULL; case TRUE: case FALSE: return PackType.BOOLEAN; case FLOAT_64: return PackType.FLOAT; case BYTES_8: case BYTES_16: case BYTES_32: return PackType.BYTES; case STRING_8: case STRING_16: case STRING_32: return PackType.STRING; case LIST_8: case LIST_16: case LIST_32: return PackType.LIST; case MAP_8: case MAP_16: case MAP_32: return PackType.MAP; case STRUCT_8: case STRUCT_16: return PackType.STRUCT; default: return PackType.INTEGER; } } } public static class PackstreamException extends IOException { public PackstreamException( String message ) { super( message ); } } public static class EndOfStream extends PackstreamException { public EndOfStream( String message ) { super( message ); } } public static class Overflow extends PackstreamException { public Overflow( String message ) { super( message ); } } public static class Unexpected extends PackstreamException { public Unexpected( String message ) { super( message ); } } public static class Unpackable extends PackstreamException { public Unpackable( String message ) { super( message ); } } }
/* * The MIT License * * Copyright 2017 Arnaud Hamon * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.ptitnoony.components.fxtreemap.fximpl; import com.github.ptitnoony.components.fxtreemap.MapData; import com.github.ptitnoony.components.fxtreemap.Rect; import com.github.ptitnoony.components.fxtreemap.TreeMap; import com.github.ptitnoony.components.fxtreemap.TreeMapLayout; import com.github.ptitnoony.components.fxtreemap.TreeMapStyle; import com.github.ptitnoony.components.fxtreemap.TreeMapUtils; import java.beans.PropertyChangeEvent; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import static javafx.application.Platform.runLater; import javafx.geometry.Insets; import javafx.scene.control.TreeItem; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.Pane; import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import org.controlsfx.control.BreadCrumbBar; import org.controlsfx.control.BreadCrumbBar.BreadCrumbActionEvent; /** * * @author ahamon */ public class FxTreeMap extends TreeMap { private static final Logger LOG = Logger.getGlobal(); private final FxMapModel model; private final TreeMapLayout treeMapLayout; private VBox layout; private Pane pane; private BreadCrumbBar<MapData> breadCrumbBar; private FxMapModel currentModel; private MapData currentData = null; private Map<MapData, TreeItem<MapData>> treeItems; private Map<MapData, FxMapModel> mapLevels; private TreeMapStyle style = new TreeMapStyle(); public FxTreeMap(MapData mapData, boolean withLayoutDelay) { super(withLayoutDelay); // treeMapLayout = new TreeMapLayout(); mapLevels = new HashMap<>(); treeItems = new HashMap<>(); model = new FxMapModel(FxTreeMap.this, mapData, getWidth(), getHeight()); mapData.addPropertyChangeListener(this::handleModelChange); mapLevels.put(model.getData(), model); currentModel = model; model.setTreeMapStyle(style); style.addPropertyChangeListener(this::handleStyleChanged); // layout = new VBox(8); layout.setPadding(new Insets(8)); // breadCrumbBar = new BreadCrumbBar(); pane = new Pane(); pane.getChildren().addAll(model.getFxItems().stream().map(i -> i.getNode()).collect(Collectors.toList())); layout.getChildren().add(breadCrumbBar); layout.getChildren().add(pane); VBox.setVgrow(breadCrumbBar, Priority.NEVER); VBox.setVgrow(pane, Priority.ALWAYS); getContainer().getChildren().add(layout); AnchorPane.setBottomAnchor(layout, 0.0); AnchorPane.setLeftAnchor(layout, 0.0); AnchorPane.setRightAnchor(layout, 0.0); AnchorPane.setTopAnchor(layout, 0.0); // createBar(); // breadCrumbBar.setAutoNavigationEnabled(true); breadCrumbBar.setOnCrumbAction((BreadCrumbActionEvent<MapData> bae) -> handleBreadCrumbEvent(bae)); breadCrumbBar.setCrumbFactory((TreeItem<MapData> param) -> { String label = param != null && param.getValue() != null ? param.getValue().getName() : "?"; return new BreadCrumbBar.BreadCrumbButton(label); }); // runLater(() -> requestLayoutUpdate()); } public FxTreeMap(MapData data) { this(data, false); } @Override public MapData getData() { return model.getData(); } @Override public Color getBackgroundColor() { return style.getBackgroundColor(); } @Override public Color getDataFill() { return style.getFillColor(); } @Override public Color getDataStroke() { return style.getStrokeColor(); } @Override public double getDataBorderRadius() { return style.getBorderRadius(); } @Override public double getDataStrokeWidth() { return style.getStrokeWidth(); } @Override public double getPadding() { return style.getPadding(); } @Override public void setBackgroundColor(Color newBackgroundColor) { style.setBackgroundColor(newBackgroundColor); } @Override public void setDataFill(Color newBackgroundColor) { style.setFillColor(newBackgroundColor); } @Override public void setDataStroke(Color newStrokeColor) { style.setStrokeColor(newStrokeColor); } @Override public void setDataStrokeWidth(double newStrokeWidth) { style.setStokeWidth(newStrokeWidth); } @Override public void setDataBorderRadius(double newBorderRadius) { style.setBorderRadius(newBorderRadius); } @Override public void setPadding(double newPaddingValue) { style.setPadding(newPaddingValue); } @Override public void propertyChange(PropertyChangeEvent evt) { if (TreeMapUtils.ITEM_CLICKED.equals(evt.getPropertyName())) { MapData data = (MapData) evt.getNewValue(); if (!mapLevels.containsKey(data)) { FxMapModel newDataModel = new FxMapModel(this, data, 0, 0); newDataModel.setTreeMapStyle(style); mapLevels.put(data, newDataModel); currentModel = newDataModel; } else { currentModel = mapLevels.get(data); } currentData = data; updateBreadCrumbBar(); pane.getChildren().setAll(currentModel.getFxItems().stream().map(i -> i.getNode()).collect(Collectors.toList())); requestLayoutUpdate(); } } /** * Set the spacing value between the BreadCrumbBar and the treemap items * * @param spacing new spacing value */ public void setSpacing(double spacing) { layout.setSpacing(spacing); } @Override protected void applyLayout() { LOG.log(Level.FINE, "Applying layout update"); double width = pane != null ? pane.getWidth() : 0; double height = pane != null ? pane.getHeight() : 0; currentModel.setSize(width, height); treeMapLayout.layout(currentModel, new Rect(0, 0, width, height)); currentModel.getFxItems().forEach(FxMapItem::applyLayout); } private void handleBreadCrumbEvent(BreadCrumbActionEvent<MapData> bae) { MapData clickedData = bae.getSelectedCrumb().getValue(); if (!clickedData.equals(currentData)) { currentData = clickedData; if (!mapLevels.containsKey(currentData)) { LOG.log(Level.SEVERE, "Could not find map item for data :: {0}", currentData.getName()); } else { currentModel = mapLevels.get(currentData); } pane.getChildren().setAll(currentModel.getFxItems().stream().map(i -> i.getNode()).collect(Collectors.toList())); requestLayoutUpdate(); } } private void createBar() { TreeItem<MapData> root = new TreeItem<>(model.getData()); createDataChildrenItems(model.getData(), root); breadCrumbBar.setSelectedCrumb(root); } private void createDataChildrenItems(MapData parentData, TreeItem<MapData> parentTreeItem) { parentData.getChildrenData().stream() .filter(childData -> childData.hasChildrenData()) .map(childData -> { TreeItem<MapData> item = new TreeItem<>(childData); treeItems.put(childData, item); createDataChildrenItems(childData, item); return item; }).forEachOrdered(item -> parentTreeItem.getChildren().add(item) ); } private void updateBreadCrumbBar() { breadCrumbBar.setSelectedCrumb(treeItems.get(currentData)); } private void handleModelChange(PropertyChangeEvent event) { if (TreeMapUtils.MAP_DATA_VALUE_CHANGED.equals(event.getPropertyName())) { requestLayoutUpdate(); } } private void handleStyleChanged(PropertyChangeEvent event) { LOG.log(Level.FINE, "Updating after style changed: {0}", event); getContainer().setBackground(new Background(new BackgroundFill(style.getBackgroundColor(), CornerRadii.EMPTY, Insets.EMPTY))); } }
/* * Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang.invoke; import sun.misc.Unsafe; import java.lang.reflect.Method; import java.util.Arrays; import sun.invoke.util.VerifyAccess; import static java.lang.invoke.MethodHandleNatives.Constants.*; import static java.lang.invoke.LambdaForm.*; import static java.lang.invoke.MethodTypeForm.*; import static java.lang.invoke.MethodHandleStatics.*; import java.lang.ref.WeakReference; import java.lang.reflect.Field; import sun.invoke.util.ValueConversions; import sun.invoke.util.VerifyType; import sun.invoke.util.Wrapper; /** * The flavor of method handle which implements a constant reference * to a class member. * @author jrose */ class DirectMethodHandle extends MethodHandle { final MemberName member; // Constructors and factory methods in this class *must* be package scoped or private. private DirectMethodHandle(MethodType mtype, LambdaForm form, MemberName member) { super(mtype, form); if (!member.isResolved()) throw new InternalError(); this.member = member; } // Factory methods: static DirectMethodHandle make(Class<?> receiver, MemberName member) { MethodType mtype = member.getMethodOrFieldType(); if (!member.isStatic()) { if (!member.getDeclaringClass().isAssignableFrom(receiver) || member.isConstructor()) throw new InternalError(member.toString()); mtype = mtype.insertParameterTypes(0, receiver); } if (!member.isField()) { LambdaForm lform = preparedLambdaForm(member); return new DirectMethodHandle(mtype, lform, member); } else { LambdaForm lform = preparedFieldLambdaForm(member); if (member.isStatic()) { long offset = MethodHandleNatives.staticFieldOffset(member); Object base = MethodHandleNatives.staticFieldBase(member); return new StaticAccessor(mtype, lform, member, base, offset); } else { long offset = MethodHandleNatives.objectFieldOffset(member); assert(offset == (int)offset); return new Accessor(mtype, lform, member, (int)offset); } } } static DirectMethodHandle make(MemberName member) { if (member.isConstructor()) return makeAllocator(member); return make(member.getDeclaringClass(), member); } static DirectMethodHandle make(Method method) { return make(method.getDeclaringClass(), new MemberName(method)); } static DirectMethodHandle make(Field field) { return make(field.getDeclaringClass(), new MemberName(field)); } private static DirectMethodHandle makeAllocator(MemberName ctor) { assert(ctor.isConstructor() && ctor.getName().equals("<init>")); Class<?> instanceClass = ctor.getDeclaringClass(); ctor = ctor.asConstructor(); assert(ctor.isConstructor() && ctor.getReferenceKind() == REF_newInvokeSpecial) : ctor; MethodType mtype = ctor.getMethodType().changeReturnType(instanceClass); LambdaForm lform = preparedLambdaForm(ctor); MemberName init = ctor.asSpecial(); assert(init.getMethodType().returnType() == void.class); return new Constructor(mtype, lform, ctor, init, instanceClass); } @Override MethodHandle copyWith(MethodType mt, LambdaForm lf) { return new DirectMethodHandle(mt, lf, member); } @Override String internalProperties() { return "/DMH="+member.toString(); } //// Implementation methods. @Override @ForceInline MemberName internalMemberName() { return member; } @Override MethodHandle bindArgument(int pos, char basicType, Object value) { // If the member needs dispatching, do so. if (pos == 0 && basicType == 'L') { DirectMethodHandle concrete = maybeRebind(value); if (concrete != null) return concrete.bindReceiver(value); } return super.bindArgument(pos, basicType, value); } @Override MethodHandle bindReceiver(Object receiver) { // If the member needs dispatching, do so. DirectMethodHandle concrete = maybeRebind(receiver); if (concrete != null) return concrete.bindReceiver(receiver); return super.bindReceiver(receiver); } private static final MemberName.Factory IMPL_NAMES = MemberName.getFactory(); private DirectMethodHandle maybeRebind(Object receiver) { if (receiver != null) { switch (member.getReferenceKind()) { case REF_invokeInterface: case REF_invokeVirtual: // Pre-dispatch the member. Class<?> concreteClass = receiver.getClass(); MemberName concrete = new MemberName(concreteClass, member.getName(), member.getMethodType(), REF_invokeSpecial); concrete = IMPL_NAMES.resolveOrNull(REF_invokeSpecial, concrete, concreteClass); if (concrete != null) return new DirectMethodHandle(type(), preparedLambdaForm(concrete), concrete); break; } } return null; } /** * Create a LF which can invoke the given method. * Cache and share this structure among all methods with * the same basicType and refKind. */ private static LambdaForm preparedLambdaForm(MemberName m) { assert(m.isInvocable()) : m; // call preparedFieldLambdaForm instead MethodType mtype = m.getInvocationType().basicType(); assert(!m.isMethodHandleInvoke() || "invokeBasic".equals(m.getName())) : m; int which; switch (m.getReferenceKind()) { case REF_invokeVirtual: which = LF_INVVIRTUAL; break; case REF_invokeStatic: which = LF_INVSTATIC; break; case REF_invokeSpecial: which = LF_INVSPECIAL; break; case REF_invokeInterface: which = LF_INVINTERFACE; break; case REF_newInvokeSpecial: which = LF_NEWINVSPECIAL; break; default: throw new InternalError(m.toString()); } if (which == LF_INVSTATIC && shouldBeInitialized(m)) { // precompute the barrier-free version: preparedLambdaForm(mtype, which); which = LF_INVSTATIC_INIT; } LambdaForm lform = preparedLambdaForm(mtype, which); maybeCompile(lform, m); assert(lform.methodType().dropParameterTypes(0, 1) .equals(m.getInvocationType().basicType())) : Arrays.asList(m, m.getInvocationType().basicType(), lform, lform.methodType()); return lform; } private static LambdaForm preparedLambdaForm(MethodType mtype, int which) { LambdaForm lform = mtype.form().cachedLambdaForm(which); if (lform != null) return lform; lform = makePreparedLambdaForm(mtype, which); return mtype.form().setCachedLambdaForm(which, lform); } private static LambdaForm makePreparedLambdaForm(MethodType mtype, int which) { boolean needsInit = (which == LF_INVSTATIC_INIT); boolean doesAlloc = (which == LF_NEWINVSPECIAL); String linkerName, lambdaName; switch (which) { case LF_INVVIRTUAL: linkerName = "linkToVirtual"; lambdaName = "DMH.invokeVirtual"; break; case LF_INVSTATIC: linkerName = "linkToStatic"; lambdaName = "DMH.invokeStatic"; break; case LF_INVSTATIC_INIT:linkerName = "linkToStatic"; lambdaName = "DMH.invokeStaticInit"; break; case LF_INVSPECIAL: linkerName = "linkToSpecial"; lambdaName = "DMH.invokeSpecial"; break; case LF_INVINTERFACE: linkerName = "linkToInterface"; lambdaName = "DMH.invokeInterface"; break; case LF_NEWINVSPECIAL: linkerName = "linkToSpecial"; lambdaName = "DMH.newInvokeSpecial"; break; default: throw new InternalError("which="+which); } MethodType mtypeWithArg = mtype.appendParameterTypes(MemberName.class); if (doesAlloc) mtypeWithArg = mtypeWithArg .insertParameterTypes(0, Object.class) // insert newly allocated obj .changeReturnType(void.class); // <init> returns void MemberName linker = new MemberName(MethodHandle.class, linkerName, mtypeWithArg, REF_invokeStatic); try { linker = IMPL_NAMES.resolveOrFail(REF_invokeStatic, linker, null, NoSuchMethodException.class); } catch (ReflectiveOperationException ex) { throw newInternalError(ex); } final int DMH_THIS = 0; final int ARG_BASE = 1; final int ARG_LIMIT = ARG_BASE + mtype.parameterCount(); int nameCursor = ARG_LIMIT; final int NEW_OBJ = (doesAlloc ? nameCursor++ : -1); final int GET_MEMBER = nameCursor++; final int LINKER_CALL = nameCursor++; Name[] names = arguments(nameCursor - ARG_LIMIT, mtype.invokerType()); assert(names.length == nameCursor); if (doesAlloc) { // names = { argx,y,z,... new C, init method } names[NEW_OBJ] = new Name(NF_allocateInstance, names[DMH_THIS]); names[GET_MEMBER] = new Name(NF_constructorMethod, names[DMH_THIS]); } else if (needsInit) { names[GET_MEMBER] = new Name(NF_internalMemberNameEnsureInit, names[DMH_THIS]); } else { names[GET_MEMBER] = new Name(NF_internalMemberName, names[DMH_THIS]); } Object[] outArgs = Arrays.copyOfRange(names, ARG_BASE, GET_MEMBER+1, Object[].class); assert(outArgs[outArgs.length-1] == names[GET_MEMBER]); // look, shifted args! int result = LambdaForm.LAST_RESULT; if (doesAlloc) { assert(outArgs[outArgs.length-2] == names[NEW_OBJ]); // got to move this one System.arraycopy(outArgs, 0, outArgs, 1, outArgs.length-2); outArgs[0] = names[NEW_OBJ]; result = NEW_OBJ; } names[LINKER_CALL] = new Name(linker, outArgs); lambdaName += "_" + LambdaForm.basicTypeSignature(mtype); LambdaForm lform = new LambdaForm(lambdaName, ARG_LIMIT, names, result); // This is a tricky bit of code. Don't send it through the LF interpreter. lform.compileToBytecode(); return lform; } private static void maybeCompile(LambdaForm lform, MemberName m) { if (VerifyAccess.isSamePackage(m.getDeclaringClass(), MethodHandle.class)) // Help along bootstrapping... lform.compileToBytecode(); } /** Static wrapper for DirectMethodHandle.internalMemberName. */ @ForceInline /*non-public*/ static Object internalMemberName(Object mh) { return ((DirectMethodHandle)mh).member; } /** Static wrapper for DirectMethodHandle.internalMemberName. * This one also forces initialization. */ /*non-public*/ static Object internalMemberNameEnsureInit(Object mh) { DirectMethodHandle dmh = (DirectMethodHandle)mh; dmh.ensureInitialized(); return dmh.member; } /*non-public*/ static boolean shouldBeInitialized(MemberName member) { switch (member.getReferenceKind()) { case REF_invokeStatic: case REF_getStatic: case REF_putStatic: case REF_newInvokeSpecial: break; default: // No need to initialize the class on this kind of member. return false; } Class<?> cls = member.getDeclaringClass(); if (cls == ValueConversions.class || cls == MethodHandleImpl.class || cls == Invokers.class) { // These guys have lots of <clinit> DMH creation but we know // the MHs will not be used until the system is booted. return false; } if (VerifyAccess.isSamePackage(MethodHandle.class, cls) || VerifyAccess.isSamePackage(ValueConversions.class, cls)) { // It is a system class. It is probably in the process of // being initialized, but we will help it along just to be safe. if (UNSAFE.shouldBeInitialized(cls)) { UNSAFE.ensureClassInitialized(cls); } return false; } return UNSAFE.shouldBeInitialized(cls); } private static class EnsureInitialized extends ClassValue<WeakReference<Thread>> { @Override protected WeakReference<Thread> computeValue(Class<?> type) { UNSAFE.ensureClassInitialized(type); if (UNSAFE.shouldBeInitialized(type)) // If the previous call didn't block, this can happen. // We are executing inside <clinit>. return new WeakReference<>(Thread.currentThread()); return null; } static final EnsureInitialized INSTANCE = new EnsureInitialized(); } private void ensureInitialized() { if (checkInitialized(member)) { // The coast is clear. Delete the <clinit> barrier. if (member.isField()) updateForm(preparedFieldLambdaForm(member)); else updateForm(preparedLambdaForm(member)); } } private static boolean checkInitialized(MemberName member) { Class<?> defc = member.getDeclaringClass(); WeakReference<Thread> ref = EnsureInitialized.INSTANCE.get(defc); if (ref == null) { return true; // the final state } Thread clinitThread = ref.get(); // Somebody may still be running defc.<clinit>. if (clinitThread == Thread.currentThread()) { // If anybody is running defc.<clinit>, it is this thread. if (UNSAFE.shouldBeInitialized(defc)) // Yes, we are running it; keep the barrier for now. return false; } else { // We are in a random thread. Block. UNSAFE.ensureClassInitialized(defc); } assert(!UNSAFE.shouldBeInitialized(defc)); // put it into the final state EnsureInitialized.INSTANCE.remove(defc); return true; } /*non-public*/ static void ensureInitialized(Object mh) { ((DirectMethodHandle)mh).ensureInitialized(); } /** This subclass handles constructor references. */ static class Constructor extends DirectMethodHandle { final MemberName initMethod; final Class<?> instanceClass; private Constructor(MethodType mtype, LambdaForm form, MemberName constructor, MemberName initMethod, Class<?> instanceClass) { super(mtype, form, constructor); this.initMethod = initMethod; this.instanceClass = instanceClass; assert(initMethod.isResolved()); } } /*non-public*/ static Object constructorMethod(Object mh) { Constructor dmh = (Constructor)mh; return dmh.initMethod; } /*non-public*/ static Object allocateInstance(Object mh) throws InstantiationException { Constructor dmh = (Constructor)mh; return UNSAFE.allocateInstance(dmh.instanceClass); } /** This subclass handles non-static field references. */ static class Accessor extends DirectMethodHandle { final Class<?> fieldType; final int fieldOffset; private Accessor(MethodType mtype, LambdaForm form, MemberName member, int fieldOffset) { super(mtype, form, member); this.fieldType = member.getFieldType(); this.fieldOffset = fieldOffset; } @Override Object checkCast(Object obj) { return fieldType.cast(obj); } } @ForceInline /*non-public*/ static long fieldOffset(Object accessorObj) { // Note: We return a long because that is what Unsafe.getObject likes. // We store a plain int because it is more compact. return ((Accessor)accessorObj).fieldOffset; } @ForceInline /*non-public*/ static Object checkBase(Object obj) { // Note that the object's class has already been verified, // since the parameter type of the Accessor method handle // is either member.getDeclaringClass or a subclass. // This was verified in DirectMethodHandle.make. // Therefore, the only remaining check is for null. // Since this check is *not* guaranteed by Unsafe.getInt // and its siblings, we need to make an explicit one here. obj.getClass(); // maybe throw NPE return obj; } /** This subclass handles static field references. */ static class StaticAccessor extends DirectMethodHandle { final private Class<?> fieldType; final private Object staticBase; final private long staticOffset; private StaticAccessor(MethodType mtype, LambdaForm form, MemberName member, Object staticBase, long staticOffset) { super(mtype, form, member); this.fieldType = member.getFieldType(); this.staticBase = staticBase; this.staticOffset = staticOffset; } @Override Object checkCast(Object obj) { return fieldType.cast(obj); } } @ForceInline /*non-public*/ static Object nullCheck(Object obj) { obj.getClass(); return obj; } @ForceInline /*non-public*/ static Object staticBase(Object accessorObj) { return ((StaticAccessor)accessorObj).staticBase; } @ForceInline /*non-public*/ static long staticOffset(Object accessorObj) { return ((StaticAccessor)accessorObj).staticOffset; } @ForceInline /*non-public*/ static Object checkCast(Object mh, Object obj) { return ((DirectMethodHandle) mh).checkCast(obj); } Object checkCast(Object obj) { return member.getReturnType().cast(obj); } // Caching machinery for field accessors: private static byte AF_GETFIELD = 0, AF_PUTFIELD = 1, AF_GETSTATIC = 2, AF_PUTSTATIC = 3, AF_GETSTATIC_INIT = 4, AF_PUTSTATIC_INIT = 5, AF_LIMIT = 6; // Enumerate the different field kinds using Wrapper, // with an extra case added for checked references. private static int FT_LAST_WRAPPER = Wrapper.values().length-1, FT_UNCHECKED_REF = Wrapper.OBJECT.ordinal(), FT_CHECKED_REF = FT_LAST_WRAPPER+1, FT_LIMIT = FT_LAST_WRAPPER+2; private static int afIndex(byte formOp, boolean isVolatile, int ftypeKind) { return ((formOp * FT_LIMIT * 2) + (isVolatile ? FT_LIMIT : 0) + ftypeKind); } private static final LambdaForm[] ACCESSOR_FORMS = new LambdaForm[afIndex(AF_LIMIT, false, 0)]; private static int ftypeKind(Class<?> ftype) { if (ftype.isPrimitive()) return Wrapper.forPrimitiveType(ftype).ordinal(); else if (VerifyType.isNullReferenceConversion(Object.class, ftype)) return FT_UNCHECKED_REF; else return FT_CHECKED_REF; } /** * Create a LF which can access the given field. * Cache and share this structure among all fields with * the same basicType and refKind. */ private static LambdaForm preparedFieldLambdaForm(MemberName m) { Class<?> ftype = m.getFieldType(); boolean isVolatile = m.isVolatile(); byte formOp; switch (m.getReferenceKind()) { case REF_getField: formOp = AF_GETFIELD; break; case REF_putField: formOp = AF_PUTFIELD; break; case REF_getStatic: formOp = AF_GETSTATIC; break; case REF_putStatic: formOp = AF_PUTSTATIC; break; default: throw new InternalError(m.toString()); } if (shouldBeInitialized(m)) { // precompute the barrier-free version: preparedFieldLambdaForm(formOp, isVolatile, ftype); assert((AF_GETSTATIC_INIT - AF_GETSTATIC) == (AF_PUTSTATIC_INIT - AF_PUTSTATIC)); formOp += (AF_GETSTATIC_INIT - AF_GETSTATIC); } LambdaForm lform = preparedFieldLambdaForm(formOp, isVolatile, ftype); maybeCompile(lform, m); assert(lform.methodType().dropParameterTypes(0, 1) .equals(m.getInvocationType().basicType())) : Arrays.asList(m, m.getInvocationType().basicType(), lform, lform.methodType()); return lform; } private static LambdaForm preparedFieldLambdaForm(byte formOp, boolean isVolatile, Class<?> ftype) { int afIndex = afIndex(formOp, isVolatile, ftypeKind(ftype)); LambdaForm lform = ACCESSOR_FORMS[afIndex]; if (lform != null) return lform; lform = makePreparedFieldLambdaForm(formOp, isVolatile, ftypeKind(ftype)); ACCESSOR_FORMS[afIndex] = lform; // don't bother with a CAS return lform; } private static LambdaForm makePreparedFieldLambdaForm(byte formOp, boolean isVolatile, int ftypeKind) { boolean isGetter = (formOp & 1) == (AF_GETFIELD & 1); boolean isStatic = (formOp >= AF_GETSTATIC); boolean needsInit = (formOp >= AF_GETSTATIC_INIT); boolean needsCast = (ftypeKind == FT_CHECKED_REF); Wrapper fw = (needsCast ? Wrapper.OBJECT : Wrapper.values()[ftypeKind]); Class<?> ft = fw.primitiveType(); assert(ftypeKind(needsCast ? String.class : ft) == ftypeKind); String tname = fw.primitiveSimpleName(); String ctname = Character.toUpperCase(tname.charAt(0)) + tname.substring(1); if (isVolatile) ctname += "Volatile"; String getOrPut = (isGetter ? "get" : "put"); String linkerName = (getOrPut + ctname); // getObject, putIntVolatile, etc. MethodType linkerType; if (isGetter) linkerType = MethodType.methodType(ft, Object.class, long.class); else linkerType = MethodType.methodType(void.class, Object.class, long.class, ft); MemberName linker = new MemberName(Unsafe.class, linkerName, linkerType, REF_invokeVirtual); try { linker = IMPL_NAMES.resolveOrFail(REF_invokeVirtual, linker, null, NoSuchMethodException.class); } catch (ReflectiveOperationException ex) { throw newInternalError(ex); } // What is the external type of the lambda form? MethodType mtype; if (isGetter) mtype = MethodType.methodType(ft); else mtype = MethodType.methodType(void.class, ft); mtype = mtype.basicType(); // erase short to int, etc. if (!isStatic) mtype = mtype.insertParameterTypes(0, Object.class); final int DMH_THIS = 0; final int ARG_BASE = 1; final int ARG_LIMIT = ARG_BASE + mtype.parameterCount(); // if this is for non-static access, the base pointer is stored at this index: final int OBJ_BASE = isStatic ? -1 : ARG_BASE; // if this is for write access, the value to be written is stored at this index: final int SET_VALUE = isGetter ? -1 : ARG_LIMIT - 1; int nameCursor = ARG_LIMIT; final int F_HOLDER = (isStatic ? nameCursor++ : -1); // static base if any final int F_OFFSET = nameCursor++; // Either static offset or field offset. final int OBJ_CHECK = (OBJ_BASE >= 0 ? nameCursor++ : -1); final int INIT_BAR = (needsInit ? nameCursor++ : -1); final int PRE_CAST = (needsCast && !isGetter ? nameCursor++ : -1); final int LINKER_CALL = nameCursor++; final int POST_CAST = (needsCast && isGetter ? nameCursor++ : -1); final int RESULT = nameCursor-1; // either the call or the cast Name[] names = arguments(nameCursor - ARG_LIMIT, mtype.invokerType()); if (needsInit) names[INIT_BAR] = new Name(NF_ensureInitialized, names[DMH_THIS]); if (needsCast && !isGetter) names[PRE_CAST] = new Name(NF_checkCast, names[DMH_THIS], names[SET_VALUE]); Object[] outArgs = new Object[1 + linkerType.parameterCount()]; assert(outArgs.length == (isGetter ? 3 : 4)); outArgs[0] = UNSAFE; if (isStatic) { outArgs[1] = names[F_HOLDER] = new Name(NF_staticBase, names[DMH_THIS]); outArgs[2] = names[F_OFFSET] = new Name(NF_staticOffset, names[DMH_THIS]); } else { outArgs[1] = names[OBJ_CHECK] = new Name(NF_checkBase, names[OBJ_BASE]); outArgs[2] = names[F_OFFSET] = new Name(NF_fieldOffset, names[DMH_THIS]); } if (!isGetter) { outArgs[3] = (needsCast ? names[PRE_CAST] : names[SET_VALUE]); } for (Object a : outArgs) assert(a != null); names[LINKER_CALL] = new Name(linker, outArgs); if (needsCast && isGetter) names[POST_CAST] = new Name(NF_checkCast, names[DMH_THIS], names[LINKER_CALL]); for (Name n : names) assert(n != null); String fieldOrStatic = (isStatic ? "Static" : "Field"); String lambdaName = (linkerName + fieldOrStatic); // significant only for debugging if (needsCast) lambdaName += "Cast"; if (needsInit) lambdaName += "Init"; return new LambdaForm(lambdaName, ARG_LIMIT, names, RESULT); } private static final NamedFunction NF_internalMemberName, NF_internalMemberNameEnsureInit, NF_ensureInitialized, NF_fieldOffset, NF_checkBase, NF_staticBase, NF_staticOffset, NF_checkCast, NF_allocateInstance, NF_constructorMethod; static { try { NamedFunction nfs[] = { NF_internalMemberName = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("internalMemberName", Object.class)), NF_internalMemberNameEnsureInit = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("internalMemberNameEnsureInit", Object.class)), NF_ensureInitialized = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("ensureInitialized", Object.class)), NF_fieldOffset = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("fieldOffset", Object.class)), NF_checkBase = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("checkBase", Object.class)), NF_staticBase = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("staticBase", Object.class)), NF_staticOffset = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("staticOffset", Object.class)), NF_checkCast = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("checkCast", Object.class, Object.class)), NF_allocateInstance = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("allocateInstance", Object.class)), NF_constructorMethod = new NamedFunction(DirectMethodHandle.class .getDeclaredMethod("constructorMethod", Object.class)) }; for (NamedFunction nf : nfs) { // Each nf must be statically invocable or we get tied up in our bootstraps. assert(InvokerBytecodeGenerator.isStaticallyInvocable(nf.member)) : nf; nf.resolve(); } } catch (ReflectiveOperationException ex) { throw newInternalError(ex); } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.demo.player; import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.MediaCodecAudioTrackRenderer; import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException; import com.google.android.exoplayer.MediaCodecVideoTrackRenderer; import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.chunk.ChunkSampleSource; import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.FormatEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.MultiTrackChunkSource; import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil; import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder; import com.google.android.exoplayer.drm.DrmSessionManager; import com.google.android.exoplayer.drm.MediaDrmCallback; import com.google.android.exoplayer.drm.StreamingDrmSessionManager; import com.google.android.exoplayer.drm.UnsupportedDrmException; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser; import com.google.android.exoplayer.text.TextTrackRenderer; import com.google.android.exoplayer.text.ttml.TtmlParser; import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DefaultAllocator; import com.google.android.exoplayer.upstream.DefaultBandwidthMeter; import com.google.android.exoplayer.upstream.DefaultHttpDataSource; import com.google.android.exoplayer.upstream.DefaultUriDataSource; import com.google.android.exoplayer.util.ManifestFetcher; import com.google.android.exoplayer.util.Util; import android.content.Context; import android.media.MediaCodec; import android.os.Handler; import java.io.IOException; import java.util.Arrays; /** * A {@link RendererBuilder} for SmoothStreaming. */ public class SmoothStreamingRendererBuilder implements RendererBuilder { private static final int BUFFER_SEGMENT_SIZE = 64 * 1024; private static final int VIDEO_BUFFER_SEGMENTS = 200; private static final int AUDIO_BUFFER_SEGMENTS = 60; private static final int TEXT_BUFFER_SEGMENTS = 2; private static final int LIVE_EDGE_LATENCY_MS = 30000; private final Context context; private final String userAgent; private final String url; private final MediaDrmCallback drmCallback; private AsyncRendererBuilder currentAsyncBuilder; public SmoothStreamingRendererBuilder(Context context, String userAgent, String url, MediaDrmCallback drmCallback) { this.context = context; this.userAgent = userAgent; this.url = Util.toLowerInvariant(url).endsWith("/manifest") ? url : url + "/Manifest"; this.drmCallback = drmCallback; } @Override public void buildRenderers(DemoPlayer player) { currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback, player); currentAsyncBuilder.init(); } @Override public void cancel() { if (currentAsyncBuilder != null) { currentAsyncBuilder.cancel(); currentAsyncBuilder = null; } } private static final class AsyncRendererBuilder implements ManifestFetcher.ManifestCallback<SmoothStreamingManifest> { private final Context context; private final String userAgent; private final MediaDrmCallback drmCallback; private final DemoPlayer player; private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher; private boolean canceled; public AsyncRendererBuilder(Context context, String userAgent, String url, MediaDrmCallback drmCallback, DemoPlayer player) { this.context = context; this.userAgent = userAgent; this.drmCallback = drmCallback; this.player = player; SmoothStreamingManifestParser parser = new SmoothStreamingManifestParser(); manifestFetcher = new ManifestFetcher<>(url, new DefaultHttpDataSource(userAgent, null), parser); } public void init() { manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this); } public void cancel() { canceled = true; } @Override public void onSingleManifestError(IOException exception) { if (canceled) { return; } player.onRenderersError(exception); } @Override public void onSingleManifest(SmoothStreamingManifest manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); // Check drm support if necessary. DrmSessionManager drmSessionManager = null; if (manifest.protectionElement != null) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = new StreamingDrmSessionManager(manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Obtain stream elements for playback. int audioStreamElementCount = 0; int textStreamElementCount = 0; int videoStreamElementIndex = -1; for (int i = 0; i < manifest.streamElements.length; i++) { if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) { audioStreamElementCount++; } else if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) { textStreamElementCount++; } else if (videoStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) { videoStreamElementIndex = i; } } // Determine which video tracks we should use for playback. int[] videoTrackIndices = null; if (videoStreamElementIndex != -1) { try { videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context, Arrays.asList(manifest.streamElements[videoStreamElementIndex].tracks), null, false); } catch (DecoderQueryException e) { player.onRenderersError(e); return; } } // Build the video renderer. final MediaCodecVideoTrackRenderer videoRenderer; if (videoTrackIndices == null || videoTrackIndices.length == 0) { videoRenderer = null; } else { DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, videoStreamElementIndex, videoTrackIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50); } // Build the audio renderer. final String[] audioTrackNames; final MultiTrackChunkSource audioChunkSource; final MediaCodecAudioTrackRenderer audioRenderer; if (audioStreamElementCount == 0) { audioTrackNames = null; audioChunkSource = null; audioRenderer = null; } else { audioTrackNames = new String[audioStreamElementCount]; ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount]; DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator(); audioStreamElementCount = 0; for (int i = 0; i < manifest.streamElements.length; i++) { if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) { audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name; audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource( manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator, LIVE_EDGE_LATENCY_MS); audioStreamElementCount++; } } audioChunkSource = new MultiTrackChunkSource(audioChunkSources); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player); } // Build the text renderer. final String[] textTrackNames; final MultiTrackChunkSource textChunkSource; final TrackRenderer textRenderer; if (textStreamElementCount == 0) { textTrackNames = null; textChunkSource = null; textRenderer = null; } else { textTrackNames = new String[textStreamElementCount]; ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount]; DataSource ttmlDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator(); textStreamElementCount = 0; for (int i = 0; i < manifest.streamElements.length; i++) { if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) { textTrackNames[textStreamElementCount] = manifest.streamElements[i].language; textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource( manifestFetcher, i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator, LIVE_EDGE_LATENCY_MS); textStreamElementCount++; } } textChunkSource = new MultiTrackChunkSource(textChunkSources); ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); textRenderer = new TextTrackRenderer(ttmlSampleSource, player, mainHandler.getLooper(), new TtmlParser()); } // Invoke the callback. String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][]; trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames; trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames; MultiTrackChunkSource[] multiTrackChunkSources = new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT]; multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource; multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource; TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter); } } }
/********************************************************************************** * $URL: $ * $Id: $ *********************************************************************************** * * Copyright (c) 2006, 2007, 2008 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.content.types; import static org.sakaiproject.content.api.ResourceToolAction.*; import java.util.ArrayList; import java.util.EnumMap; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentEntity; import org.sakaiproject.content.api.ResourceToolAction; import org.sakaiproject.content.api.ResourceType; import org.sakaiproject.content.api.ResourceToolAction.ActionType; import org.sakaiproject.content.api.ContentPrintService; import org.sakaiproject.content.util.BaseInteractionAction; import org.sakaiproject.content.util.BaseResourceType; import org.sakaiproject.content.util.BaseResourceAction.Localizer; import org.sakaiproject.content.util.BaseServiceLevelAction; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.user.api.User; import org.sakaiproject.util.Resource; import org.sakaiproject.util.ResourceLoader; public class TextDocumentType extends BaseResourceType { protected ContentPrintService contentPrintService; protected String typeId = ResourceType.TYPE_TEXT; protected String helperId = "sakai.resource.type.helper"; public static final String MY_HELPER_ID = "sakai.resource.type.helper"; /** localized tool properties **/ private static final String DEFAULT_RESOURCECLASS = "org.sakaiproject.localization.util.TypeProperties"; private static final String DEFAULT_RESOURCEBUNDLE = "org.sakaiproject.localization.bundle.type.types"; private static final String RESOURCECLASS = "resource.class.type"; private static final String RESOURCEBUNDLE = "resource.bundle.type"; private String resourceClass = ServerConfigurationService.getString(RESOURCECLASS, DEFAULT_RESOURCECLASS); private String resourceBundle = ServerConfigurationService.getString(RESOURCEBUNDLE, DEFAULT_RESOURCEBUNDLE); private ResourceLoader rb = new Resource().getLoader(resourceClass, resourceBundle); protected EnumMap<ActionType, List<ResourceToolAction>> actionMap = new EnumMap<ActionType, List<ResourceToolAction>>(ActionType.class); protected Map<String, ResourceToolAction> actions = new HashMap<String, ResourceToolAction>(); private Localizer localizer(final String string) { return new Localizer() { public String getLabel() { return rb.getString(string); } }; } public TextDocumentType() { this.contentPrintService = (ContentPrintService) ComponentManager.get("org.sakaiproject.content.api.ContentPrintService"); actions.put(CREATE, new TextDocumentCreateAction(CREATE, ActionType.CREATE, typeId, helperId, localizer("create.text"))); // actions.put(ACCESS_CONTENT, new TextDocumentAccessAction()); actions.put(REVISE_CONTENT, new TextDocumentReviseAction(REVISE_CONTENT, ActionType.REVISE_CONTENT, typeId, helperId, localizer("action.revise"))); actions.put(REPLACE_CONTENT, new TextDocumentReplaceAction(REPLACE_CONTENT, ActionType.REPLACE_CONTENT, typeId, helperId, localizer("action.replace"))); actions.put(ACCESS_PROPERTIES, new BaseServiceLevelAction(ACCESS_PROPERTIES, ActionType.VIEW_METADATA, typeId, false, localizer("action.access"))); actions.put(REVISE_METADATA, new BaseServiceLevelAction(REVISE_METADATA, ActionType.REVISE_METADATA, typeId, false, localizer("action.props"))); actions.put(COPY, new BaseServiceLevelAction(COPY, ActionType.COPY, typeId, true, localizer("action.copy"))); actions.put(DUPLICATE, new BaseServiceLevelAction(DUPLICATE, ActionType.DUPLICATE, typeId, false, localizer("action.duplicate"))); actions.put(MOVE, new BaseServiceLevelAction(MOVE, ActionType.MOVE, typeId, true, localizer("action.move"))); actions.put(DELETE, new BaseServiceLevelAction(DELETE, ActionType.DELETE, typeId, true, localizer("action.delete"))); actions.put(MAKE_SITE_PAGE, new MakeSitePageAction(MAKE_SITE_PAGE, ActionType.MAKE_SITE_PAGE, typeId)); if (ServerConfigurationService.getString(contentPrintService.CONTENT_PRINT_SERVICE_URL, null) != null) { // print service url is provided. Add the Print option. actions.put(PRINT_FILE, new BaseServiceLevelAction(PRINT_FILE, ActionType.PRINT_FILE, typeId, false, localizer("action.printfile"))); } // initialize actionMap with an empty List for each ActionType for(ActionType type : ActionType.values()) { actionMap.put(type, new ArrayList<ResourceToolAction>()); } // for each action in actions, add a link in actionMap Iterator<String> it = actions.keySet().iterator(); while(it.hasNext()) { String id = it.next(); ResourceToolAction action = actions.get(id); List<ResourceToolAction> list = actionMap.get(action.getActionType()); if(list == null) { list = new ArrayList<ResourceToolAction>(); actionMap.put(action.getActionType(), list); } list.add(action); } } public class TextDocumentReplaceAction extends BaseInteractionAction { public TextDocumentReplaceAction(String id, ActionType actionType, String typeId, String helperId, Localizer localizer) { super(id, actionType, typeId, helperId, localizer); } @Override public List<String> getRequiredPropertyKeys() { List<String> rv = new ArrayList<String>(); rv.add(ResourceProperties.PROP_CONTENT_ENCODING); return rv; } } public class TextDocumentCreateAction extends BaseInteractionAction { public TextDocumentCreateAction(String id, ActionType actionType, String typeId, String helperId, Localizer localizer) { super(id, actionType, typeId, helperId, localizer); } @Override public List<String> getRequiredPropertyKeys() { List<String> rv = new ArrayList<String>(); rv.add(ResourceProperties.PROP_CONTENT_ENCODING); return rv; } } public class TextDocumentReviseAction extends BaseInteractionAction { public TextDocumentReviseAction(String id, ActionType actionType, String typeId, String helperId, Localizer localizer) { super(id, actionType, typeId, helperId, localizer); } @Override public List<String> getRequiredPropertyKeys() { List<String> rv = new ArrayList<String>(); rv.add(ResourceProperties.PROP_CONTENT_ENCODING); return rv; } } public ResourceToolAction getAction(String actionId) { return actions.get(actionId); } public List<ResourceToolAction> getActions(Reference entityRef, Set permissions) { // TODO: use entityRef to filter actions List<ResourceToolAction> rv = new ArrayList<ResourceToolAction>(); rv.addAll(actions.values()); return rv; } public List<ResourceToolAction> getActions(Reference entityRef, User user, Set permissions) { // TODO: use entityRef and user to filter actions List<ResourceToolAction> rv = new ArrayList<ResourceToolAction>(); rv.addAll(actions.values()); return rv; } public String getIconLocation(ContentEntity entity) { // TODO Auto-generated method stub return null; } public String getId() { return typeId; } public String getLabel() { return rb.getString("type.text"); } /* (non-Javadoc) * @see org.sakaiproject.content.api.ResourceType#getLocalizedHoverText(org.sakaiproject.entity.api.Reference) */ public String getLocalizedHoverText(ContentEntity member) { return rb.getString("type.text"); } /* (non-Javadoc) * @see org.sakaiproject.content.api.ResourceType#getActions(org.sakaiproject.content.api.ResourceType.ActionType) */ public List<ResourceToolAction> getActions(ActionType type) { List<ResourceToolAction> list = actionMap.get(type); if(list == null) { list = new ArrayList<ResourceToolAction>(); actionMap.put(type, list); } return new ArrayList<ResourceToolAction>(list); } /* (non-Javadoc) * @see org.sakaiproject.content.api.ResourceType#getActions(java.util.List) */ public List<ResourceToolAction> getActions(List<ActionType> types) { List<ResourceToolAction> list = new ArrayList<ResourceToolAction>(); if(types != null) { Iterator<ActionType> it = types.iterator(); while(it.hasNext()) { ActionType type = it.next(); List<ResourceToolAction> sublist = actionMap.get(type); if(sublist == null) { sublist = new ArrayList<ResourceToolAction>(); actionMap.put(type, sublist); } list.addAll(sublist); } } return list; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol; import java.util.Set; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.ValueFactory; import javax.jcr.ValueFormatException; import javax.jcr.security.AccessControlException; import javax.jcr.security.Privilege; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.api.security.JackrabbitAccessControlEntry; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl; import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction; import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionImpl; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests for {@link ACE} */ public class ACETest extends AbstractAccessControlTest { private Value globValue; private Value[] nameValues; private Value nameValue; @Before public void before() throws Exception { ValueFactory valueFactory = new ValueFactoryImpl(Mockito.mock(Root.class), getNamePathMapper()); globValue = valueFactory.createValue("*"); nameValue = valueFactory.createValue("nt:file", PropertyType.NAME); nameValues = new Value[] { valueFactory.createValue("nt:folder", PropertyType.NAME), valueFactory.createValue("nt:file", PropertyType.NAME) }; } private ACE createEntry(Restriction... restrictions) throws Exception { return createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true, restrictions); } private Restriction createRestriction(String name, String value) { return new RestrictionImpl(PropertyStates.createProperty(name, value), false); } private Restriction createRestriction(String name, Value value) throws Exception { return new RestrictionImpl(PropertyStates.createProperty(name, value), false); } private Restriction createRestriction(String name, Value[] values) throws Exception { return new RestrictionImpl(PropertyStates.createProperty(name, ImmutableList.copyOf(values)), false); } @Test public void testIsAllow() throws RepositoryException { ACE ace = createEntry(true, PrivilegeConstants.JCR_READ); assertTrue(ace.isAllow()); ace = createEntry(false, PrivilegeConstants.JCR_READ); assertFalse(ace.isAllow()); } @Test public void testGetPrincipal() throws RepositoryException { ACE tmpl = createEntry(true, PrivilegeConstants.JCR_READ); assertNotNull(tmpl.getPrincipal()); assertEquals(testPrincipal.getName(), tmpl.getPrincipal().getName()); assertSame(testPrincipal, tmpl.getPrincipal()); } @Test(expected = AccessControlException.class) public void testNullPrincipal() throws Exception { createEntry(null, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); } @Test(expected = AccessControlException.class) public void testNullPrivilegeBits() throws Exception { createEntry(testPrincipal, (PrivilegeBits) null, true); } @Test(expected = AccessControlException.class) public void testEmptyPrivilegeBits() throws Exception { createEntry(testPrincipal, PrivilegeBits.EMPTY, true); } @Test public void testGetPrivilegeBits() throws RepositoryException { ACE entry = createEntry(true, PrivilegeConstants.JCR_READ); PrivilegeBits bits = entry.getPrivilegeBits(); assertNotNull(bits); assertEquals(PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), bits); entry = createEntry(true, PrivilegeConstants.REP_WRITE); bits = entry.getPrivilegeBits(); assertNotNull(bits); assertEquals(PrivilegeBits.BUILT_IN.get(PrivilegeConstants.REP_WRITE), bits); entry = createEntry(true, PrivilegeConstants.JCR_ADD_CHILD_NODES, PrivilegeConstants.JCR_REMOVE_CHILD_NODES); bits = entry.getPrivilegeBits(); assertNotNull(bits); PrivilegeBits expected = getBitsProvider().getBits( PrivilegeConstants.JCR_ADD_CHILD_NODES, PrivilegeConstants.JCR_REMOVE_CHILD_NODES); assertEquals(expected, bits); } @Test public void testNullPrivileges() { try { new EmptyACE(null); fail("Privileges must not be null"); } catch (AccessControlException e) { // success } } @Test public void testEmptyPrivileges() { try { new EmptyACE(PrivilegeBits.EMPTY); fail("Privileges must not be empty."); } catch (AccessControlException e) { // success } } @Test public void testGetRestrictionNames() throws Exception { // empty restrictions String[] restrictionNames = createEntry().getRestrictionNames(); assertNotNull(restrictionNames); assertEquals(0, restrictionNames.length); Restriction globRestr = createRestriction(AccessControlConstants.REP_GLOB, globValue); Restriction nameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, nameValues); // single restriction restrictionNames = createEntry(globRestr).getRestrictionNames(); assertEquals(1, restrictionNames.length); // 2 restrictions restrictionNames = createEntry(globRestr, nameRestr).getRestrictionNames(); assertEquals(2, restrictionNames.length); } @Test public void testGetRestrictionForEmpty() throws Exception { // empty restrictions Value val = createEntry().getRestriction(AccessControlConstants.REP_GLOB); assertNull(val); } @Test public void testGetNonExistingRestriction() throws Exception { // single valued restriction Restriction globRestr = createRestriction(AccessControlConstants.REP_GLOB, globValue); ACE ace = createEntry(globRestr); assertNull(ace.getRestriction(AccessControlConstants.REP_NT_NAMES)); } @Test public void testGetRestrictionForSingleValued() throws Exception { // single valued restriction Restriction globRestr = createRestriction(AccessControlConstants.REP_GLOB, globValue); ACE ace = createEntry(globRestr); Value val = ace.getRestriction(AccessControlConstants.REP_GLOB); assertNotNull(val); assertEquals(globValue, val); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test(expected = ValueFormatException.class) public void testGetRestrictionForMultiValued() throws Exception { // multivalued restriction Restriction nameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, nameValues); ACE ace = createEntry(nameRestr); ace.getRestriction(AccessControlConstants.REP_NT_NAMES); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetRestrictionForMultiValued2() throws Exception { // single value restriction stored in multi-value property Restriction singleNameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, new Value[] {nameValue}); ACE ace = createEntry(singleNameRestr); Value val = ace.getRestriction(AccessControlConstants.REP_NT_NAMES); assertEquals(nameValue, val); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetEmptyRestrictions() throws Exception { // empty restrictions Value[] vs = createEntry().getRestrictions(AccessControlConstants.REP_GLOB); assertNull(vs); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetNonExistingRestrictions() throws Exception { Restriction nameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, nameValues); ACE ace = createEntry(nameRestr); assertNull(ace.getRestrictions(AccessControlConstants.REP_GLOB)); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetRestrictionsForSingleValue() throws Exception { // single valued restriction Restriction globRestr = createRestriction(AccessControlConstants.REP_GLOB, globValue); ACE ace = createEntry(globRestr); Value[] vs = ace.getRestrictions(AccessControlConstants.REP_GLOB); assertNotNull(vs); assertArrayEquals(new Value[] {globValue}, vs); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetRestrictionsForMultiValued() throws Exception { // multivalued restriction Restriction nameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, nameValues); ACE ace = createEntry(nameRestr); Value[] vs = ace.getRestrictions(AccessControlConstants.REP_NT_NAMES); assertEquals(2, vs.length); assertArrayEquals(nameValues, vs); } /** * @since OAK 1.0: support for multi-value restrictions */ @Test public void testGetRestrictionsForMultiValued2() throws Exception { // single value restriction stored in multi-value property Restriction singleNameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, new Value[]{nameValue}); ACE ace = createEntry(singleNameRestr); Value[] vs = ace.getRestrictions(AccessControlConstants.REP_NT_NAMES); assertEquals(1, vs.length); assertEquals(nameValue, vs[0]); } @Test public void testGetRestrictions() throws Exception { Restriction nameRestr = createRestriction(AccessControlConstants.REP_NT_NAMES, nameValues); Restriction globRestr = createRestriction(AccessControlConstants.REP_GLOB, globValue); Set<Restriction> expected = ImmutableSet.of(nameRestr, globRestr); ACE ace = createEntry(nameRestr, globRestr); assertEquals(expected, ace.getRestrictions()); } @Test public void testGetRestrictionsNone() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); assertTrue(ace.getRestrictions().isEmpty()); } @Test public void testEqualsSameACE() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); assertTrue(ace.equals(ace)); } @Test public void testEqualsACE() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); ACE ace2 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); assertTrue(ace.equals(ace2)); } @Test public void testEqualsOtherEntryImpl() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); assertFalse(ace.equals(Mockito.mock(JackrabbitAccessControlEntry.class))); } @Test public void testEqualsDifferentAllow() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); ACE ace2 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), false); assertFalse(ace.equals(ace2)); } @Test public void testEqualsDifferentPrincipal() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); ACE ace2 = createEntry(EveryonePrincipal.getInstance(), PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); assertFalse(ace.equals(ace2)); } @Test public void testEqualsDifferentPrivs() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true); ACE ace2 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_ADD_CHILD_NODES), true); assertFalse(ace.equals(ace2)); } @Test public void testEqualsDifferentRestrictions() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true, createRestriction("name2", "val")); ACE ace2 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true, createRestriction("name", "val")); assertFalse(ace.equals(ace2)); } @Test public void testEqualsDifferentRestrictionValue() throws Exception { ACE ace = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true, createRestriction("name", "val")); ACE ace2 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ), true, createRestriction("name", "val2")); assertFalse(ace.equals(ace2)); } private class EmptyACE extends ACE { public EmptyACE(PrivilegeBits privilegeBits) throws AccessControlException { super(testPrincipal, privilegeBits, true, null, getNamePathMapper()); } @Override public Privilege[] getPrivileges() { return new Privilege[0]; } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.jvm.java.abi; import static org.junit.Assert.assertEquals; import com.facebook.buck.jvm.java.testutil.compiler.CompilerTreeApiTest; import com.facebook.buck.jvm.java.testutil.compiler.CompilerTreeApiTestRunner; import com.google.common.base.Joiner; import java.io.IOException; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.objectweb.asm.Opcodes; @RunWith(CompilerTreeApiTestRunner.class) public class AccessFlagsTest extends CompilerTreeApiTest { private AccessFlags accessFlags; @Test public void testPublicFlagOnField() throws IOException { testFieldFlags("public", Opcodes.ACC_PUBLIC); } @Test public void testPublicFlagOnMethod() throws IOException { testMethodFlags("public", Opcodes.ACC_PUBLIC); } @Test public void testPublicFlagOnClass() throws IOException { testClassFlags("public", Opcodes.ACC_PUBLIC); } @Test public void testProtectedFlagOnField() throws IOException { testFieldFlags("protected", Opcodes.ACC_PROTECTED); } @Test public void testProtectedFlagOnMethod() throws IOException { testMethodFlags("protected", Opcodes.ACC_PROTECTED); } @Test public void testPrivateFlagOnField() throws IOException { testFieldFlags("private", Opcodes.ACC_PRIVATE); } @Test public void testPrivateFlagOnMethod() throws IOException { testMethodFlags("private", Opcodes.ACC_PRIVATE); } @Test public void testNoFlagForDefaultVisibilityOnField() throws IOException { testFieldFlags("", 0); } @Test public void testNoFlagForDefaultVisibilityOnMethod() throws IOException { testMethodFlags("", 0); } @Test public void testNoFlagForDefaultVisibilityOnClass() throws IOException { testClassFlags("", 0); } @Test public void testNoFlagForInterfaceDefaultMethod() throws IOException { compile(Joiner.on('\n').join("interface Foo {", " default void foo() { }", "}")); assertEquals( Opcodes.ACC_PUBLIC, accessFlags.getAccessFlags(findMethod("foo", elements.getTypeElement("Foo")))); } @Test public void testStaticFlagOnField() throws IOException { testFieldFlags("static", Opcodes.ACC_STATIC); } @Test public void testStaticFlagOnMethod() throws IOException { testMethodFlags("static", Opcodes.ACC_STATIC); } @Test public void testStaticFlagOnClass() throws IOException { testTypeFlags( Joiner.on('\n').join("class Foo {", " static class Inner { }", "}"), "Foo.Inner", Opcodes.ACC_STATIC | Opcodes.ACC_SUPER); } @Test public void testFinalFlagOnField() throws IOException { testFieldFlags("final", Opcodes.ACC_FINAL); } @Test public void testFinalFlagOnMethod() throws IOException { testMethodFlags("final", Opcodes.ACC_FINAL); } @Test public void testFinalFlagOnClass() throws IOException { testClassFlags("final", Opcodes.ACC_FINAL); } @Test public void testVolatileFlag() throws IOException { testFieldFlags("volatile", Opcodes.ACC_VOLATILE); } @Test public void testTransientFlag() throws IOException { testFieldFlags("transient", Opcodes.ACC_TRANSIENT); } @Test public void testAbstractFlagOnClass() throws IOException { testClassFlags("abstract", Opcodes.ACC_ABSTRACT); } @Test public void testAbstractFlagOnMethod() throws IOException { compile(Joiner.on('\n').join("abstract class Foo {", " abstract void foo();", "}")); assertEquals( Opcodes.ACC_ABSTRACT, accessFlags.getAccessFlags(findMethod("foo", elements.getTypeElement("Foo")))); } @Test public void testSynchronizedFlag() throws IOException { testMethodFlags("synchronized", Opcodes.ACC_SYNCHRONIZED); } @Test public void testFpStrictFlag() throws IOException { testMethodFlags("strictfp", Opcodes.ACC_STRICT); } @Test public void testNativeFlag() throws IOException { compile(Joiner.on('\n').join("class Foo {", " native void method();", "}")); assertEquals( Opcodes.ACC_NATIVE, accessFlags.getAccessFlags(findMethod("method", elements.getTypeElement("Foo")))); } @Test public void testMultipleFlagsOnMethod() throws IOException { testMethodFlags("public static", Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC); } @Test public void testMultipleFlagsOnField() throws IOException { testFieldFlags("public static", Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC); } @Test public void testVarArgsFlag() throws IOException { compile(Joiner.on('\n').join("class Foo {", " void method(String... s) { }", "}")); assertEquals( Opcodes.ACC_VARARGS, accessFlags.getAccessFlags(findMethod("method", elements.getTypeElement("Foo")))); } @Test public void testDeprecatedPseudoFlagOnField() throws IOException { testFieldFlags("@Deprecated", Opcodes.ACC_DEPRECATED); } @Test public void testDeprecatedPseudoFlagOnMethod() throws IOException { testMethodFlags("@Deprecated", Opcodes.ACC_DEPRECATED); } @Test public void testAnnotationTypeFlags() throws IOException { testTypeFlags( "@java.lang.annotation.Documented @interface Foo { }", "Foo", Opcodes.ACC_ANNOTATION | Opcodes.ACC_INTERFACE | Opcodes.ACC_ABSTRACT); } @Test public void testInterfaceTypeFlags() throws IOException { testTypeFlags("interface Foo { }", "Foo", Opcodes.ACC_INTERFACE | Opcodes.ACC_ABSTRACT); } @Test public void testEnumTypeFlags() throws IOException { testTypeFlags( "enum Foo { Item }", "Foo", Opcodes.ACC_ENUM | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL); } @Test public void testExplicitEnumAbstractFlag() throws IOException { testTypeFlags( Joiner.on('\n') .join( "enum Foo {", " Value {", " int get() { return 3; }", " };", " abstract int get();", "}"), "Foo", Opcodes.ACC_ENUM | Opcodes.ACC_SUPER | Opcodes.ACC_ABSTRACT); } @Test public void testImplicitEnumAbstractFlag() throws IOException { testTypeFlags( Joiner.on('\n').join("enum Foo implements Runnable {", " Value;", "}"), "Foo", Opcodes.ACC_ENUM | Opcodes.ACC_SUPER | Opcodes.ACC_ABSTRACT); } @Test public void testNonAbstractGenericEnumAbstractFlag() throws IOException { testTypeFlags( Joiner.on('\n') .join( "enum Foo implements java.util.Comparator<Foo>{", " Value {", " int get() { return 3; }", " };", " public int compare(Foo a, Foo b) { return 0; }", "}"), "Foo", Opcodes.ACC_ENUM | Opcodes.ACC_SUPER); } @Test public void testEnumVarFlags() throws IOException { compile("enum Foo { Item }"); assertEquals( Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL | Opcodes.ACC_ENUM, accessFlags.getAccessFlags(findField("Item", elements.getTypeElement("Foo")))); } private void testClassFlags(String modifiers, int expectedFlags) throws IOException { testTypeFlags( String.format("%s class Foo { }", modifiers), "Foo", expectedFlags | Opcodes.ACC_SUPER); } private void testTypeFlags(String content, String typeName, int expectedFlags) throws IOException { compile(content); assertNoErrors(); assertEquals(expectedFlags, accessFlags.getAccessFlags(elements.getTypeElement(typeName))); } private void testMethodFlags(String modifiers, int expectedFlags) throws IOException { compile( Joiner.on('\n') .join("class Foo {", String.format(" %s void method() { }", modifiers), "}")); assertNoErrors(); assertEquals( expectedFlags, accessFlags.getAccessFlags(findMethod("method", elements.getTypeElement("Foo")))); } private void testFieldFlags(String modifiers, int expectedFlags) throws IOException { compile( Joiner.on('\n').join("class Foo {", String.format(" %s int field = 0;", modifiers), "}")); assertNoErrors(); assertEquals( expectedFlags, accessFlags.getAccessFlags(findField("field", elements.getTypeElement("Foo")))); } @Override protected void initCompiler(Map<String, String> fileNamesToContents) throws IOException { super.initCompiler(fileNamesToContents); accessFlags = new AccessFlags(elements); } }
package io.apiman.gateway.engine.threescale.beans; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; /** * @author Marc Savy {@literal <marc@rhymewithgravy.com>} */ @JsonInclude(JsonInclude.Include.NON_NULL) @JsonPropertyOrder({ "id", "account_id", "name", "oneline_description", "description", "txt_api", "txt_support", "txt_features", "created_at", "updated_at", "logo_file_name", "logo_content_type", "logo_file_size", "state", "intentions_required", "draft_name", "infobar", "terms", "display_provider_keys", "tech_support_email", "admin_support_email", "credit_card_support_email", "buyers_manage_apps", "buyers_manage_keys", "custom_keys_enabled", "buyer_plan_change_permission", "buyer_can_select_plan", "notification_settings", "default_application_plan_id", "default_service_plan_id", "buyer_can_see_log_requests", "default_end_user_plan_id", "end_user_registration_required", "tenant_id", "system_name", "backend_version", "mandatory_app_key", "buyer_key_regenerate_enabled", "support_email", "referrer_filters_required", "deployment_option", "proxiable?", "backend_authentication_type", "backend_authentication_value", "proxy" }) public class BackendConfiguration implements Serializable { @JsonProperty("id") private long id; @JsonProperty("account_id") private long accountId; @JsonProperty("name") private String name; @JsonProperty("oneline_description") private String onelineDescription; @JsonProperty("description") private String description; @JsonProperty("txt_api") private String txtApi; @JsonProperty("txt_support") private String txtSupport; @JsonProperty("txt_features") private String txtFeatures; @JsonProperty("created_at") private String createdAt; @JsonProperty("updated_at") private String updatedAt; @JsonProperty("logo_file_name") private String logoFileName; @JsonProperty("logo_content_type") private String logoContentType; @JsonProperty("logo_file_size") private Object logoFileSize; @JsonProperty("state") private String state; @JsonProperty("intentions_required") private boolean intentionsRequired; @JsonProperty("draft_name") private String draftName; @JsonProperty("infobar") private Object infobar; @JsonProperty("terms") private Object terms; @JsonProperty("display_provider_keys") private boolean displayProviderKeys; @JsonProperty("tech_support_email") private String techSupportEmail; @JsonProperty("admin_support_email") private String adminSupportEmail; @JsonProperty("credit_card_support_email") private String creditCardSupportEmail; @JsonProperty("buyers_manage_apps") private boolean buyersManageApps; @JsonProperty("buyers_manage_keys") private boolean buyersManageKeys; @JsonProperty("custom_keys_enabled") private boolean customKeysEnabled; @JsonProperty("buyer_plan_change_permission") private String buyerPlanChangePermission; @JsonProperty("buyer_can_select_plan") private boolean buyerCanSelectPlan; @JsonProperty("notification_settings") private Object notificationSettings; @JsonProperty("default_application_plan_id") private long defaultApplicationPlanId; @JsonProperty("default_service_plan_id") private long defaultServicePlanId; @JsonProperty("buyer_can_see_log_requests") private boolean buyerCanSeeLogRequests; @JsonProperty("default_end_user_plan_id") private String defaultEndUserPlanId; @JsonProperty("end_user_registration_required") private boolean endUserRegistrationRequired; @JsonProperty("tenant_id") private long tenantId; @JsonProperty("system_name") private String systemName; @JsonProperty("backend_version") private String backendVersion; @JsonProperty("mandatory_app_key") private boolean mandatoryAppKey; @JsonProperty("buyer_key_regenerate_enabled") private boolean buyerKeyRegenerateEnabled; @JsonProperty("support_email") private String supportEmail; @JsonProperty("referrer_filters_required") private boolean referrerFiltersRequired; @JsonProperty("deployment_option") private String deploymentOption; @JsonProperty("proxiable?") private boolean proxiable; @JsonProperty("backend_authentication_type") private String backendAuthenticationType; @JsonProperty("backend_authentication_value") private String backendAuthenticationValue; @JsonProperty("proxy") private Proxy proxy; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<>(); private final static long serialVersionUID = -6934510462570339651L; @JsonProperty("id") public long getId() { return id; } @JsonProperty("id") public void setId(long id) { this.id = id; } public BackendConfiguration withId(long id) { this.id = id; return this; } @JsonProperty("account_id") public long getAccountId() { return accountId; } @JsonProperty("account_id") public void setAccountId(long accountId) { this.accountId = accountId; } public BackendConfiguration withAccountId(long accountId) { this.accountId = accountId; return this; } @JsonProperty("name") public String getName() { return name; } @JsonProperty("name") public void setName(String name) { this.name = name; } public BackendConfiguration withName(String name) { this.name = name; return this; } @JsonProperty("oneline_description") public Object getOnelineDescription() { return onelineDescription; } @JsonProperty("oneline_description") public void setOnelineDescription(String onelineDescription) { this.onelineDescription = onelineDescription; } public BackendConfiguration withOnelineDescription(String onelineDescription) { this.onelineDescription = onelineDescription; return this; } @JsonProperty("description") public Object getDescription() { return description; } @JsonProperty("description") public void setDescription(String description) { this.description = description; } public BackendConfiguration withDescription(String description) { this.description = description; return this; } @JsonProperty("txt_api") public Object getTxtApi() { return txtApi; } @JsonProperty("txt_api") public void setTxtApi(String txtApi) { this.txtApi = txtApi; } public BackendConfiguration withTxtApi(String txtApi) { this.txtApi = txtApi; return this; } @JsonProperty("txt_support") public Object getTxtSupport() { return txtSupport; } @JsonProperty("txt_support") public void setTxtSupport(String txtSupport) { this.txtSupport = txtSupport; } public BackendConfiguration withTxtSupport(String txtSupport) { this.txtSupport = txtSupport; return this; } @JsonProperty("txt_features") public Object getTxtFeatures() { return txtFeatures; } @JsonProperty("txt_features") public void setTxtFeatures(String txtFeatures) { this.txtFeatures = txtFeatures; } public BackendConfiguration withTxtFeatures(String txtFeatures) { this.txtFeatures = txtFeatures; return this; } @JsonProperty("created_at") public String getCreatedAt() { return createdAt; } @JsonProperty("created_at") public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } public BackendConfiguration withCreatedAt(String createdAt) { this.createdAt = createdAt; return this; } @JsonProperty("updated_at") public String getUpdatedAt() { return updatedAt; } @JsonProperty("updated_at") public void setUpdatedAt(String updatedAt) { this.updatedAt = updatedAt; } public BackendConfiguration withUpdatedAt(String updatedAt) { this.updatedAt = updatedAt; return this; } @JsonProperty("logo_file_name") public Object getLogoFileName() { return logoFileName; } @JsonProperty("logo_file_name") public void setLogoFileName(String logoFileName) { this.logoFileName = logoFileName; } public BackendConfiguration withLogoFileName(String logoFileName) { this.logoFileName = logoFileName; return this; } @JsonProperty("logo_content_type") public Object getLogoContentType() { return logoContentType; } @JsonProperty("logo_content_type") public void setLogoContentType(String logoContentType) { this.logoContentType = logoContentType; } public BackendConfiguration withLogoContentType(String logoContentType) { this.logoContentType = logoContentType; return this; } @JsonProperty("logo_file_size") public Object getLogoFileSize() { return logoFileSize; } @JsonProperty("logo_file_size") public void setLogoFileSize(Object logoFileSize) { this.logoFileSize = logoFileSize; } public BackendConfiguration withLogoFileSize(Object logoFileSize) { this.logoFileSize = logoFileSize; return this; } @JsonProperty("state") public String getState() { return state; } @JsonProperty("state") public void setState(String state) { this.state = state; } public BackendConfiguration withState(String state) { this.state = state; return this; } @JsonProperty("intentions_required") public boolean isIntentionsRequired() { return intentionsRequired; } @JsonProperty("intentions_required") public void setIntentionsRequired(boolean intentionsRequired) { this.intentionsRequired = intentionsRequired; } public BackendConfiguration withIntentionsRequired(boolean intentionsRequired) { this.intentionsRequired = intentionsRequired; return this; } @JsonProperty("draft_name") public String getDraftName() { return draftName; } @JsonProperty("draft_name") public void setDraftName(String draftName) { this.draftName = draftName; } public BackendConfiguration withDraftName(String draftName) { this.draftName = draftName; return this; } @JsonProperty("infobar") public Object getInfobar() { return infobar; } @JsonProperty("infobar") public void setInfobar(Object infobar) { this.infobar = infobar; } public BackendConfiguration withInfobar(Object infobar) { this.infobar = infobar; return this; } @JsonProperty("terms") public Object getTerms() { return terms; } @JsonProperty("terms") public void setTerms(Object terms) { this.terms = terms; } public BackendConfiguration withTerms(Object terms) { this.terms = terms; return this; } @JsonProperty("display_provider_keys") public boolean isDisplayProviderKeys() { return displayProviderKeys; } @JsonProperty("display_provider_keys") public void setDisplayProviderKeys(boolean displayProviderKeys) { this.displayProviderKeys = displayProviderKeys; } public BackendConfiguration withDisplayProviderKeys(boolean displayProviderKeys) { this.displayProviderKeys = displayProviderKeys; return this; } @JsonProperty("tech_support_email") public Object getTechSupportEmail() { return techSupportEmail; } @JsonProperty("tech_support_email") public void setTechSupportEmail(String techSupportEmail) { this.techSupportEmail = techSupportEmail; } public BackendConfiguration withTechSupportEmail(String techSupportEmail) { this.techSupportEmail = techSupportEmail; return this; } @JsonProperty("admin_support_email") public Object getAdminSupportEmail() { return adminSupportEmail; } @JsonProperty("admin_support_email") public void setAdminSupportEmail(String adminSupportEmail) { this.adminSupportEmail = adminSupportEmail; } public BackendConfiguration withAdminSupportEmail(String adminSupportEmail) { this.adminSupportEmail = adminSupportEmail; return this; } @JsonProperty("credit_card_support_email") public Object getCreditCardSupportEmail() { return creditCardSupportEmail; } @JsonProperty("credit_card_support_email") public void setCreditCardSupportEmail(String creditCardSupportEmail) { this.creditCardSupportEmail = creditCardSupportEmail; } public BackendConfiguration withCreditCardSupportEmail(String creditCardSupportEmail) { this.creditCardSupportEmail = creditCardSupportEmail; return this; } @JsonProperty("buyers_manage_apps") public boolean isBuyersManageApps() { return buyersManageApps; } @JsonProperty("buyers_manage_apps") public void setBuyersManageApps(boolean buyersManageApps) { this.buyersManageApps = buyersManageApps; } public BackendConfiguration withBuyersManageApps(boolean buyersManageApps) { this.buyersManageApps = buyersManageApps; return this; } @JsonProperty("buyers_manage_keys") public boolean isBuyersManageKeys() { return buyersManageKeys; } @JsonProperty("buyers_manage_keys") public void setBuyersManageKeys(boolean buyersManageKeys) { this.buyersManageKeys = buyersManageKeys; } public BackendConfiguration withBuyersManageKeys(boolean buyersManageKeys) { this.buyersManageKeys = buyersManageKeys; return this; } @JsonProperty("custom_keys_enabled") public boolean isCustomKeysEnabled() { return customKeysEnabled; } @JsonProperty("custom_keys_enabled") public void setCustomKeysEnabled(boolean customKeysEnabled) { this.customKeysEnabled = customKeysEnabled; } public BackendConfiguration withCustomKeysEnabled(boolean customKeysEnabled) { this.customKeysEnabled = customKeysEnabled; return this; } @JsonProperty("buyer_plan_change_permission") public String getBuyerPlanChangePermission() { return buyerPlanChangePermission; } @JsonProperty("buyer_plan_change_permission") public void setBuyerPlanChangePermission(String buyerPlanChangePermission) { this.buyerPlanChangePermission = buyerPlanChangePermission; } public BackendConfiguration withBuyerPlanChangePermission(String buyerPlanChangePermission) { this.buyerPlanChangePermission = buyerPlanChangePermission; return this; } @JsonProperty("buyer_can_select_plan") public boolean isBuyerCanSelectPlan() { return buyerCanSelectPlan; } @JsonProperty("buyer_can_select_plan") public void setBuyerCanSelectPlan(boolean buyerCanSelectPlan) { this.buyerCanSelectPlan = buyerCanSelectPlan; } public BackendConfiguration withBuyerCanSelectPlan(boolean buyerCanSelectPlan) { this.buyerCanSelectPlan = buyerCanSelectPlan; return this; } @JsonProperty("notification_settings") public Object getNotificationSettings() { return notificationSettings; } @JsonProperty("notification_settings") public void setNotificationSettings(Object notificationSettings) { this.notificationSettings = notificationSettings; } public BackendConfiguration withNotificationSettings(Object notificationSettings) { this.notificationSettings = notificationSettings; return this; } @JsonProperty("default_application_plan_id") public long getDefaultApplicationPlanId() { return defaultApplicationPlanId; } @JsonProperty("default_application_plan_id") public void setDefaultApplicationPlanId(long defaultApplicationPlanId) { this.defaultApplicationPlanId = defaultApplicationPlanId; } public BackendConfiguration withDefaultApplicationPlanId(long defaultApplicationPlanId) { this.defaultApplicationPlanId = defaultApplicationPlanId; return this; } @JsonProperty("default_service_plan_id") public long getDefaultServicePlanId() { return defaultServicePlanId; } @JsonProperty("default_service_plan_id") public void setDefaultServicePlanId(long defaultServicePlanId) { this.defaultServicePlanId = defaultServicePlanId; } public BackendConfiguration withDefaultServicePlanId(long defaultServicePlanId) { this.defaultServicePlanId = defaultServicePlanId; return this; } @JsonProperty("buyer_can_see_log_requests") public boolean isBuyerCanSeeLogRequests() { return buyerCanSeeLogRequests; } @JsonProperty("buyer_can_see_log_requests") public void setBuyerCanSeeLogRequests(boolean buyerCanSeeLogRequests) { this.buyerCanSeeLogRequests = buyerCanSeeLogRequests; } public BackendConfiguration withBuyerCanSeeLogRequests(boolean buyerCanSeeLogRequests) { this.buyerCanSeeLogRequests = buyerCanSeeLogRequests; return this; } @JsonProperty("default_end_user_plan_id") public Object getDefaultEndUserPlanId() { return defaultEndUserPlanId; } @JsonProperty("default_end_user_plan_id") public void setDefaultEndUserPlanId(String defaultEndUserPlanId) { this.defaultEndUserPlanId = defaultEndUserPlanId; } public BackendConfiguration withDefaultEndUserPlanId(String defaultEndUserPlanId) { this.defaultEndUserPlanId = defaultEndUserPlanId; return this; } @JsonProperty("end_user_registration_required") public boolean isEndUserRegistrationRequired() { return endUserRegistrationRequired; } @JsonProperty("end_user_registration_required") public void setEndUserRegistrationRequired(boolean endUserRegistrationRequired) { this.endUserRegistrationRequired = endUserRegistrationRequired; } public BackendConfiguration withEndUserRegistrationRequired(boolean endUserRegistrationRequired) { this.endUserRegistrationRequired = endUserRegistrationRequired; return this; } @JsonProperty("tenant_id") public long getTenantId() { return tenantId; } @JsonProperty("tenant_id") public void setTenantId(long tenantId) { this.tenantId = tenantId; } public BackendConfiguration withTenantId(long tenantId) { this.tenantId = tenantId; return this; } @JsonProperty("system_name") public String getSystemName() { return systemName; } @JsonProperty("system_name") public void setSystemName(String systemName) { this.systemName = systemName; } public BackendConfiguration withSystemName(String systemName) { this.systemName = systemName; return this; } @JsonProperty("backend_version") public String getBackendVersion() { return backendVersion; } @JsonProperty("backend_version") public void setBackendVersion(String backendVersion) { this.backendVersion = backendVersion; } public BackendConfiguration withBackendVersion(String backendVersion) { this.backendVersion = backendVersion; return this; } @JsonProperty("mandatory_app_key") public boolean isMandatoryAppKey() { return mandatoryAppKey; } @JsonProperty("mandatory_app_key") public void setMandatoryAppKey(boolean mandatoryAppKey) { this.mandatoryAppKey = mandatoryAppKey; } public BackendConfiguration withMandatoryAppKey(boolean mandatoryAppKey) { this.mandatoryAppKey = mandatoryAppKey; return this; } @JsonProperty("buyer_key_regenerate_enabled") public boolean isBuyerKeyRegenerateEnabled() { return buyerKeyRegenerateEnabled; } @JsonProperty("buyer_key_regenerate_enabled") public void setBuyerKeyRegenerateEnabled(boolean buyerKeyRegenerateEnabled) { this.buyerKeyRegenerateEnabled = buyerKeyRegenerateEnabled; } public BackendConfiguration withBuyerKeyRegenerateEnabled(boolean buyerKeyRegenerateEnabled) { this.buyerKeyRegenerateEnabled = buyerKeyRegenerateEnabled; return this; } @JsonProperty("support_email") public String getSupportEmail() { return supportEmail; } @JsonProperty("support_email") public void setSupportEmail(String supportEmail) { this.supportEmail = supportEmail; } public BackendConfiguration withSupportEmail(String supportEmail) { this.supportEmail = supportEmail; return this; } @JsonProperty("referrer_filters_required") public boolean isReferrerFiltersRequired() { return referrerFiltersRequired; } @JsonProperty("referrer_filters_required") public void setReferrerFiltersRequired(boolean referrerFiltersRequired) { this.referrerFiltersRequired = referrerFiltersRequired; } public BackendConfiguration withReferrerFiltersRequired(boolean referrerFiltersRequired) { this.referrerFiltersRequired = referrerFiltersRequired; return this; } @JsonProperty("deployment_option") public String getDeploymentOption() { return deploymentOption; } @JsonProperty("deployment_option") public void setDeploymentOption(String deploymentOption) { this.deploymentOption = deploymentOption; } public BackendConfiguration withDeploymentOption(String deploymentOption) { this.deploymentOption = deploymentOption; return this; } @JsonProperty("proxiable?") public boolean isProxiable() { return proxiable; } @JsonProperty("proxiable?") public void setProxiable(boolean proxiable) { this.proxiable = proxiable; } public BackendConfiguration withProxiable(boolean proxiable) { this.proxiable = proxiable; return this; } @JsonProperty("backend_authentication_type") public String getBackendAuthenticationType() { return backendAuthenticationType; } @JsonProperty("backend_authentication_type") public void setBackendAuthenticationType(String backendAuthenticationType) { this.backendAuthenticationType = backendAuthenticationType; } public BackendConfiguration withBackendAuthenticationType(String backendAuthenticationType) { this.backendAuthenticationType = backendAuthenticationType; return this; } @JsonProperty("backend_authentication_value") public String getBackendAuthenticationValue() { return backendAuthenticationValue; } @JsonProperty("backend_authentication_value") public void setBackendAuthenticationValue(String backendAuthenticationValue) { this.backendAuthenticationValue = backendAuthenticationValue; } public BackendConfiguration withBackendAuthenticationValue(String backendAuthenticationValue) { this.backendAuthenticationValue = backendAuthenticationValue; return this; } @JsonProperty("proxy") public Proxy getProxy() { return proxy; } @JsonProperty("proxy") public void setProxy(Proxy proxy) { this.proxy = proxy; } public BackendConfiguration withProxy(Proxy proxy) { this.proxy = proxy; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } public BackendConfiguration withAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); return this; } @SuppressWarnings("nls") public AuthTypeEnum getAuthType() { switch(getBackendVersion()) { case "1": return AuthTypeEnum.API_KEY; case "2": return AuthTypeEnum.APP_ID; case "oauth": // Yes, really. return AuthTypeEnum.OAUTH; default: throw new IllegalStateException("Unrecognised auth type: " + getBackendVersion()); } } @Override public int hashCode() { return new HashCodeBuilder().append(id).append(accountId).append(name).append(onelineDescription).append(description).append(txtApi) .append(txtSupport).append(txtFeatures).append(createdAt).append(updatedAt).append(logoFileName).append(logoContentType) .append(logoFileSize).append(state).append(intentionsRequired).append(draftName).append(infobar).append(terms) .append(displayProviderKeys).append(techSupportEmail).append(adminSupportEmail).append(creditCardSupportEmail) .append(buyersManageApps).append(buyersManageKeys).append(customKeysEnabled).append(buyerPlanChangePermission) .append(buyerCanSelectPlan).append(notificationSettings).append(defaultApplicationPlanId).append(defaultServicePlanId) .append(buyerCanSeeLogRequests).append(defaultEndUserPlanId).append(endUserRegistrationRequired).append(tenantId).append(systemName) .append(backendVersion).append(mandatoryAppKey).append(buyerKeyRegenerateEnabled).append(supportEmail).append(referrerFiltersRequired) .append(deploymentOption).append(proxiable).append(backendAuthenticationType).append(backendAuthenticationValue).append(proxy) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof BackendConfiguration) == false) { return false; } BackendConfiguration rhs = ((BackendConfiguration) other); return new EqualsBuilder().append(id, rhs.id).append(accountId, rhs.accountId).append(name, rhs.name) .append(onelineDescription, rhs.onelineDescription).append(description, rhs.description).append(txtApi, rhs.txtApi) .append(txtSupport, rhs.txtSupport).append(txtFeatures, rhs.txtFeatures).append(createdAt, rhs.createdAt) .append(updatedAt, rhs.updatedAt).append(logoFileName, rhs.logoFileName).append(logoContentType, rhs.logoContentType) .append(logoFileSize, rhs.logoFileSize).append(state, rhs.state).append(intentionsRequired, rhs.intentionsRequired) .append(draftName, rhs.draftName).append(infobar, rhs.infobar).append(terms, rhs.terms) .append(displayProviderKeys, rhs.displayProviderKeys).append(techSupportEmail, rhs.techSupportEmail) .append(adminSupportEmail, rhs.adminSupportEmail).append(creditCardSupportEmail, rhs.creditCardSupportEmail) .append(buyersManageApps, rhs.buyersManageApps).append(buyersManageKeys, rhs.buyersManageKeys) .append(customKeysEnabled, rhs.customKeysEnabled).append(buyerPlanChangePermission, rhs.buyerPlanChangePermission) .append(buyerCanSelectPlan, rhs.buyerCanSelectPlan).append(notificationSettings, rhs.notificationSettings) .append(defaultApplicationPlanId, rhs.defaultApplicationPlanId).append(defaultServicePlanId, rhs.defaultServicePlanId) .append(buyerCanSeeLogRequests, rhs.buyerCanSeeLogRequests).append(defaultEndUserPlanId, rhs.defaultEndUserPlanId) .append(endUserRegistrationRequired, rhs.endUserRegistrationRequired).append(tenantId, rhs.tenantId) .append(systemName, rhs.systemName).append(backendVersion, rhs.backendVersion).append(mandatoryAppKey, rhs.mandatoryAppKey) .append(buyerKeyRegenerateEnabled, rhs.buyerKeyRegenerateEnabled).append(supportEmail, rhs.supportEmail) .append(referrerFiltersRequired, rhs.referrerFiltersRequired).append(deploymentOption, rhs.deploymentOption) .append(proxiable, rhs.proxiable).append(backendAuthenticationType, rhs.backendAuthenticationType) .append(backendAuthenticationValue, rhs.backendAuthenticationValue).append(proxy, rhs.proxy) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
package apple.metrickit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSMeasurement; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.foundation.NSUnitDuration; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * MXLocationActivityMetric * <p> * An MXMetric subclass that encapsulates location metrics * <p> * The metrics contained in this class describe properties of location activity. See MXAppRunTimeMetric for time spent performing location activities. */ @Generated @Library("MetricKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class MXLocationActivityMetric extends MXMetric { static { NatJ.register(); } @Generated protected MXLocationActivityMetric(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native MXLocationActivityMetric alloc(); @Owned @Generated @Selector("allocWithZone:") public static native MXLocationActivityMetric allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); /** * [@property] cumulativeBestAccuracyForNavigationTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyBestForNavigation. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeBestAccuracyForNavigationTime") public native NSMeasurement<NSUnitDuration> cumulativeBestAccuracyForNavigationTime(); /** * [@property] cumulativeBestAccuracyTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyBest. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeBestAccuracyTime") public native NSMeasurement<NSUnitDuration> cumulativeBestAccuracyTime(); /** * [@property] cumulativeHundredMetersAccuracyTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyHundredMeters. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeHundredMetersAccuracyTime") public native NSMeasurement<NSUnitDuration> cumulativeHundredMetersAccuracyTime(); /** * [@property] cumulativeKilometerAccuracyTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyKilometer. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeKilometerAccuracyTime") public native NSMeasurement<NSUnitDuration> cumulativeKilometerAccuracyTime(); /** * [@property] cumulativeNearestTenMetersAccuracyTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyNearestTenMeters. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeNearestTenMetersAccuracyTime") public native NSMeasurement<NSUnitDuration> cumulativeNearestTenMetersAccuracyTime(); /** * [@property] cumulativeThreeKilometersAccuracyTime * <p> * Cumulative time spent acquiring location at kCLLocationAccuracyThreeKilometers. * <p> * Dimensioned as NSUnitDuration. */ @Generated @Selector("cumulativeThreeKilometersAccuracyTime") public native NSMeasurement<NSUnitDuration> cumulativeThreeKilometersAccuracyTime(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native MXLocationActivityMetric init(); @Generated @Selector("initWithCoder:") public native MXLocationActivityMetric initWithCoder(NSCoder coder); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native MXLocationActivityMetric new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("supportsSecureCoding") public static native boolean supportsSecureCoding(); @Generated @ProtocolClassMethod("supportsSecureCoding") public boolean _supportsSecureCoding() { return supportsSecureCoding(); } @Generated @Selector("version") @NInt public static native long version_static(); }
package edu.washington.nsre.util; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.*; public class DR { private int BUFFER_SIZE = 8 * 1024 * 1024; private BufferedReader br; public boolean EOF = false; int count = 0; long sec_last100000 = (new Date()).getTime(); public DR(String filename) { try { br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), "utf-8"), BUFFER_SIZE); } catch (Exception e) { e.printStackTrace(); } } public DR(String filename, int bufferSize) throws IOException { br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), "utf-8"), bufferSize); } public DR(String filename, String charset) throws IOException { br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), charset), BUFFER_SIZE); } public DR(String filename, String charset, boolean gzip) throws IOException { if (!gzip) br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), charset), BUFFER_SIZE); else br = new BufferedReader(new InputStreamReader(new java.util.zip.GZIPInputStream((new FileInputStream( filename))), charset), BUFFER_SIZE); } public String[] read() { try { count++; if (count % 100000 == 0) { Date d = new Date(); long t = d.getTime(); System.out.print(count + "\t" + (t - sec_last100000) + "\t" + d + "\r"); sec_last100000 = t; System.out.flush(); } String line = br.readLine(); if (line == null) { this.EOF = true; return null; } /* * String[] cols = line.split("\t"); for (int i=0; i < cols.length; i++) * cols[i] = Util.unescape(cols[i]); return cols; */ return toCols(line); } catch (IOException e) { e.printStackTrace(); return null; } } public static String[] toCols(String line) { String[] buffer = new String[32]; int numBuffer = 0; int start = 0; int end; while ((end = line.indexOf('\t', start)) != -1) { if (numBuffer >= buffer.length) buffer = extendBuffer(buffer); buffer[numBuffer++] = DelimitedEscape.unescape(line.substring(start, end)); start = end + 1; } if (numBuffer >= buffer.length) buffer = extendBuffer(buffer); buffer[numBuffer++] = DelimitedEscape.unescape(line.substring(start)); String[] returnValue = new String[numBuffer]; System.arraycopy(buffer, 0, returnValue, 0, numBuffer); return returnValue; } private static String[] extendBuffer(String[] buffer) { String[] newBuffer = new String[2 * buffer.length]; System.arraycopy(buffer, 0, newBuffer, 0, buffer.length); return newBuffer; } public List<String[]> readAll() throws IOException { List<String[]> all = new ArrayList<String[]>(); String[] line; while ((line = read()) != null) { all.add(line); } return all; } public List<String[]> readAll(int MAX) throws IOException { List<String[]> all = new ArrayList<String[]>(MAX); String[] line; while ((line = read()) != null) { all.add(line); } return all; } public HashMap<String, String> readAll2Hash(int keyId, int valueId) throws IOException { HashMap<String, String> all = new HashMap<String, String>(); String[] l; while ((l = read()) != null) { all.put(l[keyId], l[valueId]); } return all; } private String[] blockbuffer; public List<String[]> readBlock(int key) { if (blockbuffer == null) { blockbuffer = this.read(); } if (this.EOF || blockbuffer == null) return null; List<String[]> block = new ArrayList<String[]>(); block.add(blockbuffer); String[] l; while ((l = this.read()) != null && l[key].equals(block.get(0)[key])) { block.add(l); } blockbuffer = l; return block; } public List<String[]> readUntil(int key, int until) { if (blockbuffer == null) { blockbuffer = this.read(); } if (this.EOF || blockbuffer == null) return null; List<String[]> block = new ArrayList<String[]>(); block.add(blockbuffer); String[] l; while ((l = this.read()) != null && Integer.parseInt(l[key]) <= until) { block.add(l); } blockbuffer = l; return block; } private String[] blockbufferlimited; public List<String[]> readBlocklimited(int key, int limitedsize) throws IOException { if (blockbufferlimited == null) { blockbufferlimited = this.read(); } if (this.EOF || blockbufferlimited == null) return null; List<String[]> block = new ArrayList<String[]>(); block.add(blockbufferlimited); String[] l; while ((l = this.read()) != null && l[key].equals(block.get(0)[key])) { if (block.size() < limitedsize) { block.add(l); } } blockbufferlimited = l; return block; } public List<String[]> readBlock(int[] keys) throws IOException { if (blockbuffer == null) { blockbuffer = this.read(); } if (this.EOF || blockbuffer == null) return null; List<String[]> block = new ArrayList<String[]>(); block.add(blockbuffer); String[] l; while ((l = this.read()) != null) { boolean match = true; for (int k : keys) { if (!l[k].equals(block.get(0)[k])) { match = false; break; } } if (match) { block.add(l); } else { break; } } blockbuffer = l; return block; } public List<String[]> readBlocklimited(int[] keys, int limitedsize) throws IOException { if (blockbuffer == null) { blockbuffer = this.read(); } if (this.EOF || blockbuffer == null) return null; List<String[]> block = new ArrayList<String[]>(); block.add(blockbuffer); String[] l; while ((l = this.read()) != null) { boolean match = true; for (int k : keys) { if (!l[k].equals(block.get(0)[k])) { match = false; break; } } if (match) { if (block.size() < limitedsize) { block.add(l); } } else { break; } } blockbuffer = l; return block; } public void close() { try { br.close(); } catch (IOException e) { e.printStackTrace(); } } public static Set<String> readColumn(String file, int columnId) { Set<String> all = new HashSet<String>(); String[] l; DR dr = new DR(file); while ((l = dr.read()) != null) { all.add(l[columnId]); } dr.close(); return all; } public static Set<Integer> readColumnInt(String file, int columnId) { Set<Integer> all = new HashSet<Integer>(); String[] l; DR dr = new DR(file); while ((l = dr.read()) != null) { all.add(Integer.parseInt(l[columnId])); } dr.close(); return all; } }
/** * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.waveprotocol.wave.model.document.util; import org.waveprotocol.wave.model.document.AnnotationMutationHandler; import org.waveprotocol.wave.model.document.MutableAnnotationSet; import org.waveprotocol.wave.model.document.MutableDocument; import org.waveprotocol.wave.model.document.MutableDocumentImpl; import org.waveprotocol.wave.model.document.indexed.AnnotationSetListener; import org.waveprotocol.wave.model.document.indexed.AnnotationTree; import org.waveprotocol.wave.model.document.indexed.DocumentHandler; import org.waveprotocol.wave.model.document.indexed.IndexedDocument; import org.waveprotocol.wave.model.document.indexed.LocationMapper; import org.waveprotocol.wave.model.document.indexed.ObservableIndexedDocument; import org.waveprotocol.wave.model.document.operation.Attributes; import org.waveprotocol.wave.model.document.operation.DocInitialization; import org.waveprotocol.wave.model.document.operation.automaton.DocumentSchema; import org.waveprotocol.wave.model.document.raw.RawDocument; import org.waveprotocol.wave.model.document.raw.TextNodeOrganiser; import org.waveprotocol.wave.model.document.raw.impl.Element; import org.waveprotocol.wave.model.document.raw.impl.Node; import org.waveprotocol.wave.model.document.raw.impl.RawDocumentImpl; import org.waveprotocol.wave.model.document.raw.impl.Text; import org.waveprotocol.wave.model.document.util.ContextProviders.TestDocumentContext.MiscListener; import org.waveprotocol.wave.model.operation.OperationException; import org.waveprotocol.wave.model.operation.OperationRuntimeException; import org.waveprotocol.wave.model.util.Box; import java.util.Iterator; import java.util.Map; /** * Document context providers * * @author danilatos@google.com (Daniel Danilatos) */ public class ContextProviders { /** * Extension for testing purposes, exposing some more internals */ public interface TestDocumentContext<N, E extends N, T extends N> extends DocumentContext<N, E, T> { public interface MiscListener { // void onBegin(); // void onFinish(); void onSchedulePaint(Node node); } RawDocument<N, E, T> getFullRawDoc(); RawDocument<N, E, T> getPersistentRawDoc(); /** Gets the indexed doc */ IndexedDocument<N, E, T> getIndexedDoc(); } public static class LocalDocImpl<N, E extends N, T extends N> extends IdentityView<N, E, T> implements LocalDocument<N, E, T> { private final WritableLocalDocument<N, E, T> writable; public LocalDocImpl(RawDocument<N, E, T> fullDoc, WritableLocalDocument<N, E, T> local) { super(fullDoc); this.writable = local; } @Override public <T> T getProperty(Property<T> property, E element) { return writable.getProperty(property, element); } @Override public boolean isDestroyed(E element) { return writable.isDestroyed(element); } @Override public <T> void setProperty(Property<T> property, E element, T value) { writable.setProperty(property, element, value); } @Override public T transparentCreate(String text, E parent, N nodeAfter) { return writable.transparentCreate(text, parent, nodeAfter); } @Override public E transparentCreate(String tagName, Map<String, String> attributes, E parent, N nodeAfter) { return writable.transparentCreate(tagName, attributes, parent, nodeAfter); } @Override public void transparentSetAttribute(E element, String name, String value) { writable.transparentSetAttribute(element, name, value); } @Override public void transparentDeepRemove(N node) { writable.transparentDeepRemove(node); } @Override public void transparentMove(E newParent, N fromIncl, N toExcl, N refChild) { writable.transparentMove(newParent, fromIncl, toExcl, refChild); } @Override public N transparentSlice(N splitAt) { return writable.transparentSlice(splitAt); } @Override public void transparentUnwrap(E element) { writable.transparentUnwrap(element); } @Override public void markNodeForPersistence(N localNode, boolean lazy) { writable.markNodeForPersistence(localNode, lazy); } @Override public boolean isTransparent(N node) { return writable.isTransparent(node); } } /** * @param docHandler * @return a self-contained document context suitable for testing */ public static TestDocumentContext<Node, Element, Text> createTestPojoContext2( final String initialInnerXml, final DocumentHandler<Node, Element, Text> docHandler, final AnnotationRegistry annotationRegistry, final MiscListener miscListener, final DocumentSchema schemaConstraints) { final Box<TestDocumentContext<Node, Element, Text>> box = Box.create(); return box.boxed = createTestPojoContext(initialInnerXml, docHandler, new AnnotationSetListener<Object>() { @Override public void onAnnotationChange(int start, int end, String key, Object newValue) { Iterator<AnnotationMutationHandler> handlers = annotationRegistry.getHandlers(key); while (handlers.hasNext()) { handlers.next().handleAnnotationChange( box.boxed, start, end, key, newValue); } } }, miscListener, schemaConstraints); } /** * @param docHandler * @param annotationSetListener * @return a self-contained document context suitable for testing */ public static TestDocumentContext<Node, Element, Text> createTestPojoContext( final String initialInnerXml, final DocumentHandler<Node, Element, Text> docHandler, final AnnotationSetListener<Object> annotationSetListener, final MiscListener miscListener, final DocumentSchema schemaConstraints) { return createTestPojoContext( // FIXME(ohler): it's a bit weird that we parse into an IndexedDocument just to // get its asOperation(). DocProviders.POJO.parse(initialInnerXml).asOperation(), docHandler, annotationSetListener, miscListener, schemaConstraints); } /** * @param docHandler * @param annotationSetListener * @return a self-contained document context suitable for testing */ public static TestDocumentContext<Node, Element, Text> createTestPojoContext( final DocInitialization initialContent, final DocumentHandler<Node, Element, Text> docHandler, final AnnotationSetListener<Object> annotationSetListener, final MiscListener miscListener, final DocumentSchema schemaConstraints) { final AnnotationSetListener<Object> annotationListener = annotationSetListener != null ? annotationSetListener : new AnnotationSetListener<Object>() { @Override public void onAnnotationChange(int start, int end, String key, Object newValue) { // Do nothing } }; TestDocumentContext<Node, Element, Text> documentContext = new TestDocumentContext<Node, Element, Text>() { private final RawDocument<Node, Element, Text> fullDoc = RawDocumentImpl.PROVIDER.create("doc", Attributes.EMPTY_MAP); private final PersistentContent<Node, Element, Text> persistentDoc = new RepaintingPersistentContent<Node, Element, Text>( fullDoc, Element.ELEMENT_MANAGER) { @Override protected void schedulePaint(Node node) { if (miscListener != null) { miscListener.onSchedulePaint(node); } } }; AnnotationTree<Object> fullAnnotations = new AnnotationTree<Object>( "a", "b", annotationListener); private final LocalDocImpl<Node, Element, Text> localDoc = new LocalDocImpl<Node, Element, Text>(fullDoc, persistentDoc); private final IndexedDocument<Node, Element, Text> indexedDoc = new ObservableIndexedDocument<Node, Element, Text, Void>( docHandler, persistentDoc, fullAnnotations, schemaConstraints) { // @Override // public void begin() { // super.begin(); // if (miscListener != null) { // miscListener.onBegin(); // } // } // // @Override // public void finish() { // if (miscListener != null) { // miscListener.onFinish(); // } // super.finish(); // } }; private final MutableDocument<Node, Element, Text> mutableDoc = new MutableDocumentImpl<Node, Element, Text>( DocProviders.createTrivialSequencer(indexedDoc, null), indexedDoc); private final LocalAnnotationSetImpl localAnnotations = new LocalAnnotationSetImpl(fullAnnotations); @Override public LocalDocument<Node, Element, Text> annotatableContent() { return localDoc; } @Override public MutableDocument<Node, Element, Text> document() { return mutableDoc; } @Override public ElementManager<Element> elementManager() { return Element.ELEMENT_MANAGER; } @Override public MutableAnnotationSet.Local localAnnotations() { return localAnnotations; } @Override public LocationMapper<Node> locationMapper() { return indexedDoc; } @Override public ReadableDocumentView<Node, Element, Text> persistentView() { return persistentDoc; } @Override public ReadableDocumentView<Node, Element, Text> hardView() { return persistentDoc.hardView(); } @Override public TextNodeOrganiser<Text> textNodeOrganiser() { return indexedDoc; } @Override public IndexedDocument<Node, Element, Text> getIndexedDoc() { return indexedDoc; } @Override public RawDocument<Node, Element, Text> getFullRawDoc() { return fullDoc; } @Override public RawDocument<Node, Element, Text> getPersistentRawDoc() { return persistentDoc; } }; try { documentContext.getIndexedDoc().consume(initialContent); } catch (OperationException e) { throw new OperationRuntimeException("Invalid constructing op", e); } return documentContext; } }
import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.Enumeration; import java.util.TooManyListenersException; import java.util.Vector; import javax.comm.CommPortIdentifier; import javax.comm.NoSuchPortException; import javax.comm.PortInUseException; import javax.comm.SerialPort; import javax.comm.SerialPortEvent; import javax.comm.SerialPortEventListener; import javax.comm.UnsupportedCommOperationException; /** * Class that handles a Bluetooth connection. */ public class SerialPortConnection implements SerialPortEventListener { static CommPortIdentifier portId; static String TimeStamp; protected SerialPort port; protected OutputStream outputStream; protected Writer outputWriter; protected BufferedReader inputReader; protected CloseableFrame caller; protected String defaultPort; protected boolean validOldPort; private Simulator simulator; private boolean portOpenAndReadyForUse = false; private Object lock = new Object(); private boolean isWaitingOnEvent; private CancelDialog cancelDialog; private Vector portsOpenWhileListening; public SerialPortConnection(String defaultPort, CloseableFrame caller, Simulator simulator) { this.simulator = simulator; this.caller = caller; this.defaultPort = defaultPort; validOldPort = false; TimeStamp = new java.util.Date().toString(); // If automatic port opening at startup is enabled (a non null port is // supplied) if (!defaultPort.equals("null")) { openPort(defaultPort); } else { System.out .println("SerialPortConnection.SerialPortConnection() no default port supplied, will not open port."); } } /** * Open a port to make it ready for communication. * * @param portParam * the port to open */ private void openPort(String portParam) { System.out.println("SerialPortConnection.openPort() port: " + portParam); // Close a previously opened port if (port != null) { port.close(); } // Open the new port try { portId = CommPortIdentifier.getPortIdentifier(portParam); port = (SerialPort) portId.open("SerialPortConnection", 2000); port.setSerialPortParams(115200, SerialPort.DATABITS_8, SerialPort.STOPBITS_1, SerialPort.PARITY_NONE); outputStream = port.getOutputStream(); outputWriter = new OutputStreamWriter(outputStream); inputReader = new BufferedReader(new InputStreamReader(port .getInputStream())); // Success portOpenAndReadyForUse = true; System.out .println("SerialPortConnection.openPort() port was opened successfully"); } catch (NoSuchPortException ne) { System.out .println("SerialPortConnection.openPort() Error, Port Not Found " + ne); ErrorDialog err = new ErrorDialog(caller, "Port Not Found"); err.addText("The port was not found: " + portParam); err.show(); } catch (IOException ioe) { System.out .println("SerialPortConnection.openPort() Error, IOException" + ioe); ErrorDialog err = new ErrorDialog(caller, "Error, IOException"); err.addText("There was a problem when trying to open the port: " + portParam); err.show(); } catch (PortInUseException piue) { System.out .println("SerialPortConnection.openPort() Error, Port in Use " + piue); ErrorDialog err = new ErrorDialog(caller, "Error, Port in Use"); err .addText("The port is already in use (by this or another program): " + portParam); err.show(); } catch (UnsupportedCommOperationException ucoe) { System.out .println("SerialPortConnection.openPort() Error, Unsupported Operation " + ucoe); ErrorDialog err = new ErrorDialog(caller, "Error, Unsupported Operation"); err.addText("The port does not support the operaton: " + portParam); err.show(); } catch (Throwable t) { System.out .println("SerialPortConnection.openPort() Error, Other Error " + t); ErrorDialog err = new ErrorDialog(caller, "Error, Other Error"); err.addText("There was a problem when trying to open the port: " + portParam); err.show(); } } /** * This is the procedure to listen for port activity and thereby * automatically select what port to use. */ public void listenForPort() { isWaitingOnEvent = true; portOpenAndReadyForUse = false; portsOpenWhileListening = new Vector(); // Go through all available ports for (Enumeration e = CommPortIdentifier.getPortIdentifiers(); e .hasMoreElements();) { CommPortIdentifier portIdentifier = (CommPortIdentifier) e .nextElement(); String name = portIdentifier.getName(); System.out.println("SerialPortConnection.listenForPort() port found: " + name); // Can only use serial ports if (portIdentifier.getPortType() != CommPortIdentifier.PORT_SERIAL) { System.out .println("SerialPortConnection.listenForPort() non serial com port, excluding it: " + name); continue; } if (portIdentifier.isCurrentlyOwned()) { System.out.println("SerialPortConnection.listenForPort() port already owned, excluding it, owner: " + portIdentifier.getCurrentOwner()); continue; } SerialPort serialPort = null; // Open the port try { serialPort = (SerialPort) portIdentifier.open( "BTGPSSimulator.listenForPort()", 2000); } catch (PortInUseException pe) { System.out .println("SerialPortConnection.listenForPort() port is already used by another application, excluding it: " + name); continue; } catch (ClassCastException cc) { System.out .println("SerialPortConnection.listenForPort() port was not a serial port after all, excluding it: " + name); continue; } // Start listening for events on the port if (serialPort != null) { try { serialPort.addEventListener(this); // This is first call in this // section in case of // exception serialPort.notifyOnDataAvailable(true); // This is the event // that is specific // when receiving a // connection. portsOpenWhileListening.add(serialPort); } catch (TooManyListenersException te) { System.out .println("SerialPortConnection.listenForPort() there was already another listener for the port, excluding it: " + name); serialPort.close(); continue; } } } // If no ports found, then return if (portsOpenWhileListening.size() == 0) { System.out .println("SerialPortConnection.listenForPort() No available ports found"); ErrorDialog errorDialog = new ErrorDialog(caller, "No Ports Found"); errorDialog.addText("No available ports found."); errorDialog.show(); return; } // Show a wait message cancelDialog = new CancelDialog(caller, "Waiting for Connections"); cancelDialog .addText("Please connect a device, e.g. select your computer as the Bluetooth GPS in WF Navigator."); cancelDialog.show(); isWaitingOnEvent = false; // Close all temporary ports for (int i = 0; i < portsOpenWhileListening.size(); i++) { SerialPort serialPort = (SerialPort) portsOpenWhileListening.get(i); serialPort.close(); } portsOpenWhileListening.clear(); // Confirm that a connection has been made if (portOpenAndReadyForUse) { // Save as default port simulator.savePort(port.getName()); ErrorDialog errorDialog = new ErrorDialog(caller, "Success!"); errorDialog.addText("Successfully connected to port: " + port); errorDialog.show(); } } /** * Listen for events on all open ports. This is part of the listenForPort() * procedure. */ public void serialEvent(SerialPortEvent event) { // Handle one event at a time synchronized (lock) { // Keep listening until the requested event has been received if (isWaitingOnEvent) { String portName = ((SerialPort) event.getSource()).getName(); // This is the event we trigger on to detect a connection if (event.getEventType() == SerialPortEvent.DATA_AVAILABLE) { System.out .println("SerialPortConnection.serialEvent() data is available, we connect to the data sender: " + portName); // Close all temporary ports for (int i = 0; i < portsOpenWhileListening.size(); i++) { SerialPort serialPort = (SerialPort) portsOpenWhileListening .get(i); if (serialPort.getName().equals(portName)) { portsOpenWhileListening.remove(i); serialPort.close(); } } // Open the targeted port in the regular way openPort(portName); // Hide the wait dialog cancelDialog.dispose(); } else { System.out .println("SerialPortConnection.serialEvent() an event was discarded from: " + portName + ", type: " + event.getEventType()); } } } } /** * The dialog to manually select a port from a list. */ public void changePort() { TimeStamp = new java.util.Date().toString(); portOpenAndReadyForUse = false; // Show dialog PortDialog dialog = new PortDialog(defaultPort, caller); dialog.show(); // Get user input String portName = dialog.getPortName(); // If a port is available if (portName != null) { // Save port to settings file simulator.savePort(portName); // Open the port openPort(portName); } return; } /** * @return when a connection has been opened, positions to the client should * be sent here. */ public Writer getWriter() { return outputWriter; } /** * Perform termination activities */ public void terminate() { /** * If a port is already open, then close it to get it included in the next * listenForPort() activity. */ if (port != null) { port.close(); } } /** * Check if the port is open and ready for use. */ public boolean isPortOpenAndReadyForUse() { return portOpenAndReadyForUse; } }
package nxt.user; import nxt.Account; import nxt.Block; import nxt.BlockchainProcessor; import nxt.Constants; import nxt.Generator; import nxt.Nxt; import nxt.Transaction; import nxt.TransactionProcessor; import nxt.peer.Peer; import nxt.peer.Peers; import nxt.util.Convert; import nxt.util.Listener; import nxt.util.Logger; import nxt.util.ThreadPool; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SslConnectionFactory; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.server.handler.DefaultHandler; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.server.handler.ResourceHandler; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.FilterMapping; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.servlets.CrossOriginFilter; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONStreamAware; import java.math.BigInteger; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; public final class Users { private static final int TESTNET_UI_PORT=6875; private static final ConcurrentMap<String, User> users = new ConcurrentHashMap<>(); private static final Collection<User> allUsers = Collections.unmodifiableCollection(users.values()); private static final AtomicInteger peerCounter = new AtomicInteger(); private static final ConcurrentMap<String, Integer> peerIndexMap = new ConcurrentHashMap<>(); private static final ConcurrentMap<Integer, String> peerAddressMap = new ConcurrentHashMap<>(); private static final AtomicInteger blockCounter = new AtomicInteger(); private static final ConcurrentMap<Long, Integer> blockIndexMap = new ConcurrentHashMap<>(); private static final AtomicInteger transactionCounter = new AtomicInteger(); private static final ConcurrentMap<Long, Integer> transactionIndexMap = new ConcurrentHashMap<>(); static final Set<String> allowedUserHosts; private static final Server userServer; static { List<String> allowedUserHostsList = Nxt.getStringListProperty("nxt.allowedUserHosts"); if (! allowedUserHostsList.contains("*")) { allowedUserHosts = Collections.unmodifiableSet(new HashSet<>(allowedUserHostsList)); } else { allowedUserHosts = null; } boolean enableUIServer = Nxt.getBooleanProperty("nxt.enableUIServer"); if (enableUIServer) { final int port = Constants.isTestnet ? TESTNET_UI_PORT : Nxt.getIntProperty("nxt.uiServerPort"); final String host = Nxt.getStringProperty("nxt.uiServerHost"); userServer = new Server(); ServerConnector connector; boolean enableSSL = Nxt.getBooleanProperty("nxt.uiSSL"); if (enableSSL) { Logger.logMessage("Using SSL (https) for the user interface server"); HttpConfiguration https_config = new HttpConfiguration(); https_config.setSecureScheme("https"); https_config.setSecurePort(port); https_config.addCustomizer(new SecureRequestCustomizer()); SslContextFactory sslContextFactory = new SslContextFactory(); sslContextFactory.setKeyStorePath(Nxt.getStringProperty("nxt.keyStorePath")); sslContextFactory.setKeyStorePassword(Nxt.getStringProperty("nxt.keyStorePassword")); sslContextFactory.setExcludeCipherSuites("SSL_RSA_WITH_DES_CBC_SHA", "SSL_DHE_RSA_WITH_DES_CBC_SHA", "SSL_DHE_DSS_WITH_DES_CBC_SHA", "SSL_RSA_EXPORT_WITH_RC4_40_MD5", "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA"); connector = new ServerConnector(userServer, new SslConnectionFactory(sslContextFactory, "http/1.1"), new HttpConnectionFactory(https_config)); } else { connector = new ServerConnector(userServer); } connector.setPort(port); connector.setHost(host); connector.setIdleTimeout(Nxt.getIntProperty("nxt.uiServerIdleTimeout")); userServer.addConnector(connector); HandlerList userHandlers = new HandlerList(); ResourceHandler userFileHandler = new ResourceHandler(); userFileHandler.setDirectoriesListed(false); userFileHandler.setWelcomeFiles(new String[]{"index.html"}); userFileHandler.setResourceBase(Nxt.getStringProperty("nxt.uiResourceBase")); userHandlers.addHandler(userFileHandler); String javadocResourceBase = Nxt.getStringProperty("nxt.javadocResourceBase"); if (javadocResourceBase != null) { ContextHandler contextHandler = new ContextHandler("/doc"); ResourceHandler docFileHandler = new ResourceHandler(); docFileHandler.setDirectoriesListed(false); docFileHandler.setWelcomeFiles(new String[]{"index.html"}); docFileHandler.setResourceBase(javadocResourceBase); contextHandler.setHandler(docFileHandler); userHandlers.addHandler(contextHandler); } ServletHandler userHandler = new ServletHandler(); ServletHolder userHolder = userHandler.addServletWithMapping(UserServlet.class, "/nxt"); userHolder.setAsyncSupported(true); if (Nxt.getBooleanProperty("nxt.uiServerCORS")) { FilterHolder filterHolder = userHandler.addFilterWithMapping(CrossOriginFilter.class, "/*", FilterMapping.DEFAULT); filterHolder.setInitParameter("allowedHeaders", "*"); filterHolder.setAsyncSupported(true); } userHandlers.addHandler(userHandler); userHandlers.addHandler(new DefaultHandler()); userServer.setHandler(userHandlers); userServer.setStopAtShutdown(true); ThreadPool.runBeforeStart(new Runnable() { @Override public void run() { try { userServer.start(); Logger.logMessage("Started user interface server at " + host + ":" + port); } catch (Exception e) { Logger.logDebugMessage("Failed to start user interface server", e); throw new RuntimeException(e.toString(), e); } } }); } else { userServer = null; Logger.logMessage("User interface server not enabled"); } } static { if (userServer != null) { Account.addListener(new Listener<Account>() { @Override public void notify(Account account) { JSONObject response = new JSONObject(); response.put("response", "setBalance"); response.put("balanceNQT", account.getUnconfirmedBalanceNQT()); byte[] accountPublicKey = account.getPublicKey(); for (User user : Users.users.values()) { if (user.getSecretPhrase() != null && Arrays.equals(user.getPublicKey(), accountPublicKey)) { user.send(response); } } } }, Account.Event.UNCONFIRMED_BALANCE); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray removedActivePeers = new JSONArray(); JSONObject removedActivePeer = new JSONObject(); removedActivePeer.put("index", Users.getIndex(peer)); removedActivePeers.add(removedActivePeer); response.put("removedActivePeers", removedActivePeers); JSONArray removedKnownPeers = new JSONArray(); JSONObject removedKnownPeer = new JSONObject(); removedKnownPeer.put("index", Users.getIndex(peer)); removedKnownPeers.add(removedKnownPeer); response.put("removedKnownPeers", removedKnownPeers); JSONArray addedBlacklistedPeers = new JSONArray(); JSONObject addedBlacklistedPeer = new JSONObject(); addedBlacklistedPeer.put("index", Users.getIndex(peer)); addedBlacklistedPeer.put("address", peer.getPeerAddress()); addedBlacklistedPeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { addedBlacklistedPeer.put("wellKnown", true); } addedBlacklistedPeer.put("software", peer.getSoftware()); addedBlacklistedPeers.add(addedBlacklistedPeer); response.put("addedBlacklistedPeers", addedBlacklistedPeers); Users.sendNewDataToAll(response); } }, Peers.Event.BLACKLIST); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray removedActivePeers = new JSONArray(); JSONObject removedActivePeer = new JSONObject(); removedActivePeer.put("index", Users.getIndex(peer)); removedActivePeers.add(removedActivePeer); response.put("removedActivePeers", removedActivePeers); JSONArray addedKnownPeers = new JSONArray(); JSONObject addedKnownPeer = new JSONObject(); addedKnownPeer.put("index", Users.getIndex(peer)); addedKnownPeer.put("address", peer.getPeerAddress()); addedKnownPeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { addedKnownPeer.put("wellKnown", true); } addedKnownPeer.put("software", peer.getSoftware()); addedKnownPeers.add(addedKnownPeer); response.put("addedKnownPeers", addedKnownPeers); Users.sendNewDataToAll(response); } }, Peers.Event.DEACTIVATE); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray removedBlacklistedPeers = new JSONArray(); JSONObject removedBlacklistedPeer = new JSONObject(); removedBlacklistedPeer.put("index", Users.getIndex(peer)); removedBlacklistedPeers.add(removedBlacklistedPeer); response.put("removedBlacklistedPeers", removedBlacklistedPeers); JSONArray addedKnownPeers = new JSONArray(); JSONObject addedKnownPeer = new JSONObject(); addedKnownPeer.put("index", Users.getIndex(peer)); addedKnownPeer.put("address", peer.getPeerAddress()); addedKnownPeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { addedKnownPeer.put("wellKnown", true); } addedKnownPeer.put("software", peer.getSoftware()); addedKnownPeers.add(addedKnownPeer); response.put("addedKnownPeers", addedKnownPeers); Users.sendNewDataToAll(response); } }, Peers.Event.UNBLACKLIST); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray removedKnownPeers = new JSONArray(); JSONObject removedKnownPeer = new JSONObject(); removedKnownPeer.put("index", Users.getIndex(peer)); removedKnownPeers.add(removedKnownPeer); response.put("removedKnownPeers", removedKnownPeers); Users.sendNewDataToAll(response); } }, Peers.Event.REMOVE); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray changedActivePeers = new JSONArray(); JSONObject changedActivePeer = new JSONObject(); changedActivePeer.put("index", Users.getIndex(peer)); changedActivePeer.put("downloaded", peer.getDownloadedVolume()); changedActivePeers.add(changedActivePeer); response.put("changedActivePeers", changedActivePeers); Users.sendNewDataToAll(response); } }, Peers.Event.DOWNLOADED_VOLUME); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray changedActivePeers = new JSONArray(); JSONObject changedActivePeer = new JSONObject(); changedActivePeer.put("index", Users.getIndex(peer)); changedActivePeer.put("uploaded", peer.getUploadedVolume()); changedActivePeers.add(changedActivePeer); response.put("changedActivePeers", changedActivePeers); Users.sendNewDataToAll(response); } }, Peers.Event.UPLOADED_VOLUME); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray changedActivePeers = new JSONArray(); JSONObject changedActivePeer = new JSONObject(); changedActivePeer.put("index", Users.getIndex(peer)); changedActivePeer.put("weight", peer.getWeight()); changedActivePeers.add(changedActivePeer); response.put("changedActivePeers", changedActivePeers); Users.sendNewDataToAll(response); } }, Peers.Event.WEIGHT); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray removedKnownPeers = new JSONArray(); JSONObject removedKnownPeer = new JSONObject(); removedKnownPeer.put("index", Users.getIndex(peer)); removedKnownPeers.add(removedKnownPeer); response.put("removedKnownPeers", removedKnownPeers); JSONArray addedActivePeers = new JSONArray(); JSONObject addedActivePeer = new JSONObject(); addedActivePeer.put("index", Users.getIndex(peer)); if (peer.getState() != Peer.State.CONNECTED) { addedActivePeer.put("disconnected", true); } addedActivePeer.put("address", peer.getPeerAddress()); addedActivePeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { addedActivePeer.put("wellKnown", true); } addedActivePeer.put("weight", peer.getWeight()); addedActivePeer.put("downloaded", peer.getDownloadedVolume()); addedActivePeer.put("uploaded", peer.getUploadedVolume()); addedActivePeer.put("software", peer.getSoftware()); addedActivePeers.add(addedActivePeer); response.put("addedActivePeers", addedActivePeers); Users.sendNewDataToAll(response); } }, Peers.Event.ADDED_ACTIVE_PEER); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray changedActivePeers = new JSONArray(); JSONObject changedActivePeer = new JSONObject(); changedActivePeer.put("index", Users.getIndex(peer)); changedActivePeer.put(peer.getState() == Peer.State.CONNECTED ? "connected" : "disconnected", true); changedActivePeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { changedActivePeer.put("wellKnown", true); } changedActivePeers.add(changedActivePeer); response.put("changedActivePeers", changedActivePeers); Users.sendNewDataToAll(response); } }, Peers.Event.CHANGED_ACTIVE_PEER); Peers.addListener(new Listener<Peer>() { @Override public void notify(Peer peer) { JSONObject response = new JSONObject(); JSONArray addedKnownPeers = new JSONArray(); JSONObject addedKnownPeer = new JSONObject(); addedKnownPeer.put("index", Users.getIndex(peer)); addedKnownPeer.put("address", peer.getPeerAddress()); addedKnownPeer.put("announcedAddress", Convert.truncate(peer.getAnnouncedAddress(), "-", 25, true)); if (peer.isWellKnown()) { addedKnownPeer.put("wellKnown", true); } addedKnownPeer.put("software", peer.getSoftware()); addedKnownPeers.add(addedKnownPeer); response.put("addedKnownPeers", addedKnownPeers); Users.sendNewDataToAll(response); } }, Peers.Event.NEW_PEER); Nxt.getTransactionProcessor().addListener(new Listener<List<Transaction>>() { @Override public void notify(List<Transaction> transactions) { JSONObject response = new JSONObject(); JSONArray removedUnconfirmedTransactions = new JSONArray(); for (Transaction transaction : transactions) { JSONObject removedUnconfirmedTransaction = new JSONObject(); removedUnconfirmedTransaction.put("index", Users.getIndex(transaction)); removedUnconfirmedTransactions.add(removedUnconfirmedTransaction); } response.put("removedUnconfirmedTransactions", removedUnconfirmedTransactions); Users.sendNewDataToAll(response); } }, TransactionProcessor.Event.REMOVED_UNCONFIRMED_TRANSACTIONS); Nxt.getTransactionProcessor().addListener(new Listener<List<Transaction>>() { @Override public void notify(List<Transaction> transactions) { JSONObject response = new JSONObject(); JSONArray addedUnconfirmedTransactions = new JSONArray(); for (Transaction transaction : transactions) { JSONObject addedUnconfirmedTransaction = new JSONObject(); addedUnconfirmedTransaction.put("index", Users.getIndex(transaction)); addedUnconfirmedTransaction.put("timestamp", transaction.getTimestamp()); addedUnconfirmedTransaction.put("deadline", transaction.getDeadline()); addedUnconfirmedTransaction.put("recipient", Convert.toUnsignedLong(transaction.getRecipientId())); addedUnconfirmedTransaction.put("amountNQT", transaction.getAmountNQT()); addedUnconfirmedTransaction.put("feeNQT", transaction.getFeeNQT()); addedUnconfirmedTransaction.put("sender", Convert.toUnsignedLong(transaction.getSenderId())); addedUnconfirmedTransaction.put("id", transaction.getStringId()); addedUnconfirmedTransactions.add(addedUnconfirmedTransaction); } response.put("addedUnconfirmedTransactions", addedUnconfirmedTransactions); Users.sendNewDataToAll(response); } }, TransactionProcessor.Event.ADDED_UNCONFIRMED_TRANSACTIONS); Nxt.getTransactionProcessor().addListener(new Listener<List<Transaction>>() { @Override public void notify(List<Transaction> transactions) { JSONObject response = new JSONObject(); JSONArray addedConfirmedTransactions = new JSONArray(); for (Transaction transaction : transactions) { JSONObject addedConfirmedTransaction = new JSONObject(); addedConfirmedTransaction.put("index", Users.getIndex(transaction)); addedConfirmedTransaction.put("blockTimestamp", transaction.getBlockTimestamp()); addedConfirmedTransaction.put("transactionTimestamp", transaction.getTimestamp()); addedConfirmedTransaction.put("sender", Convert.toUnsignedLong(transaction.getSenderId())); addedConfirmedTransaction.put("recipient", Convert.toUnsignedLong(transaction.getRecipientId())); addedConfirmedTransaction.put("amountNQT", transaction.getAmountNQT()); addedConfirmedTransaction.put("feeNQT", transaction.getFeeNQT()); addedConfirmedTransaction.put("id", transaction.getStringId()); addedConfirmedTransactions.add(addedConfirmedTransaction); } response.put("addedConfirmedTransactions", addedConfirmedTransactions); Users.sendNewDataToAll(response); } }, TransactionProcessor.Event.ADDED_CONFIRMED_TRANSACTIONS); Nxt.getTransactionProcessor().addListener(new Listener<List<Transaction>>() { @Override public void notify(List<Transaction> transactions) { JSONObject response = new JSONObject(); JSONArray newTransactions = new JSONArray(); for (Transaction transaction : transactions) { JSONObject newTransaction = new JSONObject(); newTransaction.put("index", Users.getIndex(transaction)); newTransaction.put("timestamp", transaction.getTimestamp()); newTransaction.put("deadline", transaction.getDeadline()); newTransaction.put("recipient", Convert.toUnsignedLong(transaction.getRecipientId())); newTransaction.put("amountNQT", transaction.getAmountNQT()); newTransaction.put("feeNQT", transaction.getFeeNQT()); newTransaction.put("sender", Convert.toUnsignedLong(transaction.getSenderId())); newTransaction.put("id", transaction.getStringId()); newTransactions.add(newTransaction); } response.put("addedDoubleSpendingTransactions", newTransactions); Users.sendNewDataToAll(response); } }, TransactionProcessor.Event.ADDED_DOUBLESPENDING_TRANSACTIONS); Nxt.getBlockchainProcessor().addListener(new Listener<Block>() { @Override public void notify(Block block) { JSONObject response = new JSONObject(); JSONArray addedOrphanedBlocks = new JSONArray(); JSONObject addedOrphanedBlock = new JSONObject(); addedOrphanedBlock.put("index", Users.getIndex(block)); addedOrphanedBlock.put("timestamp", block.getTimestamp()); addedOrphanedBlock.put("numberOfTransactions", block.getTransactionIds().size()); addedOrphanedBlock.put("totalAmountNQT", block.getTotalAmountNQT()); addedOrphanedBlock.put("totalFeeNQT", block.getTotalFeeNQT()); addedOrphanedBlock.put("payloadLength", block.getPayloadLength()); addedOrphanedBlock.put("generator", Convert.toUnsignedLong(block.getGeneratorId())); addedOrphanedBlock.put("height", block.getHeight()); addedOrphanedBlock.put("version", block.getVersion()); addedOrphanedBlock.put("block", block.getStringId()); addedOrphanedBlock.put("baseTarget", BigInteger.valueOf(block.getBaseTarget()).multiply(BigInteger.valueOf(100000)).divide(BigInteger.valueOf(Constants.INITIAL_BASE_TARGET))); addedOrphanedBlocks.add(addedOrphanedBlock); response.put("addedOrphanedBlocks", addedOrphanedBlocks); Users.sendNewDataToAll(response); } }, BlockchainProcessor.Event.BLOCK_POPPED); Nxt.getBlockchainProcessor().addListener(new Listener<Block>() { @Override public void notify(Block block) { JSONObject response = new JSONObject(); JSONArray addedRecentBlocks = new JSONArray(); JSONObject addedRecentBlock = new JSONObject(); addedRecentBlock.put("index", Users.getIndex(block)); addedRecentBlock.put("timestamp", block.getTimestamp()); addedRecentBlock.put("numberOfTransactions", block.getTransactionIds().size()); addedRecentBlock.put("totalAmountNQT", block.getTotalAmountNQT()); addedRecentBlock.put("totalFeeNQT", block.getTotalFeeNQT()); addedRecentBlock.put("payloadLength", block.getPayloadLength()); addedRecentBlock.put("generator", Convert.toUnsignedLong(block.getGeneratorId())); addedRecentBlock.put("height", block.getHeight()); addedRecentBlock.put("version", block.getVersion()); addedRecentBlock.put("block", block.getStringId()); addedRecentBlock.put("baseTarget", BigInteger.valueOf(block.getBaseTarget()).multiply(BigInteger.valueOf(100000)).divide(BigInteger.valueOf(Constants.INITIAL_BASE_TARGET))); addedRecentBlocks.add(addedRecentBlock); response.put("addedRecentBlocks", addedRecentBlocks); Users.sendNewDataToAll(response); } }, BlockchainProcessor.Event.BLOCK_PUSHED); Generator.addListener(new Listener<Generator>() { @Override public void notify(Generator generator) { JSONObject response = new JSONObject(); response.put("response", "setBlockGenerationDeadline"); response.put("deadline", generator.getDeadline()); for (User user : users.values()) { if (Arrays.equals(generator.getPublicKey(), user.getPublicKey())) { user.send(response); } } } }, Generator.Event.GENERATION_DEADLINE); } } static Collection<User> getAllUsers() { return allUsers; } static User getUser(String userId) { User user = users.get(userId); if (user == null) { user = new User(userId); User oldUser = users.putIfAbsent(userId, user); if (oldUser != null) { user = oldUser; user.setInactive(false); } } else { user.setInactive(false); } return user; } static User remove(User user) { return users.remove(user.getUserId()); } private static void sendNewDataToAll(JSONObject response) { response.put("response", "processNewData"); sendToAll(response); } private static void sendToAll(JSONStreamAware response) { for (User user : users.values()) { user.send(response); } } static int getIndex(Peer peer) { Integer index = peerIndexMap.get(peer.getPeerAddress()); if (index == null) { index = peerCounter.incrementAndGet(); peerIndexMap.put(peer.getPeerAddress(), index); peerAddressMap.put(index, peer.getPeerAddress()); } return index; } static Peer getPeer(int index) { String peerAddress = peerAddressMap.get(index); if (peerAddress == null) { return null; } return Peers.getPeer(peerAddress); } static int getIndex(Block block) { Integer index = blockIndexMap.get(block.getId()); if (index == null) { index = blockCounter.incrementAndGet(); blockIndexMap.put(block.getId(), index); } return index; } static int getIndex(Transaction transaction) { Integer index = transactionIndexMap.get(transaction.getId()); if (index == null) { index = transactionCounter.incrementAndGet(); transactionIndexMap.put(transaction.getId(), index); } return index; } public static void init() {} public static void shutdown() { if (userServer != null) { try { userServer.stop(); } catch (Exception e) { Logger.logDebugMessage("Failed to stop user interface server", e); } } } private Users() {} // never }
/* * Copyright 2015-2017 Spotify AB * Copyright 2016-2019 The Last Pickle Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cassandrareaper.resources; import io.cassandrareaper.AppContext; import io.cassandrareaper.ReaperException; import io.cassandrareaper.core.Cluster; import io.cassandrareaper.core.Table; import io.cassandrareaper.crypto.Cryptograph; import io.cassandrareaper.crypto.NoopCrypotograph; import io.cassandrareaper.jmx.ClusterFacade; import io.cassandrareaper.jmx.JmxConnectionFactory; import io.cassandrareaper.jmx.JmxProxy; import io.cassandrareaper.service.TestRepairConfiguration; import io.cassandrareaper.storage.MemoryStorage; import java.net.URI; import java.time.Duration; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.Set; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang3.RandomStringUtils; import org.assertj.core.api.Assertions; import org.eclipse.jetty.http.HttpStatus; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public final class ClusterResourceTest { static final String CLUSTER_NAME = "testcluster"; static final String PARTITIONER = "org.apache.cassandra.dht.RandomPartitioner"; static final String SEED_HOST = "TestHost"; static final URI SAMPLE_URI = URI.create("http://reaper_host/cluster/"); static final String I_DO_EXIST = "i_do_exist"; static final String I_DONT_EXIST = "i_dont_exist"; static final String JMX_USERNAME = "foo"; static final String JMX_PASSWORD = "bar"; private static final String STCS = "SizeTieredCompactionStrategy"; @Test public void testAddCluster() throws Exception { final MockObjects mocks = initMocks(); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); Response response = clusterResource .addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.CREATED_201); assertEquals(1, mocks.context.storage.getClusters().size()); Cluster cluster = mocks.context.storage.getCluster(CLUSTER_NAME); assertNotNull("Did not find expected cluster", cluster); assertEquals(0, mocks.context.storage.getRepairRunsForCluster(cluster.getName(), Optional.of(1)).size()); assertEquals(CLUSTER_NAME, cluster.getName()); assertEquals(1, cluster.getSeedHosts().size()); assertEquals(SEED_HOST, cluster.getSeedHosts().iterator().next()); assertTrue(cluster.getJmxCredentials().isPresent()); assertEquals(JMX_USERNAME, cluster.getJmxCredentials().get().getUsername()); assertNotEquals(JMX_PASSWORD, cluster.getJmxCredentials().get().getPassword()); } @Test public void testAddExistingCluster() throws Exception { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withPartitioner(PARTITIONER) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); Response response = clusterResource .addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.NO_CONTENT_204); assertTrue(response.getLocation().toString().endsWith("/cluster/" + cluster.getName())); assertEquals(1, mocks.context.storage.getClusters().size()); cluster = mocks.context.storage.getCluster(CLUSTER_NAME); assertNotNull("Did not find expected cluster", cluster); assertEquals(0, mocks.context.storage.getRepairRunsForCluster(cluster.getName(), Optional.of(1)).size()); assertEquals(CLUSTER_NAME, cluster.getName()); assertEquals(1, cluster.getSeedHosts().size()); assertEquals(SEED_HOST, cluster.getSeedHosts().iterator().next()); } @Test public void testAddExistingClusterWithClusterName() throws Exception { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withPartitioner(PARTITIONER) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); Response response = clusterResource.addOrUpdateCluster( mocks.uriInfo, CLUSTER_NAME, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.NO_CONTENT_204); assertTrue(response.getLocation().toString().endsWith("/cluster/" + cluster.getName())); assertEquals(1, mocks.context.storage.getClusters().size()); cluster = mocks.context.storage.getCluster(CLUSTER_NAME); assertNotNull("Did not find expected cluster", cluster); assertEquals(0, mocks.context.storage.getRepairRunsForCluster(cluster.getName(), Optional.of(1)).size()); assertEquals(CLUSTER_NAME, cluster.getName()); assertEquals(1, cluster.getSeedHosts().size()); assertEquals(SEED_HOST, cluster.getSeedHosts().iterator().next()); } @Test public void testFailAddingJmxCredentialsWithoutEncryptionConfigured() throws Exception { final MockObjects mocks = initMocks(); ClusterFacade clusterFacade = mock(ClusterFacade.class); ClusterResource clusterResource = ClusterResource.create(mocks.context, new NoopCrypotograph(), () -> clusterFacade); Response response = clusterResource.addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), response.getStatus()); } @Test public void testGetNonExistingCluster() throws ReaperException { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getCluster(I_DONT_EXIST, Optional.<Integer>empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.NOT_FOUND_404); } @Test public void testGetExistingCluster() throws ReaperException { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); Cluster cluster = Cluster.builder() .withName(I_DO_EXIST) .withPartitioner(PARTITIONER) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getCluster(I_DO_EXIST, Optional.<Integer>empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); } @Test public void testGetClusters_all() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(1); Assertions.assertThat(clusterNames).contains(CLUSTER_NAME); } @Test public void testGetClusters_specified() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.of(SEED_HOST)); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(1); Assertions.assertThat(clusterNames).contains(CLUSTER_NAME); } @Test public void testGetClusters_only_specified_first() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName("cluster2") .withSeedHosts(ImmutableSet.of("host2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.of(SEED_HOST)); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(1); Assertions.assertThat(clusterNames).contains(CLUSTER_NAME); } @Test public void testGetClusters_only_specified_second() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName("cluster2") .withSeedHosts(ImmutableSet.of("host2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.of("host2")); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(1); Assertions.assertThat(clusterNames).contains("cluster2"); } @Test public void testGetClusters_multiple() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName("cluster2") .withSeedHosts(ImmutableSet.of("host2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(2); Assertions.assertThat(clusterNames).contains(CLUSTER_NAME, "cluster2"); } @Test public void testGetClusters_multiple_ordered_by_name() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName("abc") .withSeedHosts(ImmutableSet.of("host2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(2); Assertions.assertThat(clusterNames).containsExactly("abc", CLUSTER_NAME); } @Test public void testGetClusters_multiple_ordered_by_state() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.UNREACHABLE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName("cluster2") .withSeedHosts(ImmutableSet.of("host2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph, () -> mocks.clusterFacade); Response response = clusterResource.getClusterList(Optional.empty()); Assertions.assertThat(response.getStatus()).isEqualTo(HttpStatus.OK_200); List<String> clusterNames = (List<String>) response.getEntity(); Assertions.assertThat(clusterNames).hasSize(2); Assertions.assertThat(clusterNames).containsExactly("cluster2", CLUSTER_NAME); } @Test(expected = IllegalStateException.class) public void testGetClusters_fail_persisting_unknown() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .build(); mocks.context.storage.addCluster(cluster); } @Test(expected = IllegalArgumentException.class) public void testGetClusters_fail_persisting_two_clusters_same_name() throws ReaperException { final MockObjects mocks = initMocks(); Cluster cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of(SEED_HOST)) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); cluster = Cluster.builder() .withName(CLUSTER_NAME) .withSeedHosts(ImmutableSet.of("test_host_2")) .withState(Cluster.State.ACTIVE) .build(); mocks.context.storage.addCluster(cluster); } @Test public void testModifyClusterSeeds() throws ReaperException { final MockObjects mocks = initMocks(); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); clusterResource.addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); doReturn(Arrays.asList(SEED_HOST + 1, SEED_HOST)).when(mocks.jmxProxy).getLiveNodes(); Response response = clusterResource .addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST + 1), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.OK_200, response.getStatus()); assertTrue(response.getLocation().toString().endsWith("/cluster/" + CLUSTER_NAME)); assertEquals(1, mocks.context.storage.getClusters().size()); Cluster cluster = mocks.context.storage.getCluster(CLUSTER_NAME); Assertions.assertThat(cluster.getSeedHosts()).hasSize(2); Assertions.assertThat(cluster.getSeedHosts()).contains(SEED_HOST + 1); response = clusterResource.addOrUpdateCluster( mocks.uriInfo, CLUSTER_NAME, Optional.of(SEED_HOST + 1), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.NO_CONTENT_204, response.getStatus()); assertTrue(response.getLocation().toString().endsWith("/cluster/" + cluster.getName())); } @Test public void testModifyClusterSeedsWithClusterName() throws ReaperException { final MockObjects mocks = initMocks(); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); clusterResource.addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); doReturn(Arrays.asList(SEED_HOST + 1, SEED_HOST)).when(mocks.jmxProxy).getLiveNodes(); Response response = clusterResource.addOrUpdateCluster( mocks.uriInfo, CLUSTER_NAME, Optional.of(SEED_HOST + 1), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.OK_200, response.getStatus()); assertTrue(response.getLocation().toString().endsWith("/cluster/" + CLUSTER_NAME)); assertEquals(1, mocks.context.storage.getClusters().size()); Cluster cluster = mocks.context.storage.getCluster(CLUSTER_NAME); Assertions.assertThat(cluster.getSeedHosts()).hasSize(2); Assertions.assertThat(cluster.getSeedHosts()).contains(SEED_HOST + 1); response = clusterResource.addOrUpdateCluster( mocks.uriInfo, CLUSTER_NAME, Optional.of(SEED_HOST + 1), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.NO_CONTENT_204, response.getStatus()); assertTrue(response.getLocation().toString().endsWith("/cluster/" + cluster.getName())); } @Test public void addingAClusterAutomaticallySetupSchedulingRepairsWhenEnabled() throws Exception { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); when(mocks.jmxProxy.getKeyspaces()).thenReturn(Lists.newArrayList("keyspace1")); when(mocks.jmxProxy.getTablesForKeyspace("keyspace1")) .thenReturn(Sets.newHashSet(Table.builder().withName("table1").withCompactionStrategy(STCS).build())); mocks.context.config = TestRepairConfiguration.defaultConfigBuilder() .withAutoScheduling( TestRepairConfiguration.defaultAutoSchedulingConfigBuilder() .thatIsEnabled() .withTimeBeforeFirstSchedule(Duration.ofMinutes(1)) .build()) .build(); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); Response response = clusterResource .addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.CREATED_201, response.getStatus()); assertEquals(1, mocks.context.storage.getAllRepairSchedules().size()); assertEquals(1, mocks.context.storage.getRepairSchedulesForClusterAndKeyspace(CLUSTER_NAME, "keyspace1").size()); } @Test public void testClusterDeleting() throws Exception { final MockObjects mocks = initMocks(); when(mocks.jmxProxy.getLiveNodes()).thenReturn(Arrays.asList(SEED_HOST)); when(mocks.jmxProxy.getKeyspaces()).thenReturn(Lists.newArrayList("keyspace1")); when(mocks.jmxProxy.getTablesForKeyspace("keyspace1")) .thenReturn(Sets.newHashSet(Table.builder().withName("table1").withCompactionStrategy(STCS).build())); mocks.context.config = TestRepairConfiguration.defaultConfigBuilder() .withAutoScheduling( TestRepairConfiguration.defaultAutoSchedulingConfigBuilder() .thatIsEnabled() .withTimeBeforeFirstSchedule(Duration.ofMinutes(1)) .build()) .build(); ClusterResource clusterResource = ClusterResource.create(mocks.context, mocks.cryptograph); Response response = clusterResource .addOrUpdateCluster(mocks.uriInfo, Optional.of(SEED_HOST), Optional.of(Cluster.DEFAULT_JMX_PORT), Optional.of(JMX_USERNAME), Optional.of(JMX_PASSWORD)); assertEquals(HttpStatus.CREATED_201, response.getStatus()); assertEquals(1, mocks.context.storage.getAllRepairSchedules().size()); assertEquals(1, mocks.context.storage.getRepairSchedulesForClusterAndKeyspace(CLUSTER_NAME, "keyspace1").size()); assertEquals(HttpStatus.CONFLICT_409, clusterResource.deleteCluster(CLUSTER_NAME, Optional.empty()).getStatus()); assertEquals( HttpStatus.CONFLICT_409, clusterResource.deleteCluster(CLUSTER_NAME, Optional.of(Boolean.FALSE)).getStatus()); assertEquals( HttpStatus.ACCEPTED_202, clusterResource.deleteCluster(CLUSTER_NAME, Optional.of(Boolean.TRUE)).getStatus()); } @Test public void testParseSeedHost() { String seedHostStringList = "127.0.0.1 , 127.0.0.2, 127.0.0.3"; Set<String> seedHostSet = ClusterResource.parseSeedHosts(seedHostStringList); Set<String> seedHostExpectedSet = Sets.newHashSet("127.0.0.2", "127.0.0.1", "127.0.0.3"); assertEquals(seedHostSet, seedHostExpectedSet); } @Test public void testParseSeedHostWithClusterName() { String seedHostStringList = "127.0.0.1@cluster1 , 127.0.0.2@cluster1, 127.0.0.3@cluster1"; Set<String> seedHostSet = ClusterResource.parseSeedHosts(seedHostStringList); Set<String> seedHostExpectedSet = Sets.newHashSet("127.0.0.2", "127.0.0.1", "127.0.0.3"); assertEquals(seedHostSet, seedHostExpectedSet); } @Test public void testParseClusterNameInSeedHost() { String seedHostStringList = "127.0.0.1@cluster one , 127.0.0.2@cluster one, 127.0.0.3@cluster one"; Optional<String> clusterName = ClusterResource.parseClusterNameFromSeedHost(seedHostStringList); assertEquals("cluster one", clusterName.get()); } private MockObjects initMocks() throws ReaperException { AppContext context = new AppContext(); context.storage = new MemoryStorage(); context.config = TestRepairConfiguration.defaultConfig(); UriInfo uriInfo = mock(UriInfo.class); when(uriInfo.getBaseUriBuilder()).thenReturn(UriBuilder.fromUri(SAMPLE_URI)); JmxProxy jmxProxy = mock(JmxProxy.class); when(jmxProxy.getClusterName()).thenReturn(CLUSTER_NAME); when(jmxProxy.getPartitioner()).thenReturn(PARTITIONER); context.jmxConnectionFactory = mock(JmxConnectionFactory.class); when(context.jmxConnectionFactory.connectAny(Mockito.anyCollection())).thenReturn(jmxProxy); Cryptograph cryptograph = mock(Cryptograph.class); when(cryptograph.encrypt(any(String.class))).thenReturn(RandomStringUtils.randomNumeric(10)); ClusterFacade clusterFacade = mock(ClusterFacade.class); return new MockObjects(context, cryptograph, uriInfo, jmxProxy, clusterFacade); } private static final class MockObjects { final AppContext context; final Cryptograph cryptograph; final UriInfo uriInfo; final JmxProxy jmxProxy; final ClusterFacade clusterFacade; MockObjects( AppContext context, Cryptograph cryptograph, UriInfo uriInfo, JmxProxy jmxProxy, ClusterFacade clusterFacade) { super(); this.context = context; this.cryptograph = cryptograph; this.uriInfo = uriInfo; this.jmxProxy = jmxProxy; this.clusterFacade = clusterFacade; } } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.testutil; import android.os.Looper; import android.support.annotation.Nullable; import android.view.Surface; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.PlayerMessage; import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.testutil.Action.ClearVideoSurface; import com.google.android.exoplayer2.testutil.Action.ExecuteRunnable; import com.google.android.exoplayer2.testutil.Action.PlayUntilPosition; import com.google.android.exoplayer2.testutil.Action.PrepareSource; import com.google.android.exoplayer2.testutil.Action.Seek; import com.google.android.exoplayer2.testutil.Action.SendMessages; import com.google.android.exoplayer2.testutil.Action.SetPlayWhenReady; import com.google.android.exoplayer2.testutil.Action.SetPlaybackParameters; import com.google.android.exoplayer2.testutil.Action.SetRendererDisabled; import com.google.android.exoplayer2.testutil.Action.SetRepeatMode; import com.google.android.exoplayer2.testutil.Action.SetShuffleModeEnabled; import com.google.android.exoplayer2.testutil.Action.SetVideoSurface; import com.google.android.exoplayer2.testutil.Action.Stop; import com.google.android.exoplayer2.testutil.Action.ThrowPlaybackException; import com.google.android.exoplayer2.testutil.Action.WaitForPlaybackState; import com.google.android.exoplayer2.testutil.Action.WaitForPositionDiscontinuity; import com.google.android.exoplayer2.testutil.Action.WaitForSeekProcessed; import com.google.android.exoplayer2.testutil.Action.WaitForTimelineChanged; import com.google.android.exoplayer2.trackselection.MappingTrackSelector; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.HandlerWrapper; /** * Schedules a sequence of {@link Action}s for execution during a test. */ public final class ActionSchedule { /** * Callback to notify listener that the action schedule has finished. */ public interface Callback { /** * Called when action schedule finished executing all its actions. */ void onActionScheduleFinished(); } private final ActionNode rootNode; private final CallbackAction callbackAction; /** * @param rootNode The first node in the sequence. * @param callbackAction The final action which can be used to trigger a callback. */ private ActionSchedule(ActionNode rootNode, CallbackAction callbackAction) { this.rootNode = rootNode; this.callbackAction = callbackAction; } /** * Starts execution of the schedule. * * @param player The player to which actions should be applied. * @param trackSelector The track selector to which actions should be applied. * @param surface The surface to use when applying actions. * @param mainHandler A handler associated with the main thread of the host activity. * @param callback A {@link Callback} to notify when the action schedule finishes, or null if no * notification is needed. */ /* package */ void start( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface, HandlerWrapper mainHandler, @Nullable Callback callback) { callbackAction.setCallback(callback); rootNode.schedule(player, trackSelector, surface, mainHandler); } /** * A builder for {@link ActionSchedule} instances. */ public static final class Builder { private final String tag; private final ActionNode rootNode; private long currentDelayMs; private ActionNode previousNode; /** * @param tag A tag to use for logging. */ public Builder(String tag) { this.tag = tag; rootNode = new ActionNode(new RootAction(tag), 0); previousNode = rootNode; } /** * Schedules a delay between executing any previous actions and any subsequent ones. * * @param delayMs The delay in milliseconds. * @return The builder, for convenience. */ public Builder delay(long delayMs) { currentDelayMs += delayMs; return this; } /** * Schedules an action to be executed. * * @param action The action to schedule. * @return The builder, for convenience. */ public Builder apply(Action action) { return appendActionNode(new ActionNode(action, currentDelayMs)); } /** * Schedules an action to be executed repeatedly. * * @param action The action to schedule. * @param intervalMs The interval between each repetition in milliseconds. * @return The builder, for convenience. */ public Builder repeat(Action action, long intervalMs) { return appendActionNode(new ActionNode(action, currentDelayMs, intervalMs)); } /** * Schedules a seek action to be executed. * * @param positionMs The seek position. * @return The builder, for convenience. */ public Builder seek(long positionMs) { return apply(new Seek(tag, positionMs)); } /** * Schedules a seek action to be executed. * * @param windowIndex The window to seek to. * @param positionMs The seek position. * @return The builder, for convenience. */ public Builder seek(int windowIndex, long positionMs) { return apply(new Seek(tag, windowIndex, positionMs)); } /** * Schedules a seek action to be executed and waits until playback resumes after the seek. * * @param positionMs The seek position. * @return The builder, for convenience. */ public Builder seekAndWait(long positionMs) { return apply(new Seek(tag, positionMs)) .apply(new WaitForSeekProcessed(tag)) .apply(new WaitForPlaybackState(tag, Player.STATE_READY)); } /** * Schedules a delay until the player indicates that a seek has been processed. * * @return The builder, for convenience. */ public Builder waitForSeekProcessed() { return apply(new WaitForSeekProcessed(tag)); } /** * Schedules a playback parameters setting action to be executed. * * @param playbackParameters The playback parameters to set. * @return The builder, for convenience. * @see Player#setPlaybackParameters(PlaybackParameters) */ public Builder setPlaybackParameters(PlaybackParameters playbackParameters) { return apply(new SetPlaybackParameters(tag, playbackParameters)); } /** * Schedules a stop action to be executed. * * @return The builder, for convenience. */ public Builder stop() { return apply(new Stop(tag)); } /** * Schedules a stop action to be executed. * * @param reset Whether the player should be reset. * @return The builder, for convenience. */ public Builder stop(boolean reset) { return apply(new Stop(tag, reset)); } /** * Schedules a play action to be executed. * * @return The builder, for convenience. */ public Builder play() { return apply(new SetPlayWhenReady(tag, true)); } /** * Schedules a play action to be executed, waits until the player reaches the specified * position, and pauses the player again. * * @param windowIndex The window index at which the player should be paused again. * @param positionMs The position in that window at which the player should be paused again. * @return The builder, for convenience. */ public Builder playUntilPosition(int windowIndex, long positionMs) { return apply(new PlayUntilPosition(tag, windowIndex, positionMs)); } /** * Schedules a play action to be executed, waits until the player reaches the start of the * specified window, and pauses the player again. * * @param windowIndex The window index at which the player should be paused again. * @return The builder, for convenience. */ public Builder playUntilStartOfWindow(int windowIndex) { return apply(new PlayUntilPosition(tag, windowIndex, /* positionMs= */ 0)); } /** * Schedules a pause action to be executed. * * @return The builder, for convenience. */ public Builder pause() { return apply(new SetPlayWhenReady(tag, false)); } /** * Schedules a renderer enable action to be executed. * * @return The builder, for convenience. */ public Builder enableRenderer(int index) { return apply(new SetRendererDisabled(tag, index, false)); } /** * Schedules a renderer disable action to be executed. * * @return The builder, for convenience. */ public Builder disableRenderer(int index) { return apply(new SetRendererDisabled(tag, index, true)); } /** * Schedules a clear video surface action to be executed. * * @return The builder, for convenience. */ public Builder clearVideoSurface() { return apply(new ClearVideoSurface(tag)); } /** * Schedules a set video surface action to be executed. * * @return The builder, for convenience. */ public Builder setVideoSurface() { return apply(new SetVideoSurface(tag)); } /** * Schedules a new source preparation action to be executed. * * @return The builder, for convenience. */ public Builder prepareSource(MediaSource mediaSource) { return apply(new PrepareSource(tag, mediaSource)); } /** * Schedules a new source preparation action to be executed. * @see com.google.android.exoplayer2.ExoPlayer#prepare(MediaSource, boolean, boolean). * * @return The builder, for convenience. */ public Builder prepareSource(MediaSource mediaSource, boolean resetPosition, boolean resetState) { return apply(new PrepareSource(tag, mediaSource, resetPosition, resetState)); } /** * Schedules a repeat mode setting action to be executed. * * @return The builder, for convenience. */ public Builder setRepeatMode(@Player.RepeatMode int repeatMode) { return apply(new SetRepeatMode(tag, repeatMode)); } /** * Schedules a shuffle setting action to be executed. * * @return The builder, for convenience. */ public Builder setShuffleModeEnabled(boolean shuffleModeEnabled) { return apply(new SetShuffleModeEnabled(tag, shuffleModeEnabled)); } /** * Schedules sending a {@link PlayerMessage}. * * @param positionMs The position in the current window at which the message should be sent, in * milliseconds. * @return The builder, for convenience. */ public Builder sendMessage(Target target, long positionMs) { return apply(new SendMessages(tag, target, positionMs)); } /** * Schedules sending a {@link PlayerMessage}. * * @param target A message target. * @param windowIndex The window index at which the message should be sent. * @param positionMs The position at which the message should be sent, in milliseconds. * @return The builder, for convenience. */ public Builder sendMessage(Target target, int windowIndex, long positionMs) { return apply( new SendMessages(tag, target, windowIndex, positionMs, /* deleteAfterDelivery= */ true)); } /** * Schedules to send a {@link PlayerMessage}. * * @param target A message target. * @param windowIndex The window index at which the message should be sent. * @param positionMs The position at which the message should be sent, in milliseconds. * @param deleteAfterDelivery Whether the message will be deleted after delivery. * @return The builder, for convenience. */ public Builder sendMessage( Target target, int windowIndex, long positionMs, boolean deleteAfterDelivery) { return apply(new SendMessages(tag, target, windowIndex, positionMs, deleteAfterDelivery)); } /** * Schedules a delay until the timeline changed to a specified expected timeline. * * @param expectedTimeline The expected timeline to wait for. * @return The builder, for convenience. */ public Builder waitForTimelineChanged(Timeline expectedTimeline) { return apply(new WaitForTimelineChanged(tag, expectedTimeline)); } /** * Schedules a delay until the next position discontinuity. * * @return The builder, for convenience. */ public Builder waitForPositionDiscontinuity() { return apply(new WaitForPositionDiscontinuity(tag)); } /** * Schedules a delay until the playback state changed to the specified state. * * @param targetPlaybackState The target playback state. * @return The builder, for convenience. */ public Builder waitForPlaybackState(int targetPlaybackState) { return apply(new WaitForPlaybackState(tag, targetPlaybackState)); } /** * Schedules a {@link Runnable} to be executed. * * @return The builder, for convenience. */ public Builder executeRunnable(Runnable runnable) { return apply(new ExecuteRunnable(tag, runnable)); } /** * Schedules to throw a playback exception on the playback thread. * * @param exception The exception to throw. * @return The builder, for convenience. */ public Builder throwPlaybackException(ExoPlaybackException exception) { return apply(new ThrowPlaybackException(tag, exception)); } public ActionSchedule build() { CallbackAction callbackAction = new CallbackAction(tag); apply(callbackAction); return new ActionSchedule(rootNode, callbackAction); } private Builder appendActionNode(ActionNode actionNode) { previousNode.setNext(actionNode); previousNode = actionNode; currentDelayMs = 0; return this; } } /** * Provides a wrapper for a {@link Target} which has access to the player when handling messages. * Can be used with {@link Builder#sendMessage(Target, long)}. */ public abstract static class PlayerTarget implements Target { private SimpleExoPlayer player; /** Handles the message send to the component and additionally provides access to the player. */ public abstract void handleMessage(SimpleExoPlayer player, int messageType, Object message); /** Sets the player to be passed to {@link #handleMessage(SimpleExoPlayer, int, Object)}. */ /* package */ void setPlayer(SimpleExoPlayer player) { this.player = player; } @Override public final void handleMessage(int messageType, Object message) throws ExoPlaybackException { handleMessage(player, messageType, message); } } /** * Provides a wrapper for a {@link Runnable} which has access to the player. Can be used with * {@link Builder#executeRunnable(Runnable)}. */ public abstract static class PlayerRunnable implements Runnable { private SimpleExoPlayer player; /** Executes Runnable with reference to player. */ public abstract void run(SimpleExoPlayer player); /** Sets the player to be passed to {@link #run(SimpleExoPlayer)} . */ /* package */ void setPlayer(SimpleExoPlayer player) { this.player = player; } @Override public final void run() { run(player); } } /** * Wraps an {@link Action}, allowing a delay and a next {@link Action} to be specified. */ /* package */ static final class ActionNode implements Runnable { private final Action action; private final long delayMs; private final long repeatIntervalMs; private ActionNode next; private SimpleExoPlayer player; private MappingTrackSelector trackSelector; private Surface surface; private HandlerWrapper mainHandler; /** * @param action The wrapped action. * @param delayMs The delay between the node being scheduled and the action being executed. */ public ActionNode(Action action, long delayMs) { this(action, delayMs, C.TIME_UNSET); } /** * @param action The wrapped action. * @param delayMs The delay between the node being scheduled and the action being executed. * @param repeatIntervalMs The interval between one execution and the next repetition. If set to * {@link C#TIME_UNSET}, the action is executed once only. */ public ActionNode(Action action, long delayMs, long repeatIntervalMs) { this.action = action; this.delayMs = delayMs; this.repeatIntervalMs = repeatIntervalMs; } /** * Sets the next action. * * @param next The next {@link Action}. */ public void setNext(ActionNode next) { this.next = next; } /** * Schedules {@link #action} to be executed after {@link #delayMs}. The {@link #next} node will * be scheduled immediately after {@link #action} is executed. * * @param player The player to which actions should be applied. * @param trackSelector The track selector to which actions should be applied. * @param surface The surface to use when applying actions. * @param mainHandler A handler associated with the main thread of the host activity. */ public void schedule( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface, HandlerWrapper mainHandler) { this.player = player; this.trackSelector = trackSelector; this.surface = surface; this.mainHandler = mainHandler; if (delayMs == 0 && Looper.myLooper() == mainHandler.getLooper()) { run(); } else { mainHandler.postDelayed(this, delayMs); } } @Override public void run() { action.doActionAndScheduleNext(player, trackSelector, surface, mainHandler, next); if (repeatIntervalMs != C.TIME_UNSET) { mainHandler.postDelayed( new Runnable() { @Override public void run() { action.doActionAndScheduleNext(player, trackSelector, surface, mainHandler, null); mainHandler.postDelayed(this, repeatIntervalMs); } }, repeatIntervalMs); } } } /** * A no-op root action. */ private static final class RootAction extends Action { public RootAction(String tag) { super(tag, "Root"); } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Do nothing. } } /** * An action calling a specified {@link ActionSchedule.Callback}. */ private static final class CallbackAction extends Action { private @Nullable Callback callback; public CallbackAction(String tag) { super(tag, "FinishedCallback"); } public void setCallback(@Nullable Callback callback) { this.callback = callback; } @Override protected void doActionAndScheduleNextImpl( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface, HandlerWrapper handler, ActionNode nextAction) { Assertions.checkArgument(nextAction == null); if (callback != null) { handler.post( new Runnable() { @Override public void run() { callback.onActionScheduleFinished(); } }); } } @Override protected void doActionImpl( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created on Jun 12, 2003 * * To change the template for this generated file go to * Window>Preferences>Java>Code Generation>Code and Comments */ package ghidra.app.decompiler; import java.awt.Color; import java.util.List; //import ghidra.app.plugin.core.decompile.*; import ghidra.program.model.address.Address; import ghidra.program.model.pcode.*; import ghidra.xml.XmlElement; import ghidra.xml.XmlPullParser; /** * * * Class representing a C code language token * May contain links back to pcode object */ public class ClangToken implements ClangNode { public final static int KEYWORD_COLOR = 0; public final static int TYPE_COLOR = 1; public final static int FUNCTION_COLOR = 2; public final static int COMMENT_COLOR = 3; public final static int VARIABLE_COLOR = 4; public final static int CONST_COLOR = 5; public final static int PARAMETER_COLOR = 6; public final static int GLOBAL_COLOR = 7; public final static int DEFAULT_COLOR = 8; private ClangNode parent; private ClangLine lineparent; private String text; private int syntax_type; private Color highlight; // Color to highlight with or null if no highlight private boolean matchingToken; public ClangToken(ClangNode par) { parent = par; text = null; highlight = null; syntax_type = getColor(null); lineparent = null; } public ClangToken(ClangNode par, String txt) { parent = par; text = txt; highlight = null; syntax_type = getColor(null); } public ClangToken(ClangNode par, String txt, String col) { parent = par; text = txt; highlight = null; syntax_type = getColor(col); } @Override public ClangNode Parent() { return parent; } public ClangLine getLineParent() { return lineparent; } public void setLineParent(ClangLine line) { lineparent = line; } @Override public Address getMinAddress() { return null; } @Override public Address getMaxAddress() { return null; } @Override public int numChildren() { return 0; } @Override public ClangNode Child(int i) { return null; } @Override public ClangFunction getClangFunction() { if (parent != null) { return parent.getClangFunction(); } return null; } @Override public void setHighlight(Color val) { highlight = val; } public Color getHighlight() { return highlight; } public void setMatchingToken(boolean matchingToken) { this.matchingToken = matchingToken; } public boolean isMatchingToken() { return matchingToken; } public boolean isVariableRef() { return false; } public int getSyntaxType() { return syntax_type; } void setSyntaxType(int syntax_type) { this.syntax_type = syntax_type; } public String getText() { return text; } void setText(String text) { this.text = text; } public void restoreFromXML(XmlElement el, XmlElement end, PcodeFactory pfactory) { text = end.getText(); String col = el.getAttribute(ClangXML.COLOR); syntax_type = getColor(col); } @Override public void flatten(List<ClangNode> list) { list.add(this); } static public ClangToken buildToken(ClangNode par, XmlPullParser parser, PcodeFactory pfactory) { XmlElement node = parser.start(ClangXML.VARIABLE, ClangXML.OP, ClangXML.SYNTAX, ClangXML.BREAK, ClangXML.FUNCNAME, ClangXML.TYPE, ClangXML.COMMENT, ClangXML.LABEL, ClangXML.FIELD); ClangToken token = null; if (node.getName().equals(ClangXML.VARIABLE)) { token = new ClangVariableToken(par); } else if (node.getName().equals(ClangXML.OP)) { token = new ClangOpToken(par); } else if (node.getName().equals(ClangXML.SYNTAX)) { token = new ClangSyntaxToken(par); } else if (node.getName().equals(ClangXML.BREAK)) { token = new ClangBreak(par); } else if (node.getName().equals(ClangXML.FUNCNAME)) { token = new ClangFuncNameToken(par, null); } else if (node.getName().equals(ClangXML.TYPE)) { token = new ClangTypeToken(par); } else if (node.getName().equals(ClangXML.COMMENT)) { token = new ClangCommentToken(par); } else if (node.getName().equals(ClangXML.LABEL)) { token = new ClangLabelToken(par); } else if (node.getName().equals(ClangXML.FIELD)) { token = new ClangFieldToken(par); } XmlElement end = parser.end(node); if (token != null) { token.restoreFromXML(node, end, pfactory); } return token; } public static int getColor(String col) { if (col != null) { if (col.equals(ClangXML.KEYWORD_COLOR)) { return KEYWORD_COLOR; } else if (col.equals(ClangXML.VARIABLE_COLOR)) { return VARIABLE_COLOR; } else if (col.equals(ClangXML.CONST_COLOR)) { return CONST_COLOR; } else if (col.equals(ClangXML.PARAMETER_COLOR)) { return PARAMETER_COLOR; } else if (col.equals(ClangXML.GLOBAL_COLOR)) { return GLOBAL_COLOR; } else if (col.equals(ClangXML.TYPE_COLOR)) { return TYPE_COLOR; } else if (col.equals(ClangXML.COMMENT_COLOR)) { return COMMENT_COLOR; } else if (col.equals(ClangXML.FUNCNAME_COLOR)) { return FUNCTION_COLOR; } } return DEFAULT_COLOR; // The default color } static public ClangToken buildSpacer(ClangNode par, int indent, String indentStr) { String spacing = new String(); for (int i = 0; i < indent; ++i) { spacing += indentStr; } return new ClangSyntaxToken(par, spacing); } @Override public String toString() { return text; } /** * Get the high-level variable associate with this * token or null otherwise * @return HighVariable */ public HighVariable getHighVariable() { if (Parent() instanceof ClangVariableDecl) { return ((ClangVariableDecl) Parent()).getHighVariable(); } return null; } /** * Many tokens directly represent a variable in the data-flow * @return the variable (Varnode) associated with this token or null */ public Varnode getVarnode() { return null; } /** * Many tokens directly represent a pcode operator in the data-flow * @return the operation (PcodeOp) associated with this token or null */ public PcodeOp getPcodeOp() { return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.messaginghub.pooled.jms; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import javax.jms.Connection; import javax.jms.IllegalStateException; import javax.jms.IllegalStateRuntimeException; import javax.jms.JMSException; import javax.jms.JMSRuntimeException; import javax.jms.QueueConnection; import javax.jms.QueueConnectionFactory; import javax.jms.TopicConnection; import javax.jms.TopicConnectionFactory; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.messaginghub.pooled.jms.mock.MockJMSConnection; import org.messaginghub.pooled.jms.mock.MockJMSConnectionFactory; import org.messaginghub.pooled.jms.util.Wait; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Performs basic tests on the JmsPoolConnectionFactory implementation. */ @Timeout(60) public class JmsPoolConnectionFactoryTest extends JmsPoolTestSupport { public final static Logger LOG = LoggerFactory.getLogger(JmsPoolConnectionFactoryTest.class); @Test public void testInstanceOf() throws Exception { cf = new JmsPoolConnectionFactory(); assertTrue(cf instanceof QueueConnectionFactory); assertTrue(cf instanceof TopicConnectionFactory); cf.stop(); } @Test public void testSetTimeBetweenExpirationCheckMillis() throws Exception { cf = new JmsPoolConnectionFactory(); assertEquals(-1, cf.getConnectionCheckInterval()); cf.setConnectionCheckInterval(5000); assertEquals(5000, cf.getConnectionCheckInterval()); } @Test public void testGetConnectionFactory() throws Exception { cf = new JmsPoolConnectionFactory(); assertNull(cf.getConnectionFactory(), "Should not have any factory set yet"); cf.setConnectionFactory(factory); assertNotNull(cf.getConnectionFactory(), "Should have a factory set yet"); assertSame(factory, cf.getConnectionFactory()); } @Test public void testFactoryRejectsNonConnectionFactorySet() throws Exception { assertThrows(IllegalArgumentException.class, () -> cf.setConnectionFactory("")); } @Test public void testCreateConnectionWithNoFactorySet() throws Exception { cf = new JmsPoolConnectionFactory(); assertThrows(IllegalStateException.class, () -> cf.createConnection()); } @Test public void testCreateConnection() throws Exception { Connection connection = cf.createConnection(); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testCreateConnectionWithCredentials() throws Exception { Connection connection = cf.createConnection("user", "pass"); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testQueueCreateConnection() throws Exception { QueueConnection connection = cf.createQueueConnection(); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testQueueCreateConnectionWithCredentials() throws Exception { QueueConnection connection = cf.createQueueConnection("user", "pass"); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testTopicCreateConnection() throws Exception { TopicConnection connection = cf.createTopicConnection(); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testTopicCreateConnectionWithCredentials() throws Exception { TopicConnection connection = cf.createTopicConnection("user", "pass"); assertNotNull(connection); assertEquals(1, cf.getNumConnections()); connection.close(); assertEquals(1, cf.getNumConnections()); } @Test public void testClearAllConnections() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(3); JmsPoolConnection conn1 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn2 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn3 = (JmsPoolConnection) cf.createConnection(); assertNotSame(conn1.getConnection(), conn2.getConnection()); assertNotSame(conn1.getConnection(), conn3.getConnection()); assertNotSame(conn2.getConnection(), conn3.getConnection()); assertEquals(3, cf.getNumConnections()); cf.clear(); assertEquals(0, cf.getNumConnections()); conn1 = (JmsPoolConnection) cf.createConnection(); conn2 = (JmsPoolConnection) cf.createConnection(); conn3 = (JmsPoolConnection) cf.createConnection(); assertNotSame(conn1.getConnection(), conn2.getConnection()); assertNotSame(conn1.getConnection(), conn3.getConnection()); assertNotSame(conn2.getConnection(), conn3.getConnection()); cf.stop(); } @Test public void testClearDoesNotFailOnStoppedConnectionFactory() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(3); JmsPoolConnection conn1 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn2 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn3 = (JmsPoolConnection) cf.createConnection(); assertNotSame(conn1.getConnection(), conn2.getConnection()); assertNotSame(conn1.getConnection(), conn3.getConnection()); assertNotSame(conn2.getConnection(), conn3.getConnection()); assertEquals(3, cf.getNumConnections()); cf.stop(); assertEquals(0, cf.getNumConnections()); try { cf.clear(); } catch (Throwable error) { fail("Should not throw on clear of stopped factory."); } } @Test public void testMaxConnectionsAreCreated() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(3); JmsPoolConnection conn1 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn2 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn3 = (JmsPoolConnection) cf.createConnection(); assertNotSame(conn1.getConnection(), conn2.getConnection()); assertNotSame(conn1.getConnection(), conn3.getConnection()); assertNotSame(conn2.getConnection(), conn3.getConnection()); assertEquals(3, cf.getNumConnections()); cf.stop(); } @Test public void testCannotCreateConnectionOnStoppedFactory() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(100); cf.stop(); assertEquals(0, cf.getNumConnections()); assertNull(cf.createConnection()); assertEquals(0, cf.getNumConnections()); cf.start(); assertNotNull(cf.createConnection()); assertEquals(1, cf.getNumConnections()); cf.stop(); } @Test public void testCannotCreateContextOnStoppedFactory() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(100); cf.stop(); assertEquals(0, cf.getNumConnections()); assertNull(cf.createContext()); assertEquals(0, cf.getNumConnections()); cf.start(); assertNotNull(cf.createContext()); assertEquals(1, cf.getNumConnections()); cf.stop(); } @Test public void testFactoryStopStart() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(1); JmsPoolConnection conn1 = (JmsPoolConnection) cf.createConnection(); cf.stop(); assertNull(cf.createConnection()); cf.start(); JmsPoolConnection conn2 = (JmsPoolConnection) cf.createConnection(); assertNotSame(conn1.getConnection(), conn2.getConnection()); assertEquals(1, cf.getNumConnections()); cf.stop(); } @Test public void testConnectionsAreRotated() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(10); Connection previous = null; // Front load the pool. for (int i = 0; i < 10; ++i) { cf.createConnection(); } for (int i = 0; i < 100; ++i) { Connection current = ((JmsPoolConnection) cf.createConnection()).getConnection(); assertNotSame(previous, current); previous = current; } cf.stop(); } @Test public void testConnectionsArePooled() throws Exception { MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(1); JmsPoolConnection conn1 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn2 = (JmsPoolConnection) cf.createConnection(); JmsPoolConnection conn3 = (JmsPoolConnection) cf.createConnection(); assertSame(conn1.getConnection(), conn2.getConnection()); assertSame(conn1.getConnection(), conn3.getConnection()); assertSame(conn2.getConnection(), conn3.getConnection()); assertEquals(1, cf.getNumConnections()); cf.stop(); } @Test public void testConnectionsArePooledAsyncCreate() throws Exception { final MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(1); final ConcurrentLinkedQueue<JmsPoolConnection> connections = new ConcurrentLinkedQueue<JmsPoolConnection>(); final JmsPoolConnection primary = (JmsPoolConnection) cf.createConnection(); final ExecutorService executor = Executors.newFixedThreadPool(10); final int numConnections = 100; for (int i = 0; i < numConnections; ++i) { executor.execute(new Runnable() { @Override public void run() { try { connections.add((JmsPoolConnection) cf.createConnection()); } catch (JMSException e) { } } }); } assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return connections.size() == numConnections; } }, TimeUnit.SECONDS.toMillis(10), TimeUnit.MILLISECONDS.toMillis(50)), "All connections should have been created."); executor.shutdown(); assertTrue(executor.awaitTermination(5, TimeUnit.SECONDS)); for (JmsPoolConnection connection : connections) { assertSame(primary.getConnection(), connection.getConnection()); } connections.clear(); cf.stop(); } @Test public void testConcurrentCreateGetsUniqueConnectionCreateOnDemand() throws Exception { doTestConcurrentCreateGetsUniqueConnection(false); } @Test public void testConcurrentCreateGetsUniqueConnectionCreateOnStart() throws Exception { doTestConcurrentCreateGetsUniqueConnection(true); } private void doTestConcurrentCreateGetsUniqueConnection(boolean createOnStart) throws Exception { final int numConnections = 2; final MockJMSConnectionFactory mock = new MockJMSConnectionFactory(); cf = new JmsPoolConnectionFactory(); cf.setConnectionFactory(mock); cf.setMaxConnections(numConnections); cf.start(); final ConcurrentMap<UUID, Connection> connections = new ConcurrentHashMap<>(); final ExecutorService executor = Executors.newFixedThreadPool(numConnections); for (int i = 0; i < numConnections; ++i) { executor.execute(new Runnable() { @Override public void run() { try { JmsPoolConnection pooled = (JmsPoolConnection) cf.createConnection(); MockJMSConnection wrapped = (MockJMSConnection) pooled.getConnection(); connections.put(wrapped.getConnectionId(), pooled); } catch (JMSException e) { } } }); } executor.shutdown(); assertTrue(executor.awaitTermination(30, TimeUnit.SECONDS)); assertEquals(numConnections, connections.size(), "Should have all unique connections"); connections.clear(); cf.stop(); } @Test public void testPooledBaseHandlesSubclassesInjectingInvalidFactoriesForConnection() throws Exception { cf = new BadFactoryJmsPoolConnectionFactory(); cf.setConnectionFactory(UUID.randomUUID()); try { cf.createConnection(); fail("Should throw IllegalStateException when factory is an invalid type"); } catch (IllegalStateException ise) {} } @Test public void testPooledBaseHandlesSubclassesInjectingInvalidFactoriesForContext() throws Exception { cf = new BadFactoryJmsPoolConnectionFactory(); cf.setConnectionFactory(UUID.randomUUID()); try { cf.createContext(); fail("Should throw IllegalStateRuntimeException when factory is an invalid type"); } catch (IllegalStateRuntimeException isre) {} } private class BadFactoryJmsPoolConnectionFactory extends JmsPoolConnectionFactory { @Override public void setConnectionFactory(Object factory) { // Simulate bad Pooled factory subclass to ensure we validate what it gave us. this.connectionFactory = factory; this.jmsContextSupported = true; } } @Test public void testPooledCreateContextFailsWhenJMS20NotSupported() throws Exception { cf = new JMS20NotAllowedJmsPoolConnectionFactory(); cf.setConnectionFactory(new MockJMSConnectionFactory()); try { cf.createContext(); fail("Should throw JMSRuntimeException when told JMS 2.0 isn't available"); } catch (JMSRuntimeException jmsre) {} } private class JMS20NotAllowedJmsPoolConnectionFactory extends JmsPoolConnectionFactory { @Override public void setConnectionFactory(Object factory) { super.setConnectionFactory(factory); this.jmsContextSupported = false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.math.primitives.matrix; import org.apache.ignite.ml.math.exceptions.CardinalityException; import org.apache.ignite.ml.math.exceptions.SingularMatrixException; import org.apache.ignite.ml.math.primitives.vector.Vector; import static org.apache.ignite.ml.math.util.MatrixUtil.copy; import static org.apache.ignite.ml.math.util.MatrixUtil.like; import static org.apache.ignite.ml.math.util.MatrixUtil.likeVector; /** * Calculates the LU-decomposition of a square matrix. * <p> * This class is inspired by class from Apache Common Math with similar name.</p> * * @see <a href="http://mathworld.wolfram.com/LUDecomposition.html">MathWorld</a> * @see <a href="http://en.wikipedia.org/wiki/LU_decomposition">Wikipedia</a> * * <p>TODO: IGNITE-11192, remove after resolve this issue.</p> */ public class LUDecomposition implements AutoCloseable { /** Default bound to determine effective singularity in LU decomposition. */ private static final double DEFAULT_TOO_SMALL = 1e-11; /** Pivot permutation associated with LU decomposition. */ private final Vector pivot; /** Parity of the permutation associated with the LU decomposition. */ private boolean even; /** Singularity indicator. */ private boolean singular; /** Cached value of L. */ private Matrix cachedL; /** Cached value of U. */ private Matrix cachedU; /** Cached value of P. */ private Matrix cachedP; /** Original matrix. */ private Matrix matrix; /** Entries of LU decomposition. */ private Matrix lu; /** * Calculates the LU-decomposition of the given matrix. This constructor uses 1e-11 as default value for the * singularity threshold. * * @param matrix Matrix to decompose. * @throws CardinalityException if matrix is not square. */ public LUDecomposition(Matrix matrix) { this(matrix, DEFAULT_TOO_SMALL); } /** * Calculates the LUP-decomposition of the given matrix. * * @param matrix Matrix to decompose. * @param singularityThreshold threshold (based on partial row norm). * @throws CardinalityException if matrix is not square. */ public LUDecomposition(Matrix matrix, double singularityThreshold) { assert matrix != null; int rows = matrix.rowSize(); int cols = matrix.columnSize(); if (rows != cols) throw new CardinalityException(rows, cols); this.matrix = matrix; lu = copy(matrix); pivot = likeVector(matrix); for (int i = 0; i < pivot.size(); i++) pivot.setX(i, i); even = true; singular = false; cachedL = null; cachedU = null; cachedP = null; for (int col = 0; col < cols; col++) { //upper for (int row = 0; row < col; row++) { Vector luRow = lu.viewRow(row); double sum = luRow.get(col); for (int i = 0; i < row; i++) sum -= luRow.getX(i) * lu.getX(i, col); luRow.setX(col, sum); } // permutation row int max = col; double largest = Double.NEGATIVE_INFINITY; // lower for (int row = col; row < rows; row++) { Vector luRow = lu.viewRow(row); double sum = luRow.getX(col); for (int i = 0; i < col; i++) sum -= luRow.getX(i) * lu.getX(i, col); luRow.setX(col, sum); if (Math.abs(sum) > largest) { largest = Math.abs(sum); max = row; } } // Singularity check if (Math.abs(lu.getX(max, col)) < singularityThreshold) { singular = true; return; } // Pivot if necessary if (max != col) { double tmp; Vector luMax = lu.viewRow(max); Vector luCol = lu.viewRow(col); for (int i = 0; i < cols; i++) { tmp = luMax.getX(i); luMax.setX(i, luCol.getX(i)); luCol.setX(i, tmp); } int temp = (int)pivot.getX(max); pivot.setX(max, pivot.getX(col)); pivot.setX(col, temp); even = !even; } // Divide the lower elements by the "winning" diagonal elt. final double luDiag = lu.getX(col, col); for (int row = col + 1; row < cols; row++) { double val = lu.getX(row, col) / luDiag; lu.setX(row, col, val); } } } /** * Destroys decomposition components and other internal components of decomposition. */ @Override public void close() { if (cachedL != null) cachedL.destroy(); if (cachedU != null) cachedU.destroy(); if (cachedP != null) cachedP.destroy(); lu.destroy(); } /** * Returns the matrix L of the decomposition. * <p>L is a lower-triangular matrix</p> * * @return the L matrix (or null if decomposed matrix is singular). */ public Matrix getL() { if ((cachedL == null) && !singular) { final int m = pivot.size(); cachedL = like(matrix); cachedL.assign(0.0); for (int i = 0; i < m; ++i) { for (int j = 0; j < i; ++j) cachedL.setX(i, j, lu.getX(i, j)); cachedL.setX(i, i, 1.0); } } return cachedL; } /** * Returns the matrix U of the decomposition. * <p>U is an upper-triangular matrix</p> * * @return the U matrix (or null if decomposed matrix is singular). */ public Matrix getU() { if ((cachedU == null) && !singular) { final int m = pivot.size(); cachedU = like(matrix); cachedU.assign(0.0); for (int i = 0; i < m; ++i) for (int j = i; j < m; ++j) cachedU.setX(i, j, lu.getX(i, j)); } return cachedU; } /** * Returns the P rows permutation matrix. * <p>P is a sparse matrix with exactly one element set to 1.0 in * each row and each column, all other elements being set to 0.0.</p> * <p>The positions of the 1 elements are given by the {@link #getPivot() * pivot permutation vector}.</p> * * @return the P rows permutation matrix (or null if decomposed matrix is singular). * @see #getPivot() */ public Matrix getP() { if ((cachedP == null) && !singular) { final int m = pivot.size(); cachedP = like(matrix); cachedP.assign(0.0); for (int i = 0; i < m; ++i) cachedP.setX(i, (int)pivot.get(i), 1.0); } return cachedP; } /** * Returns the pivot permutation vector. * * @return the pivot permutation vector. * @see #getP() */ public Vector getPivot() { return pivot.copy(); } /** * Return the determinant of the matrix. * * @return determinant of the matrix. */ public double determinant() { if (singular) return 0; final int m = pivot.size(); double determinant = even ? 1 : -1; for (int i = 0; i < m; i++) determinant *= lu.getX(i, i); return determinant; } /** * @param b Vector to solve using this decomposition. * @return Solution vector. */ public Vector solve(Vector b) { final int m = pivot.size(); if (b.size() != m) throw new CardinalityException(b.size(), m); if (singular) throw new SingularMatrixException(); final double[] bp = new double[m]; // Apply permutations to b for (int row = 0; row < m; row++) bp[row] = b.get((int)pivot.get(row)); // Solve LY = b for (int col = 0; col < m; col++) { final double bpCol = bp[col]; for (int i = col + 1; i < m; i++) bp[i] -= bpCol * lu.get(i, col); } // Solve UX = Y for (int col = m - 1; col >= 0; col--) { bp[col] /= lu.get(col, col); final double bpCol = bp[col]; for (int i = 0; i < col; i++) bp[i] -= bpCol * lu.get(i, col); } return b.like(m).assign(bp); } /** * @param b Matrix to solve using this decomposition. * @return Solution matrix. */ public Matrix solve(Matrix b) { final int m = pivot.size(); if (b.rowSize() != m) throw new CardinalityException(b.rowSize(), m); if (singular) throw new SingularMatrixException(); final int nColB = b.columnSize(); // Apply permutations to b final double[][] bp = new double[m][nColB]; for (int row = 0; row < m; row++) { final double[] bpRow = bp[row]; final int pRow = (int)pivot.get(row); for (int col = 0; col < nColB; col++) bpRow[col] = b.get(pRow, col); } // Solve LY = b for (int col = 0; col < m; col++) { final double[] bpCol = bp[col]; for (int i = col + 1; i < m; i++) { final double[] bpI = bp[i]; final double luICol = lu.get(i, col); for (int j = 0; j < nColB; j++) bpI[j] -= bpCol[j] * luICol; } } // Solve UX = Y for (int col = m - 1; col >= 0; col--) { final double[] bpCol = bp[col]; final double luDiag = lu.getX(col, col); for (int j = 0; j < nColB; j++) bpCol[j] /= luDiag; for (int i = 0; i < col; i++) { final double[] bpI = bp[i]; final double luICol = lu.get(i, col); for (int j = 0; j < nColB; j++) bpI[j] -= bpCol[j] * luICol; } } return b.like(b.rowSize(), b.columnSize()).assign(bp); } }
/* * $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//httpclient/src/test/org/apache/commons/httpclient/cookie/TestCookieNetscapeDraft.java,v 1.2 2004/04/24 23:28:04 olegk Exp $ * $Revision: 480424 $ * $Date: 2006-11-29 06:56:49 +0100 (Wed, 29 Nov 2006) $ * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.commons.httpclient.cookie; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.commons.httpclient.Cookie; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.NameValuePair; /** * Test cases for Netscape cookie draft * * @author <a href="mailto:oleg@ural.ru">Oleg Kalnichevski</a> * * @version $Revision: 480424 $ */ public class TestCookieNetscapeDraft extends TestCookieBase { // ------------------------------------------------------------ Constructor public TestCookieNetscapeDraft(String name) { super(name); } // ------------------------------------------------------- TestCase Methods public static Test suite() { return new TestSuite(TestCookieNetscapeDraft.class); } public void testParseAttributeInvalidAttrib() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); try { cookiespec.parseAttribute(null, null); fail("IllegalArgumentException must have been thrown"); } catch (IllegalArgumentException expected) { } } public void testParseAttributeInvalidCookie() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); try { cookiespec.parseAttribute(new NameValuePair("name", "value"), null); fail("IllegalArgumentException must have been thrown"); } catch (IllegalArgumentException expected) { } } public void testParseAttributeInvalidCookieExpires() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie cookie = new Cookie(); try { cookiespec.parseAttribute(new NameValuePair("expires", null), cookie); fail("MalformedCookieException must have been thrown"); } catch (MalformedCookieException expected) { } } public void testParseWithNullHost() throws Exception { Header header = new Header("Set-Cookie", "cookie-name=cookie-value; domain=127.0.0.1; path=/; secure"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, null, 80, "/", false, header); fail("IllegalArgumentException should have been thrown"); } catch (IllegalArgumentException e) { // expected } } public void testParseWithBlankHost() throws Exception { Header header = new Header("Set-Cookie", "cookie-name=cookie-value; domain=127.0.0.1; path=/; secure"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, " ", 80, "/", false, header); fail("IllegalArgumentException should have been thrown"); } catch (IllegalArgumentException e) { // expected } } public void testParseWithNullPath() throws Exception { Header header = new Header("Set-Cookie", "cookie-name=cookie-value; domain=127.0.0.1; path=/; secure"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", 80, null, false, header); fail("IllegalArgumentException should have been thrown"); } catch (IllegalArgumentException e) { // expected } } public void testParseWithBlankPath() throws Exception { Header header = new Header("Set-Cookie", "cookie-name=cookie-value; domain=127.0.0.1; path=/; secure"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", 80, " ", false, header); assertNotNull(parsed); assertEquals(1, parsed.length); assertEquals("/", parsed[0].getPath()); } public void testParseWithNegativePort() throws Exception { Header header = new Header("Set-Cookie", "cookie-name=cookie-value; domain=127.0.0.1; path=/; secure"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", -80, null, false, header); fail("IllegalArgumentException should have been thrown"); } catch (IllegalArgumentException e) { // expected } } public void testParseWithInvalidHeader1() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookiespec.parse("127.0.0.1", 80, "/foo", false, (String)null); fail("IllegalArgumentException should have been thrown."); } catch (IllegalArgumentException e) { // expected } } public void testParseAbsPath() throws Exception { Header header = new Header("Set-Cookie", "name1=value1;Path=/path/"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] parsed = cookieParse(cookiespec, "host", 80, "/path/", true, header); assertEquals("Found 1 cookies.",1,parsed.length); assertEquals("Name","name1",parsed[0].getName()); assertEquals("Value","value1",parsed[0].getValue()); assertEquals("Domain","host",parsed[0].getDomain()); assertEquals("Path","/path/",parsed[0].getPath()); } public void testParseAbsPath2() throws Exception { Header header = new Header("Set-Cookie", "name1=value1;Path=/"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] parsed = cookieParse(cookiespec, "host", 80, "/", true, header); assertEquals("Found 1 cookies.",1,parsed.length); assertEquals("Name","name1",parsed[0].getName()); assertEquals("Value","value1",parsed[0].getValue()); assertEquals("Domain","host",parsed[0].getDomain()); assertEquals("Path","/",parsed[0].getPath()); } public void testParseRelativePath() throws Exception { Header header = new Header("Set-Cookie", "name1=value1;Path=whatever"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] parsed = cookieParse(cookiespec, "host", 80, "whatever", true, header); assertEquals("Found 1 cookies.",1,parsed.length); assertEquals("Name","name1",parsed[0].getName()); assertEquals("Value","value1",parsed[0].getValue()); assertEquals("Domain","host",parsed[0].getDomain()); assertEquals("Path","whatever",parsed[0].getPath()); } public void testParseWithIllegalNetscapeDomain1() throws Exception { Header header = new Header("Set-Cookie","cookie-name=cookie-value; domain=.com"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, "a.com", 80, "/", false, header); fail("HttpException exception should have been thrown"); } catch (HttpException e) { // expected } } public void testParseWithWrongNetscapeDomain2() throws Exception { Header header = new Header("Set-Cookie","cookie-name=cookie-value; domain=.y.z"); CookieSpec cookiespec = new NetscapeDraftSpec(); try { Cookie[] parsed = cookieParse(cookiespec, "x.y.z", 80, "/", false, header); fail("HttpException exception should have been thrown"); } catch (HttpException e) { // expected } } /** * Tests Netscape specific cookie formatting. */ public void testNetscapeCookieFormatting() throws Exception { Header header = new Header( "Set-Cookie", "name=value; path=/; domain=.mydomain.com"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header ); cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]); String s = cookiespec.formatCookie(cookies[0]); assertEquals("name=value", s); } /** * Tests Netscape specific expire attribute parsing. */ public void testNetscapeCookieExpireAttribute() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); Header header = new Header("Set-Cookie", "name=value; path=/; domain=.mydomain.com; expires=Thu, 01-Jan-2070 00:00:10 GMT; comment=no_comment"); Cookie[] cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header ); cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]); header = new Header("Set-Cookie", "name=value; path=/; domain=.mydomain.com; expires=Thu 01-Jan-2070 00:00:10 GMT; comment=no_comment"); try { cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header ); cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]); fail("MalformedCookieException must have been thrown"); } catch (MalformedCookieException expected) { } } /** * Tests Netscape specific expire attribute without a time zone. */ public void testNetscapeCookieExpireAttributeNoTimeZone() throws Exception { CookieSpec cookiespec = new NetscapeDraftSpec(); Header header = new Header("Set-Cookie", "name=value; expires=Thu, 01-Jan-2006 00:00:00 "); try { cookiespec.parse("myhost.mydomain.com", 80, "/", false, header ); fail("MalformedCookieException should have been thrown"); } catch (MalformedCookieException ex) { // expected } } /** * Tests if cookie values with embedded comma are handled correctly. */ public void testCookieWithComma() throws Exception { Header header = new Header("Set-Cookie", "a=b,c"); CookieSpec cookiespec = new NetscapeDraftSpec(); Cookie[] cookies = cookiespec.parse("localhost", 80, "/", false, header); assertEquals("number of cookies", 1, cookies.length); assertEquals("a", cookies[0].getName()); assertEquals("b,c", cookies[0].getValue()); } }
/* * Copyright 2018-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.intellij.ideabuck.autodeps; import com.facebook.buck.intellij.ideabuck.api.BuckTarget; import com.facebook.buck.intellij.ideabuck.api.BuckTargetLocator; import com.facebook.buck.intellij.ideabuck.api.BuckTargetPattern; import com.facebook.buck.intellij.ideabuck.build.BuckCommand; import com.facebook.buck.intellij.ideabuck.build.BuckJsonCommandHandler; import com.facebook.buck.intellij.ideabuck.build.BuckJsonCommandHandler.Callback; import com.facebook.buck.intellij.ideabuck.notification.BuckNotification; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.reflect.TypeToken; import com.intellij.codeInsight.daemon.impl.actions.AddImportAction; import com.intellij.codeInsight.intention.impl.BaseIntentionAction; import com.intellij.notification.Notification; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootModificationUtil; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.psi.NavigatablePsiElement; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiJavaCodeReferenceElement; import com.intellij.util.IncorrectOperationException; import java.io.IOException; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.stream.Collectors; import javax.swing.event.HyperlinkEvent; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.Nls.Capitalization; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * An intention that will attempt to add a dependency edge to both the Buck graph and the IntelliJ * module graph. */ public class BuckAddDependencyIntention extends BaseIntentionAction { private static Logger LOGGER = Logger.getInstance(BuckAddDependencyIntention.class); /** * Creates an {@link com.intellij.codeInsight.intention.IntentionAction} that will create an * dependency edge in both the Buck target graph and IntelliJ module graph from the nodes for the * given reference element to those of the given psiClass. * * <p>Note that this intention can fail to be created if either side of the edge cannot be * resolved to a buck file in a buck cell, in which case this method returns null. Also, invoking * this intention may fail to create edges in either the Buck target graph or the IntelliJ module * graph (or both). */ @Nullable public static BuckAddDependencyIntention create( PsiJavaCodeReferenceElement referenceElement, PsiClass psiClass) { VirtualFile editSourceFile = referenceElement.getContainingFile().getVirtualFile(); if (editSourceFile == null) { return null; } Project project = referenceElement.getProject(); BuckTargetLocator buckTargetLocator = BuckTargetLocator.getInstance(project); VirtualFile editBuildFile = buckTargetLocator.findBuckFileForVirtualFile(editSourceFile).orElse(null); if (editBuildFile == null) { return null; } VirtualFile importSourceFile = psiClass.getContainingFile().getVirtualFile(); if (importSourceFile == null) { return null; } VirtualFile importBuildFile = buckTargetLocator.findBuckFileForVirtualFile(importSourceFile).orElse(null); if (importBuildFile == null) { return null; } if (importBuildFile.equals(editBuildFile)) { return null; } ProjectFileIndex projectFileIndex = ProjectFileIndex.getInstance(project); Module editModule = projectFileIndex.getModuleForFile(editSourceFile); if (editModule == null) { return null; } Module importModule = projectFileIndex.getModuleForFile(importSourceFile); if (importModule == null) { return null; } BuckTarget editSourceTarget = buckTargetLocator .findTargetPatternForVirtualFile(editSourceFile) .flatMap(BuckTargetPattern::asBuckTarget) .orElse(null); if (editSourceTarget == null) { return null; } BuckTarget importSourceTarget = buckTargetLocator .findTargetPatternForVirtualFile(importSourceFile) .flatMap(BuckTargetPattern::asBuckTarget) .orElse(null); if (importSourceTarget == null) { return null; } return new BuckAddDependencyIntention( project, referenceElement, editBuildFile, editSourceFile, editSourceTarget, editModule, psiClass, importBuildFile, importSourceFile, importSourceTarget, importModule); } private Project project; // Fields pertaining to the element in the file being edited private PsiJavaCodeReferenceElement referenceElement; private VirtualFile editBuildFile; private VirtualFile editSourceFile; private BuckTarget editSourceTarget; private BuckTarget editTarget; private Module editModule; // Fields pertaining to the dependency that should be resolved/imported private PsiClass psiClass; private VirtualFile importBuildFile; private VirtualFile importSourceFile; private BuckTarget importSourceTarget; private BuckTarget importTarget; private Module importModule; BuckAddDependencyIntention( Project project, PsiJavaCodeReferenceElement referenceElement, VirtualFile editBuildFile, VirtualFile editSourceFile, BuckTarget editSourceTarget, Module editModule, PsiClass psiClass, VirtualFile importBuildFile, VirtualFile importSourceFile, BuckTarget importSourceTarget, Module importModule) { this.project = project; this.referenceElement = referenceElement; this.editBuildFile = editBuildFile; this.editSourceFile = editSourceFile; this.editSourceTarget = editSourceTarget; this.editModule = editModule; this.psiClass = psiClass; this.importBuildFile = importBuildFile; this.importSourceFile = importSourceFile; this.importSourceTarget = importSourceTarget; this.importModule = importModule; String message = "Add BUCK dependency on owner(" + importSourceTarget + ")"; setText(message); } @Nls(capitalization = Capitalization.Sentence) @NotNull @Override public String getFamilyName() { return this.getClass().getSimpleName(); } @Override public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile psiFile) { return true; } @Override public void invoke(@NotNull Project project, Editor editor, PsiFile psiFile) throws IncorrectOperationException { String msg = "Invoked for project " + project.getName() + " and file " + psiFile.getName(); LOGGER.info(msg); queryBuckForTargets(editor); } /** Helper class to handle deserialization of buck query. */ static class TargetMetadata { public BuckTarget target; public @Nullable List<BuckTarget> deps; public @Nullable List<BuckTargetPattern> visibility; // null means PUBLIC public @Nullable List<String> srcs; public @Nullable List<String> resources; static TargetMetadata from( BuckTargetLocator buckTargetLocator, BuckTarget target, JsonObject payload) { TargetMetadata targetMetadata = new TargetMetadata(); targetMetadata.target = target; targetMetadata.srcs = stringListOrNull(payload.get("srcs")); targetMetadata.resources = stringListOrNull(payload.get("resources")); // Deps are a list of BuckTargets targetMetadata.deps = Optional.ofNullable(stringListOrNull(payload.get("deps"))) .map( deps -> deps.stream() .map( s -> BuckTarget.parse(s).map(buckTargetLocator::resolve).orElse(null)) .collect(Collectors.toList())) .orElse(null); // Visibilility falls in one of three cases: // (1) if unspecified => means visibility is limited to the current package // (2) contains "PUBLIC" => available everywhere // (3) otherwise is a list of buck target patterns where it is visible List<String> optionalVisibility = stringListOrNull(payload.get("visibility")); if (optionalVisibility == null) { targetMetadata.visibility = Collections.singletonList(target.asPattern().asPackageMatchingPattern()); } else if (optionalVisibility.contains("PUBLIC")) { targetMetadata.visibility = null; // } else { targetMetadata.visibility = optionalVisibility .stream() .map(p -> BuckTargetPattern.parse(p).map(buckTargetLocator::resolve).orElse(null)) .collect(Collectors.toList()); } return targetMetadata; } static @Nullable List<String> stringListOrNull(@Nullable JsonElement jsonElement) { if (jsonElement == null) { return null; } return new Gson().fromJson(jsonElement, new TypeToken<List<String>>() {}.getType()); } boolean isVisibleTo(BuckTarget target) { if (visibility == null) { return true; } return visibility.stream().anyMatch(pattern -> pattern.matches(target)); } boolean hasDependencyOn(BuckTarget target) { return deps.stream().anyMatch(dep -> dep.equals(target)); } boolean contains(BuckTarget targetFile) { if (!target.asPattern().asPackageMatchingPattern().matches(targetFile)) { return false; } String relativeToBuildFile = targetFile.getRuleName(); return srcs.contains(relativeToBuildFile) || resources.contains(relativeToBuildFile); } } /** Queries buck for targets that own the editSourceFile and the importSourceFile. */ private void queryBuckForTargets(Editor editor) { BuckTargetLocator buckTargetLocator = BuckTargetLocator.getInstance(project); String editPath = editSourceFile.getPath(); String importPath = importSourceFile.getPath(); BuckJsonCommandHandler<List<TargetMetadata>> handler = new BuckJsonCommandHandler<>( project, BuckCommand.QUERY, new Callback<List<TargetMetadata>>() { @Override public List<TargetMetadata> deserialize(JsonElement jsonElement) throws IOException { Type type = new TypeToken<Map<String, JsonObject>>() {}.getType(); Map<String, JsonObject> raw = new Gson().fromJson(jsonElement, type); List<TargetMetadata> results = new ArrayList<>(); for (Entry<String, JsonObject> entry : raw.entrySet()) { BuckTarget.parse(entry.getKey()) .map(buckTargetLocator::resolve) .map( target -> TargetMetadata.from(buckTargetLocator, target, entry.getValue())) .ifPresent(results::add); } return results; } @Override public void onSuccess(List<TargetMetadata> results, String stderr) { List<TargetMetadata> editTargets = new ArrayList<>(); List<TargetMetadata> importTargets = new ArrayList<>(); for (TargetMetadata targetMetadata : results) { if (targetMetadata.contains(editSourceTarget)) { editTargets.add(targetMetadata); } if (targetMetadata.contains(importSourceTarget)) { importTargets.add(targetMetadata); } } updateDependencies(editor, editTargets, importTargets); } @Override public void onFailure( String stdout, String stderr, @Nullable Integer exitCode, @Nullable Throwable throwable) { BuckNotification.getInstance(project) .showWarningBalloon( "Could not determine owners for " + editSourceFile + " and/or " + importSourceFile); return; } }); handler .command() .addParameters( "owner(%s)", editPath, importPath, "--output-attributes=deps|srcs|visibility|resources"); handler.runInCurrentThreadPostEnd(() -> {}); } /** * Implementation of {@link * com.intellij.notification.NotificationListener#hyperlinkUpdate(Notification, HyperlinkEvent)}. */ private void hyperlinkActivated( @NotNull Notification notification, @NotNull HyperlinkEvent event) { String href = event.getDescription(); switch (href) { case "editTarget": if (BuckTargetLocator.getInstance(project) .findElementForTarget(editTarget) .filter(target -> target instanceof NavigatablePsiElement) .map(target -> (NavigatablePsiElement) target) .filter(Navigatable::canNavigate) .map( e -> { e.navigate(true); return true; }) .orElse(false)) { break; } // fallthrough case "editBuildFile": FileEditorManager.getInstance(project).openFile(editBuildFile, true); break; case "editSourceFile": FileEditorManager.getInstance(project).openFile(editSourceFile, true); break; case "importTarget": if (BuckTargetLocator.getInstance(project) .findElementForTarget(importTarget) .filter(target -> target instanceof NavigatablePsiElement) .map(target -> (NavigatablePsiElement) target) .filter(Navigatable::canNavigate) .map( e -> { e.navigate(true); return true; }) .orElse(false)) { break; } // fallthrough case "importBuildFile": FileEditorManager.getInstance(project).openFile(importBuildFile, true); break; case "importSourceFile": FileEditorManager.getInstance(project).openFile(importSourceFile, true); break; } } private void updateDependencies( Editor editor, List<TargetMetadata> editTargets, List<TargetMetadata> importTargets) { if (editTargets.size() == 0) { String message = "<html><b>Add dependency failed</b>: Couldn't determine a Buck owner for <a href='editSourceFile'>" + editSourceTarget + "</a> in <a href='editBuildFile'>" + editBuildFile.getPath() + "</a>"; BuckNotification.getInstance(project).showErrorBalloon(message, this::hyperlinkActivated); return; } if (importTargets.size() == 0) { String message = "<html><b>Add dependency failed</b>: Couldn't determine a Buck owner for <a href='importSourceFile'>" + importSourceTarget + "</a> in <a href='importBuildFile'>" + importBuildFile.getPath() + "</a></html>"; BuckNotification.getInstance(project).showErrorBalloon(message, this::hyperlinkActivated); return; } TargetMetadata editTargetMetadata = editTargets.get(0); TargetMetadata importTargetMetadata = importTargets.get(0); editTarget = editTargetMetadata.target; importTarget = importTargetMetadata.target; if (!importTargetMetadata.isVisibleTo(editTarget)) { String message = "<html><b>Add dependency failed</b>: The target <a href='importTarget'>" + importTarget + "</a> is not visible to <a href='editTarget'>" + editTarget + "</a></html>"; BuckNotification.getInstance(project).showErrorBalloon(message, this::hyperlinkActivated); return; } if (!editTargetMetadata.hasDependencyOn(importTarget)) { if (!BuckDeps.modifyTargetToAddDependency( editBuildFile, editTarget.toString(), importTarget.toString())) { String message = "<html><b>Add dependency failed</b>: Could not add modify build file for <a href='editTarget'>" + editTarget + "</a> to add dependency on <a href='importTarget'>" + importTarget + "</a></html>"; BuckNotification.getInstance(project).showErrorBalloon(message, this::hyperlinkActivated); return; } } else { String message = "<html>No need to modify build file <a href='editBuildFile'>" + editBuildFile + "</a>, already has dependency from <a href='editTarget'>" + editTarget + "</a> to <a href='importTarget'>" + importTarget + "</a></html>"; BuckNotification.getInstance(project).showInfoBalloon(message, this::hyperlinkActivated); } ModuleRootModificationUtil.updateModel( editModule, (modifiableRootModel -> { if (modifiableRootModel.findModuleOrderEntry(importModule) != null) { LOGGER.info( "No need to modify module " + editModule.getName() + ", already has dependency on " + importModule.getName()); } else { modifiableRootModel.addModuleOrderEntry(importModule); LOGGER.info( "Successfully added module dependency from " + editModule.getName() + " on " + importModule.getName()); } new AddImportAction(project, referenceElement, editor, psiClass).execute(); })); } }
/* Copyright (c) 2001-2014, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb; import org.hsqldb.HsqlNameManager.HsqlName; import org.hsqldb.error.Error; import org.hsqldb.error.ErrorCode; import org.hsqldb.index.Index; import org.hsqldb.lib.HashMappedList; import org.hsqldb.lib.Iterator; import org.hsqldb.lib.OrderedHashSet; import org.hsqldb.lib.java.JavaSystem; import org.hsqldb.persist.DataFileCache; import org.hsqldb.persist.DataSpaceManager; import org.hsqldb.persist.HsqlDatabaseProperties; import org.hsqldb.persist.HsqlProperties; import org.hsqldb.persist.PersistentStore; import org.hsqldb.persist.TableSpaceManager; import org.hsqldb.result.Result; import org.hsqldb.result.ResultMetaData; import org.hsqldb.rights.User; import org.hsqldb.scriptio.ScriptWriterText; /** * Implementation of Statement for SQL commands.<p> * * @author Fred Toussi (fredt@users dot sourceforge.net) * @version 2.3.2 * @since 1.9.0 */ public class StatementCommand extends Statement { Object[] parameters; StatementCommand(int type, Object[] args) { this(type, args, null, null); } StatementCommand(int type, Object[] args, HsqlName[] readNames, HsqlName[] writeNames) { super(type); this.isTransactionStatement = true; this.parameters = args; if (readNames != null) { this.readTableNames = readNames; } if (writeNames != null) { this.writeTableNames = writeNames; } switch (type) { case StatementTypes.TRUNCATE : group = StatementTypes.X_HSQLDB_DATABASE_OPERATION; break; case StatementTypes.EXPLAIN_PLAN : group = StatementTypes.X_SQL_DIAGNOSTICS; statementReturnType = StatementTypes.RETURN_RESULT; isTransactionStatement = false; isLogged = false; break; case StatementTypes.DATABASE_CHECKPOINT : group = StatementTypes.X_HSQLDB_DATABASE_OPERATION; isLogged = false; break; case StatementTypes.DATABASE_SCRIPT : { String name = (String) parameters[0]; if (name == null) { statementReturnType = StatementTypes.RETURN_RESULT; } group = StatementTypes.X_HSQLDB_DATABASE_OPERATION; isLogged = false; break; } case StatementTypes.DATABASE_BACKUP : group = StatementTypes.X_HSQLDB_DATABASE_OPERATION; isLogged = false; break; case StatementTypes.SET_DATABASE_UNIQUE_NAME : case StatementTypes.SET_DATABASE_FILES_WRITE_DELAY : case StatementTypes.SET_DATABASE_FILES_TEMP_PATH : case StatementTypes.SET_DATABASE_FILES_EVENT_LOG : isTransactionStatement = false; group = StatementTypes.X_HSQLDB_SETTING; break; // case StatementTypes.SET_DATABASE_DEFAULT_INITIAL_SCHEMA : case StatementTypes.SET_DATABASE_DEFAULT_TABLE_TYPE : case StatementTypes.SET_DATABASE_FILES_CACHE_ROWS : case StatementTypes.SET_DATABASE_FILES_CACHE_SIZE : case StatementTypes.SET_DATABASE_FILES_CHECK : case StatementTypes.SET_DATABASE_FILES_SCALE : case StatementTypes.SET_DATABASE_FILES_SPACE : case StatementTypes.SET_DATABASE_FILES_DEFRAG : case StatementTypes.SET_DATABASE_FILES_LOBS_SCALE : case StatementTypes.SET_DATABASE_FILES_LOBS_COMPRESSED : case StatementTypes.SET_DATABASE_FILES_LOG : case StatementTypes.SET_DATABASE_FILES_LOG_SIZE : case StatementTypes.SET_DATABASE_FILES_NIO : case StatementTypes.SET_DATABASE_FILES_SCRIPT_FORMAT : case StatementTypes.SET_DATABASE_AUTHENTICATION : case StatementTypes.SET_DATABASE_PASSWORD_CHECK : case StatementTypes.SET_DATABASE_PASSWORD_DIGEST : case StatementTypes.SET_DATABASE_PROPERTY : case StatementTypes.SET_DATABASE_RESULT_MEMORY_ROWS : case StatementTypes.SET_DATABASE_SQL_REFERENTIAL_INTEGRITY : case StatementTypes.SET_DATABASE_SQL : case StatementTypes.SET_DATABASE_TRANSACTION_CONTROL : case StatementTypes.SET_DATABASE_DEFAULT_ISOLATION_LEVEL : case StatementTypes.SET_DATABASE_TRANSACTION_CONFLICT : case StatementTypes.SET_DATABASE_GC : // case StatementTypes.SET_DATABASE_SQL_COLLATION : case StatementTypes.SET_DATABASE_FILES_BACKUP_INCREMENT : case StatementTypes.SET_DATABASE_TEXT_SOURCE : group = StatementTypes.X_HSQLDB_SETTING; break; case StatementTypes.SET_TABLE_CLUSTERED : case StatementTypes.SET_TABLE_NEW_TABLESPACE : case StatementTypes.SET_TABLE_SET_TABLESPACE : group = StatementTypes.X_HSQLDB_SCHEMA_MANIPULATION; break; case StatementTypes.SET_TABLE_SOURCE_HEADER : isLogged = false; // fall through case StatementTypes.SET_TABLE_SOURCE : group = StatementTypes.X_HSQLDB_SCHEMA_MANIPULATION; break; case StatementTypes.SET_TABLE_READONLY : group = StatementTypes.X_HSQLDB_SCHEMA_MANIPULATION; break; case StatementTypes.DATABASE_SHUTDOWN : group = StatementTypes.X_HSQLDB_DATABASE_OPERATION; isTransactionStatement = false; isLogged = false; break; case StatementTypes.SET_TABLE_TYPE : group = StatementTypes.X_HSQLDB_SCHEMA_MANIPULATION; break; case StatementTypes.SET_TABLE_INDEX : group = StatementTypes.X_HSQLDB_SETTING; isTransactionStatement = false; isLogged = false; break; case StatementTypes.SET_USER_LOCAL : case StatementTypes.SET_USER_INITIAL_SCHEMA : case StatementTypes.SET_USER_PASSWORD : group = StatementTypes.X_HSQLDB_SETTING; isTransactionStatement = false; break; case StatementTypes.ALTER_SESSION : group = StatementTypes.X_HSQLDB_SESSION; isTransactionStatement = false; isLogged = false; break; default : throw Error.runtimeError(ErrorCode.U_S0500, "StatementCommand"); } } public Result execute(Session session) { Result result; try { result = getResult(session); } catch (Throwable t) { result = Result.newErrorResult(t, null); } if (result.isError()) { result.getException().setStatementType(group, type); return result; } try { if (isLogged) { session.database.logger.writeOtherStatement(session, sql); } } catch (Throwable e) { return Result.newErrorResult(e, sql); } return result; } Result getResult(Session session) { if (this.isExplain) { return Result.newSingleColumnStringResult("OPERATION", describe(session)); } switch (type) { case StatementTypes.TRUNCATE : { return getTruncateResult(session); } case StatementTypes.EXPLAIN_PLAN : { Statement statement = (Statement) parameters[0]; return Result.newSingleColumnStringResult("OPERATION", statement.describe(session)); } case StatementTypes.DATABASE_BACKUP : { String path = (String) parameters[0]; boolean blocking = ((Boolean) parameters[1]).booleanValue(); boolean script = ((Boolean) parameters[2]).booleanValue(); boolean compressed = ((Boolean) parameters[3]).booleanValue(); boolean files = ((Boolean) parameters[4]).booleanValue(); try { session.checkAdmin(); if (!session.database.getType().equals( DatabaseURL.S_FILE)) { throw Error.error(ErrorCode.DATABASE_IS_MEMORY_ONLY); } if (session.database.isFilesReadOnly()) { throw Error.error(ErrorCode.DATABASE_IS_READONLY); } if (session.database.logger.isStoredFileAccess()) { throw Error.error(ErrorCode.ACCESS_IS_DENIED); } session.database.logger.backup(path, script, blocking, compressed, files); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.DATABASE_CHECKPOINT : { boolean defrag = ((Boolean) parameters[0]).booleanValue(); session.database.lobManager.lock(); try { session.checkAdmin(); session.checkDDLWrite(); session.database.logger.checkpoint(defrag); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } finally { session.database.lobManager.unlock(); } } case StatementTypes.SET_DATABASE_FILES_BACKUP_INCREMENT : { try { boolean mode = ((Boolean) parameters[0]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setIncrementBackup(mode); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_CACHE_ROWS : { try { int value = ((Integer) parameters[0]).intValue(); boolean check = parameters[1] == null; session.checkAdmin(); session.checkDDLWrite(); if (check && !session.database.getProperties() .validateProperty(HsqlDatabaseProperties .hsqldb_cache_rows, value)) { throw Error.error(ErrorCode.X_42556); } session.database.logger.setCacheMaxRows(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_CACHE_SIZE : { try { int value = ((Integer) parameters[0]).intValue(); boolean check = parameters[1] == null; session.checkAdmin(); session.checkDDLWrite(); if (check && !session.database.getProperties() .validateProperty(HsqlDatabaseProperties .hsqldb_cache_size, value)) { throw Error.error(ErrorCode.X_42556); } session.database.logger.setCacheSize(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_CHECK : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setFilesCheck(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_LOBS_SCALE : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); if (session.isProcessingScript()) { session.database.logger.setLobFileScaleNoCheck(value); } else { session.database.logger.setLobFileScale(value); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_LOBS_COMPRESSED : { try { boolean mode = ((Boolean) parameters[0]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); if (session.isProcessingScript()) { session.database.logger.setLobFileCompressedNoCheck( mode); } else { session.database.logger.setLobFileCompressed(mode); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_SCALE : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); if (session.isProcessingScript()) { session.database.logger.setDataFileScaleNoCheck(value); } else { session.database.logger.setDataFileScale(value); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_SPACE : { try { session.checkAdmin(); session.checkDDLWrite(); if (session.database.getType().equals(DatabaseURL.S_RES)) { return Result.updateZeroResult; } if (session.database.isFilesReadOnly()) { return Result.updateZeroResult; } if (parameters[0] instanceof Boolean) { boolean value = ((Boolean) parameters[0]).booleanValue(); session.database.logger.setDataFileSpaces(value); } else { int value = ((Integer) parameters[0]).intValue(); session.database.logger.setDataFileSpaces(value); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_DEFRAG : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); if (!session.database.getProperties().validateProperty( HsqlDatabaseProperties.hsqldb_defrag_limit, value)) { throw Error.error(ErrorCode.X_42556); } session.database.logger.setDefagLimit(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_EVENT_LOG : { try { int value = ((Integer) parameters[0]).intValue(); boolean isSql = ((Boolean) parameters[1]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setEventLogLevel(value, isSql); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_NIO : { try { session.checkAdmin(); session.checkDDLWrite(); Object v = parameters[0]; if (v instanceof Boolean) { boolean value = ((Boolean) parameters[0]).booleanValue(); session.database.logger.setNioDataFile(value); } else { int value = ((Integer) parameters[0]).intValue(); session.database.logger.setNioMaxSize(value); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_LOG : { try { boolean value = ((Boolean) parameters[0]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setLogData(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_LOG_SIZE : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setLogSize(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_TEMP_PATH : { try { String value = (String) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); // no action return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_SCRIPT_FORMAT : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setScriptType(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_FILES_WRITE_DELAY : { try { int value = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.logger.setWriteDelay(value); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_AUTHENTICATION : { try { Routine routine = (Routine) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); session.database.userManager.setExtAuthenticationFunction( routine); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_PASSWORD_CHECK : { try { Routine routine = (Routine) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); session.database.userManager.setPasswordCheckFunction( routine); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_PASSWORD_DIGEST : { try { String algo = (String) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); if (!session.isProcessingScript()) { return Result.updateZeroResult; } session.database.granteeManager.setDigestAlgo(algo); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_SQL_COLLATION : { try { String name = (String) parameters[0]; boolean padSpaces = ((Boolean) parameters[1]).booleanValue(); /** @todo 1.9.0 - ensure no data in character columns */ session.checkAdmin(); session.checkDDLWrite(); session.database.collation.setCollation(name, padSpaces); session.database.schemaManager.setSchemaChangeTimestamp(); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_SQL_REFERENTIAL_INTEGRITY : { boolean mode = ((Boolean) parameters[0]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); session.database.setReferentialIntegrity(mode); return Result.updateZeroResult; } case StatementTypes.SET_DATABASE_SQL : { String property = (String) parameters[0]; boolean mode = ((Boolean) parameters[1]).booleanValue(); int value = ((Number) parameters[2]).intValue(); session.checkAdmin(); session.checkDDLWrite(); if (property == HsqlDatabaseProperties.sql_enforce_names) { session.database.setStrictNames(mode); } else if (property == HsqlDatabaseProperties.sql_regular_names) { session.database.setRegularNames(mode); } else if (property == HsqlDatabaseProperties.sql_enforce_size) { session.database.setStrictColumnSize(mode); } else if (property == HsqlDatabaseProperties.sql_enforce_types) { session.database.setStrictTypes(mode); } else if (property == HsqlDatabaseProperties.sql_enforce_refs) { session.database.setStrictReferences(mode); } else if (property == HsqlDatabaseProperties.sql_enforce_tdcd) { session.database.setStrictTDCD(mode); } else if (property == HsqlDatabaseProperties.sql_enforce_tdcu) { session.database.setStrictTDCU(mode); } else if (property == HsqlDatabaseProperties .jdbc_translate_tti_types) { session.database.setTranslateTTI(mode); } else if (property == HsqlDatabaseProperties.sql_concat_nulls) { session.database.setConcatNulls(mode); } else if (property == HsqlDatabaseProperties.sql_nulls_first) { session.database.setNullsFirst(mode); } else if (property == HsqlDatabaseProperties.sql_nulls_order) { session.database.setNullsOrder(mode); } else if (property == HsqlDatabaseProperties.sql_unique_nulls) { session.database.setUniqueNulls(mode); } else if (property == HsqlDatabaseProperties.sql_convert_trunc) { session.database.setConvertTrunc(mode); } else if (property == HsqlDatabaseProperties.sql_avg_scale) { session.database.setAvgScale(value); } else if (property == HsqlDatabaseProperties.sql_double_nan) { session.database.setDoubleNaN(mode); } else if (property == HsqlDatabaseProperties.sql_longvar_is_lob) { session.database.setLongVarIsLob(mode); } else if (property == HsqlDatabaseProperties.sql_ignore_case) { session.database.setIgnoreCase(mode); session.setIgnoreCase(mode); } else if (property == HsqlDatabaseProperties.sql_syntax_db2) { session.database.setSyntaxDb2(mode); } else if (property == HsqlDatabaseProperties.sql_syntax_mss) { session.database.setSyntaxMss(mode); } else if (property == HsqlDatabaseProperties.sql_syntax_mys) { session.database.setSyntaxMys(mode); } else if (property == HsqlDatabaseProperties.sql_syntax_ora) { session.database.setSyntaxOra(mode); } else if (property == HsqlDatabaseProperties.sql_syntax_pgs) { session.database.setSyntaxPgs(mode); } return Result.updateZeroResult; } case StatementTypes.SET_DATABASE_DEFAULT_INITIAL_SCHEMA : { HsqlName schema = (HsqlName) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); // session.database.schemaManager.setDefaultSchemaHsqlName( schema); session.database.schemaManager.setSchemaChangeTimestamp(); // return Result.updateZeroResult; } case StatementTypes.SET_DATABASE_DEFAULT_TABLE_TYPE : { Integer type = (Integer) parameters[0]; session.checkAdmin(); session.checkDDLWrite(); // session.database.schemaManager.setDefaultTableType( type.intValue()); // return Result.updateZeroResult; } case StatementTypes.SET_DATABASE_TRANSACTION_CONTROL : { try { int mode = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.database.txManager.setTransactionControl(session, mode); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_DEFAULT_ISOLATION_LEVEL : { try { int mode = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.database.defaultIsolationLevel = mode; return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_TRANSACTION_CONFLICT : { try { boolean mode = ((Boolean) parameters[0]).booleanValue(); session.checkAdmin(); session.database.txConflictRollback = mode; return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_GC : { try { int count = ((Integer) parameters[0]).intValue(); session.checkAdmin(); JavaSystem.gcFrequency = count; return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_PROPERTY : { try { String property = (String) parameters[0]; Object value = parameters[1]; session.checkAdmin(); session.checkDDLWrite(); // command is a no-op from 1.9 return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_RESULT_MEMORY_ROWS : { int size = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.database.setResultMaxMemoryRows(size); return Result.updateZeroResult; } case StatementTypes.SET_DATABASE_TEXT_SOURCE : { try { String source = (String) parameters[0]; HsqlProperties props = null; session.checkAdmin(); if (source.length() > 0) { props = HsqlProperties.delimitedArgPairsToProps(source, "=", ";", null); if (props.getErrorKeys().length > 0) { throw Error.error(ErrorCode.TEXT_TABLE_SOURCE, props.getErrorKeys()[0]); } session.database.logger.setDefaultTextTableProperties( source, props); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_DATABASE_UNIQUE_NAME : { try { String name = (String) parameters[0]; session.checkAdmin(); session.database.setUniqueName(name); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.DATABASE_SCRIPT : { ScriptWriterText dsw = null; String name = (String) parameters[0]; try { session.checkAdmin(); if (name == null) { return session.database.getScript(false); } else { dsw = new ScriptWriterText(session.database, name, true, true, true); dsw.writeAll(); dsw.close(); return Result.updateZeroResult; } } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.DATABASE_SHUTDOWN : { try { int mode = ((Integer) parameters[0]).intValue(); session.checkAdmin(); session.database.close(mode); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_NEW_TABLESPACE : { try { HsqlName name = (HsqlName) parameters[0]; Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); DataFileCache cache = session.database.logger.getCache(); session.checkAdmin(); session.checkDDLWrite(); if (!session.database.logger.isFileDatabase()) { return Result.updateZeroResult; } if (session.database.logger.getDataFileSpaces() == 0) { throw Error.error(ErrorCode.ACCESS_IS_DENIED); } if (table.getSpaceID() != DataSpaceManager.tableIdDefault) { return Result.updateZeroResult; } // memory database if (cache == null) { return Result.updateZeroResult; } DataSpaceManager dataSpace = cache.spaceManager; int tableSpaceID = dataSpace.getNewTableSpaceID(); table.setSpaceID(tableSpaceID); // if cache exists, a memory table can get a space id // it can then be converted to cached if (!table.isCached()) { return Result.updateZeroResult; } TableSpaceManager tableSpace = dataSpace.getTableSpace(tableSpaceID); PersistentStore store = table.getRowStore(session); store.setSpaceManager(tableSpace); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_SET_TABLESPACE : { try { HsqlName name = (HsqlName) parameters[0]; int spaceid = ((Integer) parameters[1]).intValue(); Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); DataFileCache cache = session.database.logger.getCache(); if (!session.isProcessingScript()) { return Result.updateZeroResult; } if (table.getTableType() != TableBase.CACHED_TABLE) { return Result.updateZeroResult; } if (cache == null) { return Result.updateZeroResult; } if (table.getSpaceID() != DataSpaceManager.tableIdDefault) { return Result.updateZeroResult; } table.setSpaceID(spaceid); DataSpaceManager dataSpace = cache.spaceManager; TableSpaceManager tableSpace = dataSpace.getTableSpace(spaceid); PersistentStore store = table.getRowStore(session); store.setSpaceManager(tableSpace); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_CLUSTERED : { try { HsqlName name = (HsqlName) parameters[0]; int[] colIndex = (int[]) parameters[1]; Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); StatementSchema.checkSchemaUpdateAuthorisation(session, table.getSchemaName()); if (!table.isCached() && !table.isText()) { throw Error.error(ErrorCode.ACCESS_IS_DENIED); } Index index = table.getIndexForColumns(session, colIndex); if (index != null) { Index[] indexes = table.getIndexList(); for (int i = 0; i < indexes.length; i++) { indexes[i].setClustered(false); } index.setClustered(true); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_INDEX : { try { HsqlName name = (HsqlName) parameters[0]; String value = (String) parameters[1]; Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); if (session.isProcessingScript()) { table.setIndexRoots(session, value); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_READONLY : { try { HsqlName name = (HsqlName) parameters[0]; Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); boolean mode = ((Boolean) parameters[1]).booleanValue(); StatementSchema.checkSchemaUpdateAuthorisation(session, table.getSchemaName()); table.setDataReadOnly(mode); session.database.schemaManager.setSchemaChangeTimestamp(); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_TABLE_SOURCE : case StatementTypes.SET_TABLE_SOURCE_HEADER : { try { HsqlName name = (HsqlName) parameters[0]; Table table = session.database.schemaManager.getTable(session, name.name, name.schema.name); StatementSchema.checkSchemaUpdateAuthorisation(session, table.getSchemaName()); if (!table.isText()) { Exception e = Error.error(ErrorCode.X_S0522); return Result.newErrorResult(e, sql); } if (parameters[1] != null) { boolean mode = ((Boolean) parameters[1]).booleanValue(); if (mode) { ((TextTable) table).connect(session); } else { ((TextTable) table).disconnect(); } session.database.schemaManager .setSchemaChangeTimestamp(); return Result.updateZeroResult; } String source = (String) parameters[2]; boolean isDesc = ((Boolean) parameters[3]).booleanValue(); boolean isHeader = ((Boolean) parameters[4]).booleanValue(); if (isHeader) { ((TextTable) table).setHeader(source); } else { ((TextTable) table).setDataSource(session, source, isDesc, false); } return Result.updateZeroResult; } catch (Throwable e) { if (!(e instanceof HsqlException)) { e = Error.error(ErrorCode.GENERAL_IO_ERROR, e.toString()); } if (session.isProcessingLog() || session.isProcessingScript()) { session.addWarning((HsqlException) e); session.database.logger.logWarningEvent( "Problem processing SET TABLE SOURCE", e); return Result.updateZeroResult; } else { return Result.newErrorResult(e, sql); } } } case StatementTypes.SET_TABLE_TYPE : { try { HsqlName name = (HsqlName) parameters[0]; int type = ((Integer) parameters[1]).intValue(); // Table table = session.database.schemaManager.getUserTable(session, name.name, name.schema.name); if (name.schema != SqlInvariants.LOBS_SCHEMA_HSQLNAME) { StatementSchema.checkSchemaUpdateAuthorisation(session, table.getSchemaName()); } TableWorks tw = new TableWorks(session, table); tw.setTableType(session, type); session.database.schemaManager.setSchemaChangeTimestamp(); if (name.schema == SqlInvariants.LOBS_SCHEMA_HSQLNAME) { session.database.lobManager.compileStatements(); } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_USER_LOCAL : { User user = (User) parameters[0]; boolean mode = ((Boolean) parameters[1]).booleanValue(); session.checkAdmin(); session.checkDDLWrite(); user.isLocalOnly = mode; session.database.schemaManager.setSchemaChangeTimestamp(); return Result.updateZeroResult; } case StatementTypes.SET_USER_INITIAL_SCHEMA : { try { User user = (User) parameters[0]; HsqlName schema = (HsqlName) parameters[1]; session.checkDDLWrite(); if (user == null) { user = session.getUser(); } else { session.checkAdmin(); session.checkDDLWrite(); user = session.database.userManager.get( user.getName().getNameString()); } if (schema != null) { schema = session.database.schemaManager.getSchemaHsqlName( schema.name); } // user.setInitialSchema(schema); session.database.schemaManager.setSchemaChangeTimestamp(); // return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.SET_USER_PASSWORD : { try { User user = parameters[0] == null ? session.getUser() : (User) parameters[0]; String password = (String) parameters[1]; boolean isDigest = (Boolean) parameters[2]; session.checkDDLWrite(); session.database.userManager.setPassword(session, user, password, isDigest); return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } case StatementTypes.ALTER_SESSION : { try { long sessionID = ((Number) parameters[0]).longValue(); int action = ((Number) parameters[1]).intValue(); Session targetSession = session.database.sessionManager.getSession(sessionID); if (targetSession == null) { throw Error.error(ErrorCode.X_2E000); } switch (action) { case Tokens.ALL : session.database.txManager.resetSession(session, targetSession, TransactionManager.resetSessionResetAll); break; case Tokens.TABLE : session.database.txManager.resetSession(session, targetSession, TransactionManager.resetSessionTables); break; case Tokens.RESULT : session.database.txManager.resetSession(session, targetSession, TransactionManager.resetSessionResults); break; case Tokens.CLOSE : session.database.txManager.resetSession(session, targetSession, TransactionManager.resetSessionClose); break; case Tokens.RELEASE : session.database.txManager.resetSession(session, targetSession, TransactionManager.resetSessionRollback); break; } } catch (HsqlException e) { return Result.newErrorResult(e, sql); } return Result.updateZeroResult; } default : throw Error.runtimeError(ErrorCode.U_S0500, "StatemntCommand"); } } Result getTruncateResult(Session session) { try { HsqlName name = (HsqlName) parameters[0]; boolean restartIdentity = (Boolean) parameters[1]; boolean noCheck = (Boolean) parameters[2]; Table[] tables; if (name.type == SchemaObject.TABLE) { Table table = session.database.schemaManager.getUserTable(session, name); tables = new Table[]{ table }; session.getGrantee().checkDelete(table); if (!noCheck) { for (int i = 0; i < table.fkMainConstraints.length; i++) { if (table.fkMainConstraints[i].getRef() != table) { HsqlName tableName = table.fkMainConstraints[i].getRef().getName(); Table refTable = session.database.schemaManager.getUserTable( session, tableName); if (!refTable.isEmpty(session)) { throw Error.error(ErrorCode.X_23504, refTable.getName().name); } } } } } else { // ensure schema existence session.database.schemaManager.getSchemaHsqlName(name.name); HashMappedList list = session.database.schemaManager.getTables(name.name); tables = new Table[list.size()]; list.toValuesArray(tables); StatementSchema.checkSchemaUpdateAuthorisation(session, name); if (!noCheck) { OrderedHashSet set = new OrderedHashSet(); session.database.schemaManager .getCascadingReferencesToSchema(name, set); for (int i = 0; i < set.size(); i++) { HsqlName objectName = (HsqlName) set.get(i); if (objectName.type == SchemaObject.CONSTRAINT) { if (objectName.parent.type == SchemaObject.TABLE) { Table refTable = (Table) session.database.schemaManager .getUserTable(session, objectName.parent); if (!refTable.isEmpty(session)) { throw Error.error(ErrorCode.X_23504, refTable.getName().name); } } } } } if (restartIdentity) { Iterator it = session.database.schemaManager.databaseObjectIterator( name.name, SchemaObject.SEQUENCE); while (it.hasNext()) { NumberSequence sequence = (NumberSequence) it.next(); sequence.reset(); } } } for (int i = 0; i < tables.length; i++) { Table table = tables[i]; PersistentStore store = table.getRowStore(session); store.removeAll(); if (restartIdentity && table.identitySequence != null) { table.identitySequence.reset(); } } return Result.updateZeroResult; } catch (HsqlException e) { return Result.newErrorResult(e, sql); } } public ResultMetaData getResultMetaData() { switch (type) { case StatementTypes.EXPLAIN_PLAN : return ResultMetaData.newSingleColumnMetaData("OPERATION"); case StatementTypes.DATABASE_SCRIPT : if (statementReturnType == StatementTypes.RETURN_RESULT) { return ResultMetaData.newSingleColumnMetaData("COMMANDS"); } // fall through default : return super.getResultMetaData(); } } public boolean isAutoCommitStatement() { return isTransactionStatement; } public String describe(Session session) { return sql; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.processors.utils; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.planner.plan.nodes.exec.AbstractExecNodeExactlyOnceVisitor; import org.apache.flink.table.planner.plan.nodes.exec.BatchExecNode; import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge; import org.apache.flink.table.planner.plan.nodes.exec.ExecNode; import org.apache.flink.util.Preconditions; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.TreeMap; /** * This class contains algorithm to detect and resolve input priority conflict in an {@link ExecNode} graph. * * <p>Some batch operators (for example, hash join and nested loop join) have different priorities for their inputs. * When some operators are reused, a deadlock may occur due to the conflict in these priorities. * * <p>For example, consider the SQL query: * <pre> * WITH * T1 AS (SELECT a, COUNT(*) AS cnt1 FROM x GROUP BY a), * T2 AS (SELECT d, COUNT(*) AS cnt2 FROM y GROUP BY d) * SELECT * FROM * (SELECT cnt1, cnt2 FROM T1 LEFT JOIN T2 ON a = d) * UNION ALL * (SELECT cnt1, cnt2 FROM T2 LEFT JOIN T1 ON d = a) * </pre> * * <p>When sub-plan reuse are enabled, we'll get the following physical plan: * <pre> * Union(all=[true], union=[cnt1, cnt2]) * :- Calc(select=[CAST(cnt1) AS cnt1, cnt2]) * : +- HashJoin(joinType=[LeftOuterJoin], where=[=(a, d)], select=[a, cnt1, d, cnt2], build=[right]) * : :- HashAggregate(isMerge=[true], groupBy=[a], select=[a, Final_COUNT(count1$0) AS cnt1], reuse_id=[2]) * : : +- Exchange(distribution=[hash[a]]) * : : +- LocalHashAggregate(groupBy=[a], select=[a, Partial_COUNT(*) AS count1$0]) * : : +- Calc(select=[a]) * : : +- LegacyTableSourceScan(table=[[default_catalog, default_database, x, source: [TestTableSource(a, b, c)]]], fields=[a, b, c]) * : +- HashAggregate(isMerge=[true], groupBy=[d], select=[d, Final_COUNT(count1$0) AS cnt2], reuse_id=[1]) * : +- Exchange(distribution=[hash[d]]) * : +- LocalHashAggregate(groupBy=[d], select=[d, Partial_COUNT(*) AS count1$0]) * : +- Calc(select=[d]) * : +- LegacyTableSourceScan(table=[[default_catalog, default_database, y, source: [TestTableSource(d, e, f)]]], fields=[d, e, f]) * +- Calc(select=[cnt1, CAST(cnt2) AS cnt2]) * +- HashJoin(joinType=[LeftOuterJoin], where=[=(d, a)], select=[d, cnt2, a, cnt1], build=[right]) * :- Reused(reference_id=[1]) * +- Reused(reference_id=[2]) * </pre> * * <p>Note that the first hash join needs to read all results from the hash aggregate whose reuse id is 1 * before reading the results from the hash aggregate whose reuse id is 2, while the second hash join requires * the opposite. This physical plan will thus cause a deadlock. * * <p>This class maintains a topological graph in which an edge pointing from vertex A to vertex B indicates * that the results from vertex A need to be read before those from vertex B. A loop in the graph indicates * a deadlock, and different subclasses of this class resolve the conflict in different ways. * * <p>For a detailed explanation of the algorithm, see appendix of the * <a href="https://docs.google.com/document/d/1qKVohV12qn-bM51cBZ8Hcgp31ntwClxjoiNBUOqVHsI">design doc</a>. */ @Internal public abstract class InputPriorityGraphGenerator { private final List<ExecNode<?, ?>> roots; private final Set<ExecNode<?, ?>> boundaries; private final ExecEdge.DamBehavior safeDamBehavior; protected TopologyGraph graph; /** * Create an {@link InputPriorityGraphGenerator} for the given {@link ExecNode} sub-graph. * * @param roots the first layer of nodes on the output side of the sub-graph * @param boundaries the first layer of nodes on the input side of the sub-graph * @param safeDamBehavior when checking for conflicts we'll ignore the edges with * {@link ExecEdge.DamBehavior} stricter or equal than this */ public InputPriorityGraphGenerator( List<ExecNode<?, ?>> roots, Set<ExecNode<?, ?>> boundaries, ExecEdge.DamBehavior safeDamBehavior) { Preconditions.checkArgument( roots.stream().allMatch(root -> root instanceof BatchExecNode), "InputPriorityConflictResolver can only be used for batch jobs."); this.roots = roots; this.boundaries = boundaries; this.safeDamBehavior = safeDamBehavior; } protected void createTopologyGraph() { // build an initial topology graph graph = new TopologyGraph(roots, boundaries); // check and resolve conflicts about input priorities AbstractExecNodeExactlyOnceVisitor inputPriorityVisitor = new AbstractExecNodeExactlyOnceVisitor() { @Override protected void visitNode(ExecNode<?, ?> node) { if (!boundaries.contains(node)) { visitInputs(node); } updateTopologyGraph(node); } }; roots.forEach(n -> n.accept(inputPriorityVisitor)); } private void updateTopologyGraph(ExecNode<?, ?> node) { // group inputs by input priorities TreeMap<Integer, List<Integer>> inputPriorityGroupMap = new TreeMap<>(); Preconditions.checkState( node.getInputNodes().size() == node.getInputEdges().size(), "Number of inputs nodes does not equal to number of input edges for node " + node.getClass().getName() + ". This is a bug."); for (int i = 0; i < node.getInputEdges().size(); i++) { int priority = node.getInputEdges().get(i).getPriority(); inputPriorityGroupMap.computeIfAbsent(priority, k -> new ArrayList<>()).add(i); } // add edges between neighboring priority groups List<List<Integer>> inputPriorityGroups = new ArrayList<>(inputPriorityGroupMap.values()); for (int i = 0; i + 1 < inputPriorityGroups.size(); i++) { List<Integer> higherGroup = inputPriorityGroups.get(i); List<Integer> lowerGroup = inputPriorityGroups.get(i + 1); for (int higher : higherGroup) { for (int lower : lowerGroup) { addTopologyEdges(node, higher, lower); } } } } private void addTopologyEdges(ExecNode<?, ?> node, int higherInput, int lowerInput) { ExecNode<?, ?> higherNode = node.getInputNodes().get(higherInput); ExecNode<?, ?> lowerNode = node.getInputNodes().get(lowerInput); List<ExecNode<?, ?>> lowerAncestors = calculatePipelinedAncestors(lowerNode); List<Tuple2<ExecNode<?, ?>, ExecNode<?, ?>>> linkedEdges = new ArrayList<>(); for (ExecNode<?, ?> ancestor : lowerAncestors) { if (graph.link(higherNode, ancestor)) { linkedEdges.add(Tuple2.of(higherNode, ancestor)); } else { // a conflict occurs, resolve it and revert all linked edges resolveInputPriorityConflict(node, higherInput, lowerInput); for (Tuple2<ExecNode<?, ?>, ExecNode<?, ?>> linkedEdge : linkedEdges) { graph.unlink(linkedEdge.f0, linkedEdge.f1); } return; } } } /** * Find the ancestors by going through PIPELINED edges. */ @VisibleForTesting List<ExecNode<?, ?>> calculatePipelinedAncestors(ExecNode<?, ?> node) { List<ExecNode<?, ?>> ret = new ArrayList<>(); AbstractExecNodeExactlyOnceVisitor ancestorVisitor = new AbstractExecNodeExactlyOnceVisitor() { @Override protected void visitNode(ExecNode<?, ?> node) { boolean hasAncestor = false; if (!boundaries.contains(node)) { List<ExecEdge> inputEdges = node.getInputEdges(); for (int i = 0; i < inputEdges.size(); i++) { // we only go through PIPELINED edges if (inputEdges.get(i).getDamBehavior().stricterOrEqual(safeDamBehavior)) { continue; } hasAncestor = true; node.getInputNodes().get(i).accept(this); } } if (!hasAncestor) { ret.add(node); } } }; node.accept(ancestorVisitor); return ret; } protected abstract void resolveInputPriorityConflict(ExecNode<?, ?> node, int higherInput, int lowerInput); }
package org.jbehave.core.configuration.guice; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; import static org.jbehave.core.reporters.Format.CONSOLE; import static org.jbehave.core.reporters.Format.HTML; import static org.jbehave.core.reporters.Format.STATS; import static org.jbehave.core.reporters.Format.TXT; import static org.jbehave.core.reporters.Format.XML; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Properties; import org.jbehave.core.annotations.Configure; import org.jbehave.core.annotations.UsingSteps; import org.jbehave.core.annotations.guice.UsingGuice; import org.jbehave.core.configuration.AnnotationBuilder; import org.jbehave.core.configuration.AnnotationMonitor; import org.jbehave.core.configuration.Configuration; import org.jbehave.core.configuration.Keywords; import org.jbehave.core.configuration.MostUsefulConfiguration; import org.jbehave.core.embedder.StoryControls; import org.jbehave.core.failures.FailureStrategy; import org.jbehave.core.failures.SilentlyAbsorbingFailure; import org.jbehave.core.i18n.LocalizedKeywords; import org.jbehave.core.io.LoadFromClasspath; import org.jbehave.core.io.LoadFromURL; import org.jbehave.core.io.StoryLoader; import org.jbehave.core.model.ExamplesTable; import org.jbehave.core.model.ExamplesTableFactory; import org.jbehave.core.model.TableParsers; import org.jbehave.core.model.TableTransformers; import org.jbehave.core.parsers.RegexPrefixCapturingPatternParser; import org.jbehave.core.parsers.StepPatternParser; import org.jbehave.core.reporters.StoryReporterBuilder; import org.jbehave.core.steps.CandidateSteps; import org.jbehave.core.steps.ParameterControls; import org.jbehave.core.steps.ParameterConverters; import org.jbehave.core.steps.ParameterConverters.DateConverter; import org.jbehave.core.steps.ParameterConverters.FunctionalParameterConverter; import org.jbehave.core.steps.Steps; import org.jbehave.core.steps.ParameterConverters.ParameterConverter; import org.jbehave.core.steps.guice.GuiceStepsFactoryBehaviour.FooSteps; import org.jbehave.core.steps.guice.GuiceStepsFactoryBehaviour.FooStepsWithDependency; import org.junit.Test; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Scopes; import com.google.inject.multibindings.Multibinder; public class GuiceAnnotationBuilderBehaviour { @Test public void shouldBuildConfigurationFromAnnotations() { AnnotationBuilder builder = new GuiceAnnotationBuilder(AnnotatedUsingGuice.class); Configuration configuration = builder.buildConfiguration(); assertThat(configuration.storyControls().dryRun(), is(true)); assertThat(configuration.storyControls().skipScenariosAfterFailure(), is(true)); assertThat(configuration.failureStrategy(), instanceOf(SilentlyAbsorbingFailure.class)); assertThat(configuration.storyLoader(), instanceOf(LoadFromURL.class)); assertThat(configuration.stepPatternParser(), instanceOf(RegexPrefixCapturingPatternParser.class)); assertThat(((RegexPrefixCapturingPatternParser) configuration.stepPatternParser()).getPrefix(), equalTo("MyPrefix")); assertThatCustomObjectIsConverted(configuration.parameterConverters()); assertThatDateIsConvertedWithFormat(configuration.parameterConverters(), new SimpleDateFormat("yyyy-MM-dd")); assertThat(configuration.storyReporterBuilder().formats(), hasItems(CONSOLE, HTML, TXT, XML, STATS)); Keywords keywords = configuration.storyReporterBuilder().keywords(); assertThat(keywords, instanceOf(LocalizedKeywords.class)); assertThat(((LocalizedKeywords) keywords).getLocale(), equalTo(Locale.ITALIAN)); assertThat(configuration.storyReporterBuilder().outputDirectory().getName(), equalTo("my-output-directory")); assertThat(configuration.storyReporterBuilder().viewResources().getProperty("index"), equalTo("my-reports-index.ftl")); assertThat(configuration.storyReporterBuilder().viewResources().getProperty("decorateNonHtml"), equalTo("true")); assertThat(configuration.storyReporterBuilder().reportFailureTrace(), is(true)); } @Test public void shouldBuildConfigurationFromAnnotationsUsingConfigureAndGuiceConverters() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingConfigureAndGuiceConverters.class); Configuration configuration = builderAnnotated.buildConfiguration(); assertThatCustomObjectIsConverted(configuration.parameterConverters()); assertThatDateIsConvertedWithFormat(configuration.parameterConverters(), new SimpleDateFormat("yyyy-MM-dd")); assertThatExamplesTableIsConverted(configuration.parameterConverters()); } @Test public void shouldBuildConfigurationFromAnnotationsUsingInjectorWithoutParent() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingConfigureAndGuiceConverters.class){ @Override protected Injector createInjector(List<Module> modules) { return Guice.createInjector(modules); } }; Configuration configuration = builderAnnotated.buildConfiguration(); assertThatCustomObjectIsConverted(configuration.parameterConverters()); assertThatDateIsConvertedWithFormat(configuration.parameterConverters(), new SimpleDateFormat("yyyy-MM-dd")); assertThatExamplesTableIsConverted(configuration.parameterConverters()); } private void assertThatCustomObjectIsConverted(ParameterConverters parameterConverters) { assertThat(parameterConverters.convert("value", CustomObject.class).toString(), equalTo(new CustomObject("value").toString())); } private void assertThatDateIsConvertedWithFormat(ParameterConverters parameterConverters, DateFormat dateFormat) { String date = "2010-10-10"; try { assertThat((Date) parameterConverters.convert(date, Date.class), equalTo(dateFormat.parse(date))); } catch (ParseException e) { throw new AssertionError(); } } private void assertThatExamplesTableIsConverted(ParameterConverters parameterConverters) { String tableAsString = "||one||two||\n" + "|1|2|"; ExamplesTable table = new ExamplesTable(tableAsString); assertThat(table.getHeaders(), hasItems("one", "two")); } @Test public void shouldBuildDefaultConfigurationIfAnnotationOrAnnotatedValuesNotPresent() { AnnotationBuilder builderNotAnnotated = new GuiceAnnotationBuilder(NotAnnotated.class); assertThatConfigurationIs(builderNotAnnotated.buildConfiguration(), new MostUsefulConfiguration()); AnnotationBuilder builderAnnotatedWithoutModules = new GuiceAnnotationBuilder(AnnotatedWithoutModules.class); assertThatConfigurationIs(builderAnnotatedWithoutModules.buildConfiguration(), new MostUsefulConfiguration()); } private void assertThatConfigurationIs(Configuration builtConfiguration, Configuration defaultConfiguration) { assertThat(builtConfiguration.failureStrategy(), instanceOf(defaultConfiguration.failureStrategy().getClass())); assertThat(builtConfiguration.storyLoader(), instanceOf(defaultConfiguration.storyLoader().getClass())); assertThat(builtConfiguration.stepPatternParser(), instanceOf(defaultConfiguration.stepPatternParser() .getClass())); assertThat(builtConfiguration.storyReporterBuilder().formats(), equalTo(defaultConfiguration .storyReporterBuilder().formats())); assertThat(builtConfiguration.storyReporterBuilder().outputDirectory(), equalTo(defaultConfiguration .storyReporterBuilder().outputDirectory())); assertThat(builtConfiguration.storyReporterBuilder().viewResources(), equalTo(defaultConfiguration .storyReporterBuilder().viewResources())); assertThat(builtConfiguration.storyReporterBuilder().reportFailureTrace(), equalTo(defaultConfiguration .storyReporterBuilder().reportFailureTrace())); } @Test public void shouldBuildCandidateStepsFromAnnotationsUsingGuice() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingGuice.class); Configuration configuration = builderAnnotated.buildConfiguration(); assertThatStepsInstancesAre(builderAnnotated.buildCandidateSteps(configuration), FooSteps.class, FooStepsWithDependency.class); } @Test public void shouldBuildCandidateStepsFromAnnotationsUsingStepsAndGuice() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingStepsAndGuice.class); Configuration configuration = builderAnnotated.buildConfiguration(); assertThatStepsInstancesAre(builderAnnotated.buildCandidateSteps(configuration), FooSteps.class); } @Test public void shouldBuildCandidateStepsFromAnnotationsUsingStepsAndInheritingGuiceFromParent() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(InheritingAnnotatedUsingSteps.class); Configuration configuration = builderAnnotated.buildConfiguration(); assertThatStepsInstancesAre(builderAnnotated.buildCandidateSteps(configuration), FooSteps.class); } @Test public void shouldBuildCandidateStepsFromAnnotationsUsingStepsAndGuiceAndConverters() { AnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingConfigureAndGuiceConverters.class); Configuration configuration = builderAnnotated.buildConfiguration(); assertThatStepsInstancesAre(builderAnnotated.buildCandidateSteps(configuration), FooSteps.class); } @Test public void shouldBuildEmptyStepsListIfAnnotationOrAnnotatedValuesNotPresent() { AnnotationBuilder builderNotAnnotated = new GuiceAnnotationBuilder(NotAnnotated.class); assertThatStepsInstancesAre(builderNotAnnotated.buildCandidateSteps()); AnnotationBuilder builderAnnotatedWithoutLocations = new GuiceAnnotationBuilder(AnnotatedWithoutModules.class); assertThatStepsInstancesAre(builderAnnotatedWithoutLocations.buildCandidateSteps()); } private void assertThatStepsInstancesAre(List<CandidateSteps> candidateSteps, Class<?>... stepsClasses) { assertThat(candidateSteps.size(), equalTo(stepsClasses.length)); for (int i = 0; i < stepsClasses.length; i++) { assertThat(((Steps) candidateSteps.get(i)).instance(), instanceOf(stepsClasses[i])); } } @Test public void shouldNotBuildContainerIfModuleNotInstantiable() { AnnotationMonitor annotationMonitor = mock(AnnotationMonitor.class); AnnotationBuilder builderPrivateModule = new GuiceAnnotationBuilder(AnnotatedWithPrivateModule.class, annotationMonitor); assertThatStepsInstancesAre(builderPrivateModule.buildCandidateSteps()); verify(annotationMonitor).elementCreationFailed(isA(Class.class), isA(Exception.class)); } @Test public void shouldCreateOnlyOneContainerForMultipleBuildInvocations() { GuiceAnnotationBuilder builderAnnotated = new GuiceAnnotationBuilder(AnnotatedUsingStepsAndGuice.class); builderAnnotated.buildConfiguration(); Injector injector = builderAnnotated.injector(); builderAnnotated.buildConfiguration(); assertThat(builderAnnotated.injector(), sameInstance(injector)); } @Configure() @UsingGuice(modules = { ConfigurationModule.class, StepsModule.class }) private static class AnnotatedUsingGuice { } @Configure() @UsingGuice(modules = { ConfigurationModule.class }) private static class ParentAnnotatedUsingGuice { } @UsingSteps(instances = { FooSteps.class }) private static class InheritingAnnotatedUsingSteps extends ParentAnnotatedUsingGuice { } @Configure() @UsingSteps(instances = { FooSteps.class }) @UsingGuice(modules = { ConfigurationModule.class }) private static class AnnotatedUsingStepsAndGuice { } @Configure(parameterConverters = { MyExampleTableConverter.class, MyDateConverter.class }) @UsingSteps(instances = { FooSteps.class }) @UsingGuice(modules = { ConfigurationModule.class }) private static class AnnotatedUsingConfigureAndGuiceConverters { } @Configure() @UsingGuice() private static class AnnotatedWithoutModules { } @Configure() @UsingGuice(modules = {PrivateModule.class} ) private static class AnnotatedWithPrivateModule { } private static class NotAnnotated { } public static class ConfigurationModule extends AbstractModule { @Override protected void configure() { bind(StoryControls.class).toInstance(new StoryControls().doDryRun(true).doSkipScenariosAfterFailure(true)); bind(FailureStrategy.class).to(SilentlyAbsorbingFailure.class); bind(StepPatternParser.class).toInstance(new RegexPrefixCapturingPatternParser("MyPrefix")); bind(StoryLoader.class).toInstance(new LoadFromURL()); Properties viewResources = new Properties(); viewResources.setProperty("index", "my-reports-index.ftl"); viewResources.setProperty("decorateNonHtml", "true"); bind(StoryReporterBuilder.class).toInstance( new StoryReporterBuilder().withDefaultFormats().withFormats(CONSOLE, HTML, TXT, XML).withKeywords( new LocalizedKeywords(Locale.ITALIAN)).withRelativeDirectory("my-output-directory") .withViewResources(viewResources).withFailureTrace(true)); Multibinder<ParameterConverter> multiBinder = Multibinder.newSetBinder(binder(), ParameterConverter.class); multiBinder.addBinding().toInstance( new FunctionalParameterConverter<>(CustomObject.class, CustomObject::new)); multiBinder.addBinding().toInstance(new DateConverter(new SimpleDateFormat("yyyy-MM-dd"))); } } public static class MyExampleTableConverter extends ParameterConverters.ExamplesTableConverter { public MyExampleTableConverter() { super(new ExamplesTableFactory(new LoadFromClasspath(), new ParameterConverters(), new ParameterControls(), new TableParsers(), new TableTransformers())); } } public static class MyDateConverter extends ParameterConverters.DateConverter { public MyDateConverter() { super(new SimpleDateFormat("dd-MM-yyyy")); } } public static class CustomObject { private final String value; public CustomObject(String value) { this.value = value; } @Override public String toString() { return value; } } public static class StepsModule extends AbstractModule { @Override protected void configure() { bind(FooSteps.class).in(Scopes.SINGLETON); bind(Integer.class).toInstance(42); bind(FooStepsWithDependency.class).in(Scopes.SINGLETON); } } private static class PrivateModule extends AbstractModule { @Override protected void configure() { } } }
package com.tomtom.cookieclock; import android.annotation.SuppressLint; import android.content.Intent; import android.os.SystemClock; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.os.Handler; import android.view.MotionEvent; import android.view.View; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.Button; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.tomtom.cookieclock.repository.GammerRepoHelper; import com.tomtom.cookieclock.repository.GammerResultDAO; import java.util.concurrent.TimeUnit; import rx.Observable; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; import rx.functions.Action1; public class MainActivity extends AppCompatActivity implements View.OnTouchListener{ private static final int UI_ANIMATION_DELAY = 300; private final Handler mHideHandler = new Handler(); private Subscription timeSubscription; private TextView tvClock; private Button startButton; private Button nextButton; private EditText name; private EditText email; private LinearLayout playerData; private boolean start = false; private GammerRepoHelper data; private long startTime = 0L; long timeInMilliseconds = 0L; long timeSwapBuff = 0L; long updatedTime = 0L; private final Runnable mHidePart2Runnable = new Runnable() { @SuppressLint("InlinedApi") @Override public void run() { tvClock.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION); } }; private final Runnable mHideRunnable = new Runnable() { @Override public void run() { hide(); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); tvClock = (TextView) findViewById(R.id.clock); startButton = (Button) findViewById(R.id.button); playerData = (LinearLayout) findViewById(R.id.player_data_layout); nextButton = (Button) findViewById(R.id.buttonNext); name = (EditText) findViewById(R.id.name); email = (EditText) findViewById(R.id.email); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { saveAndGoToResultsActivity(); } }); data = new GammerRepoHelper(getApplicationContext()); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); delayedHide(100); } @Override protected void onResume() { super.onResume(); tvClock.clearAnimation(); tvClock.setText(R.string.zero_time); playerData.clearAnimation(); playerData.setVisibility(View.INVISIBLE); startButton.clearAnimation(); startButton.setOnTouchListener(this); name.setText(""); email.setText(""); } private void hide() { // Hide UI first ActionBar actionBar = getSupportActionBar(); if (actionBar != null) { actionBar.hide(); } // Schedule a runnable to remove the status and navigation bar after a delay mHideHandler.postDelayed(mHidePart2Runnable, UI_ANIMATION_DELAY); } private void delayedHide(int delayMillis) { mHideHandler.removeCallbacks(mHideRunnable); mHideHandler.postDelayed(mHideRunnable, delayMillis); } @Override public boolean onTouch(View v, MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_UP: if(!start) { startTimer(); } else { stopTimer(); startClockAnimation(); } return false; } return false; } private void startTimer() { start = true; startButton.setText(R.string.stop); startTime = SystemClock.uptimeMillis(); timeSubscription = Observable.interval(103, TimeUnit.MILLISECONDS, AndroidSchedulers.mainThread()).subscribe(new Action1<Long>() { @Override public void call(Long aLong) { updateTime(); } }); } private void stopTimer() { start = false; startButton.setText(R.string.start); if(timeSubscription != null) { timeSubscription.unsubscribe(); } } private void updateTime() { timeInMilliseconds = SystemClock.uptimeMillis() - startTime; updatedTime = timeSwapBuff + timeInMilliseconds; tvClock.setText(DataHelper.timeToString(updatedTime)); } private void startClockAnimation() { Animation animClock = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.clock_anim); tvClock.startAnimation(animClock); playerData.setVisibility(View.VISIBLE); Animation animButton = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.go_in); playerData.startAnimation(animButton); Animation animPlayerData = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.go_out); startButton.startAnimation(animPlayerData); startButton.setOnTouchListener(null); } private void saveAndGoToResultsActivity() { if(isEditTextNotEmpty()) { GammerResultDAO gamer = new GammerResultDAO(); gamer.setName(name.getText().toString()); gamer.setEmail(email.getText().toString()); gamer.setTimeinMs(updatedTime); data.saveGammerResult(gamer); Intent intent = new Intent(this, ResultsActivity.class); startActivity(intent); } else { Toast.makeText(getApplicationContext(), R.string.no_data_warning, Toast.LENGTH_SHORT).show(); } } private boolean isEditTextNotEmpty() { return !(name.getText().equals("") || email.getText().toString().equals("")); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.packaging; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.intellij.execution.ExecutionException; import com.intellij.execution.RunCanceledByUserException; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessOutput; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.jetbrains.python.sdk.PythonSdkType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; public class PyCondaPackageManagerImpl extends PyPackageManagerImpl { public static final String PYTHON = "python"; public boolean useConda = true; public boolean useConda() { return useConda; } public void useConda(boolean conda) { useConda = conda; } PyCondaPackageManagerImpl(@NotNull final Sdk sdk) { super(sdk); } @Override public void install(@NotNull List<PyRequirement> requirements, @NotNull List<String> extraArgs) throws ExecutionException { if (useConda) { final ArrayList<String> arguments = new ArrayList<>(); for (PyRequirement requirement : requirements) { arguments.add(requirement.toString()); } arguments.add("-y"); if (extraArgs.contains("-U")) { getCondaOutput("update", arguments); } else { arguments.addAll(extraArgs); getCondaOutput("install", arguments); } } else { super.install(requirements, extraArgs); } } private ProcessOutput getCondaOutput(@NotNull final String command, List<String> arguments) throws ExecutionException { final Sdk sdk = getSdk(); final String condaExecutable = PyCondaPackageService.getCondaExecutable(sdk.getHomeDirectory()); if (condaExecutable == null) throw new PyExecutionException("Cannot find conda", "Conda", Collections.emptyList(), new ProcessOutput()); final String path = getCondaDirectory(); if (path == null) throw new PyExecutionException("Empty conda name for " + sdk.getHomePath(), command, arguments); final ArrayList<String> parameters = Lists.newArrayList(condaExecutable, command, "-p", path); parameters.addAll(arguments); final GeneralCommandLine commandLine = new GeneralCommandLine(parameters); final CapturingProcessHandler handler = new CapturingProcessHandler(commandLine); final ProcessOutput result = handler.runProcess(); final int exitCode = result.getExitCode(); if (exitCode != 0) { final String message = StringUtil.isEmptyOrSpaces(result.getStdout()) && StringUtil.isEmptyOrSpaces(result.getStderr()) ? "Permission denied" : "Non-zero exit code"; throw new PyExecutionException(message, "Conda", parameters, result); } return result; } @Nullable private String getCondaDirectory() { final VirtualFile homeDirectory = getSdk().getHomeDirectory(); if (homeDirectory == null) return null; if (SystemInfo.isWindows) return homeDirectory.getParent().getPath(); return homeDirectory.getParent().getParent().getPath(); } @Override public void install(@NotNull String requirementString) throws ExecutionException { if (useConda) { super.install(requirementString); } else { getCondaOutput("install", Lists.newArrayList(requirementString, "-y")); } } @Override public void uninstall(@NotNull List<PyPackage> packages) throws ExecutionException { if (useConda) { final ArrayList<String> arguments = new ArrayList<>(); for (PyPackage aPackage : packages) { arguments.add(aPackage.getName()); } arguments.add("-y"); getCondaOutput("remove", arguments); } else { super.uninstall(packages); } } /** * @return packages installed using 'conda' manager only. * Use 'useConda' flag to retrieve 'pip' packages */ @NotNull @Override protected List<PyPackage> collectPackages() throws ExecutionException { if (useConda) { final ProcessOutput output = getCondaOutput("list", Lists.newArrayList("-e")); final Set<PyPackage> packages = Sets.newConcurrentHashSet(parseCondaToolOutput(output.getStdout())); return Lists.newArrayList(packages); } else { return super.collectPackages(); } } @NotNull protected static List<PyPackage> parseCondaToolOutput(@NotNull String s) throws ExecutionException { final String[] lines = StringUtil.splitByLines(s); final List<PyPackage> packages = new ArrayList<>(); for (String line : lines) { if (line.startsWith("#")) continue; final List<String> fields = StringUtil.split(line, "="); if (fields.size() < 3) { throw new PyExecutionException("Invalid conda output format", "conda", Collections.emptyList()); } final String name = fields.get(0); final String version = fields.get(1); final List<PyRequirement> requirements = new ArrayList<>(); if (fields.size() >= 4) { final String requiresLine = fields.get(3); final String requiresSpec = StringUtil.join(StringUtil.split(requiresLine, ":"), "\n"); requirements.addAll(PyRequirement.fromText(requiresSpec)); } if (!"Python".equals(name)) { packages.add(new PyPackage(name, version, "", requirements)); } } return packages; } public static boolean isCondaVEnv(@NotNull final Sdk sdk) { final String condaName = "conda-meta"; final VirtualFile homeDirectory = sdk.getHomeDirectory(); if (homeDirectory == null) return false; final VirtualFile condaMeta = SystemInfo.isWindows ? homeDirectory.getParent().findChild(condaName) : homeDirectory.getParent().getParent().findChild(condaName); return condaMeta != null; } @NotNull public static String createVirtualEnv(@NotNull String destinationDir, String version) throws ExecutionException { final String condaExecutable = PyCondaPackageService.getSystemCondaExecutable(); if (condaExecutable == null) throw new PyExecutionException("Cannot find conda", "Conda", Collections.emptyList(), new ProcessOutput()); final ArrayList<String> parameters = Lists.newArrayList(condaExecutable, "create", "-p", destinationDir, "-y", "python=" + version); final GeneralCommandLine commandLine = new GeneralCommandLine(parameters); final CapturingProcessHandler handler = new CapturingProcessHandler(commandLine); final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); final ProcessOutput result = handler.runProcessWithProgressIndicator(indicator); if (result.isCancelled()) { throw new RunCanceledByUserException(); } final int exitCode = result.getExitCode(); if (exitCode != 0) { final String message = StringUtil.isEmptyOrSpaces(result.getStdout()) && StringUtil.isEmptyOrSpaces(result.getStderr()) ? "Permission denied" : "Non-zero exit code"; throw new PyExecutionException(message, "Conda", parameters, result); } final String binary = PythonSdkType.getPythonExecutable(destinationDir); final String binaryFallback = destinationDir + File.separator + "bin" + File.separator + "python"; return (binary != null) ? binary : binaryFallback; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * */ public class RoutingFieldMapper extends MetadataFieldMapper { public static final String NAME = "_routing"; public static final String CONTENT_TYPE = "_routing"; public static class Defaults { public static final String NAME = "_routing"; public static final MappedFieldType FIELD_TYPE = new RoutingFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } public static final boolean REQUIRED = false; public static final String PATH = null; } public static class Builder extends MetadataFieldMapper.Builder<Builder, RoutingFieldMapper> { private boolean required = Defaults.REQUIRED; private String path = Defaults.PATH; public Builder(MappedFieldType existing) { super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing); } public Builder required(boolean required) { this.required = required; return builder; } public Builder path(String path) { this.path = path; return builder; } @Override public RoutingFieldMapper build(BuilderContext context) { return new RoutingFieldMapper(fieldType, required, path, context.indexSettings()); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) { parseField(builder, builder.name, node, parserContext); } for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { builder.required(nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { builder.path(fieldNode.toString()); iterator.remove(); } } return builder; } } static final class RoutingFieldType extends MappedFieldType { public RoutingFieldType() { setFieldDataType(new FieldDataType("string")); } protected RoutingFieldType(RoutingFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new RoutingFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public String value(Object value) { if (value == null) { return null; } return value.toString(); } } private boolean required; private final String path; public RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) { this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings); } protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.required = required; this.path = path; } public void markAsRequired() { this.required = true; } public boolean required() { return this.required; } public String path() { return this.path; } public String value(Document document) { Field field = (Field) document.getField(fieldType().names().indexName()); return field == null ? null : (String)fieldType().value(field); } @Override public void preParse(ParseContext context) throws IOException { super.parse(context); } @Override public void postParse(ParseContext context) throws IOException { } @Override public Mapper parse(ParseContext context) throws IOException { // no need ot parse here, we either get the routing in the sourceToParse // or we don't have routing, if we get it in sourceToParse, we process it in preParse // which will always be called return null; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { if (context.sourceToParse().routing() != null) { String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { context.ignoredValue(fieldType().names().indexName(), routing); return; } fields.add(new Field(fieldType().names().indexName(), routing, fieldType())); } } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE; if (!includeDefaults && indexed == indexedDefault && fieldType().stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) { return builder; } builder.startObject(CONTENT_TYPE); if (indexCreatedBefore2x && (includeDefaults || indexed != indexedDefault)) { builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized())); } if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) { builder.field("store", fieldType().stored()); } if (includeDefaults || required != Defaults.REQUIRED) { builder.field("required", required); } if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) { builder.field("path", path); } builder.endObject(); return builder; } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { // do nothing here, no merging, but also no exception } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.util; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.ui.ScreenUtil; import com.intellij.util.containers.hash.LinkedHashMap; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import gnu.trove.TObjectIntHashMap; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.Map; /** * This class represents map between strings and rectangles. It's intended to store * sizes of window, dialogs, etc. */ @State( name = "DimensionService", storages = { @Storage(value = "dimensions.xml", roamingType = RoamingType.DISABLED), @Storage(value = "options.xml", deprecated = true) } ) public class DimensionService implements PersistentStateComponent<Element> { private static final Logger LOG = Logger.getInstance(DimensionService.class); private final Map<String, Point> myKey2Location; private final Map<String, Dimension> myKey2Size; private final TObjectIntHashMap<String> myKey2ExtendedState; @NonNls private static final String EXTENDED_STATE = "extendedState"; @NonNls private static final String KEY = "key"; @NonNls private static final String STATE = "state"; @NonNls private static final String ELEMENT_LOCATION = "location"; @NonNls private static final String ELEMENT_SIZE = "size"; @NonNls private static final String ATTRIBUTE_X = "x"; @NonNls private static final String ATTRIBUTE_Y = "y"; @NonNls private static final String ATTRIBUTE_WIDTH = "width"; @NonNls private static final String ATTRIBUTE_HEIGHT = "height"; public static DimensionService getInstance() { return ServiceManager.getService(DimensionService.class); } /** * Invoked by reflection */ private DimensionService() { myKey2Location = new LinkedHashMap<String, Point>(); myKey2Size = new LinkedHashMap<String, Dimension>(); myKey2ExtendedState = new TObjectIntHashMap<String>(); } /** * @param key a String key to perform a query for. * @return point stored under the specified <code>key</code>. The method returns * <code>null</code> if there is no stored value under the <code>key</code>. If point * is outside of current screen bounds then the method returns <code>null</code>. It * properly works in multi-monitor configuration. * @throws java.lang.IllegalArgumentException if <code>key</code> is <code>null</code>. */ @Nullable public synchronized Point getLocation(String key) { return getLocation(key, guessProject()); } @Nullable public synchronized Point getLocation(@NotNull String key, Project project) { Point point = myKey2Location.get(realKey(key, project)); if (point != null && !ScreenUtil.getScreenRectangle(point).contains(point)) { point = null; } return point != null ? (Point)point.clone() : null; } /** * Store specified <code>point</code> under the <code>key</code>. If <code>point</code> is * <code>null</code> then the value stored under <code>key</code> will be removed. * * @param key a String key to store location for. * @param point location to save. * @throws java.lang.IllegalArgumentException if <code>key</code> is <code>null</code>. */ public synchronized void setLocation(String key, Point point) { setLocation(key, point, guessProject()); } public synchronized void setLocation(@NotNull String key, Point point, Project project) { key = realKey(key, project); if (point != null) { myKey2Location.put(key, (Point)point.clone()); } else { myKey2Location.remove(key); } } /** * @param key a String key to perform a query for. * @return point stored under the specified <code>key</code>. The method returns * <code>null</code> if there is no stored value under the <code>key</code>. * @throws java.lang.IllegalArgumentException if <code>key</code> is <code>null</code>. */ @Nullable public synchronized Dimension getSize(@NotNull @NonNls String key) { return getSize(key, guessProject()); } @Nullable public synchronized Dimension getSize(@NotNull @NonNls String key, Project project) { Dimension size = myKey2Size.get(realKey(key, project)); return size != null ? (Dimension)size.clone() : null; } /** * Store specified <code>size</code> under the <code>key</code>. If <code>size</code> is * <code>null</code> then the value stored under <code>key</code> will be removed. * * @param key a String key to to save size for. * @param size a Size to save. * @throws java.lang.IllegalArgumentException if <code>key</code> is <code>null</code>. */ public synchronized void setSize(@NotNull @NonNls String key, Dimension size) { setSize(key, size, guessProject()); } public synchronized void setSize(@NotNull @NonNls String key, Dimension size, Project project) { key = realKey(key, project); if (size != null) { myKey2Size.put(key, (Dimension)size.clone()); } else { myKey2Size.remove(key); } } @Override public Element getState() { Element element = new Element("state"); // Save locations for (String key : myKey2Location.keySet()) { Point point = myKey2Location.get(key); LOG.assertTrue(point != null); Element e = new Element(ELEMENT_LOCATION); e.setAttribute(KEY, key); e.setAttribute(ATTRIBUTE_X, String.valueOf(point.x)); e.setAttribute(ATTRIBUTE_Y, String.valueOf(point.y)); element.addContent(e); } // Save sizes for (String key : myKey2Size.keySet()) { Dimension size = myKey2Size.get(key); LOG.assertTrue(size != null); Element e = new Element(ELEMENT_SIZE); e.setAttribute(KEY, key); e.setAttribute(ATTRIBUTE_WIDTH, String.valueOf(size.width)); e.setAttribute(ATTRIBUTE_HEIGHT, String.valueOf(size.height)); element.addContent(e); } // Save extended states for (Object stateKey : myKey2ExtendedState.keys()) { String key = (String)stateKey; Element e = new Element(EXTENDED_STATE); e.setAttribute(KEY, key); e.setAttribute(STATE, String.valueOf(myKey2ExtendedState.get(key))); element.addContent(e); } return element; } @Override public void loadState(final Element element) { myKey2Location.clear(); myKey2Size.clear(); myKey2ExtendedState.clear(); for (Element e : element.getChildren()) { if (ELEMENT_LOCATION.equals(e.getName())) { try { myKey2Location.put(e.getAttributeValue(KEY), new Point(Integer.parseInt(e.getAttributeValue(ATTRIBUTE_X)), Integer.parseInt(e.getAttributeValue(ATTRIBUTE_Y)))); } catch (NumberFormatException ignored) { } } else if (ELEMENT_SIZE.equals(e.getName())) { try { myKey2Size.put(e.getAttributeValue(KEY), new Dimension(Integer.parseInt(e.getAttributeValue(ATTRIBUTE_WIDTH)), Integer.parseInt(e.getAttributeValue(ATTRIBUTE_HEIGHT)))); } catch (NumberFormatException ignored) { } } else if (EXTENDED_STATE.equals(e.getName())) { try { myKey2ExtendedState.put(e.getAttributeValue(KEY), Integer.parseInt(e.getAttributeValue(STATE))); } catch (NumberFormatException ignored) { } } } } @Deprecated /** * @deprecated Use {@link com.intellij.ide.util.PropertiesComponent} */ public void setExtendedState(String key, int extendedState) { myKey2ExtendedState.put(key, extendedState); } @Deprecated /** * @deprecated Use {@link com.intellij.ide.util.PropertiesComponent} */ public int getExtendedState(String key) { if (!myKey2ExtendedState.containsKey(key)) return -1; return myKey2ExtendedState.get(key); } @Nullable private static Project guessProject() { final Project[] openProjects = ProjectManager.getInstance().getOpenProjects(); return openProjects.length == 1 ? openProjects[0] : null; } @NotNull private static String realKey(String key, @Nullable Project project) { GraphicsEnvironment env = GraphicsEnvironment.getLocalGraphicsEnvironment(); if (env.isHeadlessInstance()) { return key + ".headless"; } JFrame frame = null; final Component owner = IdeFocusManager.findInstance().getFocusOwner(); if (owner != null) { frame = UIUtil.getParentOfType(JFrame.class, owner); } if (frame == null) { frame = WindowManager.getInstance().findVisibleFrame(); } if (project != null && (frame == null || (frame instanceof IdeFrame && project != ((IdeFrame)frame).getProject()))) { frame = WindowManager.getInstance().getFrame(project); } Rectangle screen = new Rectangle(0, 0, 0, 0); if (frame != null) { final Point topLeft = frame.getLocation(); Point center = new Point(topLeft.x + frame.getWidth() / 2, topLeft.y + frame.getHeight() / 2); for (GraphicsDevice device : env.getScreenDevices()) { Rectangle bounds = device.getDefaultConfiguration().getBounds(); if (bounds.contains(center)) { screen = bounds; break; } } } else { GraphicsConfiguration gc = env.getScreenDevices()[0].getDefaultConfiguration(); screen = gc.getBounds(); } String realKey = key + '.' + screen.x + '.' + screen.y + '.' + screen.width + '.' + screen.height; if (JBUI.isHiDPI()) { realKey+= "@" + (((int)(96 * JBUI.scale(1f)))) + "dpi"; } return realKey; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kafka.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Contains source Apache Kafka versions and compatible target Apache Kafka versions. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kafka-2018-11-14/CompatibleKafkaVersion" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CompatibleKafkaVersion implements Serializable, Cloneable, StructuredPojo { /** * <p> * An Apache Kafka version. * </p> */ private String sourceVersion; /** * <p> * A list of Apache Kafka versions. * </p> */ private java.util.List<String> targetVersions; /** * <p> * An Apache Kafka version. * </p> * * @param sourceVersion * <p> * An Apache Kafka version. * </p> */ public void setSourceVersion(String sourceVersion) { this.sourceVersion = sourceVersion; } /** * <p> * An Apache Kafka version. * </p> * * @return <p> * An Apache Kafka version. * </p> */ public String getSourceVersion() { return this.sourceVersion; } /** * <p> * An Apache Kafka version. * </p> * * @param sourceVersion * <p> * An Apache Kafka version. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public CompatibleKafkaVersion withSourceVersion(String sourceVersion) { setSourceVersion(sourceVersion); return this; } /** * <p> * A list of Apache Kafka versions. * </p> * * @return <p> * A list of Apache Kafka versions. * </p> */ public java.util.List<String> getTargetVersions() { return targetVersions; } /** * <p> * A list of Apache Kafka versions. * </p> * * @param targetVersions * <p> * A list of Apache Kafka versions. * </p> */ public void setTargetVersions(java.util.Collection<String> targetVersions) { if (targetVersions == null) { this.targetVersions = null; return; } this.targetVersions = new java.util.ArrayList<String>(targetVersions); } /** * <p> * A list of Apache Kafka versions. * </p> * * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTargetVersions(java.util.Collection)} or {@link #withTargetVersions(java.util.Collection)} if you want * to override the existing values. * </p> * * @param targetVersions * <p> * A list of Apache Kafka versions. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public CompatibleKafkaVersion withTargetVersions(String... targetVersions) { if (this.targetVersions == null) { setTargetVersions(new java.util.ArrayList<String>(targetVersions.length)); } for (String ele : targetVersions) { this.targetVersions.add(ele); } return this; } /** * <p> * A list of Apache Kafka versions. * </p> * * @param targetVersions * <p> * A list of Apache Kafka versions. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public CompatibleKafkaVersion withTargetVersions(java.util.Collection<String> targetVersions) { setTargetVersions(targetVersions); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSourceVersion() != null) sb.append("SourceVersion: ").append(getSourceVersion()).append(","); if (getTargetVersions() != null) sb.append("TargetVersions: ").append(getTargetVersions()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CompatibleKafkaVersion == false) return false; CompatibleKafkaVersion other = (CompatibleKafkaVersion) obj; if (other.getSourceVersion() == null ^ this.getSourceVersion() == null) return false; if (other.getSourceVersion() != null && other.getSourceVersion().equals(this.getSourceVersion()) == false) return false; if (other.getTargetVersions() == null ^ this.getTargetVersions() == null) return false; if (other.getTargetVersions() != null && other.getTargetVersions().equals(this.getTargetVersions()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSourceVersion() == null) ? 0 : getSourceVersion().hashCode()); hashCode = prime * hashCode + ((getTargetVersions() == null) ? 0 : getTargetVersions().hashCode()); return hashCode; } @Override public CompatibleKafkaVersion clone() { try { return (CompatibleKafkaVersion) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.kafka.model.transform.CompatibleKafkaVersionMarshaller.getInstance().marshall(this, protocolMarshaller); } }