gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (C) 2012 by * * SMU Text Mining Group * Singapore Management University * * TwitterLDA is distributed for research purpose, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * The original paper is as follows: * Wayne Xin Zhao, Jing Jiang et al., Comparing Twitter and traditional media using topic models. * ECIR'11. * * Note that the package here is not developed by the authors * in the paper, nor used in the original papers. It's an implementation * based on the paper, where most of the work is done by qiming.diao.2010@smu.sg. * * Feel free to contact the following people if you find any * problems in the package. * * minghui.qiu.2010@smu.edu.sg * */ package nlp.topicmodel.tlda; import java.io.File; import java.util.ArrayList; import java.util.HashMap; public class TwitterLDAmain { static ArrayList<String> stopWords; public static void main(String args[]) throws Exception { String base = System.getProperty("user.dir") + "/data/"; String filelist = base + "tlda_parameter/filelist.txt"; String dataDir = base + "originaldocs/"; String outputDir = base + "output/tlda/"; String modelParas = base + "tlda_parameter/parameters.txt"; // create output folder //FileUtil.mkdir(new File(base + "/ModelRes/")); //FileUtil.mkdir(new File(outputDir)); ArrayList<String> files = new ArrayList<String>(); File fileDir = new File(dataDir); File [] fileArrary = fileDir.listFiles(); //FileUtil.readLines(filelist, files); for (File file : fileArrary) { files.add(file.getName()); //System.out.println(file.getName()); } // 1. get model parameters ArrayList<String> modelSettings = new ArrayList<String>(); getModelPara(modelParas, modelSettings); int A_all = Integer.parseInt(modelSettings.get(0)); float alpha_g = Float.parseFloat(modelSettings.get(1)); float beta_word = Float.parseFloat(modelSettings.get(2)); float beta_b = Float.parseFloat(modelSettings.get(3)); float gamma = Float.parseFloat(modelSettings.get(4)); int nIter = Integer.parseInt(modelSettings.get(5)); System.err.println("Topics:" + A_all + ", alpha_g:" + alpha_g + ", beta_word:" + beta_word + ", beta_b:" + beta_b + ", gamma:" + gamma + ", iteration:" + nIter); modelSettings.clear(); int outputTopicwordCnt = 20; int outputBackgroundwordCnt = 100; String outputWordsInTopics = outputDir + "WordsInTopics.txt"; String outputBackgroundWordsDistribution = outputDir + "BackgroundWordsDistribution.txt"; String outputTextWithLabel = outputDir + "/TextWithLabel/"; if (!new File(outputTextWithLabel).exists()) FileUtil.mkdir(new File(outputTextWithLabel)); // 2. get documents (users) HashMap<String, Integer> wordMap = new HashMap<String, Integer>(); ArrayList<User> users = new ArrayList<User>(); ArrayList<String> uniWordMap = new ArrayList<String>(); for (int i = 0; i < files.size(); i++) { User tweetuser = new User(dataDir + files.get(i), files.get(i), wordMap, uniWordMap); users.add(tweetuser); } // ComUtil.printHash(wordMap); if (uniWordMap.size() != wordMap.size()) { System.out.println(wordMap.size()); System.out.println(uniWordMap.size()); System.err .println("uniqword size is not the same as the hashmap size!"); System.exit(0); } // output wordMap and itemMap FileUtil.writeLines(outputDir + "wordMap.txt", wordMap); FileUtil.writeLines(outputDir + "uniWordMap.txt", uniWordMap); int uniWordMapSize = uniWordMap.size(); wordMap.clear(); uniWordMap.clear(); // uniItemMap.clear(); // 3. run the model Model model = new Model(A_all, users.size(), uniWordMapSize, nIter, alpha_g, beta_word, beta_b, gamma); model.intialize(users); // model.fake_intialize(users); model.estimate(users, nIter); // 4. output model results System.out.println("Record Topic Distributions/Counts"); model.outputTopicDistributionOnUsers(outputDir, users); System.out.println("read uniwordmap"); FileUtil.readLines(outputDir + "uniWordMap.txt", uniWordMap); try { model.outputTextWithLabel(outputTextWithLabel, users, uniWordMap); } catch (Exception e) { e.printStackTrace(); } System.out.println("write text with labels done"); // model.outputTopicCountOnTime(outputTopicsCountOnTime); users.clear(); try { model.outputWordsInTopics(outputWordsInTopics, uniWordMap, outputTopicwordCnt); } catch (Exception e1) { e1.printStackTrace(); } try { model.outputBackgroundWordsDistribution( outputBackgroundWordsDistribution, uniWordMap, outputBackgroundwordCnt); } catch (Exception e1) { e1.printStackTrace(); } System.out.println("Record Background done"); System.out.println("Final Done"); } private static void getModelPara(String modelParas, ArrayList<String> modelSettings) { modelSettings.clear(); // T , alpha , beta , gamma , iteration , saveStep, saveTimes modelSettings.clear(); // add default parameter settings modelSettings.add("40"); modelSettings.add("1.25"); modelSettings.add("0.01"); modelSettings.add("0.01"); modelSettings.add("20"); modelSettings.add("20"); ArrayList<String> inputlines = new ArrayList<String>(); FileUtil.readLines(modelParas, inputlines); for (int i = 0; i < inputlines.size(); i++) { int index = inputlines.get(i).indexOf(":"); String para = inputlines.get(i).substring(0, index).trim() .toLowerCase(); String value = inputlines.get(i) .substring(index + 1, inputlines.get(i).length()).trim() .toLowerCase(); switch (ModelParas.valueOf(para)) { case topics: modelSettings.set(0, value); break; case alpha_g: modelSettings.set(1, value); break; case beta_word: modelSettings.set(2, value); break; case beta_b: modelSettings.set(3, value); break; case gamma: modelSettings.set(4, value); break; case iteration: modelSettings.set(5, value); break; default: break; } } } public enum ModelParas { topics, alpha_g, beta_word, beta_b, gamma, iteration; } }
/* * Copyright 2006-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.dsl.builder; import com.consol.citrus.TestAction; import com.consol.citrus.actions.ReceiveMessageAction; import com.consol.citrus.dsl.actions.DelegatingTestAction; import com.consol.citrus.endpoint.Endpoint; import com.consol.citrus.exceptions.CitrusRuntimeException; import com.consol.citrus.message.Message; import com.consol.citrus.message.MessageType; import com.consol.citrus.util.FileUtils; import com.consol.citrus.validation.MessageValidator; import com.consol.citrus.validation.builder.*; import com.consol.citrus.validation.callback.ValidationCallback; import com.consol.citrus.validation.context.DefaultValidationContext; import com.consol.citrus.validation.context.ValidationContext; import com.consol.citrus.validation.json.*; import com.consol.citrus.validation.script.ScriptValidationContext; import com.consol.citrus.validation.xml.*; import com.consol.citrus.variable.MessageHeaderVariableExtractor; import com.consol.citrus.variable.dictionary.DataDictionary; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.io.Resource; import org.springframework.oxm.Marshaller; import org.springframework.oxm.XmlMappingException; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.xml.transform.StringResult; import java.io.IOException; import java.nio.charset.Charset; import java.util.Map; /** * Receive message action builder offers configuration methods for a receive test action. Build options * include construction of control message payload and headers as well as value extraction. * * @author Christoph Deppisch * @since 2.3 */ public class ReceiveMessageBuilder<A extends ReceiveMessageAction, T extends ReceiveMessageBuilder> extends AbstractTestActionBuilder<DelegatingTestAction<TestAction>> { /** Self reference for generics support */ private final T self; /** Message type for this action builder */ private String messageType; /** Validation context used in this action builder */ private DefaultValidationContext defaultValidationContext = new DefaultValidationContext(); private XmlMessageValidationContext xmlMessageValidationContext = new XmlMessageValidationContext(); private JsonMessageValidationContext jsonMessageValidationContext = new JsonMessageValidationContext(); /** JSON validation context used in this action builder */ private JsonPathMessageValidationContext jsonPathValidationContext; /** Script validation context used in this action builder */ private ScriptValidationContext scriptValidationContext; /** Variable extractors filled within this action builder */ private MessageHeaderVariableExtractor headerExtractor; private XpathPayloadVariableExtractor xpathExtractor; private JsonPathVariableExtractor jsonPathExtractor; /** Basic application context */ private ApplicationContext applicationContext; /** * Default constructor using test action, basic application context and position handle. * @param action */ public ReceiveMessageBuilder(A action) { this(new DelegatingTestAction(action)); } /** * Default constructor. */ public ReceiveMessageBuilder() { this((A) new ReceiveMessageAction()); } /** * Constructor using delegate test action. * @param action */ public ReceiveMessageBuilder(DelegatingTestAction<TestAction> action) { super(action); this.self = (T) this; } /** * Sets the message endpoint to receive messages from. * @param messageEndpoint * @return */ public ReceiveMessageBuilder endpoint(Endpoint messageEndpoint) { getAction().setEndpoint(messageEndpoint); return this; } /** * Sets the message endpoint uri to receive messages from. * @param messageEndpointUri * @return */ public ReceiveMessageBuilder endpoint(String messageEndpointUri) { getAction().setEndpointUri(messageEndpointUri); return this; } /** * Adds a custom timeout to this message receiving action. * @param receiveTimeout * @return */ public T timeout(long receiveTimeout) { getAction().setReceiveTimeout(receiveTimeout); return self; } /** * Expect a control message in this receive action. * @param controlMessage * @return */ public T message(Message controlMessage) { StaticMessageContentBuilder staticMessageContentBuilder = StaticMessageContentBuilder.withMessage(controlMessage); staticMessageContentBuilder.setMessageHeaders(getMessageContentBuilder().getMessageHeaders()); getAction().setMessageBuilder(staticMessageContentBuilder); return self; } /** * Sets the payload data on the message builder implementation. * @param payload * @return */ protected void setPayload(String payload) { MessageContentBuilder messageContentBuilder = getMessageContentBuilder(); if (messageContentBuilder instanceof PayloadTemplateMessageBuilder) { ((PayloadTemplateMessageBuilder) messageContentBuilder).setPayloadData(payload); } else if (messageContentBuilder instanceof StaticMessageContentBuilder) { ((StaticMessageContentBuilder) messageContentBuilder).getMessage().setPayload(payload); } else { throw new CitrusRuntimeException("Unable to set payload on message builder type: " + messageContentBuilder.getClass()); } } /** * Sets the message name. * @param name * @return */ public T name(String name) { getMessageContentBuilder().setMessageName(name); return self; } /** * Expect this message payload data in received message. * @param payload * @return */ public T payload(String payload) { setPayload(payload); return self; } /** * Expect this message payload data in received message. * @param payloadResource * @return */ public T payload(Resource payloadResource) { return payload(payloadResource, FileUtils.getDefaultCharset()); } /** * Expect this message payload data in received message. * @param payloadResource * @param charset * @return */ public T payload(Resource payloadResource, Charset charset) { try { setPayload(FileUtils.readToString(payloadResource, charset)); } catch (IOException e) { throw new CitrusRuntimeException("Failed to read payload resource", e); } return self; } /** * Expect this message payload as model object which is marshalled to a character sequence * using the default object to xml mapper before validation is performed. * @param payload * @param marshaller * @return */ public T payload(Object payload, Marshaller marshaller) { StringResult result = new StringResult(); try { marshaller.marshal(payload, result); } catch (XmlMappingException e) { throw new CitrusRuntimeException("Failed to marshal object graph for message payload", e); } catch (IOException e) { throw new CitrusRuntimeException("Failed to marshal object graph for message payload", e); } setPayload(result.toString()); return self; } /** * Expect this message payload as model object which is mapped to a character sequence * using the default object to json mapper before validation is performed. * @param payload * @param objectMapper * @return */ public T payload(Object payload, ObjectMapper objectMapper) { try { setPayload(objectMapper.writer().writeValueAsString(payload)); } catch (JsonProcessingException e) { throw new CitrusRuntimeException("Failed to map object graph for message payload", e); } return self; } /** * Expect this message payload as model object which is marshalled to a character sequence using the default object to xml mapper that * is available in Spring bean application context. * * @param payload * @return */ public T payloadModel(Object payload) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); if (!CollectionUtils.isEmpty(applicationContext.getBeansOfType(Marshaller.class))) { return payload(payload, applicationContext.getBean(Marshaller.class)); } else if (!CollectionUtils.isEmpty(applicationContext.getBeansOfType(ObjectMapper.class))) { return payload(payload, applicationContext.getBean(ObjectMapper.class)); } throw new CitrusRuntimeException("Unable to find default object mapper or marshaller in application context"); } /** * Expect this message payload as model object which is marshalled to a character sequence using the given object to xml mapper that * is accessed by its bean name in Spring bean application context. * * @param payload * @param mapperName * @return */ public T payload(Object payload, String mapperName) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); if (applicationContext.containsBean(mapperName)) { Object mapper = applicationContext.getBean(mapperName); if (Marshaller.class.isAssignableFrom(mapper.getClass())) { return payload(payload, (Marshaller) mapper); } else if (ObjectMapper.class.isAssignableFrom(mapper.getClass())) { return payload(payload, (ObjectMapper) mapper); } else { throw new CitrusRuntimeException(String.format("Invalid bean type for mapper '%s' expected ObjectMapper or Marshaller but was '%s'", mapperName, mapper.getClass())); } } throw new CitrusRuntimeException("Unable to find default object mapper or marshaller in application context"); } /** * Expect this message header entry in received message. * @param name * @param value * @return */ public T header(String name, Object value) { getMessageContentBuilder().getMessageHeaders().put(name, value); return self; } /** * Expect this message header data in received message. Message header data is used in * SOAP messages as XML fragment for instance. * @param data * @return */ public T header(String data) { getMessageContentBuilder().getHeaderData().add(data); return self; } /** * Expect this message header data as model object which is marshalled to a character sequence using the default object to xml mapper that * is available in Spring bean application context. * * @param model * @return */ public T headerFragment(Object model) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); if (!CollectionUtils.isEmpty(applicationContext.getBeansOfType(Marshaller.class))) { return headerFragment(model, applicationContext.getBean(Marshaller.class)); } else if (!CollectionUtils.isEmpty(applicationContext.getBeansOfType(ObjectMapper.class))) { return headerFragment(model, applicationContext.getBean(ObjectMapper.class)); } throw new CitrusRuntimeException("Unable to find default object mapper or marshaller in application context"); } /** * Expect this message header data as model object which is marshalled to a character sequence using the given object to xml mapper that * is accessed by its bean name in Spring bean application context. * * @param model * @param mapperName * @return */ public T headerFragment(Object model, String mapperName) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); if (applicationContext.containsBean(mapperName)) { Object mapper = applicationContext.getBean(mapperName); if (Marshaller.class.isAssignableFrom(mapper.getClass())) { return headerFragment(model, (Marshaller) mapper); } else if (ObjectMapper.class.isAssignableFrom(mapper.getClass())) { return headerFragment(model, (ObjectMapper) mapper); } else { throw new CitrusRuntimeException(String.format("Invalid bean type for mapper '%s' expected ObjectMapper or Marshaller but was '%s'", mapperName, mapper.getClass())); } } throw new CitrusRuntimeException("Unable to find default object mapper or marshaller in application context"); } /** * Expect this message header data as model object which is marshalled to a character sequence * using the default object to xml mapper before validation is performed. * @param model * @param marshaller * @return */ public T headerFragment(Object model, Marshaller marshaller) { StringResult result = new StringResult(); try { marshaller.marshal(model, result); } catch (XmlMappingException e) { throw new CitrusRuntimeException("Failed to marshal object graph for message header data", e); } catch (IOException e) { throw new CitrusRuntimeException("Failed to marshal object graph for message header data", e); } return header(result.toString()); } /** * Expect this message header data as model object which is mapped to a character sequence * using the default object to json mapper before validation is performed. * @param model * @param objectMapper * @return */ public T headerFragment(Object model, ObjectMapper objectMapper) { try { return header(objectMapper.writer().writeValueAsString(model)); } catch (JsonProcessingException e) { throw new CitrusRuntimeException("Failed to map object graph for message header data", e); } } /** * Expect this message header data in received message from file resource. Message header data is used in * SOAP messages as XML fragment for instance. * @param resource * @return */ public T header(Resource resource) { return header(resource, FileUtils.getDefaultCharset()); } /** * Expect this message header data in received message from file resource. Message header data is used in * SOAP messages as XML fragment for instance. * @param resource * @param charset * @return */ public T header(Resource resource, Charset charset) { try { getMessageContentBuilder().getHeaderData().add(FileUtils.readToString(resource, charset)); } catch (IOException e) { throw new CitrusRuntimeException("Failed to read header resource", e); } return self; } /** * Adds script validation. * @param validationScript * @return */ public T validateScript(String validationScript) { getScriptValidationContext().setValidationScript(validationScript); return self; } /** * Reads validation script file resource and sets content as validation script. * @param scriptResource * @return */ public T validateScript(Resource scriptResource) { return validateScript(scriptResource, FileUtils.getDefaultCharset()); } /** * Reads validation script file resource and sets content as validation script. * @param scriptResource * @param charset * @return */ public T validateScript(Resource scriptResource, Charset charset) { try { validateScript(FileUtils.readToString(scriptResource, charset)); } catch (IOException e) { throw new CitrusRuntimeException("Failed to read script resource file", e); } return self; } /** * Adds script validation file resource. * @param fileResourcePath * @return */ public T validateScriptResource(String fileResourcePath) { getScriptValidationContext().setValidationScriptResourcePath(fileResourcePath); return self; } /** * Adds custom validation script type. * @param type * @return */ public T validateScriptType(String type) { getScriptValidationContext().setScriptType(type); return self; } /** * Sets a explicit message type for this receive action. * @param messageType * @return */ public T messageType(MessageType messageType) { messageType(messageType.name()); return self; } /** * Sets a explicit message type for this receive action. * @param messageType * @return */ public T messageType(String messageType) { this.messageType = messageType; getAction().setMessageType(messageType); if (getAction().getValidationContexts().isEmpty()) { getAction().getValidationContexts().add(defaultValidationContext); getAction().getValidationContexts().add(xmlMessageValidationContext); getAction().getValidationContexts().add(jsonMessageValidationContext); } return self; } /** * Sets schema validation enabled/disabled for this message. * @param enabled * @return */ public T schemaValidation(boolean enabled) { xmlMessageValidationContext.setSchemaValidation(enabled); return self; } /** * Validates XML namespace with prefix and uri. * @param prefix * @param namespaceUri * @return */ public T validateNamespace(String prefix, String namespaceUri) { xmlMessageValidationContext.getControlNamespaces().put(prefix, namespaceUri); return self; } /** * Adds message element validation. * @param path * @param controlValue * @return */ public T validate(String path, Object controlValue) { if (JsonPathMessageValidationContext.isJsonPathExpression(path)) { getJsonPathValidationContext().getJsonPathExpressions().put(path, controlValue); } else { getXPathValidationContext().getXpathExpressions().put(path, controlValue); } return self; } /** * Adds ignore path expression for message element. * @param path * @return */ public T ignore(String path) { if (messageType.equalsIgnoreCase(MessageType.XML.name()) || messageType.equalsIgnoreCase(MessageType.XHTML.name())) { xmlMessageValidationContext.getIgnoreExpressions().add(path); } else if (messageType.equalsIgnoreCase(MessageType.JSON.name())) { jsonMessageValidationContext.getIgnoreExpressions().add(path); } return self; } /** * Adds XPath message element validation. * @param xPathExpression * @param controlValue * @return */ public T xpath(String xPathExpression, Object controlValue) { validate(xPathExpression, controlValue); return self; } /** * Adds JsonPath message element validation. * @param jsonPathExpression * @param controlValue * @return */ public T jsonPath(String jsonPathExpression, Object controlValue) { validate(jsonPathExpression, controlValue); return self; } /** * Sets explicit schema instance name to use for schema validation. * @param schemaName * @return */ public T xsd(String schemaName) { xmlMessageValidationContext.setSchema(schemaName); return self; } /** * Sets explicit xsd schema repository instance to use for validation. * @param schemaRepository * @return */ public T xsdSchemaRepository(String schemaRepository) { xmlMessageValidationContext.setSchemaRepository(schemaRepository); return self; } /** * Adds explicit namespace declaration for later path validation expressions. * @param prefix * @param namespaceUri * @return */ public T namespace(String prefix, String namespaceUri) { getXpathVariableExtractor().getNamespaces().put(prefix, namespaceUri); xmlMessageValidationContext.getNamespaces().put(prefix, namespaceUri); return self; } /** * Sets default namespace declarations on this action builder. * @param namespaceMappings * @return */ public T namespaces(Map<String, String> namespaceMappings) { getXpathVariableExtractor().getNamespaces().putAll(namespaceMappings); xmlMessageValidationContext.getNamespaces().putAll(namespaceMappings); return self; } /** * Sets message selector string. * @param messageSelector * @return */ public T selector(String messageSelector) { getAction().setMessageSelectorString(messageSelector); return self; } /** * Sets message selector elements. * @param messageSelector * @return */ public T selector(Map<String, Object> messageSelector) { getAction().setMessageSelector(messageSelector); return self; } /** * Sets explicit message validator for this receive action. * @param validator * @return */ public T validator(MessageValidator<? extends ValidationContext> validator) { getAction().setValidator(validator); return self; } /** * Sets explicit message validator by name. * @param validatorName * @return */ @SuppressWarnings("unchecked") public T validator(String validatorName) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); MessageValidator<? extends ValidationContext> validator = applicationContext.getBean(validatorName, MessageValidator.class); getAction().setValidator(validator); return self; } /** * Sets explicit data dictionary for this receive action. * @param dictionary * @return */ public T dictionary(DataDictionary dictionary) { getAction().setDataDictionary(dictionary); return self; } /** * Sets explicit data dictionary by name. * @param dictionaryName * @return */ @SuppressWarnings("unchecked") public T dictionary(String dictionaryName) { Assert.notNull(applicationContext, "Citrus application context is not initialized!"); DataDictionary dictionary = applicationContext.getBean(dictionaryName, DataDictionary.class); getAction().setDataDictionary(dictionary); return self; } /** * Extract message header entry as variable. * @param headerName * @param variable * @return */ public T extractFromHeader(String headerName, String variable) { if (headerExtractor == null) { headerExtractor = new MessageHeaderVariableExtractor(); getAction().getVariableExtractors().add(headerExtractor); } headerExtractor.getHeaderMappings().put(headerName, variable); return self; } /** * Extract message element via XPath or JSONPath from message payload as new test variable. * @param path * @param variable * @return */ public T extractFromPayload(String path, String variable) { if (JsonPathMessageValidationContext.isJsonPathExpression(path)) { getJsonPathVariableExtractor().getJsonPathExpressions().put(path, variable); } else { getXpathVariableExtractor().getXpathExpressions().put(path, variable); } return self; } /** * Adds validation callback to the receive action for validating * the received message with Java code. * @param callback * @return */ public T validationCallback(ValidationCallback callback) { if (callback instanceof ApplicationContextAware) { ((ApplicationContextAware) callback).setApplicationContext(applicationContext); } getAction().setValidationCallback(callback); return self; } /** * Sets the Spring bean application context. * @param applicationContext */ public T withApplicationContext(ApplicationContext applicationContext) { this.applicationContext = applicationContext; return self; } /** * Get message builder, if already registered or create a new message builder and register it * * @return the message builder in use */ protected AbstractMessageContentBuilder getMessageContentBuilder() { if (getAction().getMessageBuilder() != null && getAction().getMessageBuilder() instanceof AbstractMessageContentBuilder) { return (AbstractMessageContentBuilder) getAction().getMessageBuilder(); } else { PayloadTemplateMessageBuilder messageBuilder = new PayloadTemplateMessageBuilder(); getAction().setMessageBuilder(messageBuilder); return messageBuilder; } } /** * Creates new variable extractor and adds it to test action. */ private XpathPayloadVariableExtractor getXpathVariableExtractor() { if (xpathExtractor == null) { xpathExtractor = new XpathPayloadVariableExtractor(); getAction().getVariableExtractors().add(xpathExtractor); } return xpathExtractor; } /** * Creates new variable extractor and adds it to test action. */ private JsonPathVariableExtractor getJsonPathVariableExtractor() { if (jsonPathExtractor == null) { jsonPathExtractor = new JsonPathVariableExtractor(); getAction().getVariableExtractors().add(jsonPathExtractor); } return jsonPathExtractor; } /** * Gets the validation context as XML validation context an raises exception if existing validation context is * not a XML validation context. * @return */ private XpathMessageValidationContext getXPathValidationContext() { if (xmlMessageValidationContext instanceof XpathMessageValidationContext) { return ((XpathMessageValidationContext)xmlMessageValidationContext); } else { XpathMessageValidationContext xPathContext = new XpathMessageValidationContext(); xPathContext.setNamespaces(xmlMessageValidationContext.getNamespaces()); xPathContext.setControlNamespaces(xmlMessageValidationContext.getControlNamespaces()); xPathContext.setIgnoreExpressions(xmlMessageValidationContext.getIgnoreExpressions()); xPathContext.setSchema(xmlMessageValidationContext.getSchema()); xPathContext.setSchemaRepository(xmlMessageValidationContext.getSchemaRepository()); xPathContext.setSchemaValidation(xmlMessageValidationContext.isSchemaValidationEnabled()); xPathContext.setDTDResource(xmlMessageValidationContext.getDTDResource()); getAction().getValidationContexts().remove(xmlMessageValidationContext); getAction().getValidationContexts().add(xPathContext); xmlMessageValidationContext = xPathContext; return xPathContext; } } /** * Creates new script validation context if not done before and gets the script validation context. */ private ScriptValidationContext getScriptValidationContext() { if (scriptValidationContext == null) { scriptValidationContext = new ScriptValidationContext(messageType.toString()); getAction().getValidationContexts().add(scriptValidationContext); } return scriptValidationContext; } /** * Creates new JSONPath validation context if not done before and gets the validation context. */ private JsonPathMessageValidationContext getJsonPathValidationContext() { if (jsonPathValidationContext == null) { jsonPathValidationContext = new JsonPathMessageValidationContext(); getAction().getValidationContexts().add(jsonPathValidationContext); } return jsonPathValidationContext; } /** * Provides access to receive message action delegate. * @return */ protected ReceiveMessageAction getAction() { return (ReceiveMessageAction) action.getDelegate(); } /** * Sets the message type. * @param messageType */ protected void setMessageType(MessageType messageType) { this.messageType = messageType.name(); } /** * Sets the message type. * @param messageType */ protected void setMessageType(String messageType) { this.messageType = messageType; } /** * Sets the xpath extractor. * @param xpathExtractor */ protected void setXpathExtractor(XpathPayloadVariableExtractor xpathExtractor) { this.xpathExtractor = xpathExtractor; } /** * Sets the jsonPath extractor. * @param jsonPathExtractor */ protected void setJsonPathExtractor(JsonPathVariableExtractor jsonPathExtractor) { this.jsonPathExtractor = jsonPathExtractor; } /** * Sets the header extractor. * @param headerExtractor */ protected void setHeaderExtractor(MessageHeaderVariableExtractor headerExtractor) { this.headerExtractor = headerExtractor; } /** * Sets the script message validator. * @param scriptValidationContext */ protected void setScriptValidationContext(ScriptValidationContext scriptValidationContext) { this.scriptValidationContext = scriptValidationContext; } /** * Sets the script message validator. * @param jsonPathValidationContext */ protected void setJsonPathValidationContext(JsonPathMessageValidationContext jsonPathValidationContext) { this.jsonPathValidationContext = jsonPathValidationContext; } /** * Sets the XML validation context. * @param validationContext */ protected void setXmlMessageValidationContext(XmlMessageValidationContext validationContext) { this.xmlMessageValidationContext = validationContext; } /** * Sets the JSON validation context. * @param validationContext */ protected void setJsonMessageValidationContext(JsonMessageValidationContext validationContext) { this.jsonMessageValidationContext = validationContext; } /** * Sets the default validation context. * @param validationContext */ protected void setDefaultValidationContext(DefaultValidationContext validationContext) { this.defaultValidationContext = validationContext; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.cassandra; import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.Configuration; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.Metadata; import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSetFuture; import com.datastax.driver.core.Session; import com.datastax.driver.core.Statement; import com.datastax.driver.core.exceptions.InvalidQueryException; import com.datastax.driver.core.exceptions.NoHostAvailableException; import com.datastax.driver.core.exceptions.UnavailableException; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Before; import org.junit.Test; import javax.net.ssl.SSLContext; import java.net.InetSocketAddress; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Unit tests for the PutCassandraQL processor */ public class PutCassandraQLTest { private TestRunner testRunner; private MockPutCassandraQL processor; @Before public void setUp() { processor = new MockPutCassandraQL(); testRunner = TestRunners.newTestRunner(processor); } @Test public void testProcessorConfigValidity() { testRunner.setProperty(AbstractCassandraProcessor.CONTACT_POINTS, "localhost:9042"); testRunner.assertValid(); testRunner.setProperty(AbstractCassandraProcessor.PASSWORD, "password"); testRunner.assertNotValid(); testRunner.setProperty(AbstractCassandraProcessor.USERNAME, "username"); testRunner.setProperty(AbstractCassandraProcessor.CONSISTENCY_LEVEL, "ONE"); testRunner.assertValid(); } @Test public void testProcessorELConfigValidity() { testRunner.setProperty(AbstractCassandraProcessor.CONTACT_POINTS, "${hosts}"); testRunner.setProperty(AbstractCassandraProcessor.PASSWORD, "${pass}"); testRunner.setProperty(AbstractCassandraProcessor.USERNAME, "${user}"); testRunner.setProperty(AbstractCassandraProcessor.CHARSET, "${charset}"); testRunner.setProperty(PutCassandraQL.STATEMENT_TIMEOUT, "${timeout}"); testRunner.assertValid(); } @Test public void testProcessorHappyPath() { setUpStandardTestConfig(); testRunner.enqueue("INSERT INTO users (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", new HashMap<String, String>() { { put("cql.args.1.type", "int"); put("cql.args.1.value", "1"); put("cql.args.2.type", "text"); put("cql.args.2.value", "Joe"); put("cql.args.3.type", "text"); // No value for arg 3 to test setNull put("cql.args.4.type", "map<text,text>"); put("cql.args.4.value", "{'a':'Hello', 'b':'World'}"); put("cql.args.5.type", "list<boolean>"); put("cql.args.5.value", "[true,false,true]"); put("cql.args.6.type", "set<double>"); put("cql.args.6.value", "{1.0, 2.0}"); put("cql.args.7.type", "bigint"); put("cql.args.7.value", "20000000"); put("cql.args.8.type", "float"); put("cql.args.8.value", "1.0"); put("cql.args.9.type", "blob"); put("cql.args.9.value", "0xDEADBEEF"); put("cql.args.10.type", "timestamp"); put("cql.args.10.value", "2016-07-01T15:21:05Z"); } }); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_SUCCESS, 1); testRunner.clearTransferState(); } @Test public void testProcessorHappyPathELConfig() { testRunner.setProperty(AbstractCassandraProcessor.CONTACT_POINTS, "${hosts}"); testRunner.setProperty(AbstractCassandraProcessor.PASSWORD, "${pass}"); testRunner.setProperty(AbstractCassandraProcessor.USERNAME, "${user}"); testRunner.setProperty(AbstractCassandraProcessor.CONSISTENCY_LEVEL, "ONE"); testRunner.setProperty(AbstractCassandraProcessor.CHARSET, "${charset}"); testRunner.setProperty(PutCassandraQL.STATEMENT_TIMEOUT, "${timeout}"); testRunner.assertValid(); testRunner.setVariable("hosts", "localhost:9042"); testRunner.setVariable("user", "username"); testRunner.setVariable("pass", "password"); testRunner.setVariable("charset", "UTF-8"); testRunner.setVariable("timeout", "30 sec"); testRunner.enqueue("INSERT INTO users (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", new HashMap<String, String>() { { put("cql.args.1.type", "int"); put("cql.args.1.value", "1"); put("cql.args.2.type", "text"); put("cql.args.2.value", "Joe"); put("cql.args.3.type", "text"); // No value for arg 3 to test setNull put("cql.args.4.type", "map<text,text>"); put("cql.args.4.value", "{'a':'Hello', 'b':'World'}"); put("cql.args.5.type", "list<boolean>"); put("cql.args.5.value", "[true,false,true]"); put("cql.args.6.type", "set<double>"); put("cql.args.6.value", "{1.0, 2.0}"); put("cql.args.7.type", "bigint"); put("cql.args.7.value", "20000000"); put("cql.args.8.type", "float"); put("cql.args.8.value", "1.0"); put("cql.args.9.type", "blob"); put("cql.args.9.value", "0xDEADBEEF"); put("cql.args.10.type", "timestamp"); put("cql.args.10.value", "2016-07-01T15:21:05Z"); } }); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_SUCCESS, 1); testRunner.clearTransferState(); } @Test public void testMultipleQuery() { setUpStandardTestConfig(); testRunner.setProperty(PutCassandraQL.STATEMENT_CACHE_SIZE, "1"); HashMap<String, String> testData = new HashMap<>(); testData.put("cql.args.1.type", "int"); testData.put("cql.args.1.value", "1"); testData.put("cql.args.2.type", "text"); testData.put("cql.args.2.value", "Joe"); testData.put("cql.args.3.type", "text"); // No value for arg 3 to test setNull testData.put("cql.args.4.type", "map<text,text>"); testData.put("cql.args.4.value", "{'a':'Hello', 'b':'World'}"); testData.put("cql.args.5.type", "list<boolean>"); testData.put("cql.args.5.value", "[true,false,true]"); testData.put("cql.args.6.type", "set<double>"); testData.put("cql.args.6.value", "{1.0, 2.0}"); testData.put("cql.args.7.type", "bigint"); testData.put("cql.args.7.value", "20000000"); testData.put("cql.args.8.type", "float"); testData.put("cql.args.8.value", "1.0"); testData.put("cql.args.9.type", "blob"); testData.put("cql.args.9.value", "0xDEADBEEF"); testData.put("cql.args.10.type", "timestamp"); testData.put("cql.args.10.value", "2016-07-01T15:21:05Z"); testRunner.enqueue("INSERT INTO users (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", testData); testRunner.enqueue("INSERT INTO newusers (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", testData); // Change it up a bit, the same statement is executed with different data testData.put("cql.args.1.value", "2"); testRunner.enqueue("INSERT INTO users (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", testData); testRunner.enqueue("INSERT INTO users (user_id) VALUES ('user_id data');"); testRunner.run(4, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_SUCCESS, 4); } @Test public void testProcessorBadTimestamp() { setUpStandardTestConfig(); processor.setExceptionToThrow( new InvalidQueryException(new InetSocketAddress("localhost", 9042), "invalid timestamp")); testRunner.enqueue("INSERT INTO users (user_id, first_name, last_name, properties, bits, scaleset, largenum, scale, byteobject, ts) VALUES ?, ?, ?, ?, ?, ?, ?, ?, ?, ?", new HashMap<String, String>() { { put("cql.args.1.type", "int"); put("cql.args.1.value", "1"); put("cql.args.2.type", "text"); put("cql.args.2.value", "Joe"); put("cql.args.3.type", "text"); // No value for arg 3 to test setNull put("cql.args.4.type", "map<text,text>"); put("cql.args.4.value", "{'a':'Hello', 'b':'World'}"); put("cql.args.5.type", "list<boolean>"); put("cql.args.5.value", "[true,false,true]"); put("cql.args.6.type", "set<double>"); put("cql.args.6.value", "{1.0, 2.0}"); put("cql.args.7.type", "bigint"); put("cql.args.7.value", "20000000"); put("cql.args.8.type", "float"); put("cql.args.8.value", "1.0"); put("cql.args.9.type", "blob"); put("cql.args.9.value", "0xDEADBEEF"); put("cql.args.10.type", "timestamp"); put("cql.args.10.value", "not a timestamp"); } }); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_FAILURE, 1); testRunner.clearTransferState(); } @Test public void testProcessorInvalidQueryException() { setUpStandardTestConfig(); // Test exceptions processor.setExceptionToThrow( new InvalidQueryException(new InetSocketAddress("localhost", 9042), "invalid query")); testRunner.enqueue("UPDATE users SET cities = [ 'New York', 'Los Angeles' ] WHERE user_id = 'coast2coast';"); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_FAILURE, 1); testRunner.clearTransferState(); } @Test public void testProcessorUnavailableException() { setUpStandardTestConfig(); processor.setExceptionToThrow( new UnavailableException(new InetSocketAddress("localhost", 9042), ConsistencyLevel.ALL, 5, 2)); testRunner.enqueue("UPDATE users SET cities = [ 'New York', 'Los Angeles' ] WHERE user_id = 'coast2coast';"); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_RETRY, 1); } @Test public void testProcessorNoHostAvailableException() { setUpStandardTestConfig(); processor.setExceptionToThrow(new NoHostAvailableException(new HashMap<>())); testRunner.enqueue("UPDATE users SET cities = [ 'New York', 'Los Angeles' ] WHERE user_id = 'coast2coast';"); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_RETRY, 1); } @Test public void testProcessorProcessException() { setUpStandardTestConfig(); processor.setExceptionToThrow(new ProcessException()); testRunner.enqueue("UPDATE users SET cities = [ 'New York', 'Los Angeles' ] WHERE user_id = 'coast2coast';"); testRunner.run(1, true, true); testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_FAILURE, 1); } private void setUpStandardTestConfig() { testRunner.setProperty(AbstractCassandraProcessor.CONTACT_POINTS, "localhost:9042"); testRunner.setProperty(AbstractCassandraProcessor.PASSWORD, "password"); testRunner.setProperty(AbstractCassandraProcessor.USERNAME, "username"); testRunner.setProperty(AbstractCassandraProcessor.CONSISTENCY_LEVEL, "ONE"); testRunner.assertValid(); } /** * Provides a stubbed processor instance for testing */ private static class MockPutCassandraQL extends PutCassandraQL { private Exception exceptionToThrow = null; private Session mockSession = mock(Session.class); @Override protected Cluster createCluster(List<InetSocketAddress> contactPoints, SSLContext sslContext, String username, String password, String compressionType) { Cluster mockCluster = mock(Cluster.class); try { Metadata mockMetadata = mock(Metadata.class); when(mockMetadata.getClusterName()).thenReturn("cluster1"); when(mockCluster.getMetadata()).thenReturn(mockMetadata); when(mockCluster.connect()).thenReturn(mockSession); when(mockCluster.connect(anyString())).thenReturn(mockSession); Configuration config = Configuration.builder().build(); when(mockCluster.getConfiguration()).thenReturn(config); ResultSetFuture future = mock(ResultSetFuture.class); ResultSet rs = CassandraQueryTestUtil.createMockResultSet(); PreparedStatement ps = mock(PreparedStatement.class); when(mockSession.prepare(anyString())).thenReturn(ps); BoundStatement bs = mock(BoundStatement.class); when(ps.bind()).thenReturn(bs); when(future.getUninterruptibly()).thenReturn(rs); try { doReturn(rs).when(future).getUninterruptibly(anyLong(), any(TimeUnit.class)); } catch (TimeoutException te) { throw new IllegalArgumentException("Mocked cluster doesn't time out"); } if (exceptionToThrow != null) { doThrow(exceptionToThrow).when(mockSession).executeAsync(anyString()); doThrow(exceptionToThrow).when(mockSession).executeAsync(any(Statement.class)); } else { when(mockSession.executeAsync(anyString())).thenReturn(future); when(mockSession.executeAsync(any(Statement.class))).thenReturn(future); } when(mockSession.getCluster()).thenReturn(mockCluster); } catch (Exception e) { fail(e.getMessage()); } return mockCluster; } void setExceptionToThrow(Exception e) { exceptionToThrow = e; doThrow(exceptionToThrow).when(mockSession).executeAsync(anyString()); doThrow(exceptionToThrow).when(mockSession).executeAsync(any(Statement.class)); } } }
/* * Copyright (C) 2009 JavaRosa * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javarosa.xpath.expr; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Vector; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.model.condition.pivot.UnpivotableExpressionException; import org.javarosa.core.model.data.BooleanData; import org.javarosa.core.model.data.DateData; import org.javarosa.core.model.data.DecimalData; import org.javarosa.core.model.data.GeoPointData; import org.javarosa.core.model.data.IAnswerData; import org.javarosa.core.model.data.IntegerData; import org.javarosa.core.model.data.LongData; import org.javarosa.core.model.data.SelectMultiData; import org.javarosa.core.model.data.SelectOneData; import org.javarosa.core.model.data.StringData; import org.javarosa.core.model.data.UncastData; import org.javarosa.core.model.data.helper.Selection; import org.javarosa.core.model.instance.AbstractTreeElement; import org.javarosa.core.model.instance.DataInstance; import org.javarosa.core.model.instance.TreeReference; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.ExtWrapList; import org.javarosa.core.util.externalizable.PrototypeFactory; import org.javarosa.xform.util.XFormAnswerDataSerializer; import org.javarosa.xpath.XPathException; import org.javarosa.xpath.XPathLazyNodeset; import org.javarosa.xpath.XPathMissingInstanceException; import org.javarosa.xpath.XPathNodeset; import org.javarosa.xpath.XPathTypeMismatchException; import org.javarosa.xpath.XPathUnsupportedException; public class XPathPathExpr extends XPathExpression { public static final int INIT_CONTEXT_ROOT = 0; public static final int INIT_CONTEXT_RELATIVE = 1; public static final int INIT_CONTEXT_EXPR = 2; public int init_context; public XPathStep[] steps; //for INIT_CONTEXT_EXPR only public XPathFilterExpr filtExpr; public XPathPathExpr () { } //for deserialization public XPathPathExpr (int init_context, XPathStep[] steps) { this.init_context = init_context; this.steps = steps; } public XPathPathExpr (XPathFilterExpr filtExpr, XPathStep[] steps) { this(INIT_CONTEXT_EXPR, steps); this.filtExpr = filtExpr; } public TreeReference getReference () throws XPathUnsupportedException { return getReference(false); } /** * translate an xpath path reference into a TreeReference * TreeReferences only support a subset of true xpath paths; restrictions are: * simple child name tests 'child::name', '.', and '..' allowed only * no predicates * all '..' steps must come before anything else */ public TreeReference getReference (boolean allowPredicates) throws XPathUnsupportedException { TreeReference ref = new TreeReference(); boolean parentsAllowed; switch (init_context) { case XPathPathExpr.INIT_CONTEXT_ROOT: ref.setRefLevel(TreeReference.REF_ABSOLUTE); parentsAllowed = false; break; case XPathPathExpr.INIT_CONTEXT_RELATIVE: ref.setRefLevel(0); parentsAllowed = true; break; case XPathPathExpr.INIT_CONTEXT_EXPR: if (this.filtExpr.x != null && this.filtExpr.x instanceof XPathFuncExpr) { XPathFuncExpr func = (XPathFuncExpr)(this.filtExpr.x); if(func.id.toString().equals("instance")) { ref.setRefLevel(TreeReference.REF_ABSOLUTE); //i assume when refering the non main instance you have to be absolute parentsAllowed = false; if(func.args.length != 1) { throw new XPathUnsupportedException("instance() function used with "+func.args.length+ " arguements. Expecting 1 arguement"); } if(!(func.args[0] instanceof XPathStringLiteral)) { throw new XPathUnsupportedException("instance() function expecting 1 string literal arguement arguement"); } XPathStringLiteral strLit = (XPathStringLiteral)(func.args[0]); //we've got a non-standard instance in play, watch out ref.setInstanceName(strLit.s); } else if(func.id.toString().equals("current")){ parentsAllowed = true; ref.setContext(TreeReference.CONTEXT_ORIGINAL); } else { //We only support expression root contexts for instance refs, everything else is an illegal filter throw new XPathUnsupportedException("filter expression"); } } else { //We only support expression root contexts for instance refs, everything else is an illegal filter throw new XPathUnsupportedException("filter expression"); } break; default: throw new XPathUnsupportedException("filter expression"); } for (int i = 0; i < steps.length; i++) { XPathStep step = steps[i]; if (step.axis == XPathStep.AXIS_SELF) { if (step.test != XPathStep.TEST_TYPE_NODE) { throw new XPathUnsupportedException("step other than 'child::name', '.', '..'"); } } else if (step.axis == XPathStep.AXIS_PARENT) { if (!parentsAllowed || step.test != XPathStep.TEST_TYPE_NODE) { throw new XPathUnsupportedException("step other than 'child::name', '.', '..'"); } else { ref.incrementRefLevel(); } } else if (step.axis == XPathStep.AXIS_ATTRIBUTE) { if (step.test == XPathStep.TEST_NAME) { ref.add(step.name.toString(), TreeReference.INDEX_ATTRIBUTE); parentsAllowed = false; //TODO: Can you step back from an attribute, or should this always be //the last step? } else { throw new XPathUnsupportedException("attribute step other than 'attribute::name"); } }else if (step.axis == XPathStep.AXIS_CHILD) { if (step.test == XPathStep.TEST_NAME) { ref.add(step.name.toString(), TreeReference.INDEX_UNBOUND); parentsAllowed = false; } else if(step.test == XPathStep.TEST_NAME_WILDCARD) { ref.add(TreeReference.NAME_WILDCARD, TreeReference.INDEX_UNBOUND); parentsAllowed = false; } else { throw new XPathUnsupportedException("step other than 'child::name', '.', '..'"); } } else { throw new XPathUnsupportedException("step other than 'child::name', '.', '..'"); } if(step.predicates.length > 0) { int refLevel = ref.getRefLevel(); Vector<XPathExpression> v = new Vector<XPathExpression>(); for(int j = 0; j < step.predicates.length; j++) { v.addElement(step.predicates[j]); } ref.addPredicate(i, v); } } return ref; } public XPathNodeset eval (DataInstance m, EvaluationContext ec) { TreeReference genericRef = getReference(); TreeReference ref; if(genericRef.getContext() == TreeReference.CONTEXT_ORIGINAL) { ref = genericRef.contextualize(ec.getOriginalContext()); } else { ref = genericRef.contextualize(ec.getContextRef()); } //We don't necessarily know the model we want to be working with until we've contextualized the //node //check if this nodeset refers to a non-main instance if(ref.getInstanceName() != null && ref.isAbsolute()) { DataInstance nonMain = ec.getInstance(ref.getInstanceName()); if(nonMain != null) { m = nonMain; if(m.getRoot() == null) { //This instance is _declared_, but doesn't actually have any data in it. throw new XPathMissingInstanceException(ref.getInstanceName(), "Instance referenced by " + ref.toString(true) + " has not been loaded"); } } else { throw new XPathMissingInstanceException(ref.getInstanceName(), "Instance referenced by " + ref.toString(true) + " does not exist"); } } else { //TODO: We should really stop passing 'm' around and start just getting the right instance from ec //at a more central level m = ec.getMainInstance(); if(m == null) { String refStr = ref == null ? "" : ref.toString(true); throw new XPathException("Cannot evaluate the reference [" + refStr + "] in the current evaluation context. No default instance has been declared!"); } } //Otherwise we'll leave 'm' as set to the main instance //TODO: This causes problems when the paths are heterogeneous. IE: If the path is looking for an attribute that //doesn't exist on the first node, there is no template path if (ref.isAbsolute() && m.getTemplatePath(ref) == null) { return XPathNodeset.ConstructInvalidPathNodeset(ref.toString(), genericRef.toString()); } return new XPathLazyNodeset(ref, m, ec); // Vector<TreeReference> nodesetRefs; // if(!ec.terminal) { // nodesetRefs = ec.expandReference(ref); // } else { // nodesetRefs = new Vector(); // ref.setMultiplicity(ref.size() - 1, 0); // nodesetRefs.addElement(ref); // } // // //to fix conditions based on non-relevant data, filter the nodeset by relevancy // for (int i = 0; i < nodesetRefs.size(); i++) { // if (!m.resolveReference((TreeReference)nodesetRefs.elementAt(i)).isRelevant()) { // nodesetRefs.removeElementAt(i); // i--; // } // } // // return new XPathNodeset(nodesetRefs, m, ec); } // // boolean nodeset = forceNodeset; // if (!nodeset) { // //is this a nodeset? it is if the ref contains any unbound multiplicities AND the unbound nodes are repeatable // //the way i'm calculating this sucks; there has got to be an easier way to find out if a node is repeatable // TreeReference repeatTestRef = TreeReference.rootRef(); // for (int i = 0; i < ref.size(); i++) { // repeatTestRef.add(ref.getName(i), ref.getMultiplicity(i)); // if (ref.getMultiplicity(i) == TreeReference.INDEX_UNBOUND) { // if (m.getTemplate(repeatTestRef) != null) { // nodeset = true; // break; // } // } // } // } public static Object getRefValue (DataInstance model, EvaluationContext ec, TreeReference ref) { if (ec.isConstraint && ref.equals(ec.getContextRef())) { //ITEMSET TODO: need to update this; for itemset/copy constraints, need to simulate a whole xml sub-tree here return unpackValue(ec.candidateValue); } else { AbstractTreeElement node = model.resolveReference(ref); if (node == null) { //shouldn't happen -- only existent nodes should be in nodeset throw new XPathTypeMismatchException("Node " + ref.toString() + " does not exist!"); } return unpackValue(node.isRelevant() ? node.getValue() : null); } } public static Object unpackValue (IAnswerData val) { if (val == null) { return ""; } else if (val instanceof UncastData) { return val.getValue(); } else if (val instanceof IntegerData) { return new Double(((Integer)val.getValue()).doubleValue()); } else if (val instanceof LongData) { return new Double(((Long)val.getValue()).doubleValue()); } else if (val instanceof DecimalData) { return val.getValue(); } else if (val instanceof StringData) { return val.getValue(); } else if (val instanceof SelectOneData) { return ((Selection)val.getValue()).getValue(); } else if (val instanceof SelectMultiData) { return (new XFormAnswerDataSerializer()).serializeAnswerData(val); } else if (val instanceof DateData) { return val.getValue(); } else if (val instanceof BooleanData) { return val.getValue(); } else if (val instanceof GeoPointData) { return val.uncast().getString(); } else { System.out.println("warning: unrecognized data type in xpath expr: " + val.getClass().getName()); //TODO: Does this mess up any of our other plans? return val.uncast().getString(); } } public String toString () { StringBuffer sb = new StringBuffer(); sb.append("{path-expr:"); switch (init_context) { case INIT_CONTEXT_ROOT: sb.append("abs"); break; case INIT_CONTEXT_RELATIVE: sb.append("rel"); break; case INIT_CONTEXT_EXPR: sb.append(filtExpr.toString()); break; } sb.append(",{"); for (int i = 0; i < steps.length; i++) { sb.append(steps[i].toString()); if (i < steps.length - 1) sb.append(","); } sb.append("}}"); return sb.toString(); } public boolean equals (Object o) { if (o instanceof XPathPathExpr) { XPathPathExpr x = (XPathPathExpr)o; //Shortcuts for easily comparable values if(init_context != x.init_context || steps.length != x.steps.length) { return false; } return ExtUtil.arrayEquals(steps, x.steps) && (init_context == INIT_CONTEXT_EXPR ? filtExpr.equals(x.filtExpr) : true); } else { return false; } } /** * Warning: this method has somewhat unclear semantics. * * "matches" follows roughly the same process as equals(), in that it goes * through the path step by step and compares whether each step can refer to the same node. * The only difference is that match() will allow for a named step to match a step who's name * is a wildcard. * * So * \/data\/path\/to * will "match" * \/data\/*\/to * * even though they are not equal. * * Matching is reflexive, consistent, and symmetric, but _not_ transitive. * * @param o * @return true if the expression is a path that matches this one */ public boolean matches(XPathExpression o) { if (o instanceof XPathPathExpr) { XPathPathExpr x = (XPathPathExpr)o; //Shortcuts for easily comparable values if(init_context != x.init_context || steps.length != x.steps.length) { return false; } if (steps.length != x.steps.length) { return false; } else { for (int i = 0; i < steps.length; i++) { if (!steps[i].matches(x.steps[i])) { return false; } } } // If all steps match, we still need to make sure we're in the same "context" if this // is a normal expression. return (init_context == INIT_CONTEXT_EXPR ? filtExpr.equals(x.filtExpr) : true); } else { return false; } } public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { init_context = ExtUtil.readInt(in); if (init_context == INIT_CONTEXT_EXPR) { filtExpr = (XPathFilterExpr)ExtUtil.read(in, XPathFilterExpr.class, pf); } Vector v = (Vector)ExtUtil.read(in, new ExtWrapList(XPathStep.class), pf); steps = new XPathStep[v.size()]; for (int i = 0; i < steps.length; i++) steps[i] = ((XPathStep)v.elementAt(i)).intern(); } public void writeExternal(DataOutputStream out) throws IOException { ExtUtil.writeNumeric(out, init_context); if (init_context == INIT_CONTEXT_EXPR) { ExtUtil.write(out, filtExpr); } Vector v = new Vector(); for (int i = 0; i < steps.length; i++) v.addElement(steps[i]); ExtUtil.write(out, new ExtWrapList(v)); } public static XPathPathExpr fromRef (TreeReference ref) { XPathPathExpr path = new XPathPathExpr(); path.init_context = (ref.isAbsolute() ? INIT_CONTEXT_ROOT : INIT_CONTEXT_RELATIVE); path.steps = new XPathStep[ref.size()]; for (int i = 0; i < path.steps.length; i++) { if (ref.getName(i).equals(TreeReference.NAME_WILDCARD)) { path.steps[i] = new XPathStep(XPathStep.AXIS_CHILD, XPathStep.TEST_NAME_WILDCARD).intern(); } else { path.steps[i] = new XPathStep(XPathStep.AXIS_CHILD, new XPathQName(ref.getName(i))).intern(); } } return path; } public Object pivot (DataInstance model, EvaluationContext evalContext, Vector<Object> pivots, Object sentinal) throws UnpivotableExpressionException { TreeReference ref = this.getReference(); //Either concretely the sentinal, or "." if(ref.equals(sentinal) || (ref.getRefLevel() == 0)) { return sentinal; } else { //It's very, very hard to figure out how to pivot predicates. For now, just skip it for(int i = 0 ; i < ref.size(); ++i) { if(ref.getPredicate(i) != null && ref.getPredicate(i).size() > 0) { throw new UnpivotableExpressionException("Can't pivot filtered treereferences. Ref: " + ref.toString(true) + " has predicates."); } } return this.eval(model, evalContext); } } }
/************************************************************************* * * * Open Dynamics Engine 4J, Copyright (C) 2009-2014 Tilmann Zaeschke * * All rights reserved. Email: ode4j@gmx.de Web: www.ode4j.org * * * * This library is free software; you can redistribute it and/or * * modify it under the terms of EITHER: * * (1) The GNU Lesser General Public License as published by the Free * * Software Foundation; either version 2.1 of the License, or (at * * your option) any later version. The text of the GNU Lesser * * General Public License is included with this library in the * * file LICENSE.TXT. * * (2) The BSD-style license that is included with this library in * * the file ODE4J-LICENSE-BSD.TXT. * * * * This library is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the files * * LICENSE.TXT and ODE4J-LICENSE-BSD.TXT for more details. * * * *************************************************************************/ package org.ode4j.tests.math; import org.junit.Test; import org.ode4j.math.DQuaternion; public class TestDQuaternion extends OdeTestCase { @Test public void main() { } @Test public void testGet(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); assertEquals(x.get0(), 1.); assertEquals(x.get1(), 2.); assertEquals(x.get2(), 3.); assertEquals(x.get3(), 4.); assertEquals(x.get(0), 1.); assertEquals(x.get(1), 2.); assertEquals(x.get(2), 3.); assertEquals(x.get(3), 4.); } @Test public void testEqual(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); DQuaternion xx = new DQuaternion(1, 2, 3, 4); DQuaternion x1 = new DQuaternion(0, 2, 3, 4); DQuaternion x2 = new DQuaternion(1, 0, 3, 4); DQuaternion x3 = new DQuaternion(1, 2, 0, 4); DQuaternion x4 = new DQuaternion(1, 2, 3, 0); assertTrue(x.isEq(xx)); assertFalse(x.isEq(x1)); assertFalse(x.isEq(x2)); assertFalse(x.isEq(x3)); assertFalse(x.isEq(x4)); } @Test public void testSet(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); DQuaternion x2 = new DQuaternion(1, 2, 3, 4); DQuaternion y = new DQuaternion(5, 6, 7, 8); DQuaternion z = new DQuaternion(9, 10, 11, 12); x.set0(9); assertEquals(x.get0(), 9.); x.set1(10); assertEquals(x.get1(), 10.); x.set2(11); assertEquals(x.get2(), 11.); x.set3(12); assertEquals(x.get3(), 12.); assertEquals(x, z); x.set(0, 5); assertEquals(x.get0(), 5.); x.set(1, 6); assertEquals(x.get1(), 6.); x.set(2, 7); assertEquals(x.get2(), 7.); x.set(3, 8); assertEquals(x.get3(), 8.); assertEquals(x, y); x.set(1, 2, 3, 4); assertEquals(x, x2); x.set(y); assertEquals(x, y); // x.set( new double[]{ 8, 9, 11, -12} ); // assertTrue(x.get0()==8 && x.get1()==9 && x.get2()==11 && x.get3()==-12); // // x.setValues(2.5); // assertTrue(x.get0()==2.5 && x.get1()==2.5 && x.get2()==2.5 && x.get3()==2.5); // // assertFalse(x.equals(x2)); // assertFalse(x.equals(y)); // assertFalse(x.equals(z)); } @Test public void testInit(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); DQuaternion y = new DQuaternion(); DQuaternion z = new DQuaternion(x); assertTrue(x.isEq(z)); assertFalse(x.isEq(y)); assertEquals(y.get0(), 0.); assertEquals(y.get1(), 0.); assertEquals(y.get2(), 0.); assertEquals(y.get3(), 0.); assertEquals(z.get0(), 1.); assertEquals(z.get1(), 2.); assertEquals(z.get2(), 3.); assertEquals(z.get3(), 4.); } @Test public void testAdd(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); DQuaternion y = new DQuaternion(4, 8, -1, -7); DQuaternion t = new DQuaternion(); assertFalse(x.isEq(y)); t.add(x); assertTrue(t.isEq(x)); t.add(3, 6, -4, -11); assertTrue(t.isEq(y)); // t.add(0, -3); // t.add(1, -6); // t.add(2, 4); // t.add(3, 11); // assertTrue(t.equals(x)); // t.add0(3); // t.add1(6); // t.add2(-4); // assertTrue(t.equals(y)); } @Test public void testSum(){ //TODO // dQuaternion x = new dQuaternion(1, 2, 3, 4); // dQuaternion y = new dQuaternion(4, 8, -1, -7); // dQuaternion t = new dQuaternion(); // assertFalse(x.equals(y)); // // t.add(x); // assertTrue(t.equals(x)); // t.add(3, 6, -4, -11); // assertTrue(t.equals(y)); // // t.add(0, -3); // t.add(1, -6); // t.add(2, 4); // t.add(3, 11); // assertTrue(t.equals(x)); // //// t.add0(3); //// t.add1(6); //// t.add2(-4); //// assertTrue(t.equals(y)); } @Test public void testSub(){ DQuaternion x = new DQuaternion(1, 2, 3, 4); DQuaternion y = new DQuaternion(4, 8, -1, -7); DQuaternion t = new DQuaternion(); assertFalse(x.isEq(y)); t.add(x); t.add(x); // t.sub(x); // assertTrue(t.equals(x)); // t.sub(-3, -6, 4); // assertTrue(t.equals(y)); // t.sub(0, 3); // t.sub(1, 6); // t.sub(2, -4); // assertTrue(t.isEq(x)); } @Test public void testScale(){ DQuaternion y = new DQuaternion(4, 10, -6, -13); DQuaternion t = new DQuaternion(); t.set(y); t.scale(0.5); assertTrue(t.isEq( new DQuaternion(2, 5, -3, -6.5) )); } // @Test // public void testClone() { // dQuaternion y = new dQuaternion(4, 8, -1, -7); // dQuaternion t = y.clone(); // assertTrue( y.equals(t) ); // t.set0(1); // assertFalse( y.equals(t) ); // } @Test public void testOther(){ // DQuaternion x = new DQuaternion(1, 2, 3, 4); // DQuaternion y = new DQuaternion(4, 8, -1, -7); DQuaternion t = new DQuaternion(); //TODO remove dSafeNormalize3()? try { t.set(0, 0, 0, 0).normalize(); fail(); } catch (IllegalStateException e) { // Good! } assertEquals(new DQuaternion(1, 0, 0, 0), t); t.set(3, 4, -18, -6.5); t.normalize(); assertEquals(new DQuaternion(0.15166804174966758, 0.20222405566622345, -0.9100082504980056, -0.32861409045761314), t); // try { // t.set(0, 0, 0, 0).normalize(); // //assertEquals(new dQuaternion(1, 0, 0), t); // fail(t.toString()); // } catch (IllegalStateException e) { // //Ignore // } // // t.set(3, 4, -18, -6.5); // t.normalize(); // assertEquals(new dQuaternion(0.16058631827165676, 0.21411509102887566, -0.9635179096299405, 0.1), t); t.set(3, 4, -5, -2); assertEquals(Math.sqrt(54), t.length()); assertEquals(54.0, t.lengthSquared()); // t.set(-3, -4, -5); // t.eqAbs(); // assertEquals(new dQuaternion(3, 4, 5), t); // // t.eqDiff(x, y); // assertEquals(new dQuaternion(-3, -6, 4), t); } // @Test // public void testDot(){ // dQuaternion x = new dQuaternion(1, 2, 3, 4); // dQuaternion y = new dQuaternion(4, 8, -1, -7); // dQuaternion t = new dQuaternion(); // // assertEquals( 4+16-3 , t.eqDot(x, y)); // } // @Test // public void testMul(){ //TODO ?!?!?!? // dQuaternion x = new dQuaternion(1, 2, 3); // dQuaternion y = new dQuaternion(4, 8, -1); // dQuaternion t = new dQuaternion(); // // dMatrix3 B = new dMatrix3(0.10, 0.11, 0.12, 1.10, 1.11, 1.12, 2.10, 2.11, 2.22); // dQuaternion c = new dQuaternion(-1, 2.5, -11.7); // // t.eqMul(B, c); // double x1 = ; // double x2 = ; // double x3 = ; // } }
package org.sagebionetworks.repo.manager.dataaccess; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessApproval; import org.sagebionetworks.repo.model.AccessApprovalDAO; import org.sagebionetworks.repo.model.AccessRequirement; import org.sagebionetworks.repo.model.AccessRequirementDAO; import org.sagebionetworks.repo.model.ApprovalState; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.ManagedACTAccessRequirement; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.dataaccess.AccessApprovalNotification; import org.sagebionetworks.repo.model.dataaccess.AccessApprovalNotificationRequest; import org.sagebionetworks.repo.model.dataaccess.AccessApprovalNotificationResponse; import org.sagebionetworks.repo.model.dataaccess.NotificationType; import org.sagebionetworks.repo.model.dbo.dao.dataaccess.DBODataAccessNotification; import org.sagebionetworks.repo.model.dbo.dao.dataaccess.DataAccessNotificationDao; import org.sagebionetworks.repo.model.dbo.dao.dataaccess.DataAccessNotificationType; import org.sagebionetworks.repo.model.dbo.feature.FeatureStatusDao; import org.sagebionetworks.repo.model.feature.Feature; import org.sagebionetworks.repo.model.message.ChangeMessage; import org.sagebionetworks.repo.model.message.ChangeType; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.workers.util.aws.message.RecoverableMessageException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; @ExtendWith(SpringExtension.class) @ContextConfiguration(locations = { "classpath:test-context.xml" }) public class AccessApprovalNotificationManagerIntegrationTest { @Autowired private UserManager userManager; @Autowired private FeatureStatusDao featureStatusDao; @Autowired private AccessApprovalDAO accessApprovalDao; @Autowired private AccessRequirementDAO accessRequirementDao; @Autowired private DataAccessNotificationDao notificationDao; @Autowired private AccessApprovalNotificationManager manager; private UserInfo adminUser; private UserInfo submitter; private UserInfo accessor; private List<Long> users; @BeforeEach public void before() { users = new ArrayList<>(); notificationDao.truncateAll(); accessApprovalDao.clear(); accessRequirementDao.clear(); featureStatusDao.clear(); featureStatusDao.setFeatureEnabled(Feature.DATA_ACCESS_NOTIFICATIONS, true); adminUser = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId()); submitter = createUser("submitter"); accessor = createUser("accessor"); users.addAll(Arrays.asList(submitter.getId(), accessor.getId())); } @AfterEach public void after() { notificationDao.truncateAll(); accessApprovalDao.clear(); accessRequirementDao.clear(); featureStatusDao.clear(); users.forEach( id -> { userManager.deletePrincipal(adminUser, id); }); } @Test public void processAccessApprovalChangeWithNonExistingAccessApproval() throws RecoverableMessageException { ChangeMessage message = changeMessage(-1L); assertThrows(NotFoundException.class, () -> { manager.processAccessApprovalChange(message); }); } @Test public void processAccessApprovalChange() throws RecoverableMessageException { AccessRequirement requirement = createManagedAR(); AccessApproval approval = createApproval(requirement, submitter, submitter, ApprovalState.REVOKED, null); ChangeMessage message = changeMessage(approval.getId()); // Call under test manager.processAccessApprovalChange(message); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); } @Test public void processAccessApprovalChangeReprocess() throws RecoverableMessageException { AccessRequirement requirement = createManagedAR(); AccessApproval approval = createApproval(requirement, submitter, submitter, ApprovalState.REVOKED, null); ChangeMessage message = changeMessage(approval.getId()); // Call under test manager.processAccessApprovalChange(message); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); DBODataAccessNotification notification = result.get(); // Duplicate message manager.processAccessApprovalChange(message); result = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), submitter.getId()); // Make sure the notification didn't change assertEquals(notification, result.get()); } @Test public void processAccessApprovalChangeWithDifferentAccessors() throws RecoverableMessageException { AccessRequirement requirement = createManagedAR(); AccessApproval ap1 = createApproval(requirement, submitter, submitter, ApprovalState.REVOKED, null); AccessApproval ap2 = createApproval(requirement, submitter, accessor, ApprovalState.REVOKED, null); ChangeMessage message1 = changeMessage(ap1.getId()); ChangeMessage message2 = changeMessage(ap2.getId()); // Call under test manager.processAccessApprovalChange(message1); manager.processAccessApprovalChange(message2); Optional<DBODataAccessNotification> result1 = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), submitter.getId()); assertTrue(result1.isPresent()); Optional<DBODataAccessNotification> result2 = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), accessor.getId()); assertTrue(result2.isPresent()); } @Test public void processAccessApprovalChangeWithMultipleRevocations() throws RecoverableMessageException { AccessRequirement requirement = createManagedAR(); // Simulates two revoked approvals for the same accessor but different submitters AccessApproval ap1 = createApproval(requirement, submitter, accessor, ApprovalState.REVOKED, null); AccessApproval ap2 = createApproval(requirement, accessor, accessor, ApprovalState.REVOKED, null); ChangeMessage message1 = changeMessage(ap1.getId()); ChangeMessage message2 = changeMessage(ap2.getId()); // Call under test manager.processAccessApprovalChange(message1); manager.processAccessApprovalChange(message2); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.REVOCATION, requirement.getId(), accessor.getId()); assertTrue(result.isPresent()); // The notification is sent for the first processed approval only assertEquals(ap1.getId(), result.get().getAccessApprovalId()); } @Test public void testProcessAccessApprovalReminder() throws RecoverableMessageException { DataAccessNotificationType notificationType = DataAccessNotificationType.FIRST_RENEWAL_REMINDER; Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(notificationType.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessApproval approval = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); // Call under test manager.processAccessApproval(notificationType, approval.getId()); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); } @Test public void testProcessAccessApprovalReminderReprocess() throws RecoverableMessageException { DataAccessNotificationType notificationType = DataAccessNotificationType.FIRST_RENEWAL_REMINDER; Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(notificationType.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessApproval approval = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); manager.processAccessApproval(notificationType, approval.getId()); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); DBODataAccessNotification expectedNotification = result.get(); // Call under test manager.processAccessApproval(notificationType, approval.getId()); // Check that the notification wasn't updated result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertEquals(expectedNotification, result.get()); } @Test public void testProcessAccessApprovalReminderDifferentRequirements() throws RecoverableMessageException { DataAccessNotificationType notificationType = DataAccessNotificationType.FIRST_RENEWAL_REMINDER; Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(notificationType.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessRequirement requirement2 = createManagedAR(); AccessApproval ap1 = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); AccessApproval ap2 = createApproval(requirement2, submitter, submitter, ApprovalState.APPROVED, expireOn); // Call under test manager.processAccessApproval(notificationType, ap1.getId()); manager.processAccessApproval(notificationType, ap2.getId()); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement2.getId(), submitter.getId()); assertTrue(result.isPresent()); } @Test public void testProcessAccessApprovalReminderWithFutureApproval() throws RecoverableMessageException { DataAccessNotificationType notificationType = DataAccessNotificationType.FIRST_RENEWAL_REMINDER; Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(notificationType.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessApproval ap1 = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); // Simulates a new requirement version requirement.setVersionNumber(requirement.getVersionNumber() + 1); // Expires in the future (but the same day) Instant futureExpireOn = expireOn.plus(12, ChronoUnit.HOURS); AccessApproval ap2 = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, futureExpireOn); // Call under test manager.processAccessApproval(notificationType, ap1.getId()); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertFalse(result.isPresent()); // We now process also the second one manager.processAccessApproval(notificationType, ap2.getId()); result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); DBODataAccessNotification expectedNotification = result.get(); // If we re-process out of order the first one a new notification should not be sent manager.processAccessApproval(notificationType, ap2.getId()); result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); assertEquals(expectedNotification, result.get()); } @Test public void testProcessAccessApprovalReminderWithDifferentReminders() throws RecoverableMessageException { Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(DataAccessNotificationType.FIRST_RENEWAL_REMINDER.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessApproval ap1 = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); // Call under test manager.processAccessApproval(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, ap1.getId()); manager.processAccessApproval(DataAccessNotificationType.SECOND_RENEWAL_REMINDER, ap1.getId()); Optional<DBODataAccessNotification> result = notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()); assertTrue(result.isPresent()); assertFalse(notificationDao.findForUpdate(DataAccessNotificationType.SECOND_RENEWAL_REMINDER, requirement.getId(), submitter.getId()).isPresent()); DBODataAccessNotification expected = result.get(); // Emulates passing of time expireOn = LocalDate.now(ZoneOffset.UTC) .plus(DataAccessNotificationType.SECOND_RENEWAL_REMINDER.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); ap1.setExpiredOn(Date.from(expireOn)); accessApprovalDao.createOrUpdateBatch(Arrays.asList(ap1)); // Re-process both manager.processAccessApproval(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, ap1.getId()); manager.processAccessApproval(DataAccessNotificationType.SECOND_RENEWAL_REMINDER, ap1.getId()); // The first should not be re-processed at this time assertEquals(expected, notificationDao.findForUpdate(DataAccessNotificationType.FIRST_RENEWAL_REMINDER, requirement.getId(), submitter.getId()).get()); assertTrue(notificationDao.findForUpdate(DataAccessNotificationType.SECOND_RENEWAL_REMINDER, requirement.getId(), submitter.getId()).isPresent()); } @Test public void testListNotificationRequest() throws RecoverableMessageException { DataAccessNotificationType notificationType = DataAccessNotificationType.FIRST_RENEWAL_REMINDER; Instant expireOn = LocalDate.now(ZoneOffset.UTC) .plus(notificationType.getReminderPeriod()) .atStartOfDay() .toInstant(ZoneOffset.UTC); AccessRequirement requirement = createManagedAR(); AccessApproval ap1 = createApproval(requirement, submitter, submitter, ApprovalState.APPROVED, expireOn); // Process the approval so that the notification is created manager.processAccessApproval(notificationType, ap1.getId()); AccessApprovalNotificationRequest request = new AccessApprovalNotificationRequest(); request.setRequirementId(requirement.getId()); request.setRecipientIds(Arrays.asList(submitter.getId())); AccessApprovalNotification expected = new AccessApprovalNotification(); expected.setNotificationType(NotificationType.valueOf(notificationType.name())); expected.setRequirementId(requirement.getId()); expected.setRecipientId(submitter.getId()); // Call under test AccessApprovalNotificationResponse response = manager.listNotificationsRequest(adminUser, request); assertEquals(1, response.getResults().size()); AccessApprovalNotification result = response.getResults().iterator().next(); expected.setSentOn(result.getSentOn()); assertEquals(expected, result); } private ChangeMessage changeMessage(Long id) { ChangeMessage message = new ChangeMessage(); message.setChangeNumber(12345L); message.setChangeType(ChangeType.UPDATE); message.setObjectType(ObjectType.ACCESS_APPROVAL); message.setTimestamp(new Date()); message.setObjectId(id.toString()); return message; } private UserInfo createUser(String prefix) { NewUser newUser = new NewUser(); newUser.setEmail(UUID.randomUUID().toString() + "@test.com"); newUser.setUserName(prefix + "_" + UUID.randomUUID().toString()); return userManager.getUserInfo(userManager.createUser(newUser)); } private AccessRequirement createManagedAR() { AccessRequirement accessRequirement = new ManagedACTAccessRequirement(); accessRequirement.setAccessType(ACCESS_TYPE.DOWNLOAD); accessRequirement.setCreatedBy(adminUser.getId().toString()); accessRequirement.setCreatedOn(new Date()); accessRequirement.setModifiedBy(adminUser.getId().toString()); accessRequirement.setModifiedOn(new Date()); accessRequirement.setConcreteType(ManagedACTAccessRequirement.class.getName()); return accessRequirementDao.create(accessRequirement); } private AccessApproval createApproval(AccessRequirement accessRequirement, UserInfo submitter, UserInfo accessor, ApprovalState state, Instant expiresOn) { AccessApproval accessApproval = new AccessApproval(); accessApproval.setCreatedBy(adminUser.getId().toString()); accessApproval.setCreatedOn(new Date()); accessApproval.setModifiedBy(adminUser.getId().toString()); accessApproval.setModifiedOn(new Date()); accessApproval.setAccessorId(accessor.getId().toString()); accessApproval.setRequirementId(accessRequirement.getId()); accessApproval.setRequirementVersion(accessRequirement.getVersionNumber()); accessApproval.setSubmitterId(submitter.getId().toString()); accessApproval.setExpiredOn(expiresOn == null ? null : Date.from(expiresOn)); accessApproval.setState(state); return accessApprovalDao.create(accessApproval); } }
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.exceptions.GoConfigInvalidException; import com.thoughtworks.go.config.update.*; import com.thoughtworks.go.helper.GoConfigMother; import com.thoughtworks.go.plugin.access.authorization.AuthorizationExtension; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.service.plugins.validators.authorization.RoleConfigurationValidator; import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import static org.hamcrest.Matchers.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.any; import static org.mockito.Mockito.*; public class RoleConfigServiceTest { private GoConfigService configService; private AuthorizationExtension extension; private RoleConfigurationValidator configurationValidator; private EntityHashingService entityHashingService; private RoleConfigService roleConfigService; private BasicCruiseConfig cruiseConfig; @BeforeEach public void setUp() throws Exception { configService = mock(GoConfigService.class); extension = mock(AuthorizationExtension.class); configurationValidator = mock(RoleConfigurationValidator.class); entityHashingService = mock(EntityHashingService.class); cruiseConfig = GoConfigMother.defaultCruiseConfig(); when(configService.cruiseConfig()).thenReturn(cruiseConfig); when(configService.getConfigForEditing()).thenReturn(cruiseConfig); roleConfigService = new RoleConfigService(configService, entityHashingService, extension, configurationValidator); } @Test public void create_shouldAddARoleToConfig() throws Exception { PluginRoleConfig role = new PluginRoleConfig(); Username admin = new Username("admin"); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); roleConfigService.create(admin, role, result); verify(configService).updateConfig(any(RoleConfigCreateCommand.class), eq(admin)); } @Test public void create_shouldValidatePluginRoleMetadata() throws Exception { PluginRoleConfig role = new PluginRoleConfig("operate", "ldap"); cruiseConfig.server().security().securityAuthConfigs().add(new SecurityAuthConfig("ldap", "plugin_id")); roleConfigService.create(null, role, null); verify(configurationValidator).validate(role, "plugin_id"); } @Test public void create_shouldIgnorePluginRoleMetadataValidationInAbsenceOfPlugin() throws Exception { PluginRoleConfig role = new PluginRoleConfig("operate", "ldap"); roleConfigService.create(null, role, null); verifyNoInteractions(configurationValidator); } @Test public void create_shouldIgnoreValidationForGoCDRole() throws Exception { RoleConfig role = new RoleConfig(new CaseInsensitiveString("operate")); roleConfigService.create(null, role, null); verifyNoInteractions(configurationValidator); } @Test public void update_shouldUpdateAnExistingPluginRole() throws Exception { PluginRoleConfig role = new PluginRoleConfig(); Username admin = new Username("admin"); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); roleConfigService.update(admin, "md5", role, result); verify(configService).updateConfig(any(RoleConfigUpdateCommand.class), eq(admin)); } @Test public void update_shouldValidatePluginRoleMetadata() throws Exception { PluginRoleConfig role = new PluginRoleConfig("operate", "ldap"); cruiseConfig.server().security().securityAuthConfigs().add(new SecurityAuthConfig("ldap", "plugin_id")); roleConfigService.update(null, "md5", role, null); verify(configurationValidator).validate(role, "plugin_id"); } @Test public void update_shouldIgnorePluginRoleMetadataValidationInAbsenceOfPlugin() throws Exception { PluginRoleConfig role = new PluginRoleConfig("operate", "ldap"); roleConfigService.update(null, "md5", role, null); verifyNoInteractions(configurationValidator); } @Test public void update_shouldIgnoreValidationForGoCDRole() throws Exception { RoleConfig role = new RoleConfig(new CaseInsensitiveString("operate")); roleConfigService.update(null, "md5", role, null); verifyNoInteractions(configurationValidator); } @Test public void bulkUpdate_shouldUpdateGoCDRoles() { Username currentUser = new Username("current_user"); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); List<String> usersToAdd = Arrays.asList("user1", "user2"); List<String> usersToRemove = Arrays.asList("user3", "user4"); GoCDRolesBulkUpdateRequest request = new GoCDRolesBulkUpdateRequest(Collections.singletonList( new GoCDRolesBulkUpdateRequest.Operation("role1", usersToAdd, usersToRemove))); RolesConfigBulkUpdateCommand command = new RolesConfigBulkUpdateCommand(request, currentUser, configService, result); roleConfigService.bulkUpdate(request, currentUser, result); verify(configService).updateConfig(command, currentUser); } @Test public void bulkUpdate_shouldHandleInvalidConfigException() { HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); List<String> usersToAdd = Arrays.asList("user1", "user2"); List<String> usersToRemove = Arrays.asList("user3", "user4"); GoCDRolesBulkUpdateRequest request = new GoCDRolesBulkUpdateRequest(Collections.singletonList( new GoCDRolesBulkUpdateRequest.Operation("role1", usersToAdd, usersToRemove))); cruiseConfig.server().security().getRoles().addError("role1", "some error"); doThrow(new GoConfigInvalidException(cruiseConfig, "")) .when(configService).updateConfig(any(), any()); roleConfigService.bulkUpdate(request, null, result); assertThat(result.httpCode(), is(422)); assertThat(result.message(), containsString("some error")); } @Test public void bulkUpdate_shouldHandleOtherExceptions() { HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); GoCDRolesBulkUpdateRequest request = new GoCDRolesBulkUpdateRequest(Collections.singletonList( new GoCDRolesBulkUpdateRequest.Operation("role1", Collections.emptyList(), Collections.emptyList()))); doThrow(new RuntimeException()).when(configService).updateConfig(any(), any()); roleConfigService.bulkUpdate(request, null, result); assertThat(result.httpCode(), is(500)); assertThat(result.message(), containsString("An error occurred while saving the role config. Please check the logs for more information.")); } @Test public void delete_shouldDeleteARole() throws Exception { PluginRoleConfig role = new PluginRoleConfig("operate", "ldap"); Username admin = new Username("admin"); roleConfigService.delete(admin, role, new HttpLocalizedOperationResult()); verify(configService).updateConfig(any(RoleConfigDeleteCommand.class), eq(admin)); } @Test public void getRolesForUser_shouldReturnAllTheRolesForTheGivenUser() { Username bob = new Username("Bob"); Username john = new Username("John"); RoleConfig role1 = new RoleConfig(new CaseInsensitiveString("role1")); role1.addUser(new RoleUser(bob.getUsername())); role1.addUser(new RoleUser(john.getUsername())); RoleConfig role2 = new RoleConfig(new CaseInsensitiveString("role2")); role2.addUser(new RoleUser(bob.getUsername())); RoleConfig role3 = new RoleConfig(new CaseInsensitiveString("role3")); role3.addUser(new RoleUser(john.getUsername())); cruiseConfig.server().security().addRole(role1); cruiseConfig.server().security().addRole(role2); cruiseConfig.server().security().addRole(role3); ServerConfig serverConfig = new ServerConfig(); serverConfig.security().addRole(role1); serverConfig.security().addRole(role2); serverConfig.security().addRole(role3); when(configService.serverConfig()).thenReturn(serverConfig); HashMap<Username, RolesConfig> userToRolesMap = roleConfigService.getRolesForUser(Arrays.asList(bob, john)); assertThat(userToRolesMap.size(), is(2)); assertThat(userToRolesMap.get(bob), hasItem(role1)); assertThat(userToRolesMap.get(bob), hasItem(role2)); assertThat(userToRolesMap.get(bob), not(hasItem(role3))); assertThat(userToRolesMap.get(john), hasItem(role1)); assertThat(userToRolesMap.get(john), hasItem(role3)); assertThat(userToRolesMap.get(john), not(hasItem(role2))); } }
// ---------------------------------------------------------------------------- // Copyright 2006-2010, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2007/03/25 Martin D. Flynn // -Initial release // 2007/06/13 Martin D. Flynn // -Moved to package "org.opengts.war.report" // -Renamed 'DeviceList' to 'ReportDeviceList' // 2009/09/23 Martin D. Flynn // -Fixed bug that could cause an "ConcurrentModificationException" // ---------------------------------------------------------------------------- package org.opengts.war.report; import java.lang.*; import java.util.*; import org.opengts.util.*; import org.opengts.dbtools.*; import org.opengts.db.tables.*; public class ReportDeviceList extends DBRecord<ReportDeviceList> // not really a database table { // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ public static class DeviceHolder { private Account account = null; private String deviceID = null; private String deviceDesc = null; private Device device = null; public DeviceHolder(Account acct, String devID) { super(); this.account = acct; this.deviceID = devID; this.device = null; } public DeviceHolder(Device dev) { this(dev.getAccount(), dev.getDeviceID()); this.device = dev; } public void setDevice(Device dev) { if ((dev != null) && this.account.getAccountID().equals(dev.getAccountID()) && this.deviceID.equals(dev.getDeviceID())) { this.device = dev; } } public String getDeviceID() { return this.deviceID; } public boolean hasDevice() { return (this.device != null); } public String getDeviceDescription() { if (this.deviceDesc == null) { try { Device device = this.getDevice(); if (device != null) { this.deviceDesc = device.getDescription(); } else { this.deviceDesc = ""; } } catch (DBException dbe) { this.deviceDesc = ""; } } return this.deviceDesc; } public Device getDevice() throws DBException { if ((this.device == null) && (this.account != null) && (this.deviceID != null)) { this.device = Device.getDevice(this.account, this.deviceID); // may still be null if (this.device == null) { // so we don't try again this.account = null; this.deviceID = null; } } return this.device; } } private static class DeviceHolderComparator implements Comparator<DeviceHolder> { private boolean ascending = true; public DeviceHolderComparator() { this(true); } public DeviceHolderComparator(boolean ascending) { this.ascending = ascending; } public int compare(DeviceHolder dh1, DeviceHolder dh2) { // assume we are comparing DeviceHolder records if (dh1 == dh2) { return 0; // exact same object (or both null) } else if (dh1 == null) { return this.ascending? -1 : 1; // null < non-null } else if (dh2 == null) { return this.ascending? 1 : -1; // non-null > null } else { String D1 = dh1.getDeviceDescription().toLowerCase(); // dh1.getDeviceID(); String D2 = dh2.getDeviceDescription().toLowerCase(); // dh2.getDeviceID(); return this.ascending? D1.compareTo(D2) : D2.compareTo(D1); } } public boolean equals(Object other) { if (other instanceof DeviceHolderComparator) { DeviceHolderComparator dhc = (DeviceHolderComparator)other; return (this.ascending == dhc.ascending); } return false; } } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ private Account account = null; private User user = null; private DeviceGroup devGroup = null; private boolean isGroup = false; private Map<String,DeviceHolder> devMap = null; // ------------------------------------------------------------------------ /* generic instance (devices will be added later) */ public ReportDeviceList(Account acct, User user) { super(); this.account = acct; this.user = user; this.devMap = null; } /* constuctor with specific device */ public ReportDeviceList(Account acct, User user, Device device) { this(acct, user); this.add(device); } /* constuctor with a list of devices */ public ReportDeviceList(Account acct, User user, String devID[]) { this(acct, user); this.add(devID); } /* constuctor with a specific group */ public ReportDeviceList(Account acct, User user, DeviceGroup group) { this(acct, user); this.devGroup = group; this.isGroup = true; this.add(group); } // ------------------------------------------------------------------------ /* return account id */ public String getAccountID() { return (this.account != null)? this.account.getAccountID() : ""; } /* return account db instance */ public Account getAccount() { return this.account; } // ------------------------------------------------------------------------ /* return user id */ public String getUserID() { return (this.user != null)? this.user.getUserID() : ""; } /* return user db instance */ public User getUser() { return this.user; } // ------------------------------------------------------------------------ /* return device group id */ public String getDeviceGroupID() { return (this.devGroup != null)? this.devGroup.getGroupID() : ""; } /* return device group db instance */ public DeviceGroup getDeviceGroup() { return this.devGroup; } /* return ture if group */ public boolean isDeviceGroup() { return this.isGroup || (this.size() > 1); } // ------------------------------------------------------------------------ /* return device for named id (must already exist in the device map) */ public Device getDevice(String devID) throws DBException { /* invalid device-id specified? */ if (StringTools.isBlank(devID)) { return null; } /* return device */ DeviceHolder dh = this.getDeviceMap().get(devID); return (dh != null)? dh.getDevice() : null; } // ------------------------------------------------------------------------ /* clear the device map */ public void clear() { if (this.devMap != null) { this.devMap.clear(); // set Map to empty } } /* return the internal device map */ protected Map<String,DeviceHolder> getDeviceMap() { if (this.devMap == null) { this.devMap = new HashMap<String,DeviceHolder>(10); } return this.devMap; } // ------------------------------------------------------------------------ /* set the single DeviceHolder object */ public boolean setDevice(User user, DeviceHolder dh) { this.clear(); try { this._addDevice(user, dh); return true; } catch (DBException dbe) { return false; } } /* add DeviceHolder if absent from list */ protected void _addDevice(User user, DeviceHolder dh) throws DBException { String devID = dh.getDeviceID(); if ((user == null) || user.isAuthorizedDevice(devID)) { Map<String,DeviceHolder> dm = this.getDeviceMap(); if (dm.containsKey(devID)) { // already present, try updating device if (dh.hasDevice()) { // probably will be false DeviceHolder dmdh = dm.get(devID); dmdh.setDevice(dh.getDevice()); } } else { // new entry, add DeviceHolder dm.put(devID, dh); } } } /* add DeviceHolder if absent from list */ protected void _addDevice(User user, Device device) throws DBException { String devID = device.getDeviceID(); if ((user == null) || user.isAuthorizedDevice(devID)) { Map<String,DeviceHolder> dm = this.getDeviceMap(); if (dm.containsKey(devID)) { // already present, update device DeviceHolder dmdh = dm.get(devID); dmdh.setDevice(device); } else { // new entry, add device DeviceHolder dh = new DeviceHolder(device); dm.put(devID, dh); } } } // ------------------------------------------------------------------------ /* add device to map */ public boolean add(Device device) { /* invalid device */ if (device == null) { return false; } /* add device */ //Print.logStackTrace("Adding device: " + device.getDeviceID()); User user = this.getUser(); try { this._addDevice(user, device); return true; } catch (DBException dbe) { return false; } } /* add list of devices to map */ public boolean add(String devID[]) { /* empty list */ if (ListTools.isEmpty(devID)) { return false; } /* add devices from list */ //Print.logStackTrace("Adding devices ..."); Account acct = this.getAccount(); User user = this.getUser(); try { for (int i = 0; i < devID.length; i++) { this._addDevice(user, new DeviceHolder(acct, devID[i])); } return true; } catch (DBException dbe) { return false; } } /* add list of devices to map */ public boolean add(java.util.List<String> devIDList) { /* empty list */ if (ListTools.isEmpty(devIDList)) { return false; } /* add devices from list */ //Print.logStackTrace("Adding devices ..."); Account acct = this.getAccount(); User user = this.getUser(); try { for (String devID : devIDList) { this._addDevice(user, new DeviceHolder(acct, devID)); } return true; } catch (DBException dbe) { return false; } } /* add device to map */ public boolean add(String devID) { /* invalid Device id? */ if (StringTools.isBlank(devID)) { return false; } /* add device id */ //Print.logStackTrace("Adding device: " + devID); Account acct = this.getAccount(); User user = this.getUser(); try { this._addDevice(user, new DeviceHolder(acct, devID)); return true; } catch (DBException dbe) { return false; } } /* add device-group to map */ public boolean add(DeviceGroup group) { /* invalid group */ if (group == null) { return false; } /* AccountID mismatch? */ String acctID = this.getAccountID(); if (!acctID.equals(group.getAccountID())) { return false; } /* add devices from group */ Account acct = this.getAccount(); User user = this.getUser(); try { OrderedSet<String> devIDSet = DeviceGroup.getDeviceIDsForGroup(acctID, group.getGroupID(), null/*User*/, false); for (int i = 0; i < devIDSet.size(); i++) { this._addDevice(user, new DeviceHolder(acct, devIDSet.get(i))); } this.isGroup = true; return true; } catch (DBException dbe) { Print.logException("Unable to add DeviceGroup", dbe); return false; } } // ------------------------------------------------------------------------ /* add all user authorized devices to the internal device map */ public void addAllAuthorizedDevices() { try { User usr = this.getUser(); Account acct = this.getAccount(); OrderedSet<String> list = User.getAuthorizedDeviceIDs(usr, acct, false); //Print.logInfo("Authorized devices: " + list.size()); this.add(list); } catch (DBException dbe) { Print.logException("Unable to add all User devices", dbe); } } // ------------------------------------------------------------------------ /* return number of devices currently in the map */ public int size() { return this.getDeviceMap().size(); } // ------------------------------------------------------------------------ /* return a device map iterator */ public Iterator<String> iterator() { return this.getDeviceMap().keySet().iterator(); } // ------------------------------------------------------------------------ /* return the device map values */ public java.util.List<DeviceHolder> getDeviceHolderList(boolean sort) { java.util.List<DeviceHolder> dhList = new Vector<DeviceHolder>(this.getDeviceMap().values()); if (sort) { Collections.sort(dhList, new DeviceHolderComparator()); } return dhList; } // ------------------------------------------------------------------------ /* return the first deviceID in the map */ public String getFirstDeviceID() { Iterator i = this.iterator(); if (i.hasNext()) { return (String)i.next(); } else { return ""; } } /* return the first device in the map */ public Device getFirstDevice() { String devID = this.getFirstDeviceID(); if (!devID.equals("")) { try { return this.getDevice(devID); } catch (DBException dbe) { return null; } } return null; } // ------------------------------------------------------------------------ /* return a string representation of this ReportDeviceList */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("ReportDeviceList:"); sb.append(" Account=").append(this.getAccountID()); sb.append(" User=").append(this.getUserID()); sb.append(" Group=").append(this.getDeviceGroupID()); sb.append(" Size=").append(this.size()); return sb.toString(); } // ------------------------------------------------------------------------ }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query.continuous; import java.util.concurrent.CountDownLatch; import javax.cache.Cache; import javax.cache.configuration.Factory; import javax.cache.configuration.FactoryBuilder; import javax.cache.event.CacheEntryEvent; import javax.cache.event.CacheEntryListenerException; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteClientDisconnectedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.CacheEntryEventSerializableFilter; import org.apache.ignite.cache.query.ContinuousQueryWithTransformer; import org.apache.ignite.cache.query.ContinuousQueryWithTransformer.EventListener; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.lang.IgniteOutClosure; import org.apache.ignite.resources.LoggerResource; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** */ public class CacheContinuousWithTransformerFailoverTest extends GridCommonAbstractTest { /** */ private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private boolean client; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder); CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME); ccfg.setCacheMode(PARTITIONED); ccfg.setAtomicityMode(ATOMIC); ccfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setCacheConfiguration(ccfg); cfg.setClientMode(client); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); stopAllGrids(); } /** * @throws Exception If failed. */ public void testServerNodeLeft() throws Exception { startGrids(3); client = true; final int CLIENT_ID = 3; Ignite clnNode = startGrid(CLIENT_ID); client = false; IgniteOutClosure<IgniteCache<Integer, Integer>> cache = new IgniteOutClosure<IgniteCache<Integer, Integer>>() { int cnt = 0; @Override public IgniteCache<Integer, Integer> apply() { ++cnt; return grid(CLIENT_ID).cache(DEFAULT_CACHE_NAME); } }; final CacheEventListener lsnr = new CacheEventListener(); ContinuousQueryWithTransformer<Object, Object, String> qry = new ContinuousQueryWithTransformer<>(); qry.setLocalListener(lsnr); qry.setRemoteTransformerFactory(FactoryBuilder.factoryOf(new IgniteClosure<CacheEntryEvent<?, ?>, String>() { @Override public String apply(CacheEntryEvent<?, ?> evt) { return "" + evt.getKey() + evt.getValue(); } })); QueryCursor<?> cur = clnNode.cache(DEFAULT_CACHE_NAME).query(qry); boolean first = true; int keyCnt = 1; for (int i = 0; i < 10; i++) { log.info("Start iteration: " + i); if (first) first = false; else { for (int srv = 0; srv < CLIENT_ID - 1; srv++) startGrid(srv); } lsnr.latch = new CountDownLatch(keyCnt); for (int key = 0; key < keyCnt; key++) cache.apply().put(key, key); assertTrue("Failed to wait for event. Left events: " + lsnr.latch.getCount(), lsnr.latch.await(10, SECONDS)); for (int srv = 0; srv < CLIENT_ID - 1; srv++) stopGrid(srv); } tryClose(cur); } /** * @throws Exception If failed. */ public void testTransformerException() throws Exception { try { startGrids(1); Ignite ignite = ignite(0); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); final CountDownLatch latch = new CountDownLatch(10); ContinuousQueryWithTransformer<Integer, Integer, Integer> qry = new ContinuousQueryWithTransformer<>(); qry.setLocalListener(new EventListener<Integer>() { /** */ @LoggerResource private IgniteLogger log; @Override public void onUpdated(Iterable<? extends Integer> evts) throws CacheEntryListenerException { for (Integer evt : evts) { log.debug("" + evt); } } }); qry.setRemoteTransformerFactory(FactoryBuilder.factoryOf(new IgniteClosure<CacheEntryEvent<? extends Integer, ? extends Integer>, Integer>() { @Override public Integer apply(CacheEntryEvent<? extends Integer, ? extends Integer> evt) { latch.countDown(); throw new RuntimeException("Test error."); } })); qry.setRemoteFilterFactory(FactoryBuilder.factoryOf(new CacheEntryEventSerializableFilter<Integer, Integer>() { @Override public boolean evaluate(CacheEntryEvent<? extends Integer, ? extends Integer> evt) { return true; } })); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored = cache.query(qry)) { for (int i = 0; i < 10; i++) cache.put(i, i); assertTrue(latch.await(10, SECONDS)); } } finally { stopAllGrids(); } } /** * Ensure that every node see every update. * * @throws Exception If failed. */ public void testCrossCallback() throws Exception { startGrids(2); try { IgniteCache<Integer, Integer> cache1 = grid(0).cache(DEFAULT_CACHE_NAME); IgniteCache<Integer, Integer> cache2 = grid(1).cache(DEFAULT_CACHE_NAME); final int key1 = primaryKey(cache1); final int key2 = primaryKey(cache2); final CountDownLatch latch1 = new CountDownLatch(2); final CountDownLatch latch2 = new CountDownLatch(2); Factory<? extends IgniteClosure<CacheEntryEvent<? extends Integer, ? extends Integer>, Integer>> factory = FactoryBuilder.factoryOf( new IgniteClosure<CacheEntryEvent<? extends Integer, ? extends Integer>, Integer>() { @Override public Integer apply(CacheEntryEvent<? extends Integer, ? extends Integer> evt) { return evt.getKey(); } }); ContinuousQueryWithTransformer<Integer, Integer, Integer> qry1 = new ContinuousQueryWithTransformer<>(); qry1.setRemoteTransformerFactory(factory); qry1.setLocalListener(new EventListener<Integer>() { @Override public void onUpdated(Iterable<? extends Integer> evts) { for (int evt : evts) { log.info("Update in cache 1: " + evt); if (evt == key1 || evt == key2) latch1.countDown(); } } }); ContinuousQueryWithTransformer<Integer, Integer, Integer> qry2 = new ContinuousQueryWithTransformer<>(); qry2.setRemoteTransformerFactory(factory); qry2.setLocalListener(new EventListener<Integer>() { @Override public void onUpdated(Iterable<? extends Integer> evts) { for (int evt : evts) { log.info("Update in cache 2: " + evt); if (evt == key1 || evt == key2) latch2.countDown(); } } }); try (QueryCursor<Cache.Entry<Integer, Integer>> ignored1 = cache2.query(qry1); QueryCursor<Cache.Entry<Integer, Integer>> ignored2 = cache2.query(qry2)) { cache1.put(key1, key1); cache1.put(key2, key2); assertTrue(latch1.await(10, SECONDS)); assertTrue(latch2.await(10, SECONDS)); } } finally { stopAllGrids(); } } /** * @param cur Cur. */ private void tryClose(QueryCursor<?> cur) { try { cur.close(); } catch (Throwable e) { if (e instanceof IgniteClientDisconnectedException) { IgniteClientDisconnectedException ex = (IgniteClientDisconnectedException)e; ex.reconnectFuture().get(); cur.close(); } else throw e; } } /** */ private static class CacheEventListener implements EventListener<String> { /** */ public volatile CountDownLatch latch = new CountDownLatch(1); /** */ @LoggerResource private IgniteLogger log; /** {@inheritDoc} */ @Override public void onUpdated(Iterable<? extends String> evts) { for (Object evt : evts) { log.info("Received cache event: " + evt); latch.countDown(); } } } }
/* * This file is part of choco-solver, http://choco-solver.org/ * * Copyright (c) 2019, IMT Atlantique. All rights reserved. * * Licensed under the BSD 4-clause license. * * See LICENSE file in the project root for full license information. */ package org.chocosolver.solver; import org.chocosolver.solver.constraints.Constraint; import org.chocosolver.solver.exception.ContradictionException; import org.chocosolver.solver.exception.SolverException; import org.chocosolver.solver.search.loop.monitors.IMonitorSolution; import org.chocosolver.solver.variables.BoolVar; import org.chocosolver.solver.variables.IntVar; import org.chocosolver.solver.variables.RealVar; import org.chocosolver.solver.variables.SetVar; import org.chocosolver.util.ProblemMaker; import org.chocosolver.util.criteria.Criterion; import org.chocosolver.util.tools.ArrayUtils; import org.testng.Assert; import org.testng.annotations.Test; import java.io.IOException; import java.io.InputStream; import java.text.MessageFormat; import java.util.List; import static org.chocosolver.solver.search.strategy.Search.inputOrderLBSearch; import static org.chocosolver.solver.search.strategy.Search.randomSearch; import static org.chocosolver.solver.variables.IntVar.MAX_INT_BOUND; import static org.chocosolver.solver.variables.IntVar.MIN_INT_BOUND; import static org.chocosolver.util.ESat.FALSE; import static org.chocosolver.util.ESat.TRUE; import static org.testng.Assert.*; /** * <br/> * * @author Charles Prud'homme * @since 23 juil. 2010 */ public class ModelTest { final static int[] capacites = {0, 34}; final static int[] energies = {6, 4, 3}; final static int[] volumes = {7, 5, 2}; final static int[] nbOmax = {4, 6, 17}; final static int n = 3; /** For autonumbering anonymous models. */ private static int modelInitNumber; /** @return next model's number, for anonymous models. */ private static synchronized int nextModelNum() { return modelInitNumber++; } public static Model knapsack() { Model model = new Model("ModelT-" + nextModelNum()); IntVar power = model.intVar("v_" + n, 0, 9999, true); IntVar[] objects = new IntVar[n]; for (int i = 0; i < n; i++) { objects[i] = model.intVar("v_" + i, 0, nbOmax[i], false); } model.scalar(objects, volumes, "=", model.intVar("capa", capacites[0], capacites[1], true)).post(); model.scalar(objects, energies, "=", power).post(); model.setObjective(Model.MAXIMIZE, power); model.addHook("obj", power); model.getSolver().setSearch(inputOrderLBSearch(objects)); return model; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// private static final int ONE = 0, NEXT = 1, ALL = 2, OPT = 3; public static void conf(Model s, int... is) throws SolverException { for (int i : is) { switch (i) { case ONE: s.getSolver().solve(); break; case NEXT: s.getSolver().solve(); break; case ALL: while (s.getSolver().solve()) ; break; case OPT: s.setObjective(Model.MAXIMIZE, (IntVar) s.getVar(0)); s.getSolver().solve(); break; default: fail("unknonw case"); break; } } } @Test(groups = "1s", timeOut = 60000) public void testRight() { boolean alive = true; int cas = 0; while (alive) { cas++; Model s = knapsack(); try { switch (cas) { case 1: conf(s, ONE); break; case 2: conf(s, ONE, NEXT); break; case 3: conf(s, ONE, NEXT, NEXT); break; case 4: conf(s, ONE, ONE); break; case 5: conf(s, ONE, ALL); break; case 6: conf(s, ONE, OPT); break; case 7: conf(s, ALL); break; case 8: conf(s, OPT); break; case 9: conf(s, ALL, ONE); break; case 10: conf(s, ALL, ALL); break; case 11: conf(s, ALL, OPT); break; case 12: conf(s, ALL, NEXT); break; case 13: conf(s, OPT, ONE); break; case 14: conf(s, OPT, ALL); break; case 15: conf(s, OPT, OPT); break; case 16: conf(s, OPT, NEXT); break; case 17: conf(s, NEXT); break; default: alive = false; } } catch (SolverException ingored) { Assert.fail(MessageFormat.format("Fail on {0}", cas)); } } } @Test(groups = "1s", timeOut = 60000) public void testFH1() { Model model = new Model(); BoolVar b = model.boolVar("b"); IntVar i = model.intVar("i", MIN_INT_BOUND, MAX_INT_BOUND, true); SetVar s = model.setVar("s", new int[]{}, new int[]{2, 3}); RealVar r = model.realVar("r", 1.0, 2.2, 0.01); BoolVar[] bvars = model.retrieveBoolVars(); Assert.assertEquals(bvars, new BoolVar[]{b}); IntVar[] ivars = model.retrieveIntVars(false); Assert.assertEquals(ivars, new IntVar[]{i}); SetVar[] svars = model.retrieveSetVars(); Assert.assertEquals(svars, new SetVar[]{s}); RealVar[] rvars = model.retrieveRealVars(); Assert.assertEquals(rvars, new RealVar[]{r}); } @Test(groups = "1s", timeOut = 60000) public void testRetrieveInt() { Model model = new Model(); BoolVar b = model.boolVar("b"); IntVar i = model.intVar("i", 1, 3, false); IntVar[] is = model.retrieveIntVars(false); Assert.assertEquals(1, is.length); IntVar[] is2 = model.retrieveIntVars(true); Assert.assertEquals(2, is2.length); } @Test(groups = "1s", timeOut = 60000) public void testRetrieveBool() { Model model = new Model(); BoolVar b = model.boolVar("b"); IntVar i = model.intVar("i", 1, 3, false); IntVar[] bs = model.retrieveBoolVars(); Assert.assertEquals(1, bs.length); } @Test(groups = "1s", timeOut = 60000) public void testFH2() { Model model = new Model(); BoolVar b = model.boolVar("b"); model.arithm(b, "=", 2).post(); while (model.getSolver().solve()) ; assertEquals(model.getSolver().isFeasible(), FALSE); } @Test(groups = "1s", timeOut = 60000) public void testJL1() { Model s = new Model(); s.arithm(s.boolVar(true), "!=", s.boolVar(false)).post(); while (s.getSolver().solve()) ; } @Test(groups = "1s", timeOut = 60000) public void testParBug2() { for (int iter = 0; iter < 500; iter++) { Model model = knapsack(); while (model.getSolver().solve()) ; Assert.assertEquals(model.getSolver().getObjectiveManager().getBestSolutionValue(), 51); } } @Test(groups = "1s", timeOut = 60000) public void testParBug3() { for (int iter = 0; iter < 500; iter++) { Model model = knapsack(); model.getSolver().setSearch(randomSearch(model.retrieveIntVars(true), iter)); while (model.getSolver().solve()) ; Assert.assertEquals(model.getSolver().getObjectiveManager().getBestSolutionValue(), 51); } } @Test(groups = "1s", timeOut = 60000) public void testJL300() { Model s = new Model(); IntVar i = s.intVar("i", -5, 5, false); s.setObjective(Model.MAXIMIZE, i); s.getSolver().solve(); assertEquals(s.getSolver().getSolutionCount(), 1); assertEquals(i.getValue(), 5); s.getSolver().getEngine().flush(); s.getSolver().reset(); s.clearObjective(); while (s.getSolver().solve()) ; assertEquals(s.getSolver().getSolutionCount(), 11); } @Test(groups = "1s", timeOut = 60000) public void testMonitors() { Model model = new Model(); IntVar v = model.boolVar("b"); final int[] c = {0}; final int[] d = {0}; IMonitorSolution sm1 = () -> c[0]++; IMonitorSolution sm2 = () -> d[0]++; model.getSolver().plugMonitor(sm1); model.getSolver().plugMonitor(sm2); while (model.getSolver().solve()) ; assertEquals(2, c[0]); assertEquals(2, d[0]); // unplug model.getSolver().unplugMonitor(sm1); model.getSolver().reset(); while (model.getSolver().solve()) ; assertEquals(2, c[0]); assertEquals(4, d[0]); // plug model.getSolver().unplugAllSearchMonitors(); model.getSolver().reset(); while (model.getSolver().solve()) ; assertEquals(2, c[0]); assertEquals(4, d[0]); } @Test(groups = "1s", timeOut = 60000) public void testCriteria() { Model model = new Model(); IntVar v = model.boolVar("b"); Criterion c1 = () -> model.getSolver().getNodeCount() == 1; Criterion c2 = () -> model.getSolver().getSolutionCount() == 1; model.getSolver().addStopCriterion(c1); model.getSolver().addStopCriterion(c2); while (model.getSolver().solve()) ; assertEquals(0, model.getSolver().getSolutionCount()); // unplug model.getSolver().removeStopCriterion(c1); model.getSolver().reset(); model.getSolver().addStopCriterion(c2); while (model.getSolver().solve()) ; assertEquals(1, model.getSolver().getSolutionCount()); // plug model.getSolver().removeAllStopCriteria(); model.getSolver().reset(); while (model.getSolver().solve()) ; assertEquals(2, model.getSolver().getSolutionCount()); } @Test(groups = "1s", timeOut = 60000) public void testCompSearch() { Model model = new Model(); IntVar[] v = model.boolVarArray("v", 2); IntVar[] w = model.boolVarArray("w", 2); model.arithm(v[0], "!=", v[1]).post(); model.arithm(w[0], "!=", w[1]).post(); model.getSolver().setSearch(inputOrderLBSearch(v)); model.getSolver().makeCompleteStrategy(true); model.getSolver().solve(); assertEquals(model.getSolver().isSatisfied(), TRUE); } @Test(groups = "1s", timeOut = 60000) public void testAssociates() { Model s = new Model(); BoolVar v = s.boolVar("V"); Assert.assertEquals(s.getNbVars(), 1); s.associates(v); Assert.assertEquals(s.getNbVars(), 2); s.unassociates(v); Assert.assertEquals(s.getNbVars(), 1); s.unassociates(v); Assert.assertEquals(s.getNbVars(), 0); } @Test(groups = "1s", timeOut = 60000) public void testRestore() throws ContradictionException { Model model = new Model(); IntVar[] v = model.boolVarArray("v", 2); model.arithm(v[0], "!=", v[1]).post(); model.setObjective(Model.MAXIMIZE, v[0]); model.getSolver().solve(); assertTrue(v[0].isInstantiated()); if (model.getSolver().solve()) assertTrue(v[0].isInstantiatedTo(1)); else assertFalse(v[0].isInstantiated()); } @Test(groups = "1s", timeOut = 60000) public void testHook() { Model model = new Model(); String toto = "TOTO"; String titi = "TITI"; model.addHook("toto", toto); model.addHook("titi", titi); Assert.assertEquals(model.getHooks().size(), 2); Assert.assertEquals(model.getHook("toto"), toto); model.removeHook("toto"); Assert.assertEquals(model.getHook("toto"), null); Assert.assertEquals(model.getHooks().size(), 1); model.removeAllHooks(); Assert.assertEquals(model.getHooks().size(), 0); } @Test(groups = "1s", timeOut = 60000) public void testName() { Model model = new Model(); Assert.assertTrue(model.getName().startsWith("Model-")); model.setName("Revlos"); Assert.assertEquals(model.getName(), "Revlos"); } @Test(groups = "1s", timeOut = 60000) public void testNextSolution() { Model s = ProblemMaker.makeNQueenWithBinaryConstraints(8); s.getSolver().solve(); // should not throw exception } @Test(groups = "1s", timeOut = 60000) public void testFindSolution() { Model m = ProblemMaker.makeNQueenWithOneAlldifferent(4); for (int i = 0; i < 2; i++) { Assert.assertNotNull(m.getSolver().findSolution()); } Assert.assertNull(m.getSolver().findSolution()); } @Test(groups = "1s", timeOut = 60000) public void testFindAfterReset() { Model m = new Model(); IntVar i = m.intVar("i", 0, 5); Solver s = m.getSolver(); s.findOptimalSolution(i, false); s.reset(); Assert.assertNotNull(s.findSolution()); } @Test(groups = "1s", timeOut = 60000) public void testFindAllSolutions() { Model m = ProblemMaker.makeNQueenWithOneAlldifferent(4); m.getSolver().findAllSolutions(); Assert.assertEquals(m.getSolver().getSolutionCount(), 2); } @Test(groups = "1s", timeOut = 60000) public void testFindAllSolutionsAfterReset() { Model m = new Model(); IntVar i = m.intVar("i", 0, 5); Solver s = m.getSolver(); s.findOptimalSolution(i, false); s.reset(); Assert.assertEquals(s.findAllSolutions().size(), 6); } @Test(groups = "1s", timeOut = 60000) public void testFindAllSolutions2() { Model m = ProblemMaker.makeNQueenWithOneAlldifferent(4); Assert.assertEquals(m.getSolver().streamSolutions().count(), 2); } @Test(groups = "1s", timeOut = 60000) public void testFindOptimalSolution() { Model m = ProblemMaker.makeGolombRuler(8); Assert.assertNotNull(m.getSolver().findOptimalSolution((IntVar) m.getHook("objective"), false)); } @Test(groups = "1s", timeOut = 60000) public void testFindOptimalSolutionWithSearch() { Model m = ProblemMaker.makeGolombRuler(7); m.getSolver().setSearch(inputOrderLBSearch((IntVar[]) m.getHook("ticks"))); Solution s = m.getSolver().findOptimalSolution((IntVar) m.getHook("objective"), false); Assert.assertNotNull(s); Assert.assertTrue(s.getIntVal((IntVar) m.getHook("objective")) == 25); m.getEnvironment().worldPush(); try { s.restore(); } catch (ContradictionException c) { Assert.fail(); } Assert.assertTrue(((IntVar) m.getHook("objective")).isInstantiatedTo(25)); Assert.assertTrue(m.getSettings().checkModel(m.getSolver())); m.getEnvironment().worldPop(); Assert.assertFalse(((IntVar) m.getHook("objective")).isInstantiated()); } @Test(groups = "1s", timeOut = 60000) public void testFindAllOptimalSolutions() { Model m = ProblemMaker.makeGolombRuler(6); m.getSolver().findAllOptimalSolutions((IntVar) m.getHook("objective"), false); Assert.assertEquals(m.getSolver().getSolutionCount(), 4); } @Test(groups = "1s", timeOut = 60000) public void testFindAllOptimalSolutions2() { Model m = ProblemMaker.makeGolombRuler(6); Assert.assertEquals(m.getSolver().streamOptimalSolutions((IntVar) m.getHook("objective"), false).count(), 4); } @Test(groups = "1s", timeOut = 60000) public void testFind() { Model m = ProblemMaker.makeGolombRuler(6); IntVar[] ticks = (IntVar[]) m.getHook("ticks"); m.clearObjective(); List<Solution> front = m.getSolver().findParetoFront(ticks, false); for (Solution s : front) { System.out.println(s.getIntVal(ticks[0])); } Assert.assertEquals(front.size(), 8); } @Test(groups = "1s", timeOut = 60000) public void testFindAllSolutions3() { Model m = ProblemMaker.makeNQueenWithOneAlldifferent(4); m.getSolver().eachSolutionWithMeasure((solution, measures) -> { System.out.printf("Found solution: %s%n with the following measures:%n%s%n", solution.toString(), measures.toOneLineString()); }); Assert.assertEquals(m.getSolver().getSolutionCount(), 2); } @Test(groups = "1s", timeOut = 60000, expectedExceptions = SolverException.class) public void testRecord() { Model m = ProblemMaker.makeGolombRuler(6); IntVar[] ticks = (IntVar[]) m.getHook("ticks"); Solution s = new Solution(m); m.getSolver().solve(); // solution not recorded System.out.println(s.getIntVal(ticks[0])); } @Test(groups = "1s", timeOut = 60000, expectedExceptions = SolverException.class) public void testRecord2() { Model m = ProblemMaker.makeGolombRuler(6); IntVar[] ticks = (IntVar[]) m.getHook("ticks"); IntVar p = m.boolVar(); Solution s = new Solution(m, ticks); m.getSolver().solve(); s.record(); System.out.println(s.getIntVal(ticks[0])); // not recorded variable System.out.println(s.getIntVal(p)); } @Test(groups = "1s", timeOut = 60000) public void testRecord3() { Model m = ProblemMaker.makeGolombRuler(6); IntVar[] ticks = (IntVar[]) m.getHook("ticks"); IntVar p = m.boolVar(); Solution s = new Solution(m, ArrayUtils.append(ticks, new IntVar[]{p})); m.getSolver().solve(); s.record(); System.out.println(s.getIntVal(ticks[0])); System.out.println(s.getIntVal(p)); } @Test(groups = "1s", timeOut = 60000) public void testSwapOnPassivate() { Model model = new Model(new DefaultSettings().setSwapOnPassivate(true)); int n = 11; IntVar[] vars = new IntVar[n]; for (int i = 0; i < vars.length; i++) { vars[i] = model.intVar("Q_" + i, 1, n, false); } model.addHook("vars", vars); for (int i = 0; i < n - 1; i++) { for (int j = i + 1; j < n; j++) { int k = j - i; model.arithm(vars[i], "!=", vars[j]).post(); model.arithm(vars[i], "!=", vars[j], "+", -k).post(); model.arithm(vars[i], "!=", vars[j], "+", k).post(); } } model.getSolver().findAllSolutions(); Assert.assertEquals(model.getSolver().getSolutionCount(), 2680); } @Test(groups = "1s", timeOut = 60000) public void testJuTii1() { Model model = new Model("model"); IntVar b = model.intVar("b", 2, 6); model.arithm(b, ">", 3).post(); Assert.assertEquals(b.getLB(), 2); Assert.assertTrue(model.getSolver().solve()); model.getSolver().reset(); model.getSolver().getEnvironment().worldPush(); try { Assert.assertEquals(b.getLB(), 2); model.getSolver().propagate(); Assert.assertEquals(b.getLB(), 4); } catch (ContradictionException e) { e.printStackTrace(); } model.getSolver().getEnvironment().worldPop(); Assert.assertEquals(b.getLB(), 2); } @Test(groups = "1s", timeOut = 60000) public void testJuTii2() { Model mode = new Model(); IntVar r = mode.intVar("r", 1, 3); org.chocosolver.solver.constraints.Constraint c = mode.member(r, new int[]{7, 6, 5, 2}); c.post(); System.out.println(mode); for (int i = 1; i <= 3; i++) { Constraint c2 = mode.arithm(r, "=", i); mode.post(c2); boolean propagateOK = false; mode.getSolver().getEnvironment().worldPush(); try { mode.getSolver().propagate(); propagateOK = true; } catch (ContradictionException ex) { //System.err.println(ex); mode.getSolver().getEngine().flush(); } mode.getSolver().getEnvironment().worldPop(); mode.getSolver().getEngine().clear(); System.out.print("i:" + i + ", " + r + ", propagateOK = " + propagateOK + " "); System.out.println(mode.getSolver().solve()); mode.unpost(c2); mode.getSolver().hardReset(); } } @Test(groups = "1s", timeOut = 60000) public void testSettings2() throws IOException { InputStream inStream = this.getClass().getClassLoader().getResourceAsStream("Assert.properties"); Settings settings = new DefaultSettings().load(inStream); System.out.printf("%s\n",settings.getWelcomeMessage()); settings.store(System.out, "Test"); } @Test(groups="1s", timeOut=60000) public void testHR(){ Model m = new Model(); IntVar i = m.intVar("i", 1, 2); m.getSolver().hardReset(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. /* * Class BinaryExpressionEvaluator * @author Jeka */ package com.intellij.debugger.engine.evaluation.expression; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.engine.DebuggerUtils; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil; import com.intellij.debugger.engine.evaluation.EvaluationContextImpl; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.jdi.VirtualMachineProxyImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.psi.JavaTokenType; import com.intellij.psi.tree.IElementType; import com.sun.jdi.*; import org.jetbrains.annotations.NotNull; class BinaryExpressionEvaluator implements Evaluator { private static final Logger LOG = Logger.getInstance(BinaryExpressionEvaluator.class); private final Evaluator myLeftOperand; private final Evaluator myRightOperand; private final IElementType myOpType; private final String myExpectedType; // a result of PsiType.getCanonicalText() BinaryExpressionEvaluator(@NotNull Evaluator leftOperand, @NotNull Evaluator rightOperand, @NotNull IElementType opType, String expectedType) { myLeftOperand = DisableGC.create(leftOperand); myRightOperand = DisableGC.create(rightOperand); myOpType = opType; myExpectedType = expectedType; } @Override public Object evaluate(EvaluationContextImpl context) throws EvaluateException { Value leftResult = (Value)myLeftOperand.evaluate(context); return evaluateOperation(leftResult, myOpType, myRightOperand, myExpectedType, context); } @SuppressWarnings("IntegerMultiplicationImplicitCastToLong") static Object evaluateOperation(final Value leftResult, final IElementType opType, final Evaluator rightOperand, final String expectedType, final EvaluationContextImpl context) throws EvaluateException { VirtualMachineProxyImpl vm = context.getDebugProcess().getVirtualMachineProxy(); if (leftResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); if (opType == JavaTokenType.OROR && v1) { return DebuggerUtilsEx.createValue(vm, expectedType, true); } if (opType == JavaTokenType.ANDAND && !v1) { return DebuggerUtilsEx.createValue(vm, expectedType, false); } } Value rightResult = (Value)rightOperand.evaluate(context); if (opType == JavaTokenType.PLUS) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 + v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { final double v1 = ((PrimitiveValue)leftResult).doubleValue(); final double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 + v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 + v2); } if (leftResult instanceof StringReference || rightResult instanceof StringReference) { String v1 = DebuggerUtils.getValueAsString(context, leftResult); String v2 = DebuggerUtils.getValueAsString(context, rightResult); return DebuggerUtilsEx.mirrorOfString(v1 + v2, vm, context); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "+")); } else if (opType == JavaTokenType.MINUS) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 - v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 - v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 - v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "-")); } else if (opType == JavaTokenType.ASTERISK) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 * v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 * v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 * v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "*")); } else if (opType == JavaTokenType.DIV) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { long v1 = ((PrimitiveValue)leftResult).longValue(); long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 / v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 / v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 / v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "/")); } else if (opType == JavaTokenType.PERC) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { long v1 = ((PrimitiveValue)leftResult).longValue(); long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 % v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 % v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 % v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "%")); } else if (opType == JavaTokenType.LTLT) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v2 = ((PrimitiveValue)rightResult).longValue(); if (leftResult instanceof ByteValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ByteValue)leftResult).byteValue() << v2); } else if (leftResult instanceof ShortValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ShortValue)leftResult).shortValue() << v2); } else if (leftResult instanceof IntegerValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((IntegerValue)leftResult).intValue() << v2); } return DebuggerUtilsEx.createValue(vm, expectedType, ((PrimitiveValue)leftResult).longValue() << v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((CharValue)leftResult).charValue() << ((CharValue)rightResult).charValue()); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "<<")); } else if (opType == JavaTokenType.GTGT) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v2 = ((PrimitiveValue)rightResult).longValue(); if (leftResult instanceof ByteValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ByteValue)leftResult).byteValue() >> v2); } else if (leftResult instanceof ShortValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ShortValue)leftResult).shortValue() >> v2); } else if (leftResult instanceof IntegerValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((IntegerValue)leftResult).intValue() >> v2); } return DebuggerUtilsEx.createValue(vm, expectedType, ((PrimitiveValue)leftResult).longValue() >> v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((CharValue)leftResult).charValue() >> ((CharValue)rightResult).charValue()); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", ">>")); } else if (opType == JavaTokenType.GTGTGT) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v2 = ((PrimitiveValue)rightResult).longValue(); if (leftResult instanceof ByteValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ByteValue)leftResult).byteValue() >>> v2); } else if (leftResult instanceof ShortValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((ShortValue)leftResult).shortValue() >>> v2); } else if (leftResult instanceof IntegerValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((IntegerValue)leftResult).intValue() >>> v2); } return DebuggerUtilsEx.createValue(vm, expectedType, ((PrimitiveValue)leftResult).longValue() >>> v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { return DebuggerUtilsEx.createValue(vm, expectedType, ((CharValue)leftResult).charValue() >>> ((CharValue)rightResult).charValue()); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", ">>>")); } else if (opType == JavaTokenType.AND) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { long v1 = ((PrimitiveValue)leftResult).longValue(); long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 & v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 & v2); } if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 & v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "&")); } else if (opType == JavaTokenType.OR) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { long v1 = ((PrimitiveValue)leftResult).longValue(); long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 | v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 | v2); } if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 | v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "|")); } else if (opType == JavaTokenType.XOR) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { long v1 = ((PrimitiveValue)leftResult).longValue(); long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 ^ v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 ^ v2); } if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 ^ v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "^")); } else if (opType == JavaTokenType.EQEQ) { if (leftResult == null && rightResult == null) { return DebuggerUtilsEx.createValue(vm, expectedType, true); } if (leftResult == null) { return DebuggerUtilsEx.createValue(vm, expectedType, rightResult.equals(null)); } if (rightResult == null) { return DebuggerUtilsEx.createValue(vm, expectedType, leftResult.equals(null)); } if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 == v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 == v2); } if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 == v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 == v2); } if (leftResult instanceof ObjectReference && rightResult instanceof ObjectReference) { ObjectReference v1 = (ObjectReference)leftResult; ObjectReference v2 = (ObjectReference)rightResult; return DebuggerUtilsEx.createValue(vm, expectedType, v1.uniqueID() == v2.uniqueID()); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "==")); } else if (opType == JavaTokenType.OROR) { if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 || v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "||")); } else if (opType == JavaTokenType.ANDAND) { if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 && v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "&&")); } else if (opType == JavaTokenType.NE) { if (leftResult == null && rightResult == null) return DebuggerUtilsEx.createValue(vm, expectedType, false); if (leftResult == null) return DebuggerUtilsEx.createValue(vm, expectedType, !rightResult.equals(null)); if (rightResult == null) return DebuggerUtilsEx.createValue(vm, expectedType, !leftResult.equals(null)); if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 != v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 != v2); } if (leftResult instanceof BooleanValue && rightResult instanceof BooleanValue) { boolean v1 = ((PrimitiveValue)leftResult).booleanValue(); boolean v2 = ((PrimitiveValue)rightResult).booleanValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 != v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 != v2); } if (leftResult instanceof ObjectReference && rightResult instanceof ObjectReference) { ObjectReference v1 = (ObjectReference)leftResult; ObjectReference v2 = (ObjectReference)rightResult; return DebuggerUtilsEx.createValue(vm, expectedType, v1.uniqueID() != v2.uniqueID()); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "!=")); } else if (opType == JavaTokenType.LT) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 < v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 < v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 < v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "<")); } else if (opType == JavaTokenType.GT) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 > v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 > v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 > v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", ">")); } else if (opType == JavaTokenType.LE) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 <= v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 <= v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 <= v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", "<=")); } else if (opType == JavaTokenType.GE) { if (DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult)) { final long v1 = ((PrimitiveValue)leftResult).longValue(); final long v2 = ((PrimitiveValue)rightResult).longValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 >= v2); } if (DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult)) { double v1 = ((PrimitiveValue)leftResult).doubleValue(); double v2 = ((PrimitiveValue)rightResult).doubleValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 >= v2); } if (leftResult instanceof CharValue && rightResult instanceof CharValue) { char v1 = ((CharValue)leftResult).charValue(); char v2 = ((CharValue)rightResult).charValue(); return DebuggerUtilsEx.createValue(vm, expectedType, v1 >= v2); } throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.incompatible.types", ">=")); } LOG.assertTrue(false); return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io.retry; import static org.junit.Assert.*; import java.io.IOException; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.io.retry.UnreliableImplementation.TypeOfExceptionToFailWith; import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.util.ThreadUtil; import org.junit.Test; public class TestFailoverProxy { public static class FlipFlopProxyProvider<T> implements FailoverProxyProvider<T> { private Class<T> iface; private T currentlyActive; private T impl1; private T impl2; private int failoversOccurred = 0; public FlipFlopProxyProvider(Class<T> iface, T activeImpl, T standbyImpl) { this.iface = iface; this.impl1 = activeImpl; this.impl2 = standbyImpl; currentlyActive = impl1; } @Override public T getProxy() { return currentlyActive; } @Override public synchronized void performFailover(Object currentProxy) { currentlyActive = impl1 == currentProxy ? impl2 : impl1; failoversOccurred++; } @Override public Class<T> getInterface() { return iface; } @Override public void close() throws IOException { // Nothing to do. } public int getFailoversOccurred() { return failoversOccurred; } } public static class FailOverOnceOnAnyExceptionPolicy implements RetryPolicy { @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) { return failovers < 1 ? RetryAction.FAILOVER_AND_RETRY : RetryAction.FAIL; } } private static FlipFlopProxyProvider<UnreliableInterface> newFlipFlopProxyProvider() { return new FlipFlopProxyProvider<UnreliableInterface>( UnreliableInterface.class, new UnreliableImplementation("impl1"), new UnreliableImplementation("impl2")); } private static FlipFlopProxyProvider<UnreliableInterface> newFlipFlopProxyProvider(TypeOfExceptionToFailWith t1, TypeOfExceptionToFailWith t2) { return new FlipFlopProxyProvider<UnreliableInterface>( UnreliableInterface.class, new UnreliableImplementation("impl1", t1), new UnreliableImplementation("impl2", t2)); } @Test public void testSuccedsOnceThenFailOver() throws UnreliableException, IOException, StandbyException { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider(), new FailOverOnceOnAnyExceptionPolicy()); assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString()); assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningString()); try { unreliable.succeedsOnceThenFailsReturningString(); fail("should not have succeeded more than twice"); } catch (UnreliableException e) { // expected } } @Test public void testSucceedsTenTimesThenFailOver() throws UnreliableException, IOException, StandbyException { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider(), new FailOverOnceOnAnyExceptionPolicy()); for (int i = 0; i < 10; i++) { assertEquals("impl1", unreliable.succeedsTenTimesThenFailsReturningString()); } assertEquals("impl2", unreliable.succeedsTenTimesThenFailsReturningString()); } @Test public void testNeverFailOver() throws UnreliableException, IOException, StandbyException { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider(), RetryPolicies.TRY_ONCE_THEN_FAIL); unreliable.succeedsOnceThenFailsReturningString(); try { unreliable.succeedsOnceThenFailsReturningString(); fail("should not have succeeded twice"); } catch (UnreliableException e) { assertEquals("impl1", e.getMessage()); } } @Test public void testFailoverOnStandbyException() throws UnreliableException, IOException, StandbyException { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider(), RetryPolicies.failoverOnNetworkException(1)); assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString()); try { unreliable.succeedsOnceThenFailsReturningString(); fail("should not have succeeded twice"); } catch (UnreliableException e) { // Make sure there was no failover on normal exception. assertEquals("impl1", e.getMessage()); } unreliable = (UnreliableInterface)RetryProxy .create(UnreliableInterface.class, newFlipFlopProxyProvider( TypeOfExceptionToFailWith.STANDBY_EXCEPTION, TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION), RetryPolicies.failoverOnNetworkException(1)); assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString()); // Make sure we fail over since the first implementation threw a StandbyException assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningString()); } @Test public void testFailoverOnNetworkExceptionIdempotentOperation() throws UnreliableException, IOException, StandbyException { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider( TypeOfExceptionToFailWith.IO_EXCEPTION, TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION), RetryPolicies.failoverOnNetworkException(1)); assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningString()); try { unreliable.succeedsOnceThenFailsReturningString(); fail("should not have succeeded twice"); } catch (IOException e) { // Make sure we *don't* fail over since the first implementation threw an // IOException and this method is not idempotent assertEquals("impl1", e.getMessage()); } assertEquals("impl1", unreliable.succeedsOnceThenFailsReturningStringIdempotent()); // Make sure we fail over since the first implementation threw an // IOException and this method is idempotent. assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningStringIdempotent()); } /** * Test that if a non-idempotent void function is called, and there is an exception, * the exception is properly propagated */ @Test public void testExceptionPropagatedForNonIdempotentVoid() throws Exception { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy .create(UnreliableInterface.class, newFlipFlopProxyProvider( TypeOfExceptionToFailWith.IO_EXCEPTION, TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION), RetryPolicies.failoverOnNetworkException(1)); try { unreliable.nonIdempotentVoidFailsIfIdentifierDoesntMatch("impl2"); fail("did not throw an exception"); } catch (Exception e) { } } private static class SynchronizedUnreliableImplementation extends UnreliableImplementation { private CountDownLatch methodLatch; public SynchronizedUnreliableImplementation(String identifier, TypeOfExceptionToFailWith exceptionToFailWith, int threadCount) { super(identifier, exceptionToFailWith); methodLatch = new CountDownLatch(threadCount); } @Override public String failsIfIdentifierDoesntMatch(String identifier) throws UnreliableException, StandbyException, IOException { // Wait until all threads are trying to invoke this method methodLatch.countDown(); try { methodLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } return super.failsIfIdentifierDoesntMatch(identifier); } } private static class ConcurrentMethodThread extends Thread { private UnreliableInterface unreliable; public String result; public ConcurrentMethodThread(UnreliableInterface unreliable) { this.unreliable = unreliable; } @Override public void run() { try { result = unreliable.failsIfIdentifierDoesntMatch("impl2"); } catch (Exception e) { throw new RuntimeException(e); } } } /** * Test that concurrent failed method invocations only result in a single * failover. */ @Test public void testConcurrentMethodFailures() throws InterruptedException { FlipFlopProxyProvider<UnreliableInterface> proxyProvider = new FlipFlopProxyProvider<UnreliableInterface>( UnreliableInterface.class, new SynchronizedUnreliableImplementation("impl1", TypeOfExceptionToFailWith.STANDBY_EXCEPTION, 2), new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.STANDBY_EXCEPTION)); final UnreliableInterface unreliable = (UnreliableInterface)RetryProxy .create(UnreliableInterface.class, proxyProvider, RetryPolicies.failoverOnNetworkException(10)); ConcurrentMethodThread t1 = new ConcurrentMethodThread(unreliable); ConcurrentMethodThread t2 = new ConcurrentMethodThread(unreliable); t1.start(); t2.start(); t1.join(); t2.join(); assertEquals("impl2", t1.result); assertEquals("impl2", t2.result); assertEquals(1, proxyProvider.getFailoversOccurred()); } /** * Ensure that when all configured services are throwing StandbyException * that we fail over back and forth between them until one is no longer * throwing StandbyException. */ @Test public void testFailoverBetweenMultipleStandbys() throws UnreliableException, StandbyException, IOException { final long millisToSleep = 10000; final UnreliableImplementation impl1 = new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.STANDBY_EXCEPTION); FlipFlopProxyProvider<UnreliableInterface> proxyProvider = new FlipFlopProxyProvider<UnreliableInterface>( UnreliableInterface.class, impl1, new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.STANDBY_EXCEPTION)); final UnreliableInterface unreliable = (UnreliableInterface)RetryProxy .create(UnreliableInterface.class, proxyProvider, RetryPolicies.failoverOnNetworkException( RetryPolicies.TRY_ONCE_THEN_FAIL, 10, 1000, 10000)); new Thread() { @Override public void run() { ThreadUtil.sleepAtLeastIgnoreInterrupts(millisToSleep); impl1.setIdentifier("renamed-impl1"); } }.start(); String result = unreliable.failsIfIdentifierDoesntMatch("renamed-impl1"); assertEquals("renamed-impl1", result); } /** * Ensure that normal IO exceptions don't result in a failover. */ @Test public void testExpectedIOException() { UnreliableInterface unreliable = (UnreliableInterface)RetryProxy.create( UnreliableInterface.class, newFlipFlopProxyProvider( TypeOfExceptionToFailWith.REMOTE_EXCEPTION, TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION), RetryPolicies.failoverOnNetworkException( RetryPolicies.TRY_ONCE_THEN_FAIL, 10, 1000, 10000)); try { unreliable.failsIfIdentifierDoesntMatch("no-such-identifier"); fail("Should have thrown *some* exception"); } catch (Exception e) { assertTrue("Expected IOE but got " + e.getClass(), e instanceof IOException); } } }
/* * Copyright 2014-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazon.janusgraph.diskstorage.dynamodb; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; import com.amazonaws.services.dynamodbv2.model.DeleteItemRequest; import com.amazonaws.services.dynamodbv2.model.GetItemRequest; import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput; import com.amazonaws.services.dynamodbv2.model.QueryRequest; import com.amazonaws.services.dynamodbv2.model.ReturnConsumedCapacity; import com.amazonaws.services.dynamodbv2.model.ScanRequest; import com.amazonaws.services.dynamodbv2.model.UpdateItemRequest; import lombok.Getter; import org.apache.commons.codec.binary.Hex; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.tuple.Pair; import org.janusgraph.diskstorage.BackendException; import org.janusgraph.diskstorage.Entry; import org.janusgraph.diskstorage.StaticBuffer; import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; import org.janusgraph.diskstorage.keycolumnvalue.KeySliceQuery; import org.janusgraph.diskstorage.keycolumnvalue.SliceQuery; import org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction; import org.janusgraph.diskstorage.locking.TemporaryLockingException; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import com.google.common.util.concurrent.ExecutionError; import com.google.common.util.concurrent.UncheckedExecutionException; import lombok.extern.slf4j.Slf4j; /** * The base class for the SINGLE and MULTI implementations of the Amazon DynamoDB Storage Backend * for JanusGraph distributed store type. * @author Matthew Sowders * @author Alexander Patrikalakis * */ @Slf4j public abstract class AbstractDynamoDbStore implements AwsStore { protected final Client client; @Getter private final String tableName; private final DynamoDBStoreManager manager; @Getter private final String name; private final boolean forceConsistentRead; /** * The key column local lock cache maps key-column pairs to the DynamoDbStoreTransaction that first * acquired a lock on those key-column pairs. */ private final Cache<Pair<StaticBuffer, StaticBuffer>, DynamoDbStoreTransaction> keyColumnLocalLocks; private enum ReportingRemovalListener implements RemovalListener<Pair<StaticBuffer, StaticBuffer>, DynamoDbStoreTransaction> { INSTANCE; @Override public void onRemoval(final RemovalNotification<Pair<StaticBuffer, StaticBuffer>, DynamoDbStoreTransaction> notice) { log.trace("Expiring {} in tx {} because of {}", notice.getKey().toString(), notice.getValue().toString(), notice.getCause()); } } protected void mutateOneKey(final StaticBuffer key, final KCVMutation mutation, final StoreTransaction txh) throws BackendException { manager.mutateMany(Collections.singletonMap(name, Collections.singletonMap(key, mutation)), txh); } protected UpdateItemRequest createUpdateItemRequest() { return new UpdateItemRequest() .withTableName(tableName) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL); } protected GetItemRequest createGetItemRequest() { return new GetItemRequest() .withTableName(tableName) .withConsistentRead(forceConsistentRead) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL); } protected DeleteItemRequest createDeleteItemRequest() { return new DeleteItemRequest() .withTableName(tableName) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL); } protected QueryRequest createQueryRequest() { return new QueryRequest() .withTableName(tableName) .withConsistentRead(forceConsistentRead) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL); } protected ScanRequest createScanRequest() { return new ScanRequest().withTableName(tableName) .withConsistentRead(forceConsistentRead) .withLimit(client.scanLimit(tableName)) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL); } AbstractDynamoDbStore(final DynamoDBStoreManager manager, final String prefix, final String storeName) { this.manager = manager; this.client = this.manager.getClient(); this.name = storeName; this.tableName = prefix + "_" + storeName; this.forceConsistentRead = client.isForceConsistentRead(); final CacheBuilder<Pair<StaticBuffer, StaticBuffer>, DynamoDbStoreTransaction> builder = CacheBuilder.newBuilder().concurrencyLevel(client.getDelegate().getMaxConcurrentUsers()) .expireAfterWrite(manager.getLockExpiresDuration().toMillis(), TimeUnit.MILLISECONDS) .removalListener(ReportingRemovalListener.INSTANCE); this.keyColumnLocalLocks = builder.build(); } /** * Creates the schemata for the DynamoDB table or tables each store requires. * Implementations should override and reuse this logic * @return a create table request appropriate for the schema of the selected implementation. */ public CreateTableRequest getTableSchema() { return new CreateTableRequest() .withTableName(tableName) .withProvisionedThroughput(new ProvisionedThroughput(client.readCapacity(tableName), client.writeCapacity(tableName))); } @Override public final void ensureStore() throws BackendException { log.debug("Entering ensureStore table:{}", tableName); client.getDelegate().createTableAndWaitForActive(getTableSchema()); } @Override public final void deleteStore() throws BackendException { log.debug("Entering deleteStore name:{}", name); client.getDelegate().deleteTable(getTableSchema().getTableName()); //block until the tables are actually deleted client.getDelegate().ensureTableDeleted(getTableSchema().getTableName()); } @Override public void acquireLock(final StaticBuffer key, final StaticBuffer column, final StaticBuffer expectedValue, final StoreTransaction txh) throws BackendException { final DynamoDbStoreTransaction tx = DynamoDbStoreTransaction.getTx(txh); final Pair<StaticBuffer, StaticBuffer> keyColumn = Pair.of(key, column); final DynamoDbStoreTransaction existing; try { existing = keyColumnLocalLocks.get(keyColumn, () -> tx); } catch (ExecutionException | UncheckedExecutionException | ExecutionError e) { throw new TemporaryLockingException("Unable to acquire lock", e); } if (null != existing && tx != existing) { throw new TemporaryLockingException(String.format("tx %s already locked key-column %s when tx %s tried to lock", existing.toString(), keyColumn.toString(), tx.toString())); } // Titan's locking expects that only the first expectedValue for a given key/column should be used tx.putKeyColumnOnlyIfItIsNotYetChangedInTx(this, key, column, expectedValue); } @Override public void close() throws BackendException { log.debug("Closing table:{}", tableName); } String encodeKeyForLog(final StaticBuffer key) { if (null == key) { return ""; } return Constants.HEX_PREFIX + Hex.encodeHexString(key.asByteBuffer().array()); } String encodeForLog(final List<?> columns) { return columns.stream() .map(obj -> { if (obj instanceof StaticBuffer) { return (StaticBuffer) obj; } else if (obj instanceof Entry) { return ((Entry) obj).getColumn(); } else { return null; } }) .map(this::encodeKeyForLog) .collect(Collectors.joining(",", "[", "]")); } @Override public int hashCode() { return tableName.hashCode(); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (obj.getClass() != getClass()) { return false; } final AbstractDynamoDbStore rhs = (AbstractDynamoDbStore) obj; return new EqualsBuilder().append(tableName, rhs.tableName).isEquals(); } @Override public String toString() { return this.getClass().getName() + ":" + getTableName(); } protected String encodeForLog(final SliceQuery query) { return "slice[rk:" + encodeKeyForLog(query.getSliceStart()) + " -> " + encodeKeyForLog(query.getSliceEnd()) + " limit:" + query.getLimit() + "]"; } protected String encodeForLog(final KeySliceQuery query) { return "keyslice[hk:" + encodeKeyForLog(query.getKey()) + " " + "rk:" + encodeKeyForLog(query.getSliceStart()) + " -> " + encodeKeyForLog(query.getSliceEnd()) + " limit:" + query.getLimit() + "]"; } void releaseLock(final StaticBuffer key, final StaticBuffer column) { keyColumnLocalLocks.invalidate(Pair.of(key, column)); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Note that $opt$ is a marker for the optimizing compiler to ensure // it does compile the method. public class Main { public static void expectEquals(int expected, int result) { if (expected != result) { throw new Error("Expected: " + expected + ", found: " + result); } } public static void expectEquals(long expected, long result) { if (expected != result) { throw new Error("Expected: " + expected + ", found: " + result); } } public static void main(String[] args) { andInt(); andLong(); orInt(); orLong(); xorInt(); xorLong(); } private static void andInt() { expectEquals(1, $opt$And(5, 3)); expectEquals(0, $opt$And(0, 0)); expectEquals(0, $opt$And(0, 3)); expectEquals(0, $opt$And(3, 0)); expectEquals(1, $opt$And(1, -3)); expectEquals(-12, $opt$And(-12, -3)); expectEquals(1, $opt$AndLit8(1)); expectEquals(0, $opt$AndLit8(0)); expectEquals(0, $opt$AndLit8(0)); expectEquals(3, $opt$AndLit8(3)); expectEquals(4, $opt$AndLit8(-12)); expectEquals(0, $opt$AndLit16(1)); expectEquals(0, $opt$AndLit16(0)); expectEquals(0, $opt$AndLit16(0)); expectEquals(0, $opt$AndLit16(3)); expectEquals(65280, $opt$AndLit16(-12)); } private static void andLong() { expectEquals(1L, $opt$And(5L, 3L)); expectEquals(0L, $opt$And(0L, 0L)); expectEquals(0L, $opt$And(0L, 3L)); expectEquals(0L, $opt$And(3L, 0L)); expectEquals(1L, $opt$And(1L, -3L)); expectEquals(-12L, $opt$And(-12L, -3L)); expectEquals(1L, $opt$AndLit8(1L)); expectEquals(0L, $opt$AndLit8(0L)); expectEquals(0L, $opt$AndLit8(0L)); expectEquals(3L, $opt$AndLit8(3L)); expectEquals(4L, $opt$AndLit8(-12L)); expectEquals(0L, $opt$AndLit16(1L)); expectEquals(0L, $opt$AndLit16(0L)); expectEquals(0L, $opt$AndLit16(0L)); expectEquals(0L, $opt$AndLit16(3L)); expectEquals(65280L, $opt$AndLit16(-12L)); } static int $opt$And(int a, int b) { return a & b; } static int $opt$AndLit8(int a) { return a & 0xF; } static int $opt$AndLit16(int a) { return a & 0xFF00; } static long $opt$And(long a, long b) { return a & b; } static long $opt$AndLit8(long a) { return a & 0xF; } static long $opt$AndLit16(long a) { return a & 0xFF00; } private static void orInt() { expectEquals(7, $opt$Or(5, 3)); expectEquals(0, $opt$Or(0, 0)); expectEquals(3, $opt$Or(0, 3)); expectEquals(3, $opt$Or(3, 0)); expectEquals(-3, $opt$Or(1, -3)); expectEquals(-3, $opt$Or(-12, -3)); expectEquals(15, $opt$OrLit8(1)); expectEquals(15, $opt$OrLit8(0)); expectEquals(15, $opt$OrLit8(3)); expectEquals(-1, $opt$OrLit8(-12)); expectEquals(0xFF01, $opt$OrLit16(1)); expectEquals(0xFF00, $opt$OrLit16(0)); expectEquals(0xFF03, $opt$OrLit16(3)); expectEquals(-12, $opt$OrLit16(-12)); } private static void orLong() { expectEquals(7L, $opt$Or(5L, 3L)); expectEquals(0L, $opt$Or(0L, 0L)); expectEquals(3L, $opt$Or(0L, 3L)); expectEquals(3L, $opt$Or(3L, 0L)); expectEquals(-3L, $opt$Or(1L, -3L)); expectEquals(-3L, $opt$Or(-12L, -3L)); expectEquals(15L, $opt$OrLit8(1L)); expectEquals(15L, $opt$OrLit8(0L)); expectEquals(15L, $opt$OrLit8(3L)); expectEquals(-1L, $opt$OrLit8(-12L)); expectEquals(0xFF01L, $opt$OrLit16(1L)); expectEquals(0xFF00L, $opt$OrLit16(0L)); expectEquals(0xFF03L, $opt$OrLit16(3L)); expectEquals(-12L, $opt$OrLit16(-12L)); } static int $opt$Or(int a, int b) { return a | b; } static int $opt$OrLit8(int a) { return a | 0xF; } static int $opt$OrLit16(int a) { return a | 0xFF00; } static long $opt$Or(long a, long b) { return a | b; } static long $opt$OrLit8(long a) { return a | 0xF; } static long $opt$OrLit16(long a) { return a | 0xFF00; } private static void xorInt() { expectEquals(6, $opt$Xor(5, 3)); expectEquals(0, $opt$Xor(0, 0)); expectEquals(3, $opt$Xor(0, 3)); expectEquals(3, $opt$Xor(3, 0)); expectEquals(-4, $opt$Xor(1, -3)); expectEquals(9, $opt$Xor(-12, -3)); expectEquals(14, $opt$XorLit8(1)); expectEquals(15, $opt$XorLit8(0)); expectEquals(12, $opt$XorLit8(3)); expectEquals(-5, $opt$XorLit8(-12)); expectEquals(0xFF01, $opt$XorLit16(1)); expectEquals(0xFF00, $opt$XorLit16(0)); expectEquals(0xFF03, $opt$XorLit16(3)); expectEquals(-0xFF0c, $opt$XorLit16(-12)); } private static void xorLong() { expectEquals(6L, $opt$Xor(5L, 3L)); expectEquals(0L, $opt$Xor(0L, 0L)); expectEquals(3L, $opt$Xor(0L, 3L)); expectEquals(3L, $opt$Xor(3L, 0L)); expectEquals(-4L, $opt$Xor(1L, -3L)); expectEquals(9L, $opt$Xor(-12L, -3L)); expectEquals(14L, $opt$XorLit8(1L)); expectEquals(15L, $opt$XorLit8(0L)); expectEquals(12L, $opt$XorLit8(3L)); expectEquals(-5L, $opt$XorLit8(-12L)); expectEquals(0xFF01L, $opt$XorLit16(1L)); expectEquals(0xFF00L, $opt$XorLit16(0L)); expectEquals(0xFF03L, $opt$XorLit16(3L)); expectEquals(-0xFF0cL, $opt$XorLit16(-12L)); } static int $opt$Xor(int a, int b) { return a ^ b; } static int $opt$XorLit8(int a) { return a ^ 0xF; } static int $opt$XorLit16(int a) { return a ^ 0xFF00; } static long $opt$Xor(long a, long b) { return a ^ b; } static long $opt$XorLit8(long a) { return a ^ 0xF; } static long $opt$XorLit16(long a) { return a ^ 0xFF00; } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android.desugar; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.android.desugar.runtime.ThrowableExtensionTestUtility.getStrategyClassName; import static com.google.devtools.build.android.desugar.runtime.ThrowableExtensionTestUtility.getTwrStrategyClassNameSpecifiedInSystemProperty; import static com.google.devtools.build.android.desugar.runtime.ThrowableExtensionTestUtility.isMimicStrategy; import static com.google.devtools.build.android.desugar.runtime.ThrowableExtensionTestUtility.isNullStrategy; import static com.google.devtools.build.android.desugar.runtime.ThrowableExtensionTestUtility.isReuseStrategy; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import static org.objectweb.asm.ClassWriter.COMPUTE_MAXS; import static org.objectweb.asm.Opcodes.ASM5; import static org.objectweb.asm.Opcodes.INVOKESTATIC; import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; import com.google.devtools.build.android.desugar.io.BitFlags; import com.google.devtools.build.android.desugar.runtime.ThrowableExtension; import com.google.devtools.build.android.desugar.testdata.ClassUsingTryWithResources; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; /** This is the unit test for {@link TryWithResourcesRewriter} */ @RunWith(JUnit4.class) public class TryWithResourcesRewriterTest { private final DesugaringClassLoader classLoader = new DesugaringClassLoader(ClassUsingTryWithResources.class.getName()); private Class<?> desugaredClass; @Before public void setup() { try { desugaredClass = classLoader.findClass(ClassUsingTryWithResources.class.getName()); } catch (ClassNotFoundException e) { throw new AssertionError(e); } } @Test public void testMethodsAreDesugared() { // verify whether the desugared class is indeed desugared. DesugaredThrowableMethodCallCounter origCounter = countDesugaredThrowableMethodCalls(ClassUsingTryWithResources.class); DesugaredThrowableMethodCallCounter desugaredCounter = countDesugaredThrowableMethodCalls(classLoader.classContent, classLoader); /** * In java9, javac creates a helper method {@code $closeResource(Throwable, AutoCloseable) * to close resources. So, the following number 3 is highly dependant on the version of javac. */ assertThat(hasAutoCloseable(classLoader.classContent)).isFalse(); assertThat(classLoader.numOfTryWithResourcesInvoked.intValue()).isAtLeast(2); assertThat(classLoader.visitedExceptionTypes) .containsExactly( "java/lang/Exception", "java/lang/Throwable", "java/io/UnsupportedEncodingException"); assertDesugaringBehavior(origCounter, desugaredCounter); } @Test public void testCheckSuppressedExceptionsReturningEmptySuppressedExceptions() { { Throwable[] suppressed = ClassUsingTryWithResources.checkSuppressedExceptions(false); assertThat(suppressed).isEmpty(); } try { Throwable[] suppressed = (Throwable[]) desugaredClass .getMethod("checkSuppressedExceptions", boolean.class) .invoke(null, Boolean.FALSE); assertThat(suppressed).isEmpty(); } catch (Exception e) { e.printStackTrace(); throw new AssertionError(e); } } @Test public void testPrintStackTraceOfCaughtException() { { String trace = ClassUsingTryWithResources.printStackTraceOfCaughtException(); assertThat(trace.toLowerCase()).contains("suppressed"); } try { String trace = (String) desugaredClass.getMethod("printStackTraceOfCaughtException").invoke(null); if (isMimicStrategy()) { assertThat(trace.toLowerCase()).contains("suppressed"); } else if (isReuseStrategy()) { assertThat(trace.toLowerCase()).contains("suppressed"); } else if (isNullStrategy()) { assertThat(trace.toLowerCase()).doesNotContain("suppressed"); } else { fail("unexpected desugaring strategy " + ThrowableExtension.getStrategy()); } } catch (Exception e) { e.printStackTrace(); throw new AssertionError(e); } } @Test public void testCheckSuppressedExceptionReturningOneSuppressedException() { { Throwable[] suppressed = ClassUsingTryWithResources.checkSuppressedExceptions(true); assertThat(suppressed).hasLength(1); } try { Throwable[] suppressed = (Throwable[]) desugaredClass .getMethod("checkSuppressedExceptions", boolean.class) .invoke(null, Boolean.TRUE); if (isMimicStrategy()) { assertThat(suppressed).hasLength(1); } else if (isReuseStrategy()) { assertThat(suppressed).hasLength(1); } else if (isNullStrategy()) { assertThat(suppressed).isEmpty(); } else { fail("unexpected desugaring strategy " + ThrowableExtension.getStrategy()); } } catch (Exception e) { e.printStackTrace(); throw new AssertionError(e); } } @Test public void testSimpleTryWithResources() throws Throwable { { RuntimeException expected = assertThrows( RuntimeException.class, () -> ClassUsingTryWithResources.simpleTryWithResources()); assertThat(expected.getClass()).isEqualTo(RuntimeException.class); assertThat(expected.getSuppressed()).hasLength(1); assertThat(expected.getSuppressed()[0].getClass()).isEqualTo(IOException.class); } try { InvocationTargetException e = assertThrows( InvocationTargetException.class, () -> desugaredClass.getMethod("simpleTryWithResources").invoke(null)); throw e.getCause(); } catch (RuntimeException expected) { String expectedStrategyName = getTwrStrategyClassNameSpecifiedInSystemProperty(); assertThat(getStrategyClassName()).isEqualTo(expectedStrategyName); if (isMimicStrategy()) { assertThat(expected.getSuppressed()).isEmpty(); assertThat(ThrowableExtension.getSuppressed(expected)).hasLength(1); assertThat(ThrowableExtension.getSuppressed(expected)[0].getClass()) .isEqualTo(IOException.class); } else if (isReuseStrategy()) { assertThat(expected.getSuppressed()).hasLength(1); assertThat(expected.getSuppressed()[0].getClass()).isEqualTo(IOException.class); assertThat(ThrowableExtension.getSuppressed(expected)[0].getClass()) .isEqualTo(IOException.class); } else if (isNullStrategy()) { assertThat(expected.getSuppressed()).isEmpty(); assertThat(ThrowableExtension.getSuppressed(expected)).isEmpty(); } else { fail("unexpected desugaring strategy " + ThrowableExtension.getStrategy()); } } } private static void assertDesugaringBehavior( DesugaredThrowableMethodCallCounter orig, DesugaredThrowableMethodCallCounter desugared) { assertThat(desugared.countThrowableGetSuppressed()).isEqualTo(orig.countExtGetSuppressed()); assertThat(desugared.countThrowableAddSuppressed()).isEqualTo(orig.countExtAddSuppressed()); assertThat(desugared.countThrowablePrintStackTrace()).isEqualTo(orig.countExtPrintStackTrace()); assertThat(desugared.countThrowablePrintStackTracePrintStream()) .isEqualTo(orig.countExtPrintStackTracePrintStream()); assertThat(desugared.countThrowablePrintStackTracePrintWriter()) .isEqualTo(orig.countExtPrintStackTracePrintWriter()); assertThat(orig.countThrowableGetSuppressed()).isEqualTo(desugared.countExtGetSuppressed()); // $closeResource may be specialized into multiple versions. assertThat(orig.countThrowableAddSuppressed()).isAtMost(desugared.countExtAddSuppressed()); assertThat(orig.countThrowablePrintStackTrace()).isEqualTo(desugared.countExtPrintStackTrace()); assertThat(orig.countThrowablePrintStackTracePrintStream()) .isEqualTo(desugared.countExtPrintStackTracePrintStream()); assertThat(orig.countThrowablePrintStackTracePrintWriter()) .isEqualTo(desugared.countExtPrintStackTracePrintWriter()); if (orig.getSyntheticCloseResourceCount() > 0) { // Depending on the specific javac version, $closeResource(Throwable, AutoCloseable) may not // be there. assertThat(orig.getSyntheticCloseResourceCount()).isEqualTo(1); assertThat(desugared.getSyntheticCloseResourceCount()).isAtLeast(1); } assertThat(desugared.countThrowablePrintStackTracePrintStream()).isEqualTo(0); assertThat(desugared.countThrowablePrintStackTracePrintStream()).isEqualTo(0); assertThat(desugared.countThrowablePrintStackTracePrintWriter()).isEqualTo(0); assertThat(desugared.countThrowableAddSuppressed()).isEqualTo(0); assertThat(desugared.countThrowableGetSuppressed()).isEqualTo(0); } private static DesugaredThrowableMethodCallCounter countDesugaredThrowableMethodCalls( Class<?> klass) { try { ClassReader reader = new ClassReader(klass.getName()); DesugaredThrowableMethodCallCounter counter = new DesugaredThrowableMethodCallCounter(klass.getClassLoader()); reader.accept(counter, 0); return counter; } catch (IOException e) { e.printStackTrace(); fail(e.toString()); return null; } } private static DesugaredThrowableMethodCallCounter countDesugaredThrowableMethodCalls( byte[] content, ClassLoader loader) { ClassReader reader = new ClassReader(content); DesugaredThrowableMethodCallCounter counter = new DesugaredThrowableMethodCallCounter(loader); reader.accept(counter, 0); return counter; } /** Check whether java.lang.AutoCloseable is used as arguments of any method. */ private static boolean hasAutoCloseable(byte[] classContent) { ClassReader reader = new ClassReader(classContent); final AtomicInteger counter = new AtomicInteger(); ClassVisitor visitor = new ClassVisitor(Opcodes.ASM5) { @Override public MethodVisitor visitMethod( int access, String name, String desc, String signature, String[] exceptions) { for (Type argumentType : Type.getArgumentTypes(desc)) { if ("Ljava/lang/AutoCloseable;".equals(argumentType.getDescriptor())) { counter.incrementAndGet(); } } return null; } }; reader.accept(visitor, 0); return counter.get() > 0; } private static class DesugaredThrowableMethodCallCounter extends ClassVisitor { private final ClassLoader classLoader; private final Map<String, AtomicInteger> counterMap; private int syntheticCloseResourceCount; public DesugaredThrowableMethodCallCounter(ClassLoader loader) { super(ASM5); classLoader = loader; counterMap = new HashMap<>(); TryWithResourcesRewriter.TARGET_METHODS .entries() .forEach(entry -> counterMap.put(entry.getKey() + entry.getValue(), new AtomicInteger())); TryWithResourcesRewriter.TARGET_METHODS .entries() .forEach( entry -> counterMap.put( entry.getKey() + TryWithResourcesRewriter.METHOD_DESC_MAP.get(entry.getValue()), new AtomicInteger())); } @Override public MethodVisitor visitMethod( int access, String name, String desc, String signature, String[] exceptions) { if (BitFlags.isSet(access, Opcodes.ACC_SYNTHETIC | Opcodes.ACC_STATIC) && name.equals("$closeResource") && Type.getArgumentTypes(desc).length == 2 && Type.getArgumentTypes(desc)[0].getDescriptor().equals("Ljava/lang/Throwable;")) { ++syntheticCloseResourceCount; } return new InvokeCounter(); } private class InvokeCounter extends MethodVisitor { public InvokeCounter() { super(ASM5); } private boolean isAssignableToThrowable(String owner) { try { Class<?> ownerClass = classLoader.loadClass(owner.replace('/', '.')); return Throwable.class.isAssignableFrom(ownerClass); } catch (ClassNotFoundException e) { throw new AssertionError(e); } } @Override public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) { String signature = name + desc; if ((opcode == INVOKEVIRTUAL && isAssignableToThrowable(owner)) || (opcode == INVOKESTATIC && Type.getInternalName(ThrowableExtension.class).equals(owner))) { AtomicInteger counter = counterMap.get(signature); if (counter == null) { return; } counter.incrementAndGet(); } } } public int getSyntheticCloseResourceCount() { return syntheticCloseResourceCount; } public int countThrowableAddSuppressed() { return counterMap.get("addSuppressed(Ljava/lang/Throwable;)V").get(); } public int countThrowableGetSuppressed() { return counterMap.get("getSuppressed()[Ljava/lang/Throwable;").get(); } public int countThrowablePrintStackTrace() { return counterMap.get("printStackTrace()V").get(); } public int countThrowablePrintStackTracePrintStream() { return counterMap.get("printStackTrace(Ljava/io/PrintStream;)V").get(); } public int countThrowablePrintStackTracePrintWriter() { return counterMap.get("printStackTrace(Ljava/io/PrintWriter;)V").get(); } public int countExtAddSuppressed() { return counterMap.get("addSuppressed(Ljava/lang/Throwable;Ljava/lang/Throwable;)V").get(); } public int countExtGetSuppressed() { return counterMap.get("getSuppressed(Ljava/lang/Throwable;)[Ljava/lang/Throwable;").get(); } public int countExtPrintStackTrace() { return counterMap.get("printStackTrace(Ljava/lang/Throwable;)V").get(); } public int countExtPrintStackTracePrintStream() { return counterMap.get("printStackTrace(Ljava/lang/Throwable;Ljava/io/PrintStream;)V").get(); } public int countExtPrintStackTracePrintWriter() { return counterMap.get("printStackTrace(Ljava/lang/Throwable;Ljava/io/PrintWriter;)V").get(); } } private static class DesugaringClassLoader extends ClassLoader { private final String targetedClassName; private Class<?> klass; private byte[] classContent; private final Set<String> visitedExceptionTypes = new HashSet<>(); private final AtomicInteger numOfTryWithResourcesInvoked = new AtomicInteger(); public DesugaringClassLoader(String targetedClassName) { super(DesugaringClassLoader.class.getClassLoader()); this.targetedClassName = targetedClassName; } @Override protected Class<?> findClass(String name) throws ClassNotFoundException { if (name.equals(targetedClassName)) { if (klass != null) { return klass; } // desugar the class, and return the desugared one. classContent = desugarTryWithResources(name); klass = defineClass(name, classContent, 0, classContent.length); return klass; } else { return super.findClass(name); } } private byte[] desugarTryWithResources(String className) { try { ClassReader reader = new ClassReader(className); CloseResourceMethodScanner scanner = new CloseResourceMethodScanner(); reader.accept(scanner, ClassReader.SKIP_DEBUG); ClassWriter writer = new ClassWriter(reader, COMPUTE_MAXS); TryWithResourcesRewriter rewriter = new TryWithResourcesRewriter( writer, TryWithResourcesRewriterTest.class.getClassLoader(), visitedExceptionTypes, numOfTryWithResourcesInvoked, scanner.hasCloseResourceMethod()); reader.accept(rewriter, 0); return writer.toByteArray(); } catch (IOException e) { fail(e.toString()); return null; // suppress compiler error. } } } }
/////////////////////////////////////////////////////////////////////////////// // // AutobahnJava - http://crossbar.io/autobahn // // Copyright (c) Crossbar.io Technologies GmbH and contributors // // Licensed under the MIT License. // http://www.opensource.org/licenses/mit-license.php // /////////////////////////////////////////////////////////////////////////////// package io.crossbar.autobahn.wamp.transports; import java.net.URI; import java.util.List; import java.util.concurrent.TimeUnit; import javax.net.ssl.SSLException; import io.crossbar.autobahn.utils.ABLogger; import io.crossbar.autobahn.utils.IABLogger; import io.crossbar.autobahn.wamp.interfaces.ITransport; import io.crossbar.autobahn.wamp.interfaces.ITransportHandler; import io.crossbar.autobahn.wamp.serializers.CBORSerializer; import io.crossbar.autobahn.wamp.serializers.JSONSerializer; import io.crossbar.autobahn.wamp.serializers.MessagePackSerializer; import io.crossbar.autobahn.wamp.types.CloseDetails; import io.crossbar.autobahn.wamp.types.TransportOptions; import io.crossbar.autobahn.wamp.types.WebSocketOptions; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.HttpClientCodec; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame; import io.netty.handler.codec.http.websocketx.TextWebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketClientHandshaker; import io.netty.handler.codec.http.websocketx.WebSocketClientHandshakerFactory; import io.netty.handler.codec.http.websocketx.WebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketVersion; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.handler.timeout.IdleStateHandler; public class NettyWebSocket implements ITransport { private static final IABLogger LOGGER = ABLogger.getLogger( NettyWebSocket.class.getName()); private static final String SERIALIZERS_DEFAULT = String.format( "%s,%s,%s", CBORSerializer.NAME, MessagePackSerializer.NAME, JSONSerializer.NAME); private Channel mChannel; private NettyWebSocketClientHandler mHandler; private NioEventLoopGroup mGroup; private final String mUri; private WebSocketOptions mOptions; private String mSerializers; public NettyWebSocket(String uri) { this(uri, (WebSocketOptions) null); } public NettyWebSocket(String uri, List<String> serializers) { this(uri, serializers, null); } @Deprecated public NettyWebSocket(String uri, WebSocketOptions options) { this(uri, null, options); } @Deprecated public NettyWebSocket(String uri, List<String> serializers, WebSocketOptions options) { mUri = uri; if (serializers == null) { mSerializers = SERIALIZERS_DEFAULT; } else { StringBuilder result = new StringBuilder(); for (String serializer: serializers) { result.append(serializer).append(","); } mSerializers = result.toString(); } if (options == null) { mOptions = new WebSocketOptions(); } else { mOptions = options; } } private int validateURIAndGetPort(URI uri) { String scheme = uri.getScheme(); if (!"ws".equalsIgnoreCase(scheme) && !"wss".equalsIgnoreCase(scheme)) { throw new IllegalArgumentException("Only WS(S) is supported."); } int port = uri.getPort(); if (port == -1) { if ("ws".equalsIgnoreCase(scheme)) { port = 80; } else if ("wss".equalsIgnoreCase(scheme)) { port = 443; } } return port; } private SslContext getSSLContext(String scheme) throws SSLException { return "wss".equalsIgnoreCase(scheme) ? SslContextBuilder.forClient().trustManager( InsecureTrustManagerFactory.INSTANCE).build() : null; } @Override public void connect(ITransportHandler transportHandler) throws Exception { connect(transportHandler, new TransportOptions()); } @Override public void connect(ITransportHandler transportHandler, TransportOptions options) throws Exception { if (options == null) { if (mOptions == null) { options = new TransportOptions(); } else { options = new TransportOptions(); options.setAutoPingInterval(mOptions.getAutoPingInterval()); options.setAutoPingTimeout(mOptions.getAutoPingTimeout()); options.setMaxFramePayloadSize(mOptions.getMaxFramePayloadSize()); } } URI uri; uri = new URI(mUri); int port = validateURIAndGetPort(uri); String scheme = uri.getScheme(); String host = uri.getHost(); final SslContext sslContext = getSSLContext(scheme); WebSocketClientHandshaker handshaker = WebSocketClientHandshakerFactory.newHandshaker( uri, WebSocketVersion.V13, mSerializers, true, new DefaultHttpHeaders(), options.getMaxFramePayloadSize()); mHandler = new NettyWebSocketClientHandler(handshaker, this, transportHandler); mGroup = new NioEventLoopGroup(); Bootstrap bootstrap = new Bootstrap(); bootstrap.group(mGroup); bootstrap.channel(NioSocketChannel.class); TransportOptions opt = options; bootstrap.handler(new ChannelInitializer<SocketChannel>() { @Override protected void initChannel(SocketChannel ch) throws Exception { ChannelPipeline channelPipeline = ch.pipeline(); if (sslContext != null) { channelPipeline.addLast(sslContext.newHandler(ch.alloc(), host, port)); } channelPipeline.addLast( new HttpClientCodec(), new HttpObjectAggregator(8192), new IdleStateHandler( opt.getAutoPingInterval() + opt.getAutoPingTimeout(), opt.getAutoPingInterval(), 0, TimeUnit.SECONDS), mHandler); } }); ChannelFuture f = bootstrap.connect(uri.getHost(), port); f.addListener((ChannelFutureListener) connectFuture -> { Throwable connectCause = connectFuture.cause(); if (connectCause != null) { transportHandler.onDisconnect(false); } else { mChannel = f.channel(); } }); } @Override public void send(byte[] payload, boolean isBinary) { WebSocketFrame frame; if (isBinary) { frame = new BinaryWebSocketFrame(toByteBuf(payload)); } else { frame = new TextWebSocketFrame(toByteBuf(payload)); } mChannel.writeAndFlush(frame); } @Override public boolean isOpen() { return mChannel != null && mChannel.isOpen(); } @Override public void close() throws Exception { LOGGER.v("close()"); if (mHandler != null && mChannel != null) { mHandler.close(mChannel, true, new CloseDetails(CloseDetails.REASON_DEFAULT, null)); } if (mGroup != null) { mGroup.shutdownGracefully().sync(); mGroup = null; } } @Override public void abort() throws Exception { LOGGER.v("abort()"); close(); } @Override public void setOptions(TransportOptions options) { throw new UnsupportedOperationException( "Not implemented yet, provide options using connect() instead"); } private ByteBuf toByteBuf(byte[] bytes) { return Unpooled.copiedBuffer(bytes); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.bpmn.usertask; import static org.assertj.core.api.Assertions.assertThat; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.common.engine.api.scope.ScopeTypes; import org.flowable.common.engine.impl.history.HistoryLevel; import org.flowable.common.engine.impl.interceptor.CommandExecutor; import org.flowable.engine.impl.test.HistoryTestHelper; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.interceptor.CreateUserTaskAfterContext; import org.flowable.engine.interceptor.CreateUserTaskBeforeContext; import org.flowable.engine.interceptor.CreateUserTaskInterceptor; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; import org.flowable.entitylink.api.EntityLink; import org.flowable.entitylink.api.EntityLinkService; import org.flowable.entitylink.api.EntityLinkType; import org.flowable.entitylink.api.HierarchyType; import org.flowable.identitylink.api.IdentityLink; import org.flowable.identitylink.api.IdentityLinkType; import org.flowable.task.api.Task; import org.flowable.task.api.history.HistoricTaskInstance; import org.junit.jupiter.api.Test; /** * @author Joram Barrez */ public class UserTaskTest extends PluggableFlowableTestCase { @Test @Deployment public void testTaskPropertiesNotNull() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess"); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertThat(task.getId()).isNotNull(); assertThat(task.getName()).isEqualTo("my task"); assertThat(task.getDescription()).isEqualTo("Very important"); assertThat(task.getPriority()).isGreaterThan(0); assertThat(task.getAssignee()).isEqualTo("kermit"); assertThat(task.getProcessInstanceId()).isEqualTo(processInstance.getId()); assertThat(task.getProcessDefinitionId()).isNotNull(); assertThat(task.getTaskDefinitionKey()).isNotNull(); assertThat(task.getCreateTime()).isNotNull(); // the next test verifies that if an execution creates a task, that no events are created during creation of the task. if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) { assertThat(taskService.getTaskEvents(task.getId())).isEmpty(); } } @Test @Deployment public void testEntityLinkCreated() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess"); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertThat(task.getId()).isNotNull(); assertThat(task.getName()).isEqualTo("my task"); assertThat(task.getDescription()).isEqualTo("Very important"); assertThat(task.getPriority()).isGreaterThan(0); assertThat(task.getAssignee()).isEqualTo("kermit"); assertThat(task.getProcessInstanceId()).isEqualTo(processInstance.getId()); assertThat(task.getProcessDefinitionId()).isNotNull(); assertThat(task.getTaskDefinitionKey()).isNotNull(); assertThat(task.getCreateTime()).isNotNull(); CommandExecutor commandExecutor = processEngine.getProcessEngineConfiguration().getCommandExecutor(); List<EntityLink> entityLinksByScopeIdAndType = commandExecutor.execute(commandContext -> { EntityLinkService entityLinkService = processEngineConfiguration.getEntityLinkServiceConfiguration().getEntityLinkService(); return entityLinkService.findEntityLinksByScopeIdAndType(processInstance.getId(), ScopeTypes.BPMN, EntityLinkType.CHILD); }); assertThat(entityLinksByScopeIdAndType).hasSize(1); assertThat(entityLinksByScopeIdAndType.get(0).getHierarchyType()).isEqualTo(HierarchyType.ROOT); } @Test @Deployment public void testQuerySortingWithParameter() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess"); assertThat(taskService.createTaskQuery().processInstanceId(processInstance.getId()).list()).hasSize(1); } @Test @Deployment public void testCompleteAfterParallelGateway() throws InterruptedException { // related to https://activiti.atlassian.net/browse/ACT-1054 // start the process runtimeService.startProcessInstanceByKey("ForkProcess"); List<org.flowable.task.api.Task> taskList = taskService.createTaskQuery().list(); assertThat(taskList).isNotNull(); assertThat(taskList).hasSize(2); // make sure user task exists org.flowable.task.api.Task task = taskService.createTaskQuery().taskDefinitionKey("SimpleUser").singleResult(); assertThat(task).isNotNull(); // attempt to complete the task and get PersistenceException pointing to // "referential integrity constraint violation" taskService.complete(task.getId()); } @Test @Deployment public void testTaskCategory() { runtimeService.startProcessInstanceByKey("testTaskCategory"); org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); // Test if the property set in the model is shown in the task String testCategory = "My Category"; assertThat(task.getCategory()).isEqualTo(testCategory); // Test if can be queried by query API assertThat(taskService.createTaskQuery().taskCategory(testCategory).singleResult().getName()).isEqualTo("Task with category"); assertThat(taskService.createTaskQuery().taskCategory("Does not exist").count()).isZero(); if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.AUDIT, processEngineConfiguration)) { // Check historic task HistoricTaskInstance historicTaskInstance = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult(); assertThat(historicTaskInstance.getCategory()).isEqualTo(testCategory); assertThat(historyService.createHistoricTaskInstanceQuery().taskCategory(testCategory).singleResult().getName()).isEqualTo("Task with category"); assertThat(historyService.createHistoricTaskInstanceQuery().taskCategory("Does not exist").count()).isZero(); } // Update category String newCategory = "New Test Category"; task.setCategory(newCategory); taskService.saveTask(task); task = taskService.createTaskQuery().singleResult(); assertThat(task.getCategory()).isEqualTo(newCategory); assertThat(taskService.createTaskQuery().taskCategory(newCategory).singleResult().getName()).isEqualTo("Task with category"); assertThat(taskService.createTaskQuery().taskCategory(testCategory).count()).isZero(); // Complete task and verify history taskService.complete(task.getId()); if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.AUDIT, processEngineConfiguration)) { HistoricTaskInstance historicTaskInstance = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult(); assertThat(historicTaskInstance.getCategory()).isEqualTo(newCategory); assertThat(historyService.createHistoricTaskInstanceQuery().taskCategory(newCategory).singleResult().getName()).isEqualTo("Task with category"); assertThat(historyService.createHistoricTaskInstanceQuery().taskCategory(testCategory).count()).isZero(); } } // See https://activiti.atlassian.net/browse/ACT-4041 @Test public void testTaskFormKeyWhenUsingIncludeVariables() { deployOneTaskTestProcess(); runtimeService.startProcessInstanceByKey("oneTaskProcess"); // Set variables org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult(); assertThat(task).isNotNull(); Map<String, Object> vars = new HashMap<>(); for (int i = 0; i < 20; i++) { vars.put("var" + i, i * 2); } taskService.setVariables(task.getId(), vars); // Set form key task = taskService.createTaskQuery().singleResult(); task.setFormKey("test123"); taskService.saveTask(task); // Verify query and check form key task = taskService.createTaskQuery().includeProcessVariables().singleResult(); assertThat(task.getProcessVariables().size()).isEqualTo(vars.size()); assertThat(task.getFormKey()).isEqualTo("test123"); } @Test @Deployment public void testEmptyAssignmentExpression() { Map<String, Object> variableMap = new HashMap<>(); variableMap.put("assignee", null); variableMap.put("candidateUsers", null); variableMap.put("candidateGroups", null); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", variableMap); assertThat(processInstance).isNotNull(); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(task).isNotNull(); assertThat(task.getAssignee()).isNull(); List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(task.getId()); assertThat(identityLinks).isEmpty(); variableMap = new HashMap<>(); variableMap.put("assignee", ""); variableMap.put("candidateUsers", ""); variableMap.put("candidateGroups", ""); processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", variableMap); assertThat(processInstance).isNotNull(); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(task).isNotNull(); assertThat(task.getAssignee()).isNull(); identityLinks = taskService.getIdentityLinksForTask(task.getId()); assertThat(identityLinks).isEmpty(); } @Test @Deployment public void testNonStringProperties() { Map<String, Object> vars = new HashMap<>(); vars.put("taskName", 1); vars.put("taskDescription", 2); vars.put("taskCategory", 3); vars.put("taskFormKey", 4); vars.put("taskAssignee", 5); vars.put("taskOwner", 6); vars.put("taskCandidateGroups", 7); vars.put("taskCandidateUsers", 8); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("nonStringProperties", vars); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("1"); assertThat(task.getDescription()).isEqualTo("2"); assertThat(task.getCategory()).isEqualTo("3"); assertThat(task.getFormKey()).isEqualTo("4"); assertThat(task.getAssignee()).isEqualTo("5"); assertThat(task.getOwner()).isEqualTo("6"); List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(task.getId()); assertThat(identityLinks).hasSize(4); int candidateIdentityLinkCount = 0; for (IdentityLink identityLink : identityLinks) { if (identityLink.getType().equals(IdentityLinkType.CANDIDATE)) { candidateIdentityLinkCount++; if (identityLink.getGroupId() != null) { assertThat(identityLink.getGroupId()).isEqualTo("7"); } else { assertThat(identityLink.getUserId()).isEqualTo("8"); } } } assertThat(candidateIdentityLinkCount).isEqualTo(2); } @Test @Deployment(resources="org/flowable/engine/test/bpmn/usertask/UserTaskTest.testTaskPropertiesNotNull.bpmn20.xml") public void testCreateUserTaskInterceptor() throws Exception { TestCreateUserTaskInterceptor testCreateUserTaskInterceptor = new TestCreateUserTaskInterceptor(); processEngineConfiguration.setCreateUserTaskInterceptor(testCreateUserTaskInterceptor); try { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess"); org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(task.getId()).isNotNull(); assertThat(task.getName()).isEqualTo("my task"); assertThat(task.getDescription()).isEqualTo("Very important"); assertThat(task.getCategory()).isEqualTo("testCategory"); assertThat(testCreateUserTaskInterceptor.getBeforeCreateUserTaskCounter()).isEqualTo(1); assertThat(testCreateUserTaskInterceptor.getAfterCreateUserTaskCounter()).isEqualTo(1); } finally { processEngineConfiguration.setCreateUserTaskInterceptor(null); } } @Test @Deployment(resources="org/flowable/engine/test/bpmn/usertask/UserTaskTest.userTaskIdVariableName.bpmn20.xml") public void testUserTaskIdVariableName() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("userTaskIdVariableName"); // Normal string Task firstTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).taskDefinitionKey("task1").singleResult(); assertThat(firstTask).isNotNull(); String actualTaskId = firstTask.getId(); String myTaskId = (String)runtimeService.getVariable(processInstance.getId(), "myTaskId"); assertThat(myTaskId).isEqualTo(actualTaskId); // Expression Task secondTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).taskDefinitionKey("task2").singleResult(); assertThat(secondTask).isNotNull(); actualTaskId = secondTask.getId(); String myExpressionTaskId = (String)runtimeService.getVariable(processInstance.getId(), "myExpressionTaskId"); assertThat(myExpressionTaskId).isEqualTo(actualTaskId); } protected class TestCreateUserTaskInterceptor implements CreateUserTaskInterceptor { protected int beforeCreateUserTaskCounter = 0; protected int afterCreateUserTaskCounter = 0; @Override public void beforeCreateUserTask(CreateUserTaskBeforeContext context) { beforeCreateUserTaskCounter++; context.setCategory("testCategory"); } @Override public void afterCreateUserTask(CreateUserTaskAfterContext context) { afterCreateUserTaskCounter++; } public int getBeforeCreateUserTaskCounter() { return beforeCreateUserTaskCounter; } public int getAfterCreateUserTaskCounter() { return afterCreateUserTaskCounter; } } }
package org.vaadin.patrik.client; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import com.google.gwt.user.client.ui.Widget; import com.vaadin.client.ui.VCheckBox; import com.vaadin.client.ui.VColorPicker; import com.vaadin.client.ui.VComboBox; import com.vaadin.client.ui.VNativeSelect; import com.vaadin.client.ui.VPopupCalendar; import com.vaadin.client.ui.VPopupTimeCalendar; import com.vaadin.client.ui.VTextArea; import com.vaadin.client.ui.VTextField; public class EditorWidgets { private static final Logger logger = Logger.getLogger("EditorWidgets"); /** * This is interface for WidgetHandler. Purpose of the WidgetHandlers is to give * uniform API of Editor widgets for the internal logic of the GridFastNavigation. * This is necessary since unfortunately there are API differences between * different editor widgets. * * Use registerHandler(..) method to register new handlers * * @author Tatu Lund * * @param <T> The type parameter, i.e. editor widget class to be wrapped */ public interface WidgetHandler<T extends Widget> { /** * Select the content of the field. If it is not possible implement as NOP. * * @param widget The editor widget */ void selectAll(T widget); /** * Get value of the editor widget * * @param widget The editor widget * @return Current value of the editor widget */ String getValue(T widget); /** * Set the value of the Editor widget * * @param widget The editor widget * @param value Value as string */ void setValue(T widget, String value); /** * Force focus to widget * * @param widget The editor widget */ void focus(T widget); /** * Make the widget editable * * @param widget The editor widget */ void enable(T widget); /** * Make the widget uneditable * * @param widget The editor widget */ void disable(T widget); /** * Return true, if it is more natural that with this * widget cursor up/down should not change Grid row * * @param widget The editor widget * @return return true/false */ boolean isUpDownNavAllowed(T widget); /** * Return true if widget is currently editable * * @param widget The editor widget * @return true/false */ boolean isReadOnly(T widget); } private static final Map<Class<?>, WidgetHandler<?>> widgetHandlers; // // Magic happens here: statically assign handlers for supported widget types // This enables support for value revert, append and selectAll // /** * Register a new widget handler. Purpose * * @param <T> Type of the widget * @param clazz Class name of the editor widget to be registered * @param handler The handler, implements WidgetHandler interface */ public static <T extends Widget> void registerHandler(Class<T> clazz, WidgetHandler<T> handler) { widgetHandlers.put(clazz, handler); } static { widgetHandlers = new HashMap<Class<?>, WidgetHandler<?>>(); registerHandler(VTextField.class, new WidgetHandler<VTextField>() { @Override public void selectAll(VTextField widget) { if (widget.isEnabled()) { widget.selectAll(); } } @Override public String getValue(VTextField widget) { return widget.getValue(); } @Override public void setValue(VTextField widget, String value) { widget.setValue(value); widget.getElement().blur(); widget.getElement().focus(); } public void focus(VTextField widget) { if (widget.isEnabled()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VTextField widget) { widget.setEnabled(true); widget.setReadOnly(false); } @Override public void disable(VTextField widget) { widget.setEnabled(false); widget.setReadOnly(true); } @Override public boolean isUpDownNavAllowed(VTextField widget) { return true; } @Override public boolean isReadOnly(VTextField widget) { return widget.isReadOnly(); } }); registerHandler(VTextArea.class, new WidgetHandler<VTextArea>() { @Override public void selectAll(VTextArea widget) { if (widget.isEnabled()) { widget.selectAll(); } } @Override public String getValue(VTextArea widget) { return widget.getValue(); } @Override public void setValue(VTextArea widget, String value) { widget.setValue(value); widget.getElement().blur(); widget.getElement().focus(); } public void focus(VTextArea widget) { if (widget.isEnabled()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VTextArea widget) { widget.setEnabled(true); widget.setReadOnly(false); } @Override public void disable(VTextArea widget) { widget.setEnabled(false); widget.setReadOnly(true); } @Override public boolean isUpDownNavAllowed(VTextArea widget) { return false; } @Override public boolean isReadOnly(VTextArea widget) { return widget.isReadOnly(); } }); registerHandler(VNativeSelect.class, new WidgetHandler<VNativeSelect>() { @Override public void selectAll(VNativeSelect widget) { } @Override public String getValue(VNativeSelect widget) { return widget.getListBox().getSelectedValue(); } @Override public void setValue(VNativeSelect widget, String value) { widget.setSelectedItem(value); } @Override public void focus(VNativeSelect widget) { if (widget.getListBox().isEnabled() ) { widget.getElement().blur(); widget.focus(); } } @Override public void enable(VNativeSelect widget) { widget.getListBox().setEnabled(true); } @Override public void disable(VNativeSelect widget) { widget.getListBox().setEnabled(false); } @Override public boolean isUpDownNavAllowed(VNativeSelect widget) { return false; } @Override public boolean isReadOnly(VNativeSelect widget) { return !widget.getListBox().isEnabled(); } }); registerHandler(VComboBox.class, new WidgetHandler<VComboBox>() { @Override public void selectAll(VComboBox widget) { if (!widget.tb.isReadOnly()) { widget.tb.selectAll(); } } @Override public String getValue(VComboBox widget) { return widget.tb.getValue(); } @Override public void setValue(VComboBox widget, String value) { widget.tb.setValue(value); } @Override public void focus(VComboBox widget) { if (widget.enabled) { widget.getElement().blur(); widget.focus(); } } @Override public void enable(VComboBox widget) { widget.enabled = true; widget.tb.setEnabled(true); } @Override public void disable(VComboBox widget) { widget.enabled = false; widget.tb.setEnabled(false); } @Override public boolean isUpDownNavAllowed(VComboBox widget) { return false; } @Override public boolean isReadOnly(VComboBox widget) { return widget.tb.isReadOnly(); } }); registerHandler(VColorPicker.class, new WidgetHandler<VColorPicker>() { @Override public void selectAll(VColorPicker widget) { } @Override public String getValue(VColorPicker widget) { return widget.getColor(); } @Override public void setValue(VColorPicker widget, String value) { widget.setColor(value); } @Override public void focus(VColorPicker widget) { if (widget.isEnabled()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VColorPicker widget) { widget.setEnabled(true); } @Override public void disable(VColorPicker widget) { widget.setEnabled(false); } @Override public boolean isUpDownNavAllowed(VColorPicker widget) { return false; } @Override public boolean isReadOnly(VColorPicker widget) { return !widget.isEnabled(); } }); registerHandler(VCheckBox.class, new WidgetHandler<VCheckBox>() { @Override public String getValue(VCheckBox widget) { String value = ""; if (widget.getValue()) value = "true"; else value = "false"; return value; } @Override public void setValue(VCheckBox widget, String valueText) { Boolean value = false; if ("true".equals(valueText)) value = true; widget.setValue(value); } @Override public void focus(VCheckBox widget) { if (widget.isEnabled()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VCheckBox widget) { widget.setEnabled(true); } @Override public void disable(VCheckBox widget) { widget.setEnabled(false); } @Override public boolean isUpDownNavAllowed(VCheckBox widget) { return true; } @Override public void selectAll(VCheckBox widget) { // TODO Auto-generated method stub } @Override public boolean isReadOnly(VCheckBox widget) { return !widget.isEnabled(); } }); registerHandler(VPopupCalendar.class, new WidgetHandler<VPopupCalendar>() { @Override public void selectAll(VPopupCalendar widget) { if (widget.isEnabled()) { widget.text.selectAll(); } } @Override public String getValue(VPopupCalendar widget) { return widget.text.getValue(); } @Override public void setValue(VPopupCalendar widget, String value) { widget.text.setValue(value); } @Override public void focus(VPopupCalendar widget) { // Only perform blur/focus refocusing if calendar popup // is not visible if (widget.isEnabled() && !widget.calendar.isAttached()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VPopupCalendar widget) { widget.setEnabled(true); widget.setReadonly(false); } @Override public void disable(VPopupCalendar widget) { widget.setEnabled(false); widget.setReadonly(true); } @Override public boolean isUpDownNavAllowed(VPopupCalendar widget) { return false; } @Override public boolean isReadOnly(VPopupCalendar widget) { return widget.isReadonly(); } }); registerHandler(VPopupTimeCalendar.class, new WidgetHandler<VPopupTimeCalendar>() { @Override public void selectAll(VPopupTimeCalendar widget) { if (widget.isEnabled()) { widget.text.selectAll(); } } @Override public String getValue(VPopupTimeCalendar widget) { return widget.text.getValue(); } @Override public void setValue(VPopupTimeCalendar widget, String value) { widget.text.setValue(value); } @Override public void focus(VPopupTimeCalendar widget) { // Only perform blur/focus refocusing if calendar popup // is not visible if (widget.isEnabled() && !widget.calendar.isAttached()) { widget.getElement().blur(); widget.getElement().focus(); } } @Override public void enable(VPopupTimeCalendar widget) { widget.setEnabled(true); widget.setReadonly(false); } @Override public void disable(VPopupTimeCalendar widget) { widget.setEnabled(false); widget.setReadonly(true); } @Override public boolean isUpDownNavAllowed(VPopupTimeCalendar widget) { return false; } @Override public boolean isReadOnly(VPopupTimeCalendar widget) { return widget.isReadonly(); } }); // TODO: support more widget types! } private static <T> WidgetHandler<?> getHandler(Class<?> widgetClass) { if (widgetHandlers.containsKey(widgetClass)) { return widgetHandlers.get(widgetClass); } else { for (Class<?> key : widgetHandlers.keySet()) { // Note: GWT does not support isAssignableFrom Class<?> cls = widgetClass; while (cls.getSuperclass() != null) { if (cls.getSuperclass() == key) { return widgetHandlers.get(key); } cls = cls.getSuperclass(); } } logger.warning("Unhandled widget type " + widgetClass.getSimpleName()); } return null; } // // Public API // @SuppressWarnings("unchecked") public static <T extends Widget> void selectAll(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { ((WidgetHandler<T>) handler).selectAll(widget); } } else { logger.warning("EditorWidgets.selectAll: Widget is null"); } } @SuppressWarnings("unchecked") public static <T extends Widget> String getValue(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { return ((WidgetHandler<T>) handler).getValue(widget); } } else { logger.warning("EditorWidgets.getValue: Widget is null"); } return ""; } @SuppressWarnings("unchecked") public static <T extends Widget> void setValue(T widget, String value) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { ((WidgetHandler<T>) handler).setValue(widget, value); } } else { logger.warning("EditorWidgets.setValue: Widget is null"); } } @SuppressWarnings("unchecked") public static <T extends Widget> void focus(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { ((WidgetHandler<T>) handler).focus(widget); } } else { logger.warning("EditorWidgets.focus: Widget is null"); } } @SuppressWarnings("unchecked") public static <T extends Widget> void enable(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { ((WidgetHandler<T>) handler).enable(widget); } } else { logger.warning("EditorWidgets.enable: Widget is null"); } } @SuppressWarnings("unchecked") public static <T extends Widget> void disable(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { ((WidgetHandler<T>) handler).disable(widget); } } else { logger.warning("EditorWidgets.disable: Widget is null"); } } @SuppressWarnings("unchecked") public static <T extends Widget> boolean isUpDownNavAllowed(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { return ((WidgetHandler<T>) handler).isUpDownNavAllowed(widget); } } else { logger.warning("EditorWidgets.isUpDownNavAllowed: Widget is null"); } return true; } @SuppressWarnings("unchecked") public static <T extends Widget> boolean isReadOnly(T widget) { if (widget != null) { WidgetHandler<?> handler = getHandler(widget.getClass()); if (handler != null) { return ((WidgetHandler<T>) handler).isReadOnly(widget); } } else { logger.warning("EditorWidgets.isReadOnly: Widget is null"); } return true; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.properties.xml; import com.intellij.lang.properties.IProperty; import com.intellij.lang.properties.PropertiesImplUtil; import com.intellij.lang.properties.PropertiesUtil; import com.intellij.lang.properties.ResourceBundle; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MostlySingularMultiMap; import gnu.trove.THashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author Dmitry Avdeev */ public class XmlPropertiesFileImpl extends XmlPropertiesFile { public static final String ENTRY_TAG_NAME = "entry"; private static final Key<CachedValue<PropertiesFile>> KEY = Key.create("xml properties file"); private final XmlFile myFile; private List<IProperty> myProperties; private MostlySingularMultiMap<String, IProperty> myPropertiesMap; private boolean myAlphaSorted; private long myFileModificationStamp = -1L; private final Object myLock = new Object(); private void ensurePropertiesLoaded() { while (myFileModificationStamp != myFile.getModificationStamp() || myPropertiesMap == null) { myFileModificationStamp = myFile.getModificationStamp(); MostlySingularMultiMap<String, IProperty> propertiesMap = new MostlySingularMultiMap<>(); XmlTag rootTag = myFile.getRootTag(); final List<IProperty> propertiesOrder = new ArrayList<>(); if (rootTag != null) { XmlTag[] entries = rootTag.findSubTags(ENTRY_TAG_NAME); for (XmlTag entry : entries) { XmlProperty property = new XmlProperty(entry, this); propertiesOrder.add(property); final String key = property.getKey(); if (key != null) { propertiesMap.add(key, property); } } } myAlphaSorted = PropertiesImplUtil.isAlphaSorted(propertiesOrder); myProperties = propertiesOrder; myPropertiesMap = propertiesMap; } } private XmlPropertiesFileImpl(XmlFile file) { myFile = file; } @NotNull @Override public PsiFile getContainingFile() { return myFile; } @NotNull @Override public List<IProperty> getProperties() { synchronized (myLock) { ensurePropertiesLoaded(); return myProperties; } } @Override public IProperty findPropertyByKey(@NotNull @NonNls String key) { synchronized (myLock) { ensurePropertiesLoaded(); Iterator<IProperty> properties = myPropertiesMap.get(key).iterator(); return properties.hasNext() ? properties.next(): null; } } @NotNull @Override public List<IProperty> findPropertiesByKey(@NotNull @NonNls String key) { synchronized (myLock) { ensurePropertiesLoaded(); return ContainerUtil.collect(myPropertiesMap.get(key).iterator()); } } @NotNull @Override public ResourceBundle getResourceBundle() { return PropertiesImplUtil.getResourceBundle(this); } @NotNull @Override public Locale getLocale() { return PropertiesUtil.getLocale(this); } @NotNull @Override public PsiElement addProperty(@NotNull IProperty property) throws IncorrectOperationException { return addProperty(property.getKey(), property.getValue()).getPsiElement().getNavigationElement(); } @NotNull @Override public PsiElement addPropertyAfter(@NotNull IProperty property, @Nullable IProperty anchor) throws IncorrectOperationException { return addPropertyAfter(property.getKey(), property.getValue(), anchor).getPsiElement().getNavigationElement(); } @Override public IProperty addPropertyAfter(String key, String value, IProperty anchor) { return addPropertyAfter(key, value, anchor, true); } @NotNull public IProperty addPropertyAfter(String key, String value, @Nullable IProperty anchor, boolean addToEnd) { final XmlTag anchorTag = anchor == null ? null : (XmlTag)anchor.getPsiElement().getNavigationElement(); final XmlTag rootTag = myFile.getRootTag(); final XmlTag entry = createPropertyTag(key, value); final XmlTag addedEntry = (XmlTag) (anchorTag == null ? myFile.getRootTag().addSubTag(entry, !addToEnd) : rootTag.addAfter(entry, anchorTag)); return new XmlProperty(addedEntry, this); } @NotNull @Override public IProperty addProperty(String key, String value) { final XmlTag entry = createPropertyTag(key, value); synchronized (myLock) { ensurePropertiesLoaded(); if (myAlphaSorted) { final XmlProperty dummyProperty = new XmlProperty(entry, this); final int insertIndex = Collections.binarySearch(myProperties, dummyProperty, (p1, p2) -> { final String k1 = p1.getKey(); final String k2 = p2.getKey(); return k1.compareTo(k2); }); final IProperty insertPosition; final IProperty inserted; if (insertIndex == -1) { inserted = addPropertyAfter(key, value, null, false); myProperties.add(0, inserted); } else { final int position = insertIndex < 0 ? -insertIndex - 2 : insertIndex; insertPosition = myProperties.get(position); inserted = addPropertyAfter(key, value, insertPosition, false); myProperties.add(position + 1, inserted); } return inserted; } else { return addPropertyAfter(key, value, null, true); } } } private XmlTag createPropertyTag(final String key, final String value) { XmlTag rootTag = myFile.getRootTag(); XmlTag entry = rootTag.createChildTag("entry", "", value, false); entry.setAttribute("key", key); return entry; } public static PropertiesFile getPropertiesFile(@NotNull PsiFile file) { CachedValuesManager manager = CachedValuesManager.getManager(file.getProject()); if (file instanceof XmlFile) { return manager.getCachedValue(file, KEY, () -> { PropertiesFile value = XmlPropertiesIndex.isPropertiesFile((XmlFile)file) ? new XmlPropertiesFileImpl((XmlFile)file) : null; return CachedValueProvider.Result.create(value, file); }, false ); } return null; } @NotNull @Override public Map<String, String> getNamesMap() { Map<String, String> result = new THashMap<>(); for (IProperty property : getProperties()) { result.put(property.getUnescapedKey(), property.getValue()); } return result; } @Override public String getName() { return getContainingFile().getName(); } @Override public VirtualFile getVirtualFile() { return getContainingFile().getVirtualFile(); } @Override public PsiDirectory getParent() { return getContainingFile().getParent(); } @Override public Project getProject() { return getContainingFile().getProject(); } @Override public String getText() { return getContainingFile().getText(); } @Override public boolean isAlphaSorted() { synchronized (myLock) { ensurePropertiesLoaded(); return myAlphaSorted; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; XmlPropertiesFileImpl that = (XmlPropertiesFileImpl)o; if (!myFile.equals(that.myFile)) return false; return true; } @Override public int hashCode() { return myFile.hashCode(); } @Override public String toString() { return "XmlPropertiesFileImpl:" + getName(); } }
/* Copyright 1995-2015 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.geoevent.processor.geonames; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Observable; import java.util.Properties; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.esri.core.geometry.MapGeometry; import com.esri.core.geometry.Point; import com.esri.core.geometry.SpatialReference; import com.esri.ges.core.component.ComponentException; import com.esri.ges.core.geoevent.FieldException; import com.esri.ges.core.geoevent.GeoEvent; import com.esri.ges.core.geoevent.GeoEventPropertyName; import com.esri.ges.core.property.Property; import com.esri.ges.framework.i18n.BundleLogger; import com.esri.ges.framework.i18n.BundleLoggerFactory; import com.esri.ges.messaging.EventDestination; import com.esri.ges.messaging.EventUpdatable; import com.esri.ges.messaging.GeoEventCreator; import com.esri.ges.messaging.GeoEventListener; import com.esri.ges.messaging.GeoEventProducer; import com.esri.ges.messaging.Messaging; import com.esri.ges.messaging.MessagingException; import com.esri.ges.processor.GeoEventProcessorBase; import com.esri.ges.processor.GeoEventProcessorDefinition; public class GeoNamesWikipediaProcessor extends GeoEventProcessorBase implements GeoEventProducer, EventUpdatable { /** * Initialize the i18n Bundle Logger * * See {@link BundleLogger} for more info. */ GeoEventListener listener; private static final BundleLogger LOGGER = BundleLoggerFactory.getLogger(GeoNamesWikipediaProcessor.class); private long lastReport = 0; private int maxMessageRate = 500; private boolean printedWarning; Properties prop = new Properties(); private Object propertyLock = new Object(); private String geoNamesUsername; private double geoNamesWikipediaRadius; private int geoNamesWikipediaMaxRows; private double geoNamesNearbyOSMPOIsRadius; private int geoNamesNearbyOSMPOIsMaxRows; private GeoEventCreator geoEventCreator; private GeoEventProducer geoEventProducer; private Messaging messaging; protected GeoNamesWikipediaProcessor(GeoEventProcessorDefinition definition) throws ComponentException { super(definition); } @Override public void setId(String id) { super.setId(id); EventDestination evtDest = new EventDestination(id + ":event"); geoEventProducer = messaging.createGeoEventProducer(evtDest); } @Override public void afterPropertiesSet() { synchronized (propertyLock) { geoNamesUsername = "krgorton"; if (hasProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_USERNAME_PROPERTY)) { geoNamesUsername = (String) getProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_USERNAME_PROPERTY).getValue(); if (geoNamesUsername == "") geoNamesUsername = "krgorton"; } geoNamesWikipediaRadius = 10.0; if (hasProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_WIKIPEDIARADIUS_PROPERTY)) { geoNamesWikipediaRadius = (double) getProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_WIKIPEDIARADIUS_PROPERTY).getValue(); } geoNamesWikipediaMaxRows = 10; if (hasProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_WIKIPEDIAMAXROWS_PROPERTY)) { geoNamesWikipediaMaxRows = (int) getProperty(GeoNamesWikipediaProcessorDefinition.GEONAMES_WIKIPEDIAMAXROWS_PROPERTY).getValue(); } geoNamesNearbyOSMPOIsRadius = 1.0; } } @Override public GeoEvent process(GeoEvent geoEvent) throws Exception, MalformedURLException, JSONException { try { Point point = (Point) geoEvent.getGeometry().getGeometry(); double lon = point.getX(); double lat = point.getY(); int wkid = geoEvent.getGeometry().getSpatialReference().getID(); // fetch nearby Wikipedia articles via geoNames web service String geoNamesLang = "en"; // lang : language code (around 240 languages) (default = en) URL geoNamesWikipediaURL = new URL("http://api.geonames.org/findNearbyWikipediaJSON?lat=" + Double.toString(lat) + "&lng=" + Double.toString(lon) + "&username=" + geoNamesUsername + "&lang=" + geoNamesLang + "&radius=" + Double.toString(geoNamesWikipediaRadius) + "&maxRows=" + Integer.toString(geoNamesWikipediaMaxRows)); String responseWikipedia = getReverseGeocode(geoNamesWikipediaURL); createWikipediaGeoEvent(geoEvent, responseWikipedia); } catch (MessagingException e) { LOGGER.error("EVENT_SEND_FAILURE", e); } return null; } private String getReverseGeocode(URL url) { String output = ""; try { HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); conn.setRequestProperty("Accept", "application/json"); if (conn.getResponseCode() != 200) { String errorString = "Failed : HTTP error code : " + conn.getResponseCode(); throw new RuntimeException(errorString); } BufferedReader br = new BufferedReader(new InputStreamReader((conn.getInputStream()))); String line; while ((line = br.readLine()) != null) { output += line; } conn.disconnect(); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return output; } private void createWikipediaGeoEvent(GeoEvent geoEvent, String jsonResponse) throws JSONException, MessagingException { JSONObject jsonObj = new JSONObject(jsonResponse); JSONArray geonamesSONArray = jsonObj.getJSONArray("geonames"); for (int i = 0; i < geonamesSONArray.length(); i++) { JSONObject geoname = geonamesSONArray.getJSONObject(i); GeoEvent geonameGeoEvent = null; String incidentId = "none"; String inGeoEventName = geoEvent.getGeoEventDefinition().getName(); if (inGeoEventName.equals("incident")) incidentId = (String) geoEvent.getField("id"); if (geoEventCreator != null) { try { geonameGeoEvent = geoEventCreator.create("GeoNamesWikipedia", definition.getUri().toString()); geonameGeoEvent.setField(0, geoEvent.getTrackId()); geonameGeoEvent.setField(1, geoEvent.getStartTime()); geonameGeoEvent.setField(2, incidentId); try { geonameGeoEvent.setField(3, geoname.getString("summary")); } catch (Exception ex) { geonameGeoEvent.setField(3, "unavailable"); } try { geonameGeoEvent.setField(4, Double.parseDouble(geoname.getString("distance"))); } catch (Exception ex) { geonameGeoEvent.setField(4, Double.NaN); } try { geonameGeoEvent.setField(5, geoname.getDouble("rank")); } catch (Exception ex) { geonameGeoEvent.setField(5, Double.NaN); } try { geonameGeoEvent.setField(6, geoname.getString("title")); } catch (Exception ex) { geonameGeoEvent.setField(6, "unavailable"); } try { geonameGeoEvent.setField(7, geoname.getString("wikipediaUrl")); } catch (Exception ex) { geonameGeoEvent.setField(7, "unavailable"); } try { geonameGeoEvent.setField(8, geoname.getDouble("elevation")); } catch (Exception ex) { geonameGeoEvent.setField(8, Double.NaN); } try { geonameGeoEvent.setField(9, geoname.getString("countryCode")); } catch (Exception ex) { geonameGeoEvent.setField(9, "unavailable"); } try { geonameGeoEvent.setField(10, geoname.getDouble("lng")); } catch (Exception ex) { geonameGeoEvent.setField(10, "unavailable"); } try { geonameGeoEvent.setField(11, geoname.getString("feature")); } catch (Exception ex) { geonameGeoEvent.setField(11, "unavailable"); } try { geonameGeoEvent.setField(12, geoname.getDouble("geoNameId")); } catch (Exception ex) { geonameGeoEvent.setField(12, Double.NaN); } try { geonameGeoEvent.setField(13, geoname.getString("lang")); } catch (Exception ex) { geonameGeoEvent.setField(13, "unavailable"); } try { geonameGeoEvent.setField(14, geoname.getDouble("lat")); } catch (Exception ex) { geonameGeoEvent.setField(14, "unavailable"); } MapGeometry pt = null; try { pt = new MapGeometry(new Point(geoname.getDouble("lng"), geoname.getDouble("lat"), geoname.getDouble("elevation")), SpatialReference.create(4326)); } catch (Exception ex) { pt = new MapGeometry(new Point(geoname.getDouble("lng"), geoname.getDouble("lat")), SpatialReference.create(4326)); } try { geonameGeoEvent.setGeometry(pt); } catch (Exception ex) { LOGGER.debug("Failed to set geometry"); } geonameGeoEvent.setProperty(GeoEventPropertyName.TYPE, "event"); geonameGeoEvent.setProperty(GeoEventPropertyName.OWNER_ID, getId()); geonameGeoEvent.setProperty(GeoEventPropertyName.OWNER_URI, definition.getUri()); } catch (FieldException error) { geonameGeoEvent = null; LOGGER.error("GEOEVENT_CREATION_ERROR", error.getMessage()); LOGGER.info(error.getMessage(), error); } catch (MessagingException e) { // TODO Auto-generated catch block e.printStackTrace(); } } try { send(geonameGeoEvent); } catch (Exception ex) { LOGGER.error("GEOEVENTPRODUCER_SEND_ERROR (Wikipedia)", ex.getMessage()); } } } @Override public void send(GeoEvent geoEvent) throws MessagingException { if (geoEventProducer != null && geoEvent != null) { geoEventProducer.send(geoEvent); // LOGGER.info("Sent: " + geoEvent.toString()); } } @Override public EventDestination getEventDestination() { return (geoEventProducer != null) ? geoEventProducer.getEventDestination() : null; } @Override public void disconnect() { if (geoEventProducer != null) geoEventProducer.disconnect(); } @Override public boolean isConnected() { return (geoEventProducer != null) ? geoEventProducer.isConnected() : false; } @Override public String getStatusDetails() { return (geoEventProducer != null) ? geoEventProducer.getStatusDetails() : ""; } @Override public void setup() throws MessagingException { ; } @Override public void init() throws MessagingException { ; } @Override public void update(Observable o, Object arg) { ; } public void setMessaging(Messaging messaging) { this.messaging = messaging; geoEventCreator = messaging.createGeoEventCreator(); } @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append(definition.getName()); sb.append("/"); sb.append(definition.getVersion()); sb.append("["); for (Property p : getProperties()) { sb.append(p.getDefinition().getPropertyName()); sb.append(":"); sb.append(p.getValue()); sb.append(" "); } sb.append("]"); return sb.toString(); } @Override public List<EventDestination> getEventDestinations() { return (geoEventProducer != null) ? Arrays.asList(geoEventProducer.getEventDestination()) : new ArrayList<EventDestination>(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.unit.core.postoffice.impl; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.paging.cursor.PageSubscription; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.server.Consumer; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.RoutingContext; import org.apache.activemq.artemis.core.server.ServerConsumer; import org.apache.activemq.artemis.core.server.impl.AckReason; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.utils.ReferenceCounter; import org.apache.activemq.artemis.utils.collections.LinkedListIterator; import org.apache.activemq.artemis.utils.critical.CriticalComponentImpl; import org.apache.activemq.artemis.utils.critical.EmptyCriticalAnalyzer; public class FakeQueue extends CriticalComponentImpl implements Queue { @Override public void setPurgeOnNoConsumers(boolean value) { } @Override public int getConsumersBeforeDispatch() { return 0; } @Override public void setConsumersBeforeDispatch(int consumersBeforeDispatch) { } @Override public long getDelayBeforeDispatch() { return 0; } @Override public void setDelayBeforeDispatch(long delayBeforeDispatch) { } @Override public long getDispatchStartTime() { return 0; } @Override public boolean isDispatching() { return false; } @Override public void setDispatching(boolean dispatching) { } @Override public boolean allowsReferenceCallback() { return false; } @Override public boolean isExclusive() { // no-op return false; } @Override public void setExclusive(boolean value) { // no-op } @Override public boolean isLastValue() { // no-op return false; } @Override public SimpleString getLastValueKey() { return null; } @Override public boolean isNonDestructive() { return false; } @Override public void setNonDestructive(boolean nonDestructive) { } @Override public void setMaxConsumer(int maxConsumers) { } @Override public boolean isConfigurationManaged() { return false; } @Override public void setConfigurationManaged(boolean configurationManaged) { } @Override public boolean isInternalQueue() { // no-op return false; } @Override public void sendToDeadLetterAddress(Transaction tx, MessageReference ref) throws Exception { } @Override public void deleteQueue(boolean removeConsumers) throws Exception { } @Override public void unproposed(SimpleString groupID) { } @Override public void reloadPause(long recordID) { } @Override public void recheckRefCount(OperationContext context) { } @Override public boolean isPersistedPause() { return false; } @Override public int retryMessages(Filter filter) throws Exception { return 0; } @Override public void setConsumersRefCount(ReferenceCounter referenceCounter) { } @Override public void setInternalQueue(boolean internalQueue) { // no-op } @Override public void cancel(Transaction tx, MessageReference ref, boolean ignoreRedeliveryCheck) { // no-op } PageSubscription subs; @Override public boolean isDirectDeliver() { // no-op return false; } @Override public void close() { // no-op } public void forceCheckQueueSize() { // no-op } @Override public void reload(MessageReference ref) { // no-op } @Override public void pause(boolean persist) { } @Override public boolean flushExecutor() { return true; } @Override public void addHead(MessageReference ref, boolean scheduling) { // no-op } @Override public void addHead(List<MessageReference> ref, boolean scheduling) { // no-op } @Override public void addTail(MessageReference ref, boolean direct) { // no-op } @Override public void addTail(MessageReference ref) { // no-op } @Override public void resetAllIterators() { // no-op } private final SimpleString name; private final long id; private long messageCount; public FakeQueue(final SimpleString name) { this(name, 0); } public FakeQueue(final SimpleString name, final long id) { super(EmptyCriticalAnalyzer.getInstance(), 1); this.name = name; this.id = id; } @Override public void acknowledge(final MessageReference ref) throws Exception { // no-op } @Override public void acknowledge(final MessageReference ref, ServerConsumer consumer) throws Exception { // no-op } @Override public void acknowledge(MessageReference ref, AckReason reason, ServerConsumer consumer) throws Exception { // no-op } @Override public void acknowledge(final Transaction tx, final MessageReference ref) throws Exception { // no-op } @Override public void acknowledge(Transaction tx, MessageReference ref, AckReason reason, ServerConsumer consumer) throws Exception { // no-op } @Override public void addConsumer(final Consumer consumer) throws Exception { // no-op } @Override public void addRedistributor(final long delay) { // no-op } @Override public void cancel(final MessageReference reference, final long timeBase) throws Exception { // no-op } @Override public void cancel(final Transaction tx, final MessageReference ref) { // no-op } @Override public void cancelRedistributor() throws Exception { // no-op } @Override public boolean changeReferencePriority(final long messageID, final byte newPriority) throws Exception { // no-op return false; } @Override public int changeReferencesPriority(Filter filter, byte newPriority) throws Exception { // no-op return 0; } @Override public boolean checkRedelivery(final MessageReference ref, final long timeBase, final boolean check) throws Exception { // no-op return false; } @Override public int deleteAllReferences() throws Exception { // no-op return 0; } @Override public int deleteMatchingReferences(final Filter filter) throws Exception { // no-op return 0; } @Override public boolean deleteReference(final long messageID) throws Exception { // no-op return false; } @Override public void deliverAsync() { // no-op } @Override public void expire(final MessageReference ref) throws Exception { // no-op } @Override public void expire(final MessageReference ref, final ServerConsumer consumer) throws Exception { // no-op } @Override public boolean expireReference(final long messageID) throws Exception { // no-op return false; } @Override public void expireReferences() throws Exception { // no-op } @Override public int expireReferences(final Filter filter) throws Exception { // no-op return 0; } @Override public int getConsumerCount() { // no-op return 0; } @Override public long getConsumerRemovedTimestamp() { return 0; } @Override public ReferenceCounter getConsumersRefCount() { return null; //To change body of implemented methods use File | Settings | File Templates. } @Override public Set<Consumer> getConsumers() { // no-op return null; } @Override public Map<SimpleString, Consumer> getGroups() { return null; } @Override public void resetGroup(SimpleString groupID) { } @Override public void resetAllGroups() { } @Override public int getGroupCount() { return 0; } @Override public int getDeliveringCount() { // no-op return 0; } @Override public Filter getFilter() { // no-op return null; } @Override public void setFilter(Filter filter) { } @Override public long getMessageCount() { return messageCount; } @Override public long getPersistentSize() { return 0; } @Override public long getDurableMessageCount() { return 0; } @Override public long getDurablePersistentSize() { return 0; } public void setMessageCount(long messageCount) { this.messageCount = messageCount; } @Override public long getMessagesAdded() { // no-op return 0; } @Override public long getMessagesAcknowledged() { // no-op return 0; } @Override public long getMessagesExpired() { // no-op return 0; } @Override public long getMessagesKilled() { // no-op return 0; } @Override public void resetMessagesAdded() { // no-op } @Override public void resetMessagesAcknowledged() { // no-op } @Override public void resetMessagesExpired() { // no-op } @Override public void resetMessagesKilled() { // no-op } @Override public void incrementMesssagesAdded() { } @Override public void deliverScheduledMessages() { } @Override public SimpleString getName() { return name; } @Override public SimpleString getAddress() { // no-op return null; } @Override public long getID() { return id; } @Override public MessageReference getReference(final long id1) { // no-op return null; } @Override public int getScheduledCount() { // no-op return 0; } @Override public long getScheduledSize() { // no-op return 0; } @Override public List<MessageReference> getScheduledMessages() { // no-op return null; } @Override public boolean isDurableMessage() { // no-op return false; } @Override public boolean isDurable() { // no-op return false; } @Override public boolean isPaused() { // no-op return false; } @Override public boolean isTemporary() { // no-op return false; } @Override public boolean isAutoCreated() { return false; } @Override public boolean isPurgeOnNoConsumers() { return false; } @Override public int getMaxConsumers() { return -1; } @Override public LinkedListIterator<MessageReference> iterator() { // no-op return null; } @Override public int moveReferences(final Filter filter, final SimpleString toAddress, Binding binding) throws Exception { // no-op return 0; } @Override public void pause() { // no-op } @Override public void reacknowledge(final Transaction tx, final MessageReference ref) throws Exception { // no-op } @Override public void referenceHandled(MessageReference ref) { // no-op } @Override public void removeConsumer(final Consumer consumer) { } public MessageReference removeFirstReference(final long id1) throws Exception { // no-op return null; } @Override public MessageReference removeReferenceWithID(final long id1) throws Exception { // no-op return null; } @Override public void resume() { // no-op } @Override public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception { // no-op return false; } @Override public int sendMessagesToDeadLetterAddress(Filter filter) throws Exception { // no-op return 0; } @Override public SimpleString getExpiryAddress() { return null; } @Override public void route(final Message message, final RoutingContext context) throws Exception { // no-op } @Override public void routeWithAck(Message message, RoutingContext context) { } @Override public boolean hasMatchingConsumer(final Message message) { // no-op return false; } @Override public Executor getExecutor() { // no-op return null; } public void addLast(MessageReference ref, boolean direct) { // no-op } @Override public PageSubscription getPageSubscription() { return subs; } @Override public RoutingType getRoutingType() { return ActiveMQDefaultConfiguration.getDefaultRoutingType(); } @Override public void setRoutingType(RoutingType routingType) { } public void setPageSubscription(PageSubscription sub) { this.subs = sub; if (subs != null) { sub.setQueue(this); } } @Override public boolean moveReference(long messageID, SimpleString toAddress, Binding binding, boolean rejectDuplicates) throws Exception { // no-op return false; } @Override public int deleteAllReferences(int flushLimit) throws Exception { return 0; } @Override public int deleteMatchingReferences(int flushLImit, Filter filter, AckReason reason) throws Exception { return 0; } @Override public int moveReferences(int flushLimit, Filter filter, SimpleString toAddress, boolean rejectDuplicates, Binding binding) throws Exception { return 0; } @Override public void forceDelivery() { // no-op } @Override public void deleteQueue() throws Exception { // no-op } /* (non-Javadoc) * @see org.apache.activemq.artemis.core.server.Queue#destroyPaging() */ @Override public void destroyPaging() { } /* (non-Javadoc) * @see org.apache.activemq.artemis.core.server.Queue#getDeliveringMessages() */ @Override public Map<String, List<MessageReference>> getDeliveringMessages() { return null; } @Override public LinkedListIterator<MessageReference> browserIterator() { // TODO Auto-generated method stub return null; } @Override public void postAcknowledge(MessageReference ref) { } @Override public float getRate() { return 0.0f; } @Override public SimpleString getUser() { return null; } @Override public void setUser(SimpleString user) { // no-op } @Override public long getDeliveringSize() { return 0; } @Override public int getDurableDeliveringCount() { return 0; } @Override public long getDurableDeliveringSize() { return 0; } @Override public int getDurableScheduledCount() { return 0; } @Override public long getDurableScheduledSize() { return 0; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.inspections; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInspection.actions.CleanupInspectionIntention; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.util.Pair; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethodCallExpression; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.typeMigration.inspections.GuavaInspection; import com.intellij.testFramework.IdeaTestUtil; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.builders.JavaModuleFixtureBuilder; import com.intellij.testFramework.fixtures.JavaCodeInsightFixtureTestCase; import org.junit.Assert; import java.util.Arrays; import java.util.List; /** * @author Dmitry Batkovich */ public class GuavaInspectionTest extends JavaCodeInsightFixtureTestCase { private GuavaInspection myInspection; @Override public void setUp() throws Exception { super.setUp(); myInspection = new GuavaInspection(); myFixture.enableInspections(myInspection); } @Override protected String getTestDataPath() { return PlatformTestUtil.getCommunityPath() + "/java/typeMigration/testData/inspections/guava"; } @Override protected void tuneFixture(JavaModuleFixtureBuilder moduleBuilder) throws Exception { moduleBuilder.setLanguageLevel(LanguageLevel.JDK_1_8); moduleBuilder.addLibraryJars("guava", PathManager.getHomePathFor(Assert.class) + "/lib/", "guava-19.0.jar"); moduleBuilder.addLibraryJars("jsr305", PathManager.getHomePathFor(Assert.class) + "/lib/", "jsr305.jar"); moduleBuilder.addJdk(IdeaTestUtil.getMockJdk18Path().getPath()); } public void testOptional() { doTest(); } public void testOptional2() { doTest(); } public void testOptional3() { doTest(); } public void testSimpleFluentIterable() { doTest(); } public void testChainedFluentIterable() { doTest(); } public void testFluentIterableChainWithoutVariable() { doTestAllFile(); } public void testChainedFluentIterableWithChainedInitializer() { doTest(); } public void testFluentIterableChainWithOptional() { doTest(); } public void testTransformAndConcat1() { doTest(); } public void testTransformAndConcat2() { doTest(); } public void testTransformAndConcat3() { doTest(); } public void testTransformAndConcat4() { doTest(); } public void testFilterIsInstance() { doTest(); } public void testInsertTypeParameter() { doTest(); } public void testRemoveMethodReferenceForFunctionalInterfaces() { doTest(); } public void _testChainedFluentIterableWithOf() { doTest(); } //needs Guava 18.0 as dependency public void _testAppend() { doTest(); } public void testChainContainsStopMethods() { doTestNoQuickFixes(PsiMethodCallExpression.class); } public void testFluentIterableAndOptionalChain() { doTest(); } public void testCopyInto() { doTestAllFile(); } public void testToArray() { doTest(); } public void testToArray2() { doTest(); } public void testToArray3() { doTest(); } public void testReturnType() { doTest(); } public void testFluentIterableGet() { doTest(); } public void testFluentIterableGet2() { doTest(); } public void testIterableAssignment() { doTest(); } public void testReturnIterable() { doTest(); } public void testConvertFluentIterableAsIterableParameter() { doTest(); } public void testConvertFunctionAsParameter() { doTest(); } public void testFluentIterableMigrationInInheritance() { doTest(); } public void testFluentIterableAndOptional() { doTest(); } public void testFluentIterableContains() { doTest(); } public void testFluentIterableChainSeparatedByMethods() { doTest(); } public void testFluentIterableWithStaticallyImportedFrom() { doTest(); } public void testTypeMigrationRootBackTraverse() { doTest(); } public void testOptionalTransform() { doTest(); } public void testOptionalTransform2() { doTest(); } public void testRemoveMethodReference() { doTest(); } public void testSimplifyOptionalComposition() { doTest(); } public void testMigrateArrays() { doTest(); } public void testConvertImmutableCollections() { doTestAllFile(); } public void testUniqueIndex() { doTestAllFile(); } public void testMigrateMethodAsChainQualifier() { doTest(); } public void testFixAllProblems() { doTestAllFile(); } public void testFixAllProblems2() { doTestAllFile(); } public void testPredicates() { doTestAllFile(); } public void testPredicates2() { doTestAllFile(); } public void testPredicates3() { doTestAllFile(); } public void testPredicates4() { doTestAllFile(); } public void testFluentIterableElementTypeChanged() { doTest(); } // for ex: javax.annotations.Nullable is runtime annotation public void testFunctionAnnotatedWithRuntimeAnnotation() { doTestAllFile(); } public void testFunctionAnnotatedWithRuntimeAnnotation2() { try { myInspection.ignoreJavaxNullable = false; doTestAllFile(); } finally { myInspection.ignoreJavaxNullable = true; } } public void testFluentIterableFromAndParenthesises() { doTestAllFile(); } public void testFunctionIsMethodReference() { doTest(); } private void doTestNoQuickFixes(Class<? extends PsiElement>... highlightedElements) { myFixture.configureByFile(getTestName(true) + ".java"); myFixture.doHighlighting(); for (IntentionAction action : myFixture.getAvailableIntentions()) { if (action instanceof GuavaInspection.MigrateGuavaTypeFix) { final PsiElement element = ((GuavaInspection.MigrateGuavaTypeFix)action).getStartElement(); if (PsiTreeUtil.instanceOf(element, highlightedElements)) { fail("Quick fix is found but not expected for types " + Arrays.toString(highlightedElements)); } } } } private void doTest() { myFixture.configureByFile(getTestName(true) + ".java"); myFixture.enableInspections(new GuavaInspection()); boolean actionFound = false; myFixture.doHighlighting(); for (IntentionAction action : myFixture.getAvailableIntentions()) { if (action instanceof GuavaInspection.MigrateGuavaTypeFix) { myFixture.launchAction(action); actionFound = true; break; } } assertTrue("Quick fix isn't found", actionFound); myFixture.checkResultByFile(getTestName(true) + "_after.java"); } private void doTestAllFile() { myFixture.configureByFile(getTestName(true) + ".java"); myFixture.enableInspections(new GuavaInspection()); for (HighlightInfo info : myFixture.doHighlighting()) if (GuavaInspection.PROBLEM_DESCRIPTION.equals(info.getDescription())) { final Pair<HighlightInfo.IntentionActionDescriptor, RangeMarker> marker = info.quickFixActionMarkers.get(0); final PsiElement someElement = myFixture.getFile().findElementAt(0); assertNotNull(someElement); final List<IntentionAction> options = marker.getFirst().getOptions(someElement, myFixture.getEditor()); assertNotNull(options); boolean doBreak = false; for (IntentionAction option : options) { if (option instanceof CleanupInspectionIntention) { myFixture.launchAction(option); doBreak = true; break; } } if (doBreak) { break; } } myFixture.checkResultByFile(getTestName(true) + "_after.java"); } }
package org.apache.cassandra.hadoop.cql3; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.io.FileInputStream; import java.io.IOException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.util.Arrays; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import org.apache.cassandra.hadoop.ConfigHelper; import org.apache.cassandra.io.util.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import com.datastax.driver.core.AuthProvider; import com.datastax.driver.core.PlainTextAuthProvider; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.HostDistance; import com.datastax.driver.core.PoolingOptions; import com.datastax.driver.core.ProtocolOptions; import com.datastax.driver.core.QueryOptions; import com.datastax.driver.core.SSLOptions; import com.datastax.driver.core.SocketOptions; import com.datastax.driver.core.policies.LoadBalancingPolicy; import com.google.common.base.Optional; public class CqlConfigHelper { private static final String INPUT_CQL_COLUMNS_CONFIG = "cassandra.input.columnfamily.columns"; private static final String INPUT_CQL_PAGE_ROW_SIZE_CONFIG = "cassandra.input.page.row.size"; private static final String INPUT_CQL_WHERE_CLAUSE_CONFIG = "cassandra.input.where.clause"; private static final String INPUT_CQL = "cassandra.input.cql"; private static final String USERNAME = "cassandra.username"; private static final String PASSWORD = "cassandra.password"; private static final String INPUT_NATIVE_PORT = "cassandra.input.native.port"; private static final String INPUT_NATIVE_CORE_CONNECTIONS_PER_HOST = "cassandra.input.native.core.connections.per.host"; private static final String INPUT_NATIVE_MAX_CONNECTIONS_PER_HOST = "cassandra.input.native.max.connections.per.host"; private static final String INPUT_NATIVE_MIN_SIMULT_REQ_PER_CONNECTION = "cassandra.input.native.min.simult.reqs.per.connection"; private static final String INPUT_NATIVE_MAX_SIMULT_REQ_PER_CONNECTION = "cassandra.input.native.max.simult.reqs.per.connection"; private static final String INPUT_NATIVE_CONNECTION_TIMEOUT = "cassandra.input.native.connection.timeout"; private static final String INPUT_NATIVE_READ_CONNECTION_TIMEOUT = "cassandra.input.native.read.connection.timeout"; private static final String INPUT_NATIVE_RECEIVE_BUFFER_SIZE = "cassandra.input.native.receive.buffer.size"; private static final String INPUT_NATIVE_SEND_BUFFER_SIZE = "cassandra.input.native.send.buffer.size"; private static final String INPUT_NATIVE_SOLINGER = "cassandra.input.native.solinger"; private static final String INPUT_NATIVE_TCP_NODELAY = "cassandra.input.native.tcp.nodelay"; private static final String INPUT_NATIVE_REUSE_ADDRESS = "cassandra.input.native.reuse.address"; private static final String INPUT_NATIVE_KEEP_ALIVE = "cassandra.input.native.keep.alive"; private static final String INPUT_NATIVE_AUTH_PROVIDER = "cassandra.input.native.auth.provider"; private static final String INPUT_NATIVE_SSL_TRUST_STORE_PATH = "cassandra.input.native.ssl.trust.store.path"; private static final String INPUT_NATIVE_SSL_KEY_STORE_PATH = "cassandra.input.native.ssl.key.store.path"; private static final String INPUT_NATIVE_SSL_TRUST_STORE_PASSWARD = "cassandra.input.native.ssl.trust.store.password"; private static final String INPUT_NATIVE_SSL_KEY_STORE_PASSWARD = "cassandra.input.native.ssl.key.store.password"; private static final String INPUT_NATIVE_SSL_CIPHER_SUITES = "cassandra.input.native.ssl.cipher.suites"; private static final String INPUT_NATIVE_PROTOCOL_VERSION = "cassandra.input.native.protocol.version"; private static final String OUTPUT_CQL = "cassandra.output.cql"; /** * Set the CQL columns for the input of this job. * * @param conf Job configuration you are about to run * @param columns */ public static void setInputColumns(Configuration conf, String columns) { if (columns == null || columns.isEmpty()) return; conf.set(INPUT_CQL_COLUMNS_CONFIG, columns); } /** * Set the CQL query Limit for the input of this job. * * @param conf Job configuration you are about to run * @param cqlPageRowSize */ public static void setInputCQLPageRowSize(Configuration conf, String cqlPageRowSize) { if (cqlPageRowSize == null) { throw new UnsupportedOperationException("cql page row size may not be null"); } conf.set(INPUT_CQL_PAGE_ROW_SIZE_CONFIG, cqlPageRowSize); } /** * Set the CQL user defined where clauses for the input of this job. * * @param conf Job configuration you are about to run * @param clauses */ public static void setInputWhereClauses(Configuration conf, String clauses) { if (clauses == null || clauses.isEmpty()) return; conf.set(INPUT_CQL_WHERE_CLAUSE_CONFIG, clauses); } /** * Set the CQL prepared statement for the output of this job. * * @param conf Job configuration you are about to run * @param cql */ public static void setOutputCql(Configuration conf, String cql) { if (cql == null || cql.isEmpty()) return; conf.set(OUTPUT_CQL, cql); } public static void setInputCql(Configuration conf, String cql) { if (cql == null || cql.isEmpty()) return; conf.set(INPUT_CQL, cql); } public static void setUserNameAndPassword(Configuration conf, String username, String password) { if (StringUtils.isNotBlank(username)) { conf.set(INPUT_NATIVE_AUTH_PROVIDER, PlainTextAuthProvider.class.getName()); conf.set(USERNAME, username); conf.set(PASSWORD, password); } } public static Optional<Integer> getInputCoreConnections(Configuration conf) { return getIntSetting(INPUT_NATIVE_CORE_CONNECTIONS_PER_HOST, conf); } public static Optional<Integer> getInputMaxConnections(Configuration conf) { return getIntSetting(INPUT_NATIVE_MAX_CONNECTIONS_PER_HOST, conf); } public static int getInputNativePort(Configuration conf) { return Integer.parseInt(conf.get(INPUT_NATIVE_PORT, "9042")); } public static Optional<Integer> getInputMinSimultReqPerConnections(Configuration conf) { return getIntSetting(INPUT_NATIVE_MIN_SIMULT_REQ_PER_CONNECTION, conf); } public static Optional<Integer> getInputMaxSimultReqPerConnections(Configuration conf) { return getIntSetting(INPUT_NATIVE_MAX_SIMULT_REQ_PER_CONNECTION, conf); } public static Optional<Integer> getInputNativeConnectionTimeout(Configuration conf) { return getIntSetting(INPUT_NATIVE_CONNECTION_TIMEOUT, conf); } public static Optional<Integer> getInputNativeReadConnectionTimeout(Configuration conf) { return getIntSetting(INPUT_NATIVE_READ_CONNECTION_TIMEOUT, conf); } public static Optional<Integer> getInputNativeReceiveBufferSize(Configuration conf) { return getIntSetting(INPUT_NATIVE_RECEIVE_BUFFER_SIZE, conf); } public static Optional<Integer> getInputNativeSendBufferSize(Configuration conf) { return getIntSetting(INPUT_NATIVE_SEND_BUFFER_SIZE, conf); } public static Optional<Integer> getInputNativeSolinger(Configuration conf) { return getIntSetting(INPUT_NATIVE_SOLINGER, conf); } public static Optional<Boolean> getInputNativeTcpNodelay(Configuration conf) { return getBooleanSetting(INPUT_NATIVE_TCP_NODELAY, conf); } public static Optional<Boolean> getInputNativeReuseAddress(Configuration conf) { return getBooleanSetting(INPUT_NATIVE_REUSE_ADDRESS, conf); } public static Optional<String> getInputNativeAuthProvider(Configuration conf) { return getStringSetting(INPUT_NATIVE_AUTH_PROVIDER, conf); } public static Optional<String> getInputNativeSSLTruststorePath(Configuration conf) { return getStringSetting(INPUT_NATIVE_SSL_TRUST_STORE_PATH, conf); } public static Optional<String> getInputNativeSSLKeystorePath(Configuration conf) { return getStringSetting(INPUT_NATIVE_SSL_KEY_STORE_PATH, conf); } public static Optional<String> getInputNativeSSLKeystorePassword(Configuration conf) { return getStringSetting(INPUT_NATIVE_SSL_KEY_STORE_PASSWARD, conf); } public static Optional<String> getInputNativeSSLTruststorePassword(Configuration conf) { return getStringSetting(INPUT_NATIVE_SSL_TRUST_STORE_PASSWARD, conf); } public static Optional<String> getInputNativeSSLCipherSuites(Configuration conf) { return getStringSetting(INPUT_NATIVE_SSL_CIPHER_SUITES, conf); } public static Optional<Boolean> getInputNativeKeepAlive(Configuration conf) { return getBooleanSetting(INPUT_NATIVE_KEEP_ALIVE, conf); } public static String getInputcolumns(Configuration conf) { return conf.get(INPUT_CQL_COLUMNS_CONFIG); } public static Optional<Integer> getInputPageRowSize(Configuration conf) { return getIntSetting(INPUT_CQL_PAGE_ROW_SIZE_CONFIG, conf); } public static String getInputWhereClauses(Configuration conf) { return conf.get(INPUT_CQL_WHERE_CLAUSE_CONFIG); } public static String getInputCql(Configuration conf) { return conf.get(INPUT_CQL); } public static String getOutputCql(Configuration conf) { return conf.get(OUTPUT_CQL); } private static Optional<Integer> getProtocolVersion(Configuration conf) { return getIntSetting(INPUT_NATIVE_PROTOCOL_VERSION, conf); } public static Cluster getInputCluster(String host, Configuration conf) { // this method has been left for backward compatibility return getInputCluster(new String[] {host}, conf); } public static Cluster getInputCluster(String[] hosts, Configuration conf) { int port = getInputNativePort(conf); Optional<AuthProvider> authProvider = getAuthProvider(conf); Optional<SSLOptions> sslOptions = getSSLOptions(conf); Optional<Integer> protocolVersion = getProtocolVersion(conf); LoadBalancingPolicy loadBalancingPolicy = getReadLoadBalancingPolicy(conf, hosts); SocketOptions socketOptions = getReadSocketOptions(conf); QueryOptions queryOptions = getReadQueryOptions(conf); PoolingOptions poolingOptions = getReadPoolingOptions(conf); Cluster.Builder builder = Cluster.builder() .addContactPoints(hosts) .withPort(port) .withCompression(ProtocolOptions.Compression.NONE); if (authProvider.isPresent()) builder.withAuthProvider(authProvider.get()); if (sslOptions.isPresent()) builder.withSSL(sslOptions.get()); if (protocolVersion.isPresent()) { builder.withProtocolVersion(protocolVersion.get()); } builder.withLoadBalancingPolicy(loadBalancingPolicy) .withSocketOptions(socketOptions) .withQueryOptions(queryOptions) .withPoolingOptions(poolingOptions); return builder.build(); } public static void setInputCoreConnections(Configuration conf, String connections) { conf.set(INPUT_NATIVE_CORE_CONNECTIONS_PER_HOST, connections); } public static void setInputMaxConnections(Configuration conf, String connections) { conf.set(INPUT_NATIVE_MAX_CONNECTIONS_PER_HOST, connections); } public static void setInputMinSimultReqPerConnections(Configuration conf, String reqs) { conf.set(INPUT_NATIVE_MIN_SIMULT_REQ_PER_CONNECTION, reqs); } public static void setInputMaxSimultReqPerConnections(Configuration conf, String reqs) { conf.set(INPUT_NATIVE_MAX_SIMULT_REQ_PER_CONNECTION, reqs); } public static void setInputNativeConnectionTimeout(Configuration conf, String timeout) { conf.set(INPUT_NATIVE_CONNECTION_TIMEOUT, timeout); } public static void setInputNativeReadConnectionTimeout(Configuration conf, String timeout) { conf.set(INPUT_NATIVE_READ_CONNECTION_TIMEOUT, timeout); } public static void setInputNativeReceiveBufferSize(Configuration conf, String size) { conf.set(INPUT_NATIVE_RECEIVE_BUFFER_SIZE, size); } public static void setInputNativeSendBufferSize(Configuration conf, String size) { conf.set(INPUT_NATIVE_SEND_BUFFER_SIZE, size); } public static void setInputNativeSolinger(Configuration conf, String solinger) { conf.set(INPUT_NATIVE_SOLINGER, solinger); } public static void setInputNativeTcpNodelay(Configuration conf, String tcpNodelay) { conf.set(INPUT_NATIVE_TCP_NODELAY, tcpNodelay); } public static void setInputNativeAuthProvider(Configuration conf, String authProvider) { conf.set(INPUT_NATIVE_AUTH_PROVIDER, authProvider); } public static void setInputNativeSSLTruststorePath(Configuration conf, String path) { conf.set(INPUT_NATIVE_SSL_TRUST_STORE_PATH, path); } public static void setInputNativeSSLKeystorePath(Configuration conf, String path) { conf.set(INPUT_NATIVE_SSL_KEY_STORE_PATH, path); } public static void setInputNativeSSLKeystorePassword(Configuration conf, String pass) { conf.set(INPUT_NATIVE_SSL_KEY_STORE_PASSWARD, pass); } public static void setInputNativeSSLTruststorePassword(Configuration conf, String pass) { conf.set(INPUT_NATIVE_SSL_TRUST_STORE_PASSWARD, pass); } public static void setInputNativeSSLCipherSuites(Configuration conf, String suites) { conf.set(INPUT_NATIVE_SSL_CIPHER_SUITES, suites); } public static void setInputNativeReuseAddress(Configuration conf, String reuseAddress) { conf.set(INPUT_NATIVE_REUSE_ADDRESS, reuseAddress); } public static void setInputNativeKeepAlive(Configuration conf, String keepAlive) { conf.set(INPUT_NATIVE_KEEP_ALIVE, keepAlive); } public static void setInputNativePort(Configuration conf, String port) { conf.set(INPUT_NATIVE_PORT, port); } private static PoolingOptions getReadPoolingOptions(Configuration conf) { Optional<Integer> coreConnections = getInputCoreConnections(conf); Optional<Integer> maxConnections = getInputMaxConnections(conf); Optional<Integer> maxSimultaneousRequests = getInputMaxSimultReqPerConnections(conf); Optional<Integer> minSimultaneousRequests = getInputMinSimultReqPerConnections(conf); PoolingOptions poolingOptions = new PoolingOptions(); for (HostDistance hostDistance : Arrays.asList(HostDistance.LOCAL, HostDistance.REMOTE)) { if (coreConnections.isPresent()) poolingOptions.setCoreConnectionsPerHost(hostDistance, coreConnections.get()); if (maxConnections.isPresent()) poolingOptions.setMaxConnectionsPerHost(hostDistance, maxConnections.get()); if (minSimultaneousRequests.isPresent()) poolingOptions.setMinSimultaneousRequestsPerConnectionThreshold(hostDistance, minSimultaneousRequests.get()); if (maxSimultaneousRequests.isPresent()) poolingOptions.setMaxSimultaneousRequestsPerConnectionThreshold(hostDistance, maxSimultaneousRequests.get()); } return poolingOptions; } private static QueryOptions getReadQueryOptions(Configuration conf) { String CL = ConfigHelper.getReadConsistencyLevel(conf); Optional<Integer> fetchSize = getInputPageRowSize(conf); QueryOptions queryOptions = new QueryOptions(); if (CL != null && !CL.isEmpty()) queryOptions.setConsistencyLevel(com.datastax.driver.core.ConsistencyLevel.valueOf(CL)); if (fetchSize.isPresent()) queryOptions.setFetchSize(fetchSize.get()); return queryOptions; } private static SocketOptions getReadSocketOptions(Configuration conf) { SocketOptions socketOptions = new SocketOptions(); Optional<Integer> connectTimeoutMillis = getInputNativeConnectionTimeout(conf); Optional<Integer> readTimeoutMillis = getInputNativeReadConnectionTimeout(conf); Optional<Integer> receiveBufferSize = getInputNativeReceiveBufferSize(conf); Optional<Integer> sendBufferSize = getInputNativeSendBufferSize(conf); Optional<Integer> soLinger = getInputNativeSolinger(conf); Optional<Boolean> tcpNoDelay = getInputNativeTcpNodelay(conf); Optional<Boolean> reuseAddress = getInputNativeReuseAddress(conf); Optional<Boolean> keepAlive = getInputNativeKeepAlive(conf); if (connectTimeoutMillis.isPresent()) socketOptions.setConnectTimeoutMillis(connectTimeoutMillis.get()); if (readTimeoutMillis.isPresent()) socketOptions.setReadTimeoutMillis(readTimeoutMillis.get()); if (receiveBufferSize.isPresent()) socketOptions.setReceiveBufferSize(receiveBufferSize.get()); if (sendBufferSize.isPresent()) socketOptions.setSendBufferSize(sendBufferSize.get()); if (soLinger.isPresent()) socketOptions.setSoLinger(soLinger.get()); if (tcpNoDelay.isPresent()) socketOptions.setTcpNoDelay(tcpNoDelay.get()); if (reuseAddress.isPresent()) socketOptions.setReuseAddress(reuseAddress.get()); if (keepAlive.isPresent()) socketOptions.setKeepAlive(keepAlive.get()); return socketOptions; } private static LoadBalancingPolicy getReadLoadBalancingPolicy(Configuration conf, final String[] stickHosts) { return new LimitedLocalNodeFirstLocalBalancingPolicy(stickHosts); } private static Optional<AuthProvider> getAuthProvider(Configuration conf) { Optional<String> authProvider = getInputNativeAuthProvider(conf); if (!authProvider.isPresent()) return Optional.absent(); return Optional.of(getClientAuthProvider(authProvider.get(), conf)); } private static Optional<SSLOptions> getSSLOptions(Configuration conf) { Optional<String> truststorePath = getInputNativeSSLTruststorePath(conf); Optional<String> keystorePath = getInputNativeSSLKeystorePath(conf); Optional<String> truststorePassword = getInputNativeSSLTruststorePassword(conf); Optional<String> keystorePassword = getInputNativeSSLKeystorePassword(conf); Optional<String> cipherSuites = getInputNativeSSLCipherSuites(conf); if (truststorePath.isPresent() && keystorePath.isPresent() && truststorePassword.isPresent() && keystorePassword.isPresent()) { SSLContext context; try { context = getSSLContext(truststorePath.get(), truststorePassword.get(), keystorePath.get(), keystorePassword.get()); } catch (UnrecoverableKeyException | KeyManagementException | NoSuchAlgorithmException | KeyStoreException | CertificateException | IOException e) { throw new RuntimeException(e); } String[] css = SSLOptions.DEFAULT_SSL_CIPHER_SUITES; if (cipherSuites.isPresent()) css = cipherSuites.get().split(","); return Optional.of(new SSLOptions(context,css)); } return Optional.absent(); } private static Optional<Integer> getIntSetting(String parameter, Configuration conf) { String setting = conf.get(parameter); if (setting == null) return Optional.absent(); return Optional.of(Integer.valueOf(setting)); } private static Optional<Boolean> getBooleanSetting(String parameter, Configuration conf) { String setting = conf.get(parameter); if (setting == null) return Optional.absent(); return Optional.of(Boolean.valueOf(setting)); } private static Optional<String> getStringSetting(String parameter, Configuration conf) { String setting = conf.get(parameter); if (setting == null) return Optional.absent(); return Optional.of(setting); } private static AuthProvider getClientAuthProvider(String factoryClassName, Configuration conf) { try { Class<?> c = Class.forName(factoryClassName); if (PlainTextAuthProvider.class.equals(c)) { String username = getStringSetting(USERNAME, conf).or(""); String password = getStringSetting(PASSWORD, conf).or(""); return (AuthProvider) c.getConstructor(String.class, String.class) .newInstance(username, password); } else { return (AuthProvider) c.newInstance(); } } catch (Exception e) { throw new RuntimeException("Failed to instantiate auth provider:" + factoryClassName, e); } } private static SSLContext getSSLContext(String truststorePath, String truststorePassword, String keystorePath, String keystorePassword) throws NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException, UnrecoverableKeyException, KeyManagementException { FileInputStream tsf = null; FileInputStream ksf = null; SSLContext ctx = null; try { tsf = new FileInputStream(truststorePath); ksf = new FileInputStream(keystorePath); ctx = SSLContext.getInstance("SSL"); KeyStore ts = KeyStore.getInstance("JKS"); ts.load(tsf, truststorePassword.toCharArray()); TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(ts); KeyStore ks = KeyStore.getInstance("JKS"); ks.load(ksf, keystorePassword.toCharArray()); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(ks, keystorePassword.toCharArray()); ctx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); } finally { FileUtils.closeQuietly(tsf); FileUtils.closeQuietly(ksf); } return ctx; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.tserver.compaction; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.DataInputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.LongSummaryStatistics; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.ConfigurationCopy; import org.apache.accumulo.core.conf.DefaultConfiguration; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.crypto.CryptoServiceFactory; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.TableId; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.dataImpl.KeyExtent; import org.apache.accumulo.core.file.FileSKVIterator; import org.apache.accumulo.core.file.blockfile.impl.CacheProvider; import org.apache.accumulo.core.iterators.IteratorEnvironment; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.accumulo.core.metadata.TabletFile; import org.apache.accumulo.core.metadata.schema.DataFileValue; import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl; import org.apache.accumulo.core.util.Pair; import org.apache.accumulo.server.ServerContext; import org.apache.hadoop.io.Text; import org.easymock.EasyMock; import org.junit.Test; public class DefaultCompactionStrategyTest { private static Pair<Key,Key> keys(String firstString, String secondString) { Key first = null; if (firstString != null) first = new Key(new Text(firstString)); Key second = null; if (secondString != null) second = new Key(new Text(secondString)); return new Pair<>(first, second); } public static ServerContext getServerContext() { ServerContext context = EasyMock.createMock(ServerContext.class); EasyMock.expect(context.getCryptoService()).andReturn(CryptoServiceFactory.newDefaultInstance()) .anyTimes(); EasyMock.replay(context); return context; } static final Map<String,Pair<Key,Key>> fakeFiles = new HashMap<>(); static { fakeFiles.put("file1", keys("b", "m")); fakeFiles.put("file2", keys("n", "z")); fakeFiles.put("file3", keys("a", "y")); fakeFiles.put("file4", keys(null, null)); } // Mock FileSKVIterator, which will provide first/last keys above private static class TestFileSKVIterator implements FileSKVIterator { private String filename; TestFileSKVIterator(String filename) { this.filename = filename; } @Override public void setInterruptFlag(AtomicBoolean flag) {} @Override public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) {} @Override public boolean hasTop() { return false; } @Override public void next() {} @Override public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) {} @Override public Key getTopKey() { return null; } @Override public Value getTopValue() { return null; } @Override public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) { return null; } @Override public Key getFirstKey() { Pair<Key,Key> pair = fakeFiles.get(filename); if (pair == null) return null; return pair.getFirst(); } @Override public Key getLastKey() { Pair<Key,Key> pair = fakeFiles.get(filename); if (pair == null) return null; return pair.getSecond(); } @Override public DataInputStream getMetaStore(String name) { return null; } @Override public void closeDeepCopies() {} @Override public void close() {} @Override public FileSKVIterator getSample(SamplerConfigurationImpl sampleConfig) { return null; } @Override public void setCacheProvider(CacheProvider cacheProvider) {} } static final DefaultConfiguration dfault = DefaultConfiguration.getInstance(); private static class TestCompactionRequest extends MajorCompactionRequest { Integer mfpt = null; @Override public FileSKVIterator openReader(TabletFile ref) { return new TestFileSKVIterator(ref.toString()); } TestCompactionRequest(KeyExtent extent, MajorCompactionReason reason, Map<TabletFile,DataFileValue> files) { super(extent, reason, dfault, getServerContext()); setFiles(files); } TestCompactionRequest(KeyExtent extent, MajorCompactionReason reason, Map<TabletFile,DataFileValue> files, AccumuloConfiguration config) { super(extent, reason, config, getServerContext()); setFiles(files); } public void setMaxFilesPerTablet(int mfpt) { this.mfpt = mfpt; } @Override public int getMaxFilesPerTablet() { if (mfpt != null) return mfpt; return super.getMaxFilesPerTablet(); } } static Map<TabletFile,DataFileValue> createFileMap(Object... objs) { Map<TabletFile,DataFileValue> files = new HashMap<>(); for (int i = 0; i < objs.length; i += 2) { files.put(new TabletFile("hdfs://nn1/accumulo/tables/5/t-0001/" + objs[i]), new DataFileValue(((Number) objs[i + 1]).longValue(), 0)); } return files; } private TestCompactionRequest createRequest(MajorCompactionReason reason, Object... objs) { return createRequest(new KeyExtent(TableId.of("0"), null, null), reason, objs); } private TestCompactionRequest createRequest(KeyExtent extent, MajorCompactionReason reason, Object... objs) { return new TestCompactionRequest(extent, reason, createFileMap(objs)); } private static Set<String> asSet(String... strings) { return asSet(Arrays.asList(strings)); } private static Set<String> asStringSet(Collection<TabletFile> refs) { HashSet<String> result = new HashSet<>(); for (TabletFile ref : refs) { result.add(ref.getNormalizedPath()); } return result; } private static Set<String> asSet(Collection<String> strings) { HashSet<String> result = new HashSet<>(); for (String string : strings) result.add("hdfs://nn1/accumulo/tables/5/t-0001/" + string); return result; } @Test public void testGetCompactionPlan() throws Exception { // test are expecting this default assertEquals(10, DefaultConfiguration.getInstance().getCount(Property.TSERV_MAJC_THREAD_MAXOPEN)); DefaultCompactionStrategy s = new DefaultCompactionStrategy(); // do nothing TestCompactionRequest request = createRequest(MajorCompactionReason.IDLE, "file1", 10, "file2", 10); s.gatherInformation(request); CompactionPlan plan = s.getCompactionPlan(request); assertTrue(plan.inputFiles.isEmpty()); // do everything request = createRequest(MajorCompactionReason.IDLE, "file1", 10, "file2", 10, "file3", 10); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(3, plan.inputFiles.size()); // do everything request = createRequest(MajorCompactionReason.USER, "file1", 10, "file2", 10); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(2, plan.inputFiles.size()); // partial request = createRequest(MajorCompactionReason.NORMAL, "file0", 100, "file1", 10, "file2", 10, "file3", 10); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(3, plan.inputFiles.size()); assertEquals(asStringSet(plan.inputFiles), asSet("file1,file2,file3".split(","))); // Two windows (of size 10 or less) meet the compaction criteria. Should select the smallest set // of files that meet the criteria. request = createRequest(MajorCompactionReason.NORMAL, "file0", 100, "file1", 100, "file2", 100, "file3", 10, "file4", 10, "file5", 10, "file6", 10, "file7", 10, "file8", 10, "file9", 10, "fileA", 10); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(8, plan.inputFiles.size()); assertEquals(asStringSet(plan.inputFiles), asSet("file3,file4,file5,file6,file7,file8,file9,fileA".split(","))); // The last 10 files do not meet compaction ratio critea. Should move window of 10 files up // looking for files that meet criteria. request = createRequest(MajorCompactionReason.NORMAL, "file0", 19683, "file1", 19683, "file2", 19683, "file3", 6561, "file4", 2187, "file5", 729, "file6", 243, "file7", 81, "file8", 27, "file9", 9, "fileA", 3, "fileB", 1); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(10, plan.inputFiles.size()); assertEquals(asStringSet(plan.inputFiles), asSet("file0,file1,file2,file3,file4,file5,file6,file7,file8,file9".split(","))); // No window of files meets the compaction criteria, but there are more files than the max // allowed. Should compact the smallest 2. request = createRequest(MajorCompactionReason.NORMAL, "file1", 19683, "file2", 19683, "file3", 6561, "file4", 2187, "file5", 729, "file6", 243, "file7", 81, "file8", 27, "file9", 9, "fileA", 3, "fileB", 1); request.setMaxFilesPerTablet(10); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(2, plan.inputFiles.size()); assertEquals(asStringSet(plan.inputFiles), asSet("fileA,fileB".split(","))); // The last 9 files meet the compaction criteria, but 10 files need to be compacted. Should move // window of 10 files up looking for files that meet criteria. request = createRequest(MajorCompactionReason.NORMAL, "file01", 1500, "file02", 1400, "file03", 1300, "file04", 1200, "file05", 1100, "file06", 1000, "file07", 900, "file08", 800, "file09", 700, "file10", 600, "file11", 500, "file12", 400, "file13", 400, "file14", 300, "file15", 200, "file16", 100, "file17", 9, "file18", 8, "file19", 7, "file20", 6, "file21", 5, "file22", 4, "file23", 3, "file24", 2, "file25", 1); request.setMaxFilesPerTablet(15); s.gatherInformation(request); plan = s.getCompactionPlan(request); assertEquals(10, plan.inputFiles.size()); assertEquals(asStringSet(plan.inputFiles), asSet("file12,file13,file14,file15,file16,file17,file18,file19,file20,file21".split(","))); } class SimulatedTablet { private int maxFilesPerTablet; private ConfigurationCopy config; int nextFile = 0; Map<TabletFile,DataFileValue> files = new HashMap<>(); long totalRead = 0; long added = 0; SimulatedTablet(int maxFilesToCompact, int maxFilesPertablet) { this.maxFilesPerTablet = maxFilesPertablet; config = new ConfigurationCopy(DefaultConfiguration.getInstance()); config.set(Property.TSERV_MAJC_THREAD_MAXOPEN, maxFilesToCompact + ""); } void addFiles(int num, int size, int entries) { for (int i = 0; i < num; i++) { String name = "hdfs://nn1/accumulo/tables/5/t-0001/I" + String.format("%06d", nextFile) + ".rf"; nextFile++; files.put(new TabletFile(name), new DataFileValue(size, entries)); added += size; } } long compact(MajorCompactionReason reason) { TestCompactionRequest request = new TestCompactionRequest( new KeyExtent(TableId.of("0"), (Text) null, null), reason, files, config); request.setMaxFilesPerTablet(maxFilesPerTablet); DefaultCompactionStrategy s = new DefaultCompactionStrategy(); if (s.shouldCompact(request)) { CompactionPlan plan = s.getCompactionPlan(request); long totalSize = 0; long totalEntries = 0; for (TabletFile fr : plan.inputFiles) { DataFileValue dfv = files.remove(fr); totalSize += dfv.getSize(); totalEntries += dfv.getNumEntries(); totalRead += dfv.getSize(); } String name = "hdfs://nn1/accumulo/tables/5/t-0001/C" + String.format("%06d", nextFile) + ".rf"; nextFile++; files.put(new TabletFile(name), new DataFileValue(totalSize, totalEntries)); return totalSize; } else { return 0; } } long getTotalRead() { return totalRead; } public long getTotalAdded() { return added; } void print() { List<Entry<TabletFile,DataFileValue>> entries = new ArrayList<>(files.entrySet()); Collections.sort(entries, (e1, e2) -> Long.compare(e2.getValue().getSize(), e1.getValue().getSize())); for (Entry<TabletFile,DataFileValue> entry : entries) { System.out.printf("%s %,d %,d\n", entry.getKey().getFileName(), entry.getValue().getSize(), entry.getValue().getNumEntries()); } } public int getNumFiles() { return files.size(); } } @Test public void simulationTest() throws Exception { for (int n = 1; n < 10; n++) { LongSummaryStatistics lss = new LongSummaryStatistics(); SimulatedTablet simuTablet = new SimulatedTablet(10, 15); for (int i = 0; i < 1000; i++) { simuTablet.addFiles(n, 1000, 10); simuTablet.compact(MajorCompactionReason.NORMAL); lss.accept(simuTablet.getNumFiles()); } while (simuTablet.compact(MajorCompactionReason.NORMAL) > 0) { lss.accept(simuTablet.getNumFiles()); } assertTrue(simuTablet.getTotalRead() < 6 * simuTablet.getTotalAdded()); assertTrue(lss.getAverage() < (n >= 8 ? 15 : 7)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.metadata.formatting; import java.io.Closeable; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.ql.metadata.CheckConstraint; import org.apache.hadoop.hive.ql.metadata.DefaultConstraint; import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.NotNullConstraint; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.UniqueConstraint; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.map.ObjectMapper; import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW; /** * Format table and index information for machine readability using * json. */ public class JsonMetaDataFormatter implements MetaDataFormatter { private static final Logger LOG = LoggerFactory.getLogger(JsonMetaDataFormatter.class); /** * Convert the map to a JSON string. */ private void asJson(OutputStream out, Map<String, Object> data) throws HiveException { try { new ObjectMapper().writeValue(out, data); } catch (IOException e) { throw new HiveException("Unable to convert to json", e); } } /** * Write an error message. */ @Override public void error(OutputStream out, String msg, int errorCode, String sqlState) throws HiveException { error(out, msg, errorCode, sqlState, null); } @Override public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) throws HiveException { MapBuilder mb = MapBuilder.create().put("error", errorMessage); if(errorDetail != null) { mb.put("errorDetail", errorDetail); } mb.put("errorCode", errorCode); if(sqlState != null) { mb.put("sqlState", sqlState); } asJson(out,mb.build()); } /** * Show a list of tables. */ @Override public void showTables(DataOutputStream out, Set<String> tables) throws HiveException { asJson(out, MapBuilder.create().put("tables", tables).build()); } /** * Show a list of tables including table types. */ @Override public void showTablesExtended(DataOutputStream out, List<Table> tables) throws HiveException { if (tables.isEmpty()) { // Nothing to do return; } MapBuilder builder = MapBuilder.create(); ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(); for (Table table : tables) { final String tableName = table.getTableName(); final String tableType = table.getTableType().toString(); res.add(builder .put("Table Name", tableName) .put("Table Type", tableType) .build()); } asJson(out, builder.put("tables", res).build()); } /** * Show a list of materialized views. */ @Override public void showMaterializedViews(DataOutputStream out, List<Table> materializedViews) throws HiveException { if (materializedViews.isEmpty()) { // Nothing to do return; } MapBuilder builder = MapBuilder.create(); ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(); for (Table mv : materializedViews) { final String mvName = mv.getTableName(); final String rewriteEnabled = mv.isRewriteEnabled() ? "Yes" : "No"; // Currently, we only support manual refresh // TODO: Update whenever we have other modes final String refreshMode = "Manual refresh"; final String timeWindowString = mv.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW); final String mode; if (!org.apache.commons.lang.StringUtils.isEmpty(timeWindowString)) { long time = HiveConf.toTime(timeWindowString, HiveConf.getDefaultTimeUnit(HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW), TimeUnit.MINUTES); if (time > 0L) { mode = refreshMode + " (Valid for " + time + "min)"; } else if (time == 0L) { mode = refreshMode + " (Valid until source tables modified)"; } else { mode = refreshMode + " (Valid always)"; } } else { mode = refreshMode; } res.add(builder .put("MV Name", mvName) .put("Rewriting Enabled", rewriteEnabled) .put("Mode", mode) .build()); } asJson(out, builder.put("materialized views", res).build()); } /** * Describe table. */ @Override public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List<FieldSchema> cols, boolean isFormatted, boolean isExt, boolean isOutputPadded, List<ColumnStatisticsObj> colStats) throws HiveException { MapBuilder builder = MapBuilder.create(); builder.put("columns", createColumnsInfo(cols, colStats)); if (isExt) { if (part != null) { builder.put("partitionInfo", part.getTPartition()); } else { builder.put("tableInfo", tbl.getTTable()); } if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo())) { builder.put("primaryKeyInfo", tbl.getPrimaryKeyInfo()); } if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo())) { builder.put("foreignKeyInfo", tbl.getForeignKeyInfo()); } if (UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo())) { builder.put("uniqueConstraintInfo", tbl.getUniqueKeyInfo()); } if (NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint())) { builder.put("notNullConstraintInfo", tbl.getNotNullConstraint()); } if (DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) { builder.put("defaultConstraintInfo", tbl.getDefaultConstraint()); } if (CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) { builder.put("checkConstraintInfo", tbl.getCheckConstraint()); } if (tbl.getStorageHandlerInfo() != null) { builder.put("storageHandlerInfo", tbl.getStorageHandlerInfo().toString()); } } asJson(out, builder.build()); } private List<Map<String, Object>> createColumnsInfo(List<FieldSchema> columns, List<ColumnStatisticsObj> columnStatisticsList) { ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(); for (FieldSchema column : columns) { ColumnStatisticsData statistics = getStatistics(column, columnStatisticsList); res.add(createColumnInfo(column, statistics)); } return res; } private ColumnStatisticsData getStatistics(FieldSchema column, List<ColumnStatisticsObj> columnStatisticsList) { for (ColumnStatisticsObj columnStatistics : columnStatisticsList) { if (column.getName().equals(columnStatistics.getColName())) { return columnStatistics.getStatsData(); } } return null; } private Map<String, Object> createColumnInfo(FieldSchema column, ColumnStatisticsData statistics) { Map<String, Object> result = MapBuilder.create() .put("name", column.getName()) .put("type", column.getType()) .put("comment", column.getComment()) .build(); if (statistics != null) { if (statistics.isSetBinaryStats()) { if (statistics.getBinaryStats().isSetNumNulls()) { result.put("numNulls", statistics.getBinaryStats().getNumNulls()); } if (statistics.getBinaryStats().isSetAvgColLen()) { result.put("avgColLen", statistics.getBinaryStats().getAvgColLen()); } if (statistics.getBinaryStats().isSetMaxColLen()) { result.put("maxColLen", statistics.getBinaryStats().getMaxColLen()); } } else if (statistics.isSetStringStats()) { if (statistics.getStringStats().isSetNumNulls()) { result.put("numNulls", statistics.getStringStats().getNumNulls()); } if (statistics.getStringStats().isSetNumDVs()) { result.put("distinctCount", statistics.getStringStats().getNumDVs()); } if (statistics.getStringStats().isSetAvgColLen()) { result.put("avgColLen", statistics.getStringStats().getAvgColLen()); } if (statistics.getStringStats().isSetMaxColLen()) { result.put("maxColLen", statistics.getStringStats().getMaxColLen()); } } else if (statistics.isSetBooleanStats()) { if (statistics.getBooleanStats().isSetNumNulls()) { result.put("numNulls", statistics.getBooleanStats().getNumNulls()); } if (statistics.getBooleanStats().isSetNumTrues()) { result.put("numTrues", statistics.getBooleanStats().getNumTrues()); } if (statistics.getBooleanStats().isSetNumFalses()) { result.put("numFalses", statistics.getBooleanStats().getNumFalses()); } } else if (statistics.isSetDecimalStats()) { if (statistics.getDecimalStats().isSetLowValue()) { result.put("min", MetaDataFormatUtils.convertToString(statistics.getDecimalStats().getLowValue())); } if (statistics.getDecimalStats().isSetHighValue()) { result.put("max", MetaDataFormatUtils.convertToString(statistics.getDecimalStats().getHighValue())); } if (statistics.getDecimalStats().isSetNumNulls()) { result.put("numNulls", statistics.getDecimalStats().getNumNulls()); } if (statistics.getDecimalStats().isSetNumDVs()) { result.put("distinctCount", statistics.getDecimalStats().getNumDVs()); } } else if (statistics.isSetDoubleStats()) { if (statistics.getDoubleStats().isSetLowValue()) { result.put("min", statistics.getDoubleStats().getLowValue()); } if (statistics.getDoubleStats().isSetHighValue()) { result.put("max", statistics.getDoubleStats().getHighValue()); } if (statistics.getDoubleStats().isSetNumNulls()) { result.put("numNulls", statistics.getDoubleStats().getNumNulls()); } if (statistics.getDoubleStats().isSetNumDVs()) { result.put("distinctCount", statistics.getDoubleStats().getNumDVs()); } } else if (statistics.isSetLongStats()) { if (statistics.getLongStats().isSetLowValue()) { result.put("min", statistics.getLongStats().getLowValue()); } if (statistics.getLongStats().isSetHighValue()) { result.put("max", statistics.getLongStats().getHighValue()); } if (statistics.getLongStats().isSetNumNulls()) { result.put("numNulls", statistics.getLongStats().getNumNulls()); } if (statistics.getLongStats().isSetNumDVs()) { result.put("distinctCount", statistics.getLongStats().getNumDVs()); } } else if (statistics.isSetDateStats()) { if (statistics.getDateStats().isSetLowValue()) { result.put("min", MetaDataFormatUtils.convertToString(statistics.getDateStats().getLowValue())); } if (statistics.getDateStats().isSetHighValue()) { result.put("max", MetaDataFormatUtils.convertToString(statistics.getDateStats().getHighValue())); } if (statistics.getDateStats().isSetNumNulls()) { result.put("numNulls", statistics.getDateStats().getNumNulls()); } if (statistics.getDateStats().isSetNumDVs()) { result.put("distinctCount", statistics.getDateStats().getNumDVs()); } } } return result; } @Override public void showTableStatus(DataOutputStream out, Hive db, HiveConf conf, List<Table> tbls, Map<String, String> part, Partition par) throws HiveException { asJson(out, MapBuilder.create().put( "tables", makeAllTableStatus(db, conf, tbls, part, par)).build()); } private List<Map<String, Object>> makeAllTableStatus(Hive db, HiveConf conf, List<Table> tbls, Map<String, String> part, Partition par) throws HiveException { try { ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(); for (Table tbl : tbls) { res.add(makeOneTableStatus(tbl, db, conf, part, par)); } return res; } catch(IOException e) { throw new HiveException(e); } } private Map<String, Object> makeOneTableStatus(Table tbl, Hive db, HiveConf conf, Map<String, String> part, Partition par) throws HiveException, IOException { String tblLoc = null; String inputFormattCls = null; String outputFormattCls = null; if (part != null) { if (par != null) { if (par.getLocation() != null) { tblLoc = par.getDataLocation().toString(); } inputFormattCls = par.getInputFormatClass() == null ? null : par.getInputFormatClass().getName(); outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName(); } } else { if (tbl.getPath() != null) { tblLoc = tbl.getDataLocation().toString(); } inputFormattCls = tbl.getInputFormatClass() == null ? null : tbl.getInputFormatClass().getName(); outputFormattCls = tbl.getOutputFormatClass() == null ? null : tbl.getOutputFormatClass().getName(); } MapBuilder builder = MapBuilder.create(); builder.put("tableName", tbl.getTableName()); builder.put("ownerType", (tbl.getOwnerType() != null) ? tbl.getOwnerType().name() : "null"); builder.put("owner", tbl.getOwner()); builder.put("location", tblLoc); builder.put("inputFormat", inputFormattCls); builder.put("outputFormat", outputFormattCls); builder.put("columns", createColumnsInfo(tbl.getCols(), new ArrayList<ColumnStatisticsObj>())); builder.put("partitioned", tbl.isPartitioned()); if (tbl.isPartitioned()) { builder.put("partitionColumns", createColumnsInfo(tbl.getPartCols(), new ArrayList<ColumnStatisticsObj>())); } if(tbl.getTableType() != TableType.VIRTUAL_VIEW) { //tbl.getPath() is null for views putFileSystemsStats(builder, makeTableStatusLocations(tbl, db, par), conf, tbl.getPath()); } return builder.build(); } private List<Path> makeTableStatusLocations(Table tbl, Hive db, Partition par) throws HiveException { // output file system information Path tblPath = tbl.getPath(); List<Path> locations = new ArrayList<Path>(); if (tbl.isPartitioned()) { if (par == null) { for (Partition curPart : db.getPartitions(tbl)) { if (curPart.getLocation() != null) { locations.add(new Path(curPart.getLocation())); } } } else { if (par.getLocation() != null) { locations.add(new Path(par.getLocation())); } } } else { if (tblPath != null) { locations.add(tblPath); } } return locations; } /** * @param tblPath not NULL * @throws IOException */ // Duplicates logic in TextMetaDataFormatter private void putFileSystemsStats(MapBuilder builder, List<Path> locations, HiveConf conf, Path tblPath) throws IOException { long totalFileSize = 0; long maxFileSize = 0; long minFileSize = Long.MAX_VALUE; long lastAccessTime = 0; long lastUpdateTime = 0; int numOfFiles = 0; boolean unknown = false; FileSystem fs = tblPath.getFileSystem(conf); // in case all files in locations do not exist try { FileStatus tmpStatus = fs.getFileStatus(tblPath); lastAccessTime = tmpStatus.getAccessTime(); lastUpdateTime = tmpStatus.getModificationTime(); } catch (IOException e) { LOG.warn( "Cannot access File System. File System status will be unknown: ", e); unknown = true; } if (!unknown) { for (Path loc : locations) { try { FileStatus status = fs.getFileStatus(tblPath); FileStatus[] files = fs.listStatus(loc); long accessTime = status.getAccessTime(); long updateTime = status.getModificationTime(); // no matter loc is the table location or part location, it must be a // directory. if (!status.isDir()) { continue; } if (accessTime > lastAccessTime) { lastAccessTime = accessTime; } if (updateTime > lastUpdateTime) { lastUpdateTime = updateTime; } for (FileStatus currentStatus : files) { if (currentStatus.isDir()) { continue; } numOfFiles++; long fileLen = currentStatus.getLen(); totalFileSize += fileLen; if (fileLen > maxFileSize) { maxFileSize = fileLen; } if (fileLen < minFileSize) { minFileSize = fileLen; } accessTime = currentStatus.getAccessTime(); updateTime = currentStatus.getModificationTime(); if (accessTime > lastAccessTime) { lastAccessTime = accessTime; } if (updateTime > lastUpdateTime) { lastUpdateTime = updateTime; } } } catch (IOException e) { // ignore } } } builder .put("totalNumberFiles", numOfFiles, ! unknown) .put("totalFileSize", totalFileSize, ! unknown) .put("maxFileSize", maxFileSize, ! unknown) .put("minFileSize", numOfFiles > 0 ? minFileSize : 0, ! unknown) .put("lastAccessTime", lastAccessTime, ! (unknown || lastAccessTime < 0)) .put("lastUpdateTime", lastUpdateTime, ! unknown); } /** * Show the table partitions. */ @Override public void showTablePartitions(DataOutputStream out, List<String> parts) throws HiveException { asJson(out, MapBuilder.create().put("partitions", makeTablePartions(parts)).build()); } private List<Map<String, Object>> makeTablePartions(List<String> parts) { ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(parts.size()); for (String part : parts) { res.add(makeOneTablePartition(part)); } return res; } // This seems like a very wrong implementation. private Map<String, Object> makeOneTablePartition(String partIdent) { ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>(); ArrayList<String> names = new ArrayList<String>(); for (String part : StringUtils.split(partIdent, "/")) { String name = part; String val = null; String[] kv = StringUtils.split(part, "=", 2); if (kv != null) { name = kv[0]; if (kv.length > 1) { try { val = URLDecoder.decode(kv[1], StandardCharsets.UTF_8.name()); } catch (UnsupportedEncodingException e) { } } } if (val != null) { names.add(name + "='" + val + "'"); } else { names.add(name); } res.add(MapBuilder.create() .put("columnName", name) .put("columnValue", val) .build()); } return MapBuilder.create() .put("name", StringUtils.join(names, ",")) .put("values", res) .build(); } /** * Show a list of databases */ @Override public void showDatabases(DataOutputStream out, List<String> databases) throws HiveException { asJson(out, MapBuilder.create().put("databases", databases).build()); } /** * Show the description of a database */ @Override public void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, String ownerName, PrincipalType ownerType, Map<String, String> params) throws HiveException { MapBuilder builder = MapBuilder.create().put("database", database).put("comment", comment) .put("location", location); if (null != ownerName) { builder.put("owner", ownerName); } if (null != ownerType) { builder.put("ownerType", ownerType.name()); } if (null != params && !params.isEmpty()) { builder.put("params", params); } asJson(out, builder.build()); } @Override public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans) throws HiveException { JsonGenerator generator = null; try { generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out); generator.writeStartArray(); for (WMResourcePlan plan : resourcePlans) { generator.writeStartObject(); generator.writeStringField("name", plan.getName()); generator.writeStringField("status", plan.getStatus().name()); if (plan.isSetQueryParallelism()) { generator.writeNumberField("queryParallelism", plan.getQueryParallelism()); } if (plan.isSetDefaultPoolPath()) { generator.writeStringField("defaultPoolPath", plan.getDefaultPoolPath()); } generator.writeEndObject(); } generator.writeEndArray(); generator.close(); } catch (IOException e) { throw new HiveException(e); } finally { if (generator != null) { IOUtils.closeQuietly(generator); } } } /** * Formats a resource plan into a json object, the structure is as follows: * { * name: "<rp_name>", * parallelism: "<parallelism>", * defaultQueue: "<defaultQueue>", * pools : [ * { * name: "<pool_name>", * parallelism: "<parallelism>", * schedulingPolicy: "<policy>", * triggers: [ * { name: "<triggerName>", trigger: "<trigExpression>", action: "<actionExpr">} * ... * ] * } * ... * ] * } */ private static class JsonRPFormatter implements MetaDataFormatUtils.RPFormatter, Closeable { private final JsonGenerator generator; JsonRPFormatter(DataOutputStream out) throws IOException { generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out); } private void writeNameAndFields(String name, Object ... kvPairs) throws IOException { if (kvPairs.length % 2 != 0) { throw new IllegalArgumentException("Expected pairs"); } generator.writeStringField("name", name); for (int i = 0; i < kvPairs.length; i += 2) { generator.writeObjectField(kvPairs[i].toString(), kvPairs[i + 1]); } } @Override public void startRP(String rpName, Object ... kvPairs) throws IOException { generator.writeStartObject(); writeNameAndFields(rpName, kvPairs); } @Override public void endRP() throws IOException { // End the root rp object. generator.writeEndObject(); } @Override public void startPools() throws IOException { generator.writeArrayFieldStart("pools"); } @Override public void endPools() throws IOException { // End the pools array. generator.writeEndArray(); } @Override public void startPool(String poolName, Object ... kvPairs) throws IOException { generator.writeStartObject(); writeNameAndFields(poolName, kvPairs); } @Override public void startTriggers() throws IOException { generator.writeArrayFieldStart("triggers"); } @Override public void endTriggers() throws IOException { generator.writeEndArray(); } @Override public void startMappings() throws IOException { generator.writeArrayFieldStart("mappings"); } @Override public void endMappings() throws IOException { generator.writeEndArray(); } @Override public void endPool() throws IOException { generator.writeEndObject(); } @Override public void formatTrigger(String triggerName, String actionExpression, String triggerExpression) throws IOException { generator.writeStartObject(); writeNameAndFields(triggerName, "action", actionExpression, "trigger", triggerExpression); generator.writeEndObject(); } @Override public void formatMappingType(String type, List<String> names) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type); generator.writeArrayFieldStart("values"); for (String name : names) { generator.writeString(name); } generator.writeEndArray(); generator.writeEndObject(); } @Override public void close() throws IOException { generator.close(); } } public void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan resourcePlan) throws HiveException { try (JsonRPFormatter formatter = new JsonRPFormatter(out)) { MetaDataFormatUtils.formatFullRP(formatter, resourcePlan); } catch (IOException e) { throw new HiveException(e); } } @Override public void showErrors(DataOutputStream out, WMValidateResourcePlanResponse response) throws HiveException { JsonGenerator generator = null; try { generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out); generator.writeStartObject(); generator.writeArrayFieldStart("errors"); for (String error : response.getErrors()) { generator.writeString(error); } generator.writeEndArray(); generator.writeArrayFieldStart("warnings"); for (String error : response.getWarnings()) { generator.writeString(error); } generator.writeEndArray(); generator.writeEndObject(); } catch (IOException e) { throw new HiveException(e); } finally { if (generator != null) { IOUtils.closeQuietly(generator); } } } }
package io.vertx.pgclient.data; import io.vertx.pgclient.PgConnection; import io.vertx.sqlclient.ColumnChecker; import io.vertx.sqlclient.Row; import io.vertx.sqlclient.Tuple; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.sqlclient.data.Numeric; import org.junit.Test; import java.math.BigDecimal; public class NumericTypesExtendedCodecTest extends ExtendedQueryDataTypeCodecTestBase { @Test public void testDecodeInt2(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT $1 :: INT2 \"Short\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addShort((short) 32767), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Short") .returns(Tuple::getValue, Row::getValue, (short) 32767) .returns(Tuple::getShort, Row::getShort, Short.MAX_VALUE) .returns(Tuple::getInteger, Row::getInteger, 32767) .returns(Tuple::getLong, Row::getLong, 32767L) .returns(Tuple::getFloat, Row::getFloat, 32767f) .returns(Tuple::getDouble, Row::getDouble, 32767d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(32767)) .returns(Numeric.class, Numeric.create(32767)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeInt2(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Short\" = $1 WHERE \"id\" = $2 RETURNING \"Short\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.of(Short.MIN_VALUE, 2), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Short") .returns(Tuple::getValue, Row::getValue, (short) -32768) .returns(Tuple::getShort, Row::getShort, Short.MIN_VALUE) .returns(Tuple::getInteger, Row::getInteger, -32768) .returns(Tuple::getLong, Row::getLong, -32768L) .returns(Tuple::getFloat, Row::getFloat, -32768f) .returns(Tuple::getDouble, Row::getDouble, -32768d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(-32768)) .returns(Numeric.class, Numeric.create(-32768)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeInt4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT $1 :: INT4 \"Integer\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addInteger(Integer.MAX_VALUE), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Integer") .returns(Tuple::getValue, Row::getValue, Integer.MAX_VALUE) .returns(Tuple::getShort, Row::getShort, (short) -1) .returns(Tuple::getInteger, Row::getInteger, Integer.MAX_VALUE) .returns(Tuple::getLong, Row::getLong, 2147483647L) .returns(Tuple::getFloat, Row::getFloat, 2147483647f) .returns(Tuple::getDouble, Row::getDouble, 2147483647d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(2147483647)) .returns(Numeric.class, Numeric.create(2147483647)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeInt4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Integer\" = $1 WHERE \"id\" = $2 RETURNING \"Integer\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(Integer.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Integer") .returns(Tuple::getValue, Row::getValue, Integer.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, Integer.MIN_VALUE) .returns(Tuple::getLong, Row::getLong, -2147483648L) .returns(Tuple::getFloat, Row::getFloat, -2147483648f) .returns(Tuple::getDouble, Row::getDouble, -2147483648d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(-2147483648)) .returns(Numeric.class, Numeric.create(-2147483648)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeInt8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT $1 :: INT8 \"Long\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addLong(Long.MAX_VALUE), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Long") .returns(Tuple::getValue, Row::getValue, Long.MAX_VALUE) .returns(Tuple::getShort, Row::getShort, (short) -1) .returns(Tuple::getInteger, Row::getInteger, -1) .returns(Tuple::getLong, Row::getLong, Long.MAX_VALUE) .returns(Tuple::getFloat, Row::getFloat, 9.223372E18f) .returns(Tuple::getDouble, Row::getDouble, 9.223372036854776E18d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(Long.MAX_VALUE)) .returns(Numeric.class, Numeric.create(Long.MAX_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeInt8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Long\" = $1 WHERE \"id\" = $2 RETURNING \"Long\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addLong(Long.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Long") .returns(Tuple::getValue, Row::getValue, Long.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, 0) .returns(Tuple::getLong, Row::getLong, Long.MIN_VALUE) .returns(Tuple::getFloat, Row::getFloat, -9.223372E18f) .returns(Tuple::getDouble, Row::getDouble, -9.223372036854776E18d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(Long.MIN_VALUE)) .returns(Numeric.class, Numeric.create(Long.MIN_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeFloat4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT $1 :: FLOAT4\"Float\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addFloat(Float.MAX_VALUE), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Float") .returns(Tuple::getValue, Row::getValue, Float.MAX_VALUE) .returns(Tuple::getShort, Row::getShort, (short) -1) .returns(Tuple::getInteger, Row::getInteger, 2147483647) .returns(Tuple::getLong, Row::getLong, 9223372036854775807L) .returns(Tuple::getFloat, Row::getFloat, Float.MAX_VALUE) .returns(Tuple::getDouble, Row::getDouble, 3.4028234663852886E38d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal("" + Float.MAX_VALUE)) .returns(Numeric.class, Numeric.parse("" + Float.MAX_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeFloat4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Float\" = $1 WHERE \"id\" = $2 RETURNING \"Float\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addFloat(Float.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Float") .returns(Tuple::getValue, Row::getValue, Float.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, 0) .returns(Tuple::getLong, Row::getLong, 0L) .returns(Tuple::getFloat, Row::getFloat, Float.MIN_VALUE) .returns(Tuple::getDouble, Row::getDouble, 1.401298464324817E-45d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal("" + Float.MIN_VALUE)) .returns(Numeric.class, Numeric.parse("" + Float.MIN_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeFloat8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT $1 :: FLOAT8\"Double\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addDouble(Double.MAX_VALUE), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Double") .returns(Tuple::getValue, Row::getValue, Double.MAX_VALUE) .returns(Tuple::getShort, Row::getShort, (short) -1) .returns(Tuple::getInteger, Row::getInteger, 2147483647) .returns(Tuple::getLong, Row::getLong, 9223372036854775807L) .returns(Tuple::getFloat, Row::getFloat, Float.POSITIVE_INFINITY) .returns(Tuple::getDouble, Row::getDouble, Double.MAX_VALUE) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal("" + Double.MAX_VALUE)) .returns(Numeric.class, Numeric.parse("" + Double.MAX_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeFloat8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Double\" = $1 WHERE \"id\" = $2 RETURNING \"Double\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addDouble(Double.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Double") .returns(Tuple::getValue, Row::getValue, Double.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, 0) .returns(Tuple::getLong, Row::getLong, 0L) .returns(Tuple::getFloat, Row::getFloat, 0f) .returns(Tuple::getDouble, Row::getDouble, Double.MIN_VALUE) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal("" + Double.MIN_VALUE)) .returns(Numeric.class, Numeric.parse("" + Double.MIN_VALUE)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeSerial2(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"SmallSerial\" FROM \"NumericDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.of(1), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "SmallSerial") .returns(Tuple::getValue, Row::getValue, (short) 1) .returns(Tuple::getShort, Row::getShort, (short) 1) .returns(Tuple::getInteger, Row::getInteger, 1) .returns(Tuple::getLong, Row::getLong, 1L) .returns(Tuple::getFloat, Row::getFloat, 1f) .returns(Tuple::getDouble, Row::getDouble, 1d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(1)) .returns(Numeric.class, Numeric.create(1)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeSerial2(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"SmallSerial\" = $1 WHERE \"id\" = $2 RETURNING \"SmallSerial\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.of(Short.MIN_VALUE, 2), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "SmallSerial") .returns(Tuple::getValue, Row::getValue, (short) -32768) .returns(Tuple::getShort, Row::getShort, Short.MIN_VALUE) .returns(Tuple::getInteger, Row::getInteger, -32768) .returns(Tuple::getLong, Row::getLong, -32768L) .returns(Tuple::getFloat, Row::getFloat, -32768f) .returns(Tuple::getDouble, Row::getDouble, -32768d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(-32768)) .returns(Numeric.class, Numeric.create(-32768)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeSerial4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Serial\" FROM \"NumericDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addInteger(1), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Serial") .returns(Tuple::getValue, Row::getValue, 1) .returns(Tuple::getShort, Row::getShort, (short) 1) .returns(Tuple::getInteger, Row::getInteger, 1) .returns(Tuple::getLong, Row::getLong, 1L) .returns(Tuple::getFloat, Row::getFloat, 1f) .returns(Tuple::getDouble, Row::getDouble, 1d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(1)) .returns(Numeric.class, Numeric.create(1)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeSerial4(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"Serial\" = $1 WHERE \"id\" = $2 RETURNING \"Serial\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(Integer.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "Serial") .returns(Tuple::getValue, Row::getValue, Integer.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, Integer.MIN_VALUE) .returns(Tuple::getLong, Row::getLong, -2147483648L) .returns(Tuple::getFloat, Row::getFloat, -2147483648f) .returns(Tuple::getDouble, Row::getDouble, -2147483648d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(-2147483648)) .returns(Numeric.class, Numeric.create(-2147483648)) .forRow(row); async.complete(); })); })); })); } @Test public void testDecodeSerial8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"BigSerial\" FROM \"NumericDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple().addInteger(1), ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "BigSerial") .returns(Tuple::getValue, Row::getValue, 1L) .returns(Tuple::getShort, Row::getShort, (short) 1) .returns(Tuple::getInteger, Row::getInteger, 1) .returns(Tuple::getLong, Row::getLong, 1L) .returns(Tuple::getFloat, Row::getFloat, 1f) .returns(Tuple::getDouble, Row::getDouble, 1d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(1)) .returns(Numeric.class, Numeric.create(1)) .forRow(row); async.complete(); })); })); })); } @Test public void testEncodeSerial8(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"NumericDataType\" SET \"BigSerial\" = $1 WHERE \"id\" = $2 RETURNING \"BigSerial\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addLong(Long.MIN_VALUE) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ctx.assertEquals(1, result.size()); ctx.assertEquals(1, result.rowCount()); Row row = result.iterator().next(); ColumnChecker.checkColumn(0, "BigSerial") .returns(Tuple::getValue, Row::getValue, Long.MIN_VALUE) .returns(Tuple::getShort, Row::getShort, (short) 0) .returns(Tuple::getInteger, Row::getInteger, 0) .returns(Tuple::getLong, Row::getLong, Long.MIN_VALUE) .returns(Tuple::getFloat, Row::getFloat, -9.223372E18f) .returns(Tuple::getDouble, Row::getDouble, -9.223372036854776E18d) .returns(Tuple::getBigDecimal, Row::getBigDecimal, new BigDecimal(Long.MIN_VALUE)) .returns(Numeric.class, Numeric.create(Long.MIN_VALUE)) .forRow(row); async.complete(); })); })); })); } /* @Test public void testNumeric(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: NUMERIC)) AS t (c)", new Numeric[]{ Numeric.create(10), Numeric.create(200030004), Numeric.create(-500), Numeric.NaN }, Tuple::getNumeric); } */ /* @Test public void testNumericArray(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: NUMERIC[])) AS t (c)", new Numeric[][]{new Numeric[]{Numeric.create(10), Numeric.create(200030004), null, Numeric.create(-500), Numeric.NaN, null}}, Tuple::getNumericArray); } */ @Test public void testShortArray(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: INT2[])) AS t (c)", new Short[][]{new Short[]{0, -10, null, Short.MAX_VALUE}}, Tuple::getShortArray); } @Test public void testIntegerArray(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: INT4[])) AS t (c)", new Integer[][]{new Integer[]{0, -10, null, Integer.MAX_VALUE}}, Tuple::getIntegerArray); } @Test public void testLongArray(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: INT8[])) AS t (c)", new Long[][]{new Long[]{0L, -10L, null, Long.MAX_VALUE}}, Tuple::getLongArray); } @Test public void testFloatArray(TestContext ctx) { testGeneric(ctx, "SELECT c FROM (VALUES ($1 :: FLOAT4[])) AS t (c)", new Float[][]{new Float[]{0f, -10f, Float.MAX_VALUE}}, Tuple::getFloatArray); } @Test public void testDecodeShortArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Short\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Short") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new short[]{1})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{1})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{1})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{1})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{1})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{1})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(1)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeShortArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Short\" = $1 WHERE \"id\" = $2 RETURNING \"Short\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addShortArray(new Short[]{2, 3, 4}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Short") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new short[]{2, 3, 4})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{2, 3, 4})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{2, 3, 4})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{2, 3, 4})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{2, 3, 4})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{2, 3, 4})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(2), Numeric.create(3), Numeric.create(4)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testDecodeIntArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Integer\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Integer") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new int[]{2})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{2})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{2})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{2})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{2})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{2})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(2)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeIntArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Integer\" = $1 WHERE \"id\" = $2 RETURNING \"Integer\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addIntegerArray(new Integer[]{3, 4, 5, 6}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Integer") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new int[]{3, 4, 5, 6})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{3, 4, 5, 6})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{3, 4, 5, 6})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{3, 4, 5, 6})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{3, 4, 5, 6})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{3, 4, 5, 6})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(3), Numeric.create(4), Numeric.create(5), Numeric.create(6)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testDecodeLongArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Long\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Long") .returns(Tuple::getValue, Row::getValue, new Long[]{3L}) .returns(Tuple::getShortArray, Row::getShortArray, new Short[]{(short)3}) .returns(Tuple::getIntegerArray, Row::getIntegerArray, new Integer[]{3}) .returns(Tuple::getLongArray, Row::getLongArray, new Long[]{3L}) .returns(Tuple::getFloatArray, Row::getFloatArray, new Float[]{3F}) .returns(Tuple::getDoubleArray, Row::getDoubleArray, new Double[]{3D}) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(3)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeLongArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Long\" = $1 WHERE \"id\" = $2 RETURNING \"Long\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addLongArray(new Long[]{4L, 5L, 6L, 7L, 8L}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Long") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new long[]{4, 5, 6, 7, 8})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{4, 5, 6, 7, 8})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{4, 5, 6, 7, 8})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{4, 5, 6, 7, 8})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{4, 5, 6, 7, 8})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{4, 5, 6, 7, 8})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(4), Numeric.create(5), Numeric.create(6), Numeric.create(7), Numeric.create(8)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testDecodeFloatArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Float\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Float") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new float[]{4.1f})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{(short)4.1f})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{(int)4.1f})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{(long)4.1f})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{4.1f})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{(double)4.1f})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(4.1f)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeFloatArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Float\" = $1 WHERE \"id\" = $2 RETURNING \"Float\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addFloatArray(new Float[]{5.2f, 5.3f, 5.4f}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Float") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new float[]{5.2f, 5.3f, 5.4f})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{(short)5.2f, (short)5.3f, (short)5.4f})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{(int)5.2f, (int)5.3f, (int)5.4f})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{(long)5.2f, (long)5.3f, (long)5.4f})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{5.2f, 5.3f, 5.4f})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{(double)5.2f, (double)5.3f, (double)5.4f})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(5.2f), Numeric.create(5.3f), Numeric.create(5.4f)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testDecodeDoubleArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Double\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Double") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new double[]{5.2})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{(short)5})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{5})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{5L})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{5.2F})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{5.2D})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(5.2D)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeDoubleArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Double\" = $1 WHERE \"id\" = $2 RETURNING \"Double\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addDoubleArray(new Double[]{6.3}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Double") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new double[]{6.3})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{(short)6.3})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{(int)6.3})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{(long)6.3})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{(float)6.3})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{6.3})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{Numeric.create(6.3)})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeEmptyArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Double\" = $1 WHERE \"id\" = $2 RETURNING \"Double\"", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addDoubleArray(new Double[]{}) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Double") .returns(Tuple::getValue, Row::getValue, ColumnChecker.toObjectArray(new double[]{})) .returns(Tuple::getShortArray, Row::getShortArray, ColumnChecker.toObjectArray(new short[]{})) .returns(Tuple::getIntegerArray, Row::getIntegerArray, ColumnChecker.toObjectArray(new int[]{})) .returns(Tuple::getLongArray, Row::getLongArray, ColumnChecker.toObjectArray(new long[]{})) .returns(Tuple::getFloatArray, Row::getFloatArray, ColumnChecker.toObjectArray(new float[]{})) .returns(Tuple::getDoubleArray, Row::getDoubleArray, ColumnChecker.toObjectArray(new double[]{})) .returns(Tuple::getNumericArray, Row::getNumericArray, ColumnChecker.toObjectArray(new Numeric[]{})) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testDecodeNumericArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("SELECT \"Numeric\" FROM \"ArrayDataType\" WHERE \"id\" = $1", ctx.asyncAssertSuccess(p -> { p.query().execute(Tuple.tuple() .addInteger(1), ctx.asyncAssertSuccess(result -> { Numeric[] expected = { Numeric.create(0), Numeric.create(1), Numeric.create(2), Numeric.create(3) }; ColumnChecker.checkColumn(0, "Numeric") .returns(Tuple::getValue, Row::getValue, expected) .returns(Tuple::getShortArray, Row::getShortArray, new Short[]{0,1,2,3}) .returns(Tuple::getIntegerArray, Row::getIntegerArray, new Integer[]{0,1,2,3}) .returns(Tuple::getLongArray, Row::getLongArray, new Long[]{0L,1L,2L,3L}) .returns(Tuple::getFloatArray, Row::getFloatArray, new Float[]{0f,1f,2f,3f}) .returns(Tuple::getDoubleArray, Row::getDoubleArray, new Double[]{0D,1D,2D,3D}) .returns(Tuple::getNumericArray, Row::getNumericArray, new Numeric[]{Numeric.create(0),Numeric.create(1),Numeric.create(2),Numeric.create(3)}) .forRow(result.iterator().next()); async.complete(); })); })); })); } @Test public void testEncodeNumericArray(TestContext ctx) { Async async = ctx.async(); PgConnection.connect(vertx, options, ctx.asyncAssertSuccess(conn -> { conn.prepare("UPDATE \"ArrayDataType\" SET \"Numeric\" = $1 WHERE \"id\" = $2 RETURNING \"Numeric\"", ctx.asyncAssertSuccess(p -> { Numeric[] expected = { Numeric.create(0), Numeric.create(10000), }; p.query().execute(Tuple.tuple() .addValue(expected) .addInteger(2) , ctx.asyncAssertSuccess(result -> { ColumnChecker.checkColumn(0, "Numeric") .returns(Tuple::getValue, Row::getValue, expected) .returns(Tuple::getShortArray, Row::getShortArray, new Short[]{expected[0].shortValue(), expected[1].shortValue()}) .returns(Tuple::getIntegerArray, Row::getIntegerArray, new Integer[]{expected[0].intValue(), expected[1].intValue()}) .returns(Tuple::getLongArray, Row::getLongArray, new Long[]{expected[0].longValue(), expected[1].longValue()}) .returns(Tuple::getFloatArray, Row::getFloatArray, new Float[]{expected[0].floatValue(), expected[1].floatValue()}) .returns(Tuple::getDoubleArray, Row::getDoubleArray, new Double[]{expected[0].doubleValue(), expected[1].doubleValue()}) .returns(Tuple::getNumericArray, Row::getNumericArray, new Numeric[]{Numeric.create(expected[0]), Numeric.create(expected[1])}) .returns(Numeric.class, expected) .forRow(result.iterator().next()); async.complete(); })); })); })); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; import org.apache.camel.ExchangePattern; import org.apache.camel.LoggingLevel; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; import org.apache.camel.spi.ExceptionHandler; import org.apache.camel.spi.PollingConsumerPollStrategy; /** * Store and retrieve Java objects from an SQL database using JOOQ. * * Generated by camel build tools - do NOT edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface JooqEndpointBuilderFactory { /** * Builder for endpoint consumers for the JOOQ component. */ public interface JooqEndpointConsumerBuilder extends EndpointConsumerBuilder { default AdvancedJooqEndpointConsumerBuilder advanced() { return (AdvancedJooqEndpointConsumerBuilder) this; } /** * To use a specific database configuration. * * The option is a: &lt;code&gt;org.jooq.Configuration&lt;/code&gt; * type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder databaseConfiguration( Object databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } /** * To use a specific database configuration. * * The option will be converted to a * &lt;code&gt;org.jooq.Configuration&lt;/code&gt; type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder databaseConfiguration( String databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: consumer * * @param bridgeErrorHandler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: false * Group: consumer * * @param bridgeErrorHandler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder bridgeErrorHandler( String bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Delete entity after it is consumed. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: consumer * * @param consumeDelete the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder consumeDelete(boolean consumeDelete) { doSetProperty("consumeDelete", consumeDelete); return this; } /** * Delete entity after it is consumed. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: true * Group: consumer * * @param consumeDelete the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder consumeDelete(String consumeDelete) { doSetProperty("consumeDelete", consumeDelete); return this; } /** * If the polling consumer did not poll any files, you can enable this * option to send an empty message (no body) instead. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: consumer * * @param sendEmptyMessageWhenIdle the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder sendEmptyMessageWhenIdle( boolean sendEmptyMessageWhenIdle) { doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle); return this; } /** * If the polling consumer did not poll any files, you can enable this * option to send an empty message (no body) instead. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: false * Group: consumer * * @param sendEmptyMessageWhenIdle the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder sendEmptyMessageWhenIdle( String sendEmptyMessageWhenIdle) { doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle); return this; } /** * The number of subsequent error polls (failed due some error) that * should happen before the backoffMultipler should kick-in. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffErrorThreshold the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffErrorThreshold( int backoffErrorThreshold) { doSetProperty("backoffErrorThreshold", backoffErrorThreshold); return this; } /** * The number of subsequent error polls (failed due some error) that * should happen before the backoffMultipler should kick-in. * * The option will be converted to a &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffErrorThreshold the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffErrorThreshold( String backoffErrorThreshold) { doSetProperty("backoffErrorThreshold", backoffErrorThreshold); return this; } /** * The number of subsequent idle polls that should happen before the * backoffMultipler should kick-in. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffIdleThreshold the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffIdleThreshold( int backoffIdleThreshold) { doSetProperty("backoffIdleThreshold", backoffIdleThreshold); return this; } /** * The number of subsequent idle polls that should happen before the * backoffMultipler should kick-in. * * The option will be converted to a &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffIdleThreshold the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffIdleThreshold( String backoffIdleThreshold) { doSetProperty("backoffIdleThreshold", backoffIdleThreshold); return this; } /** * To let the scheduled polling consumer backoff if there has been a * number of subsequent idles/errors in a row. The multiplier is then * the number of polls that will be skipped before the next actual * attempt is happening again. When this option is in use then * backoffIdleThreshold and/or backoffErrorThreshold must also be * configured. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffMultiplier the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffMultiplier( int backoffMultiplier) { doSetProperty("backoffMultiplier", backoffMultiplier); return this; } /** * To let the scheduled polling consumer backoff if there has been a * number of subsequent idles/errors in a row. The multiplier is then * the number of polls that will be skipped before the next actual * attempt is happening again. When this option is in use then * backoffIdleThreshold and/or backoffErrorThreshold must also be * configured. * * The option will be converted to a &lt;code&gt;int&lt;/code&gt; type. * * Group: scheduler * * @param backoffMultiplier the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder backoffMultiplier( String backoffMultiplier) { doSetProperty("backoffMultiplier", backoffMultiplier); return this; } /** * Milliseconds before the next poll. * * The option is a: &lt;code&gt;long&lt;/code&gt; type. * * Default: 500 * Group: scheduler * * @param delay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder delay(long delay) { doSetProperty("delay", delay); return this; } /** * Milliseconds before the next poll. * * The option will be converted to a &lt;code&gt;long&lt;/code&gt; type. * * Default: 500 * Group: scheduler * * @param delay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder delay(String delay) { doSetProperty("delay", delay); return this; } /** * If greedy is enabled, then the ScheduledPollConsumer will run * immediately again, if the previous run polled 1 or more messages. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: scheduler * * @param greedy the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder greedy(boolean greedy) { doSetProperty("greedy", greedy); return this; } /** * If greedy is enabled, then the ScheduledPollConsumer will run * immediately again, if the previous run polled 1 or more messages. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: false * Group: scheduler * * @param greedy the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder greedy(String greedy) { doSetProperty("greedy", greedy); return this; } /** * Milliseconds before the first poll starts. * * The option is a: &lt;code&gt;long&lt;/code&gt; type. * * Default: 1000 * Group: scheduler * * @param initialDelay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder initialDelay(long initialDelay) { doSetProperty("initialDelay", initialDelay); return this; } /** * Milliseconds before the first poll starts. * * The option will be converted to a &lt;code&gt;long&lt;/code&gt; type. * * Default: 1000 * Group: scheduler * * @param initialDelay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder initialDelay(String initialDelay) { doSetProperty("initialDelay", initialDelay); return this; } /** * Specifies a maximum limit of number of fires. So if you set it to 1, * the scheduler will only fire once. If you set it to 5, it will only * fire five times. A value of zero or negative means fire forever. * * The option is a: &lt;code&gt;long&lt;/code&gt; type. * * Default: 0 * Group: scheduler * * @param repeatCount the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder repeatCount(long repeatCount) { doSetProperty("repeatCount", repeatCount); return this; } /** * Specifies a maximum limit of number of fires. So if you set it to 1, * the scheduler will only fire once. If you set it to 5, it will only * fire five times. A value of zero or negative means fire forever. * * The option will be converted to a &lt;code&gt;long&lt;/code&gt; type. * * Default: 0 * Group: scheduler * * @param repeatCount the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder repeatCount(String repeatCount) { doSetProperty("repeatCount", repeatCount); return this; } /** * The consumer logs a start/complete log line when it polls. This * option allows you to configure the logging level for that. * * The option is a: * &lt;code&gt;org.apache.camel.LoggingLevel&lt;/code&gt; type. * * Default: TRACE * Group: scheduler * * @param runLoggingLevel the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder runLoggingLevel( LoggingLevel runLoggingLevel) { doSetProperty("runLoggingLevel", runLoggingLevel); return this; } /** * The consumer logs a start/complete log line when it polls. This * option allows you to configure the logging level for that. * * The option will be converted to a * &lt;code&gt;org.apache.camel.LoggingLevel&lt;/code&gt; type. * * Default: TRACE * Group: scheduler * * @param runLoggingLevel the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder runLoggingLevel( String runLoggingLevel) { doSetProperty("runLoggingLevel", runLoggingLevel); return this; } /** * Allows for configuring a custom/shared thread pool to use for the * consumer. By default each consumer has its own single threaded thread * pool. * * The option is a: * &lt;code&gt;java.util.concurrent.ScheduledExecutorService&lt;/code&gt; type. * * Group: scheduler * * @param scheduledExecutorService the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder scheduledExecutorService( ScheduledExecutorService scheduledExecutorService) { doSetProperty("scheduledExecutorService", scheduledExecutorService); return this; } /** * Allows for configuring a custom/shared thread pool to use for the * consumer. By default each consumer has its own single threaded thread * pool. * * The option will be converted to a * &lt;code&gt;java.util.concurrent.ScheduledExecutorService&lt;/code&gt; type. * * Group: scheduler * * @param scheduledExecutorService the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder scheduledExecutorService( String scheduledExecutorService) { doSetProperty("scheduledExecutorService", scheduledExecutorService); return this; } /** * To use a cron scheduler from either camel-spring or camel-quartz * component. Use value spring or quartz for built in scheduler. * * The option is a: &lt;code&gt;java.lang.Object&lt;/code&gt; type. * * Default: none * Group: scheduler * * @param scheduler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder scheduler(Object scheduler) { doSetProperty("scheduler", scheduler); return this; } /** * To use a cron scheduler from either camel-spring or camel-quartz * component. Use value spring or quartz for built in scheduler. * * The option will be converted to a * &lt;code&gt;java.lang.Object&lt;/code&gt; type. * * Default: none * Group: scheduler * * @param scheduler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder scheduler(String scheduler) { doSetProperty("scheduler", scheduler); return this; } /** * To configure additional properties when using a custom scheduler or * any of the Quartz, Spring based scheduler. * * The option is a: &lt;code&gt;java.util.Map&amp;lt;java.lang.String, * java.lang.Object&amp;gt;&lt;/code&gt; type. * The option is multivalued, and you can use the * schedulerProperties(String, Object) method to add a value (call the * method multiple times to set more values). * * Group: scheduler * * @param key the option key * @param value the option value * @return the dsl builder */ default JooqEndpointConsumerBuilder schedulerProperties( String key, Object value) { doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value); return this; } /** * To configure additional properties when using a custom scheduler or * any of the Quartz, Spring based scheduler. * * The option is a: &lt;code&gt;java.util.Map&amp;lt;java.lang.String, * java.lang.Object&amp;gt;&lt;/code&gt; type. * The option is multivalued, and you can use the * schedulerProperties(String, Object) method to add a value (call the * method multiple times to set more values). * * Group: scheduler * * @param values the values * @return the dsl builder */ default JooqEndpointConsumerBuilder schedulerProperties(Map values) { doSetMultiValueProperties("schedulerProperties", "scheduler.", values); return this; } /** * Whether the scheduler should be auto started. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: scheduler * * @param startScheduler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder startScheduler( boolean startScheduler) { doSetProperty("startScheduler", startScheduler); return this; } /** * Whether the scheduler should be auto started. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: true * Group: scheduler * * @param startScheduler the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder startScheduler(String startScheduler) { doSetProperty("startScheduler", startScheduler); return this; } /** * Time unit for initialDelay and delay options. * * The option is a: * &lt;code&gt;java.util.concurrent.TimeUnit&lt;/code&gt; type. * * Default: MILLISECONDS * Group: scheduler * * @param timeUnit the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) { doSetProperty("timeUnit", timeUnit); return this; } /** * Time unit for initialDelay and delay options. * * The option will be converted to a * &lt;code&gt;java.util.concurrent.TimeUnit&lt;/code&gt; type. * * Default: MILLISECONDS * Group: scheduler * * @param timeUnit the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder timeUnit(String timeUnit) { doSetProperty("timeUnit", timeUnit); return this; } /** * Controls if fixed delay or fixed rate is used. See * ScheduledExecutorService in JDK for details. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: scheduler * * @param useFixedDelay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) { doSetProperty("useFixedDelay", useFixedDelay); return this; } /** * Controls if fixed delay or fixed rate is used. See * ScheduledExecutorService in JDK for details. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: true * Group: scheduler * * @param useFixedDelay the value to set * @return the dsl builder */ default JooqEndpointConsumerBuilder useFixedDelay(String useFixedDelay) { doSetProperty("useFixedDelay", useFixedDelay); return this; } } /** * Advanced builder for endpoint consumers for the JOOQ component. */ public interface AdvancedJooqEndpointConsumerBuilder extends EndpointConsumerBuilder { default JooqEndpointConsumerBuilder basic() { return (JooqEndpointConsumerBuilder) this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option is a: * &lt;code&gt;org.apache.camel.spi.ExceptionHandler&lt;/code&gt; type. * * Group: consumer (advanced) * * @param exceptionHandler the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder exceptionHandler( ExceptionHandler exceptionHandler) { doSetProperty("exceptionHandler", exceptionHandler); return this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option will be converted to a * &lt;code&gt;org.apache.camel.spi.ExceptionHandler&lt;/code&gt; type. * * Group: consumer (advanced) * * @param exceptionHandler the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder exceptionHandler( String exceptionHandler) { doSetProperty("exceptionHandler", exceptionHandler); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option is a: * &lt;code&gt;org.apache.camel.ExchangePattern&lt;/code&gt; type. * * Group: consumer (advanced) * * @param exchangePattern the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { doSetProperty("exchangePattern", exchangePattern); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option will be converted to a * &lt;code&gt;org.apache.camel.ExchangePattern&lt;/code&gt; type. * * Group: consumer (advanced) * * @param exchangePattern the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder exchangePattern( String exchangePattern) { doSetProperty("exchangePattern", exchangePattern); return this; } /** * A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing * you to provide your custom implementation to control error handling * usually occurred during the poll operation before an Exchange have * been created and being routed in Camel. * * The option is a: * &lt;code&gt;org.apache.camel.spi.PollingConsumerPollStrategy&lt;/code&gt; type. * * Group: consumer (advanced) * * @param pollStrategy the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder pollStrategy( PollingConsumerPollStrategy pollStrategy) { doSetProperty("pollStrategy", pollStrategy); return this; } /** * A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing * you to provide your custom implementation to control error handling * usually occurred during the poll operation before an Exchange have * been created and being routed in Camel. * * The option will be converted to a * &lt;code&gt;org.apache.camel.spi.PollingConsumerPollStrategy&lt;/code&gt; type. * * Group: consumer (advanced) * * @param pollStrategy the value to set * @return the dsl builder */ default AdvancedJooqEndpointConsumerBuilder pollStrategy( String pollStrategy) { doSetProperty("pollStrategy", pollStrategy); return this; } } /** * Builder for endpoint producers for the JOOQ component. */ public interface JooqEndpointProducerBuilder extends EndpointProducerBuilder { default AdvancedJooqEndpointProducerBuilder advanced() { return (AdvancedJooqEndpointProducerBuilder) this; } /** * To use a specific database configuration. * * The option is a: &lt;code&gt;org.jooq.Configuration&lt;/code&gt; * type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder databaseConfiguration( Object databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } /** * To use a specific database configuration. * * The option will be converted to a * &lt;code&gt;org.jooq.Configuration&lt;/code&gt; type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder databaseConfiguration( String databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder lazyStartProducer( String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Type of operation to execute on query. * * The option is a: * &lt;code&gt;org.apache.camel.component.jooq.JooqOperation&lt;/code&gt; type. * * Default: NONE * Group: producer * * @param operation the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder operation(JooqOperation operation) { doSetProperty("operation", operation); return this; } /** * Type of operation to execute on query. * * The option will be converted to a * &lt;code&gt;org.apache.camel.component.jooq.JooqOperation&lt;/code&gt; type. * * Default: NONE * Group: producer * * @param operation the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder operation(String operation) { doSetProperty("operation", operation); return this; } /** * To execute plain SQL query. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: producer * * @param query the value to set * @return the dsl builder */ default JooqEndpointProducerBuilder query(String query) { doSetProperty("query", query); return this; } } /** * Advanced builder for endpoint producers for the JOOQ component. */ public interface AdvancedJooqEndpointProducerBuilder extends EndpointProducerBuilder { default JooqEndpointProducerBuilder basic() { return (JooqEndpointProducerBuilder) this; } } /** * Builder for endpoint for the JOOQ component. */ public interface JooqEndpointBuilder extends JooqEndpointConsumerBuilder, JooqEndpointProducerBuilder { default AdvancedJooqEndpointBuilder advanced() { return (AdvancedJooqEndpointBuilder) this; } /** * To use a specific database configuration. * * The option is a: &lt;code&gt;org.jooq.Configuration&lt;/code&gt; * type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointBuilder databaseConfiguration( Object databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } /** * To use a specific database configuration. * * The option will be converted to a * &lt;code&gt;org.jooq.Configuration&lt;/code&gt; type. * * Group: common * * @param databaseConfiguration the value to set * @return the dsl builder */ default JooqEndpointBuilder databaseConfiguration( String databaseConfiguration) { doSetProperty("databaseConfiguration", databaseConfiguration); return this; } } /** * Advanced builder for endpoint for the JOOQ component. */ public interface AdvancedJooqEndpointBuilder extends AdvancedJooqEndpointConsumerBuilder, AdvancedJooqEndpointProducerBuilder { default JooqEndpointBuilder basic() { return (JooqEndpointBuilder) this; } } /** * Proxy enum for <code>org.apache.camel.component.jooq.JooqOperation</code> * enum. */ enum JooqOperation { EXECUTE, FETCH, NONE; } public interface JooqBuilders { /** * JOOQ (camel-jooq) * Store and retrieve Java objects from an SQL database using JOOQ. * * Category: database,sql * Since: 3.0 * Maven coordinates: org.apache.camel:camel-jooq * * Syntax: <code>jooq:entityType</code> * * Path parameter: entityType * JOOQ entity class * * @param path entityType * @return the dsl builder */ default JooqEndpointBuilder jooq(String path) { return JooqEndpointBuilderFactory.endpointBuilder("jooq", path); } /** * JOOQ (camel-jooq) * Store and retrieve Java objects from an SQL database using JOOQ. * * Category: database,sql * Since: 3.0 * Maven coordinates: org.apache.camel:camel-jooq * * Syntax: <code>jooq:entityType</code> * * Path parameter: entityType * JOOQ entity class * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path entityType * @return the dsl builder */ default JooqEndpointBuilder jooq(String componentName, String path) { return JooqEndpointBuilderFactory.endpointBuilder(componentName, path); } } static JooqEndpointBuilder endpointBuilder(String componentName, String path) { class JooqEndpointBuilderImpl extends AbstractEndpointBuilder implements JooqEndpointBuilder, AdvancedJooqEndpointBuilder { public JooqEndpointBuilderImpl(String path) { super(componentName, path); } } return new JooqEndpointBuilderImpl(path); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.gui.action; import java.awt.BorderLayout; import java.awt.FlowLayout; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.ActionMap; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.InputMap; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JRootPane; import javax.swing.JTree; import javax.swing.tree.TreePath; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.gui.GuiPackage; import org.apache.jmeter.gui.Searchable; import org.apache.jmeter.gui.tree.JMeterTreeModel; import org.apache.jmeter.gui.tree.JMeterTreeNode; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.ComponentUtil; import org.apache.jorphan.gui.JLabeledTextField; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * FIXME Why is searchTF not getting focus correctly after having been setVisible(false) once */ public class SearchTreeDialog extends JDialog implements ActionListener { private static final long serialVersionUID = -4436834972710248247L; private static final Logger logger = LoggingManager.getLoggerForClass(); private JButton searchButton; private JLabeledTextField searchTF; private JCheckBox isRegexpCB; private JCheckBox isCaseSensitiveCB; private JButton cancelButton; /** * Store last search */ private transient String lastSearch = null; private JButton searchAndExpandButton; public SearchTreeDialog() { super((JFrame) null, JMeterUtils.getResString("search_tree_title"), true); //$NON-NLS-1$ init(); } @Override protected JRootPane createRootPane() { JRootPane rootPane = new JRootPane(); // Hide Window on ESC Action escapeAction = new AbstractAction("ESCAPE") { private static final long serialVersionUID = -6543764044868772971L; @Override public void actionPerformed(ActionEvent actionEvent) { setVisible(false); } }; // Do search on Enter Action enterAction = new AbstractAction("ENTER") { private static final long serialVersionUID = -3661361497864527363L; @Override public void actionPerformed(ActionEvent actionEvent) { doSearch(actionEvent); } }; ActionMap actionMap = rootPane.getActionMap(); actionMap.put(escapeAction.getValue(Action.NAME), escapeAction); actionMap.put(enterAction.getValue(Action.NAME), enterAction); InputMap inputMap = rootPane.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); inputMap.put(KeyStrokes.ESC, escapeAction.getValue(Action.NAME)); inputMap.put(KeyStrokes.ENTER, enterAction.getValue(Action.NAME)); return rootPane; } private void init() { this.getContentPane().setLayout(new BorderLayout(10,10)); searchTF = new JLabeledTextField(JMeterUtils.getResString("search_text_field"), 20); //$NON-NLS-1$ if(!StringUtils.isEmpty(lastSearch)) { searchTF.setText(lastSearch); } isRegexpCB = new JCheckBox(JMeterUtils.getResString("search_text_chkbox_regexp"), false); //$NON-NLS-1$ isCaseSensitiveCB = new JCheckBox(JMeterUtils.getResString("search_text_chkbox_case"), false); //$NON-NLS-1$ Font font = new Font("SansSerif", Font.PLAIN, 10); // reduce font isRegexpCB.setFont(font); isCaseSensitiveCB.setFont(font); JPanel searchCriterionPanel = new JPanel(new FlowLayout(FlowLayout.CENTER)); searchCriterionPanel.add(isCaseSensitiveCB); searchCriterionPanel.add(isRegexpCB); JPanel searchPanel = new JPanel(); searchPanel.setLayout(new BoxLayout(searchPanel, BoxLayout.Y_AXIS)); searchPanel.setBorder(BorderFactory.createEmptyBorder(7, 3, 3, 3)); searchPanel.add(searchTF, BorderLayout.NORTH); searchPanel.add(searchCriterionPanel, BorderLayout.CENTER); JPanel buttonsPanel = new JPanel(new FlowLayout(FlowLayout.CENTER)); searchButton = new JButton(JMeterUtils.getResString("search")); //$NON-NLS-1$ searchButton.addActionListener(this); searchAndExpandButton = new JButton(JMeterUtils.getResString("search_expand")); //$NON-NLS-1$ searchAndExpandButton.addActionListener(this); cancelButton = new JButton(JMeterUtils.getResString("cancel")); //$NON-NLS-1$ cancelButton.addActionListener(this); buttonsPanel.add(searchButton); buttonsPanel.add(searchAndExpandButton); buttonsPanel.add(cancelButton); searchPanel.add(buttonsPanel, BorderLayout.SOUTH); this.getContentPane().add(searchPanel); searchTF.requestFocusInWindow(); this.pack(); ComponentUtil.centerComponentInWindow(this); } /** * Do search * @param e {@link ActionEvent} */ @Override public void actionPerformed(ActionEvent e) { if (e.getSource()==cancelButton) { searchTF.requestFocusInWindow(); this.setVisible(false); return; } doSearch(e); } /** * @param e {@link ActionEvent} */ private void doSearch(ActionEvent e) { boolean expand = e.getSource()==searchAndExpandButton; String wordToSearch = searchTF.getText(); if (StringUtils.isEmpty(wordToSearch)) { return; } else { this.lastSearch = wordToSearch; } // reset previous result ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.SEARCH_RESET)); // do search Searcher searcher = null; if (isRegexpCB.isSelected()) { searcher = new RegexpSearcher(isCaseSensitiveCB.isSelected(), searchTF.getText()); } else { searcher = new RawTextSearcher(isCaseSensitiveCB.isSelected(), searchTF.getText()); } GuiPackage guiPackage = GuiPackage.getInstance(); JMeterTreeModel jMeterTreeModel = guiPackage.getTreeModel(); Set<JMeterTreeNode> nodes = new HashSet<>(); for (JMeterTreeNode jMeterTreeNode : jMeterTreeModel.getNodesOfType(Searchable.class)) { try { if (jMeterTreeNode.getUserObject() instanceof Searchable){ Searchable searchable = (Searchable) jMeterTreeNode.getUserObject(); List<JMeterTreeNode> matchingNodes = jMeterTreeNode.getPathToThreadGroup(); List<String> searchableTokens = searchable.getSearchableTokens(); boolean result = searcher.search(searchableTokens); if (result) { nodes.addAll(matchingNodes); } } } catch (Exception ex) { logger.error("Error occured searching for word:"+ wordToSearch, ex); } } GuiPackage guiInstance = GuiPackage.getInstance(); JTree jTree = guiInstance.getMainFrame().getTree(); for (Iterator<JMeterTreeNode> iterator = nodes.iterator(); iterator.hasNext();) { JMeterTreeNode jMeterTreeNode = iterator.next(); jMeterTreeNode.setMarkedBySearch(true); if (expand) { jTree.expandPath(new TreePath(jMeterTreeNode.getPath())); } } GuiPackage.getInstance().getMainFrame().repaint(); searchTF.requestFocusInWindow(); this.setVisible(false); } }
/* * Copyright (c) 2012, 2021, Oracle and/or its affiliates. All rights reserved. */ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xalan.internal.xsltc.compiler; import com.sun.org.apache.bcel.internal.classfile.JavaClass; import com.sun.org.apache.xalan.internal.utils.XMLSecurityManager; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Util; import com.sun.org.apache.xml.internal.dtm.DTM; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; import javax.xml.XMLConstants; import javax.xml.catalog.CatalogFeatures; import jdk.xml.internal.JdkConstants; import jdk.xml.internal.JdkXmlFeatures; import jdk.xml.internal.JdkXmlUtils; import jdk.xml.internal.SecuritySupport; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen * @author G. Todd Miller * @author Morten Jorgensen * @author John Howard (johnh@schemasoft.com) * @LastModified: May 2021 */ public final class XSLTC { // A reference to the main stylesheet parser object. private Parser _parser; // A reference to an external XMLReader (SAX parser) passed to us private XMLReader _reader = null; // A reference to an external SourceLoader (for use with include/import) private SourceLoader _loader = null; // A reference to the stylesheet being compiled. private Stylesheet _stylesheet; // Counters used by various classes to generate unique names. // private int _variableSerial = 1; private int _modeSerial = 1; private int _stylesheetSerial = 1; private int _stepPatternSerial = 1; private int _helperClassSerial = 0; private int _attributeSetSerial = 0; private int[] _numberFieldIndexes; // Name index tables private int _nextGType; // Next available element type private List<String> _namesIndex; // Index of all registered QNames private Map<String, Integer> _elements; // Map of all registered elements private Map<String, Integer> _attributes; // Map of all registered attributes // Namespace index tables private int _nextNSType; // Next available namespace type private List<String> _namespaceIndex; // Index of all registered namespaces private Map<String, Integer> _namespaces; // Map of all registered namespaces private Map<String, Integer> _namespacePrefixes;// Map of all registered namespace prefixes // All literal text in the stylesheet private List<StringBuilder> m_characterData; // These define the various methods for outputting the translet public static final int JAR_OUTPUT = 1; public static final int BYTEARRAY_OUTPUT = 2; public static final int CLASSLOADER_OUTPUT = 3; public static final int BYTEARRAY_AND_FILE_OUTPUT = 4; public static final int BYTEARRAY_AND_JAR_OUTPUT = 5; // Compiler options (passed from command line or XSLTC client) private boolean _debug = false; // -x private String _jarFileName = null; // -j <jar-file-name> private String _className = null; // -o <class-name> private String _packageName = "die.verwandlung"; // override with -p <package-name> private File _destDir = null; // -d <directory-name> private int _outputType = BYTEARRAY_OUTPUT; // by default private List<ByteArrayOutputStream> _classes; private List<JavaClass> _bcelClasses; private boolean _callsNodeset = false; private boolean _multiDocument = false; private boolean _hasIdCall = false; /** * Set to true if template inlining is requested. Template * inlining used to be the default, but we have found that * Hotspots does a better job with shorter methods, so the * default is *not* to inline now. */ private boolean _templateInlining = false; /** * State of the secure processing feature. */ private boolean _isSecureProcessing = false; private boolean _overrideDefaultParser; /** * protocols allowed for external references set by the stylesheet processing instruction, Import and Include element. */ private String _accessExternalStylesheet = JdkConstants.EXTERNAL_ACCESS_DEFAULT; /** * protocols allowed for external DTD references in source file and/or stylesheet. */ private String _accessExternalDTD = JdkConstants.EXTERNAL_ACCESS_DEFAULT; private XMLSecurityManager _xmlSecurityManager; private final JdkXmlFeatures _xmlFeatures; /** * Extension function class loader variables */ /* Class loader reference that will be used for external extension functions loading */ private ClassLoader _extensionClassLoader; /** * HashMap with the loaded classes */ private final Map<String, Class<?>> _externalExtensionFunctions; /** * Catalog features */ CatalogFeatures _catalogFeatures; /** * CDATA chunk size */ int _cdataChunkSize; /** * XSLTC compiler constructor */ public XSLTC(JdkXmlFeatures featureManager, boolean hasListener) { _overrideDefaultParser = featureManager.getFeature( JdkXmlFeatures.XmlFeature.JDK_OVERRIDE_PARSER); _parser = new Parser(this, _overrideDefaultParser, hasListener); _xmlFeatures = featureManager; _extensionClassLoader = null; _externalExtensionFunctions = new HashMap<>(); } /** * Set the state of the secure processing feature. */ public void setSecureProcessing(boolean flag) { _isSecureProcessing = flag; } /** * Return the state of the secure processing feature. */ public boolean isSecureProcessing() { return _isSecureProcessing; } /** * Return the value of the specified feature * @param name name of the feature * @return true if the feature is enabled, false otherwise */ public boolean getFeature(JdkXmlFeatures.XmlFeature name) { return _xmlFeatures.getFeature(name); } /** * Return allowed protocols for accessing external stylesheet. * @param name the name of the property * @return the value of the property */ public Object getProperty(String name) { if (name.equals(XMLConstants.ACCESS_EXTERNAL_STYLESHEET)) { return _accessExternalStylesheet; } else if (name.equals(XMLConstants.ACCESS_EXTERNAL_DTD)) { return _accessExternalDTD; } else if (name.equals(JdkConstants.SECURITY_MANAGER)) { return _xmlSecurityManager; } else if (name.equals(JdkConstants.JDK_EXT_CLASSLOADER)) { return _extensionClassLoader; } else if (JdkXmlFeatures.CATALOG_FEATURES.equals(name)) { return _catalogFeatures; } else if (JdkConstants.CDATA_CHUNK_SIZE.equals(name)) { return _cdataChunkSize; } return null; } /** * Set allowed protocols for accessing external stylesheet. * @param name the name of the property * @param value the value of the property */ public void setProperty(String name, Object value) { if (name.equals(XMLConstants.ACCESS_EXTERNAL_STYLESHEET)) { _accessExternalStylesheet = (String)value; } else if (name.equals(XMLConstants.ACCESS_EXTERNAL_DTD)) { _accessExternalDTD = (String)value; } else if (name.equals(JdkConstants.SECURITY_MANAGER)) { _xmlSecurityManager = (XMLSecurityManager)value; } else if (name.equals(JdkConstants.JDK_EXT_CLASSLOADER)) { _extensionClassLoader = (ClassLoader) value; /* Clear the external extension functions HashMap if extension class loader was changed */ _externalExtensionFunctions.clear(); } else if (JdkXmlFeatures.CATALOG_FEATURES.equals(name)) { _catalogFeatures = (CatalogFeatures)value; } else if (JdkConstants.CDATA_CHUNK_SIZE.equals(name)) { _cdataChunkSize = Integer.parseInt((String)value); } } /** * Only for user by the internal TrAX implementation. */ public Parser getParser() { return _parser; } /** * Only for user by the internal TrAX implementation. */ public void setOutputType(int type) { _outputType = type; } /** * Only for user by the internal TrAX implementation. */ public Properties getOutputProperties() { return _parser.getOutputProperties(); } /** * Initializes the compiler to compile a new stylesheet */ public void init() { reset(); _reader = null; _classes = new ArrayList<>(); _bcelClasses = new ArrayList<>(); } private void setExternalExtensionFunctions(String name, Class<?> clazz) { if (_isSecureProcessing && clazz != null && !_externalExtensionFunctions.containsKey(name)) { _externalExtensionFunctions.put(name, clazz); } } /* * Function loads an external extension function. * The filtering of function types (external,internal) takes place in FunctionCall class * */ Class<?> loadExternalFunction(String name) throws ClassNotFoundException { Class<?> loaded = null; //Check if the function is not loaded already if (_externalExtensionFunctions.containsKey(name)) { loaded = _externalExtensionFunctions.get(name); } else if (_extensionClassLoader != null) { loaded = Class.forName(name, true, _extensionClassLoader); setExternalExtensionFunctions(name, loaded); } if (loaded == null) { throw new ClassNotFoundException(name); } //Return loaded class return loaded; } /* * Returns unmodifiable view of HashMap with loaded external extension * functions - will be needed for the TransformerImpl */ public Map<String, Class<?>> getExternalExtensionFunctions() { return Collections.unmodifiableMap(_externalExtensionFunctions); } /** * Initializes the compiler to produce a new translet */ private void reset() { _nextGType = DTM.NTYPES; _elements = new HashMap<>(); _attributes = new HashMap<>(); _namespaces = new HashMap<>(); _namespaces.put("", _nextNSType); _namesIndex = new ArrayList<>(128); _namespaceIndex = new ArrayList<>(32); _namespacePrefixes = new HashMap<>(); _stylesheet = null; _parser.init(); //_variableSerial = 1; _modeSerial = 1; _stylesheetSerial = 1; _stepPatternSerial = 1; _helperClassSerial = 0; _attributeSetSerial = 0; _multiDocument = false; _hasIdCall = false; _numberFieldIndexes = new int[] { -1, // LEVEL_SINGLE -1, // LEVEL_MULTIPLE -1 // LEVEL_ANY }; _externalExtensionFunctions.clear(); } /** * Defines an external SourceLoader to provide the compiler with documents * referenced in xsl:include/import * @param loader The SourceLoader to use for include/import */ public void setSourceLoader(SourceLoader loader) { _loader = loader; } /** * Set a flag indicating if templates are to be inlined or not. The * default is to do inlining, but this causes problems when the * stylesheets have a large number of templates (e.g. branch targets * exceeding 64K or a length of a method exceeding 64K). */ public void setTemplateInlining(boolean templateInlining) { _templateInlining = templateInlining; } /** * Return the state of the template inlining feature. */ public boolean getTemplateInlining() { return _templateInlining; } /** * Set the parameters to use to locate the correct <?xml-stylesheet ...?> * processing instruction in the case where the input document to the * compiler (and parser) is an XML document. * @param media The media attribute to be matched. May be null, in which * case the prefered templates will be used (i.e. alternate = no). * @param title The value of the title attribute to match. May be null. * @param charset The value of the charset attribute to match. May be null. */ public void setPIParameters(String media, String title, String charset) { _parser.setPIParameters(media, title, charset); } /** * Compiles an XSL stylesheet pointed to by a URL * @param url An URL containing the input XSL stylesheet */ public boolean compile(URL url) { try { // Open input stream from URL and wrap inside InputSource final InputStream stream = url.openStream(); final InputSource input = new InputSource(stream); input.setSystemId(url.toString()); return compile(input, _className); } catch (IOException e) { _parser.reportError(Constants.FATAL, new ErrorMsg(ErrorMsg.JAXP_COMPILE_ERR, e)); return false; } } /** * Compiles an XSL stylesheet pointed to by a URL * @param url An URL containing the input XSL stylesheet * @param name The name to assign to the translet class */ public boolean compile(URL url, String name) { try { // Open input stream from URL and wrap inside InputSource final InputStream stream = url.openStream(); final InputSource input = new InputSource(stream); input.setSystemId(url.toString()); return compile(input, name); } catch (IOException e) { _parser.reportError(Constants.FATAL, new ErrorMsg(ErrorMsg.JAXP_COMPILE_ERR, e)); return false; } } /** * Compiles an XSL stylesheet passed in through an InputStream * @param stream An InputStream that will pass in the stylesheet contents * @param name The name of the translet class to generate * @return 'true' if the compilation was successful */ public boolean compile(InputStream stream, String name) { final InputSource input = new InputSource(stream); input.setSystemId(name); // We have nothing else!!! return compile(input, name); } /** * Compiles an XSL stylesheet passed in through an InputStream * @param input An InputSource that will pass in the stylesheet contents * @param name The name of the translet class to generate - can be null * @return 'true' if the compilation was successful */ public boolean compile(InputSource input, String name) { try { // Reset globals in case we're called by compile(ArrayList v); reset(); // The systemId may not be set, so we'll have to check the URL String systemId = null; if (input != null) { systemId = input.getSystemId(); } // Set the translet class name if not already set if (_className == null) { if (name != null) { setClassName(name); } else if (systemId != null && !systemId.equals("")) { setClassName(Util.baseName(systemId)); } // Ensure we have a non-empty class name at this point if (_className == null || _className.length() == 0) { setClassName("GregorSamsa"); // default translet name } } // Get the root node of the abstract syntax tree SyntaxTreeNode element = null; if (_reader == null) { element = _parser.parse(input); } else { element = _parser.parse(_reader, input); } // Compile the translet - this is where the work is done! if ((!_parser.errorsFound()) && (element != null)) { // Create a Stylesheet element from the root node _stylesheet = _parser.makeStylesheet(element); _stylesheet.setSourceLoader(_loader); _stylesheet.setSystemId(systemId); _stylesheet.setParentStylesheet(null); _stylesheet.setTemplateInlining(_templateInlining); _parser.setCurrentStylesheet(_stylesheet); // Create AST under the Stylesheet element (parse & type-check) _parser.createAST(_stylesheet); } // Generate the bytecodes and output the translet class(es) if ((!_parser.errorsFound()) && (_stylesheet != null)) { _stylesheet.setCallsNodeset(_callsNodeset); _stylesheet.setMultiDocument(_multiDocument); _stylesheet.setHasIdCall(_hasIdCall); // Class synchronization is needed for BCEL synchronized (getClass()) { _stylesheet.translate(); } } } catch (Exception e) { /*if (_debug)*/ e.printStackTrace(); _parser.reportError(Constants.FATAL, new ErrorMsg(ErrorMsg.JAXP_COMPILE_ERR, e)); } catch (Error e) { if (_debug) e.printStackTrace(); _parser.reportError(Constants.FATAL, new ErrorMsg(ErrorMsg.JAXP_COMPILE_ERR, e)); } finally { _reader = null; // reset this here to be sure it is not re-used } return !_parser.errorsFound(); } /** * Compiles a set of stylesheets pointed to by a List of URLs * @param stylesheets A List containing URLs pointing to the stylesheets * @return 'true' if the compilation was successful */ public boolean compile(List<URL> stylesheets) { // Get the number of stylesheets (ie. URLs) in the vector final int count = stylesheets.size(); // Return straight away if the vector is empty if (count == 0) return true; // Special handling needed if the URL count is one, becuase the // _className global must not be reset if it was set explicitly if (count == 1) { return compile(stylesheets.get(0)); } else { // Traverse all elements in the vector and compile for (URL url : stylesheets) { _className = null; // reset, so that new name will be computed if (!compile(url)) return false; } } return true; } /** * Returns an array of bytecode arrays generated by a compilation. * @return JVM bytecodes that represent translet class definition */ public byte[][] getBytecodes() { final int count = _classes.size(); final byte[][] result = new byte[count][1]; for (int i = 0; i < count; i++) result[i] = _classes.get(i).toByteArray(); return result; } /** * Compiles a stylesheet pointed to by a URL. The result is put in a * set of byte arrays. One byte array for each generated class. * @param name The name of the translet class to generate * @param input An InputSource that will pass in the stylesheet contents * @param outputType The output type * @return JVM bytecodes that represent translet class definition */ public byte[][] compile(String name, InputSource input, int outputType) { _outputType = outputType; if (compile(input, name)) return getBytecodes(); else return null; } /** * Compiles a stylesheet pointed to by a URL. The result is put in a * set of byte arrays. One byte array for each generated class. * @param name The name of the translet class to generate * @param input An InputSource that will pass in the stylesheet contents * @return JVM bytecodes that represent translet class definition */ public byte[][] compile(String name, InputSource input) { return compile(name, input, BYTEARRAY_OUTPUT); } /** * Set the XMLReader to use for parsing the next input stylesheet * @param reader XMLReader (SAX2 parser) to use */ public void setXMLReader(XMLReader reader) { _reader = reader; } /** * Get the XMLReader to use for parsing the next input stylesheet */ public XMLReader getXMLReader() { return _reader ; } /** * Get a list of all compile error messages * @return A List containing all compile error messages */ public List<ErrorMsg> getErrors() { return _parser.getErrors(); } /** * Get a list of all compile warning messages * @return A List containing all compile error messages */ public List<ErrorMsg> getWarnings() { return _parser.getWarnings(); } /** * Print all compile error messages to standard output */ public void printErrors() { _parser.printErrors(); } /** * Print all compile warning messages to standard output */ public void printWarnings() { _parser.printWarnings(); } /** * This method is called by the XPathParser when it encounters a call * to the document() function. Affects the DOM used by the translet. */ protected void setMultiDocument(boolean flag) { _multiDocument = flag; } public boolean isMultiDocument() { return _multiDocument; } /** * This method is called by the XPathParser when it encounters a call * to the nodeset() extension function. Implies multi document. */ protected void setCallsNodeset(boolean flag) { if (flag) setMultiDocument(flag); _callsNodeset = flag; } public boolean callsNodeset() { return _callsNodeset; } protected void setHasIdCall(boolean flag) { _hasIdCall = flag; } public boolean hasIdCall() { return _hasIdCall; } /** * Set the class name for the generated translet. This class name is * overridden if multiple stylesheets are compiled in one go using the * compile(List urls) method. * @param className The name to assign to the translet class */ public void setClassName(String className) { final String base = Util.baseName(className); final String noext = Util.noExtName(base); String name = Util.toJavaName(noext); if (_packageName == null) _className = name; else _className = _packageName + '.' + name; } /** * Get the class name for the generated translet. */ public String getClassName() { return _className; } /** * Convert for Java class name of local system file name. * (Replace '.' with '/' on UNIX and replace '.' by '\' on Windows/DOS.) */ private String classFileName(final String className) { return className.replace('.', File.separatorChar) + ".class"; } /** * Generate an output File object to send the translet to */ private File getOutputFile(String className) { if (_destDir != null) return new File(_destDir, classFileName(className)); else return new File(classFileName(className)); } /** * Set the destination directory for the translet. * The current working directory will be used by default. */ public boolean setDestDirectory(String dstDirName) { final File dir = new File(dstDirName); if (SecuritySupport.doesFileExist(dir) || dir.mkdirs()) { _destDir = dir; return true; } else { _destDir = null; return false; } } /** * Set an optional package name for the translet and auxiliary classes */ public void setPackageName(String packageName) { _packageName = Objects.requireNonNull(packageName); if (_className != null) setClassName(_className); } /** * Set the name of an optional JAR-file to dump the translet and * auxiliary classes to */ public void setJarFileName(String jarFileName) { final String JAR_EXT = ".jar"; if (jarFileName.endsWith(JAR_EXT)) _jarFileName = jarFileName; else _jarFileName = jarFileName + JAR_EXT; _outputType = JAR_OUTPUT; } public String getJarFileName() { return _jarFileName; } /** * Set the top-level stylesheet */ public void setStylesheet(Stylesheet stylesheet) { if (_stylesheet == null) _stylesheet = stylesheet; } /** * Returns the top-level stylesheet */ public Stylesheet getStylesheet() { return _stylesheet; } /** * Registers an attribute and gives it a type so that it can be mapped to * DOM attribute types at run-time. */ public int registerAttribute(QName name) { Integer code = _attributes.get(name.toString()); if (code == null) { code = _nextGType++; _attributes.put(name.toString(), code); final String uri = name.getNamespace(); final String local = "@"+name.getLocalPart(); if ((uri != null) && (!uri.equals(""))) _namesIndex.add(uri+":"+local); else _namesIndex.add(local); if (name.getLocalPart().equals("*")) { registerNamespace(name.getNamespace()); } } return code.intValue(); } /** * Registers an element and gives it a type so that it can be mapped to * DOM element types at run-time. */ public int registerElement(QName name) { // Register element (full QName) Integer code = _elements.get(name.toString()); if (code == null) { _elements.put(name.toString(), code = _nextGType++); _namesIndex.add(name.toString()); } if (name.getLocalPart().equals("*")) { registerNamespace(name.getNamespace()); } return code.intValue(); } /** * Registers a namespace prefix and gives it a type so that it can be mapped to * DOM namespace types at run-time. */ public int registerNamespacePrefix(QName name) { Integer code = _namespacePrefixes.get(name.toString()); if (code == null) { code = _nextGType++; _namespacePrefixes.put(name.toString(), code); final String uri = name.getNamespace(); if ((uri != null) && (!uri.equals(""))){ // namespace::ext2:ped2 will be made empty in TypedNamespaceIterator _namesIndex.add("?"); } else{ _namesIndex.add("?"+name.getLocalPart()); } } return code.intValue(); } /** * Registers a namespace and gives it a type so that it can be mapped to * DOM namespace types at run-time. */ public int registerNamespace(String namespaceURI) { Integer code = _namespaces.get(namespaceURI); if (code == null) { code = _nextNSType++; _namespaces.put(namespaceURI,code); _namespaceIndex.add(namespaceURI); } return code; } public int nextModeSerial() { return _modeSerial++; } public int nextStylesheetSerial() { return _stylesheetSerial++; } public int nextStepPatternSerial() { return _stepPatternSerial++; } public int[] getNumberFieldIndexes() { return _numberFieldIndexes; } public int nextHelperClassSerial() { return _helperClassSerial++; } public int nextAttributeSetSerial() { return _attributeSetSerial++; } public List<String> getNamesIndex() { return _namesIndex; } public List<String> getNamespaceIndex() { return _namespaceIndex; } /** * Returns a unique name for every helper class needed to * execute a translet. */ public String getHelperClassName() { return getClassName() + '$' + _helperClassSerial++; } public void dumpClass(JavaClass clazz) { if (_outputType == BYTEARRAY_AND_FILE_OUTPUT) { File outFile = getOutputFile(clazz.getClassName()); String parentDir = outFile.getParent(); if (parentDir != null) { File parentFile = new File(parentDir); if (!SecuritySupport.doesFileExist(parentFile)) parentFile.mkdirs(); } } try { switch (_outputType) { case JAR_OUTPUT: _bcelClasses.add(clazz); break; case BYTEARRAY_OUTPUT: case BYTEARRAY_AND_FILE_OUTPUT: case BYTEARRAY_AND_JAR_OUTPUT: case CLASSLOADER_OUTPUT: ByteArrayOutputStream out = new ByteArrayOutputStream(2048); clazz.dump(out); _classes.add(out); if (_outputType == BYTEARRAY_AND_FILE_OUTPUT) clazz.dump(getOutputFile(clazz.getClassName())); else if (_outputType == BYTEARRAY_AND_JAR_OUTPUT) _bcelClasses.add(clazz); break; } } catch (Exception e) { e.printStackTrace(); } } /** * File separators are converted to forward slashes for ZIP files. */ private String entryName(File f) throws IOException { return f.getName().replace(File.separatorChar, '/'); } /** * Generate output JAR-file and packages */ public void outputToJar() throws IOException { // create the manifest final Manifest manifest = new Manifest(); final java.util.jar.Attributes atrs = manifest.getMainAttributes(); atrs.put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.2"); final Map<String, Attributes> map = manifest.getEntries(); // create manifest final String now = (new Date()).toString(); final java.util.jar.Attributes.Name dateAttr = new java.util.jar.Attributes.Name("Date"); final File jarFile = new File(_destDir, _jarFileName); final JarOutputStream jos = new JarOutputStream(new FileOutputStream(jarFile), manifest); for (JavaClass clazz : _bcelClasses) { final String className = clazz.getClassName().replace('.', '/'); final java.util.jar.Attributes attr = new java.util.jar.Attributes(); attr.put(dateAttr, now); map.put(className + ".class", attr); jos.putNextEntry(new JarEntry(className + ".class")); final ByteArrayOutputStream out = new ByteArrayOutputStream(2048); clazz.dump(out); // dump() closes it's output stream out.writeTo(jos); } jos.close(); } /** * Turn debugging messages on/off */ public void setDebug(boolean debug) { _debug = debug; } /** * Get current debugging message setting */ public boolean debug() { return _debug; } /** * Retrieve a string representation of the character data to be stored * in the translet as a <code>char[]</code>. There may be more than * one such array required. * @param index The index of the <code>char[]</code>. Zero-based. * @return String The character data to be stored in the corresponding * <code>char[]</code>. */ public String getCharacterData(int index) { return (m_characterData.get(index)).toString(); } /** * Get the number of char[] arrays, thus far, that will be created to * store literal text in the stylesheet. */ public int getCharacterDataCount() { return (m_characterData != null) ? m_characterData.size() : 0; } /** * Add literal text to char arrays that will be used to store character * data in the stylesheet. * @param newData String data to be added to char arrays. * Pre-condition: <code>newData.length() &le; 21845</code> * @return int offset at which character data will be stored */ public int addCharacterData(String newData) { StringBuilder currData; if (m_characterData == null) { m_characterData = new ArrayList<>(); currData = new StringBuilder(); m_characterData.add(currData); } else { currData = m_characterData.get(m_characterData.size()-1); } // Character data could take up to three-times as much space when // written to the class file as UTF-8. The maximum size for a // constant is 65535/3. If we exceed that, // (We really should use some "bin packing".) if (newData.length() + currData.length() > 21845) { currData = new StringBuilder(); m_characterData.add(currData); } int newDataOffset = currData.length(); currData.append(newData); return newDataOffset; } }
/* * Copyright (c) 2011 GitHub Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ package github.downloads.uploader.ant; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.text.MessageFormat; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.egit.github.core.RepositoryId; import org.eclipse.egit.github.core.client.GitHubClient; /** * Base GitHub Mojo class to be extended. * * @author Kevin Sawicki (kevin@github.com) */ public class GitHubProjectMojo { private Logger log = Logger.getLogger(getClass().getName()); /** * Get formatted exception message for {@link IOException} * * @param e * @return message */ public static String getExceptionMessage(IOException e) { return e.getMessage(); } /** * Is debug logging enabled? * * @return true if enabled, false otherwise */ public boolean isDebug() { return log.isLoggable(Level.FINE); } /** * Is info logging enabled? * * @return true if enabled, false otherwise */ public boolean isInfo() { return log.isLoggable(Level.INFO); } /** * Log given message at debug level * * @param message */ public void debug(String message) { log.fine(message); } /** * Log given message and throwable at debug level * * @param message * @param throwable */ public void debug(String message, Throwable throwable) { log.log(Level.FINE, message, throwable); } /** * Log given message at info level * * @param message */ public void info(String message) { log.info(message); } /** * Log given message and throwable at info level * * @param message * @param throwable */ public void info(String message, Throwable throwable) { log.log(Level.INFO, message, throwable); } /** * Create client * * @param host * @param userName * @param password * @param oauth2Token * @param serverId * @param settings * @param session * @return client * @throws MojoExecutionException */ public GitHubClient createClient(String host, String userName, String password, String oauth2Token, String serverId) { GitHubClient client; if (!StringUtils.isEmpty(host)) { if (isDebug()) debug("Using custom host: " + host); client = createClient(host); } else client = createClient(); if (configureUsernamePassword(client, userName, password) || configureOAuth2Token(client, oauth2Token) || configureServerCredentials(client, serverId)) return client; else throw new IllegalStateException( "No authentication credentials configured"); } /** * Create client * <p> * Subclasses can override to do any custom client configuration * * @param hostname * @return non-null client * @throws MojoExecutionException */ public GitHubClient createClient(String hostname) { if (!hostname.contains("://")) return new GitHubClient(hostname); try { URL hostUrl = new URL(hostname); return new GitHubClient(hostUrl.getHost(), hostUrl.getPort(), hostUrl.getProtocol()); } catch (MalformedURLException e) { throw new IllegalArgumentException("Could not parse host URL " + hostname, e); } } /** * Create client * <p> * Subclasses can override to do any custom client configuration * * @return non-null client */ public GitHubClient createClient() { return new GitHubClient(); } /** * Configure credentials from configured username/password combination * * @param client * @param userName * @param password * @return true if configured, false otherwise */ public boolean configureUsernamePassword(final GitHubClient client, final String userName, final String password) { if (StringUtils.isEmpty(userName, password)) return false; if (isDebug()) debug("Using basic authentication with username: " + userName); client.setCredentials(userName, password); return true; } /** * Configure credentials from configured OAuth2 token * * @param client * @param oauth2Token * @return true if configured, false otherwise */ public boolean configureOAuth2Token(final GitHubClient client, final String oauth2Token) { if (StringUtils.isEmpty(oauth2Token)) return false; if (isDebug()) debug("Using OAuth2 access token authentication"); client.setOAuth2Token(oauth2Token); return true; } /** * Configure client with credentials from given server id * * @param client * @param serverId * @param settings * @param session * @return true if configured, false otherwise * @throws MojoExecutionException */ public boolean configureServerCredentials(final GitHubClient client, final String serverId) { if (StringUtils.isEmpty(serverId)) return false; String serverUsername = null; String serverPassword = null; if (!StringUtils.isEmpty(serverUsername, serverPassword)) { if (isDebug()) debug("Using basic authentication with username: " + serverUsername); client.setCredentials(serverUsername, serverPassword); return true; } // A server password without a username is assumed to be an OAuth2 token if (!StringUtils.isEmpty(serverPassword)) { if (isDebug()) debug("Using OAuth2 access token authentication"); client.setOAuth2Token(serverPassword); return true; } if (isDebug()) debug(MessageFormat.format( "Server ''{0}'' is missing username/password credentials", serverId)); return false; } /** * Get repository and throw a {@link MojoExecutionException} on failures * * @param project * @param owner * @param name * @return non-null repository id * @throws MojoExecutionException */ public RepositoryId getRepository(final String owner, final String name) { RepositoryId repository = RepositoryUtils.getRepository(owner, name); if (repository == null) throw new IllegalArgumentException( "No GitHub repository (owner and name) configured"); if (isDebug()) debug(MessageFormat.format("Using GitHub repository {0}", repository.generateId())); return repository; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.polygene.ide.plugin.idea.concerns.common; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.search.GlobalSearchScope; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import static com.intellij.codeInsight.AnnotationUtil.findAnnotation; import static java.util.Collections.emptyList; import static org.apache.polygene.ide.plugin.idea.common.psi.PsiAnnotationUtil.getAnnotationDefaultParameterValue; import static org.apache.polygene.ide.plugin.idea.common.psi.PsiAnnotationUtil.getClassReference; import static org.apache.polygene.ide.plugin.idea.common.psi.PsiClassUtil.getPSIClass; import static org.apache.polygene.ide.plugin.idea.common.psi.search.GlobalSearchScopeUtil.determineSearchScope; import static org.apache.polygene.ide.plugin.idea.concerns.common.PolygeneConcernConstants.*; /** * @author edward.yakop@gmail.com * @since 0.1 */ public final class PolygeneConcernUtil { /** * @param searchContext Search context. * @return {@code GenericConcern} psi class if found, {@code null} otherwise. * @since 0.1 */ @Nullable public static PsiClass getGenericConcernClass( @NotNull PsiElement searchContext ) { Project project = searchContext.getProject(); GlobalSearchScope searchScope = determineSearchScope( searchContext ); return getGenericConcernClass( project, searchScope ); } /** * @param project project. * @param scope search scope. * @return {@code GenericConcern} psi class if found, {@code null} otherwise. * @since 0.1 */ @Nullable public static PsiClass getGenericConcernClass( @NotNull Project project, @Nullable GlobalSearchScope scope ) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance( project ); return scope != null ? psiFacade.findClass( QUALIFIED_NAME_GENERIC_CONCERN, scope ) : null; } @Nullable public static PsiClass getConcernOfClass( @NotNull PsiElement searchContext ) { Project project = searchContext.getProject(); GlobalSearchScope searchScope = determineSearchScope( searchContext ); return getConcernOfClass( project, searchScope ); } @Nullable public static PsiClass getConcernOfClass( @NotNull Project project, @Nullable GlobalSearchScope scope ) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance( project ); return scope != null ? psiFacade.findClass( QUALIFIED_NAME_CONCERN_OF, scope ) : null; } @Nullable public static PsiAnnotation getConcernsAnnotation( @NotNull PsiElement element ) { PsiClass psiClass = getPSIClass( element ); return findAnnotation( psiClass, QUALIFIED_NAME_CONCERNS ); } @NotNull public static PsiAnnotation addOrReplaceConcernAnnotation( @NotNull PsiModifierListOwner modifierListOwner, @NotNull PsiClass concernClassToAdd ) { Project project = modifierListOwner.getProject(); JavaPsiFacade psiFacade = JavaPsiFacade.getInstance( project ); PsiElementFactory factory = psiFacade.getElementFactory(); PsiAnnotation existingConcernsAnnotation = findAnnotation( modifierListOwner, QUALIFIED_NAME_CONCERNS ); boolean isReplace = false; PsiAnnotation newConcernsAnnotation; if( existingConcernsAnnotation != null ) { // Check duplicate List<PsiAnnotationMemberValue> concernsValues = getConcernsAnnotationValue( existingConcernsAnnotation ); for( PsiAnnotationMemberValue concernValue : concernsValues ) { PsiJavaCodeReferenceElement concernClassReference = getConcernClassReference( concernValue ); if( concernClassReference == null ) { continue; } PsiElement concernClass = concernClassReference.resolve(); if( concernClassToAdd.equals( concernClass ) ) { return existingConcernsAnnotation; } } isReplace = true; } String concernAnnotationText = createConcernAnnotationText( existingConcernsAnnotation, concernClassToAdd ); newConcernsAnnotation = factory.createAnnotationFromText( concernAnnotationText, modifierListOwner ); if( isReplace ) { // Replace @Concerns instead existingConcernsAnnotation.replace( newConcernsAnnotation ); } else { // @Concerns doesn't exists, add it as first child PsiModifierList modifierList = modifierListOwner.getModifierList(); modifierList.addBefore( newConcernsAnnotation, modifierList.getFirstChild() ); } // Shorten all class references if possible JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance( project ); codeStyleManager.shortenClassReferences( newConcernsAnnotation ); return newConcernsAnnotation; } @NotNull private static String createConcernAnnotationText( @Nullable PsiAnnotation concernAnnotationBase, @NotNull PsiClass concernClassToAdd ) { StringBuilder annotationTextBuilder = new StringBuilder(); annotationTextBuilder.append( "@" ).append( QUALIFIED_NAME_CONCERNS ).append( "( {" ); List<PsiAnnotationMemberValue> concernsAnnotationValue = getConcernsAnnotationValue( concernAnnotationBase ); for( PsiAnnotationMemberValue concernValue : concernsAnnotationValue ) { annotationTextBuilder.append( concernValue.getText() ).append( ", " ); } annotationTextBuilder.append( concernClassToAdd.getQualifiedName() ).append( ".class" ); annotationTextBuilder.append( "} )" ); return annotationTextBuilder.toString(); } @NotNull public static List<PsiAnnotationMemberValue> getConcernsAnnotationValue( @Nullable PsiAnnotation concernsAnnotation ) { if( concernsAnnotation == null ) { return emptyList(); } String concernsQualifiedName = concernsAnnotation.getQualifiedName(); if( !QUALIFIED_NAME_CONCERNS.equals( concernsQualifiedName ) ) { return emptyList(); } return getAnnotationDefaultParameterValue( concernsAnnotation ); } @Nullable public static PsiJavaCodeReferenceElement getConcernClassReference( @NotNull PsiAnnotationMemberValue value ) { return getClassReference( value ); } /** * @param psiClass psi class to check. * @return {@code true} if {@code psiClass} extends {@code ConcernOf}, {@code false} if {@code psiClass} does * not extends {@code ConcernOf} or {@code ConcernOf} is not found. * @since 0.1 */ public static boolean isAConcern( @NotNull PsiClass psiClass ) { if( psiClass.isInterface() ) { return false; } PsiClass concernOfClass = getConcernOfClass( psiClass ); return concernOfClass != null && psiClass.isInheritor( concernOfClass, true ); } /** * @param psiClass psi class to check. * @return {@code true} if {@code psiClass} extends {@code GenericConcern}, {@code false} if {@code psiClass} does * not extends {@code GenericConcern} or {@code GenericConcern} is not found. * @since 0.1 */ public static boolean isAGenericConcern( @NotNull PsiClass psiClass ) { if( psiClass.isInterface() ) { return false; } PsiClass genericConcern = getGenericConcernClass( psiClass ); return genericConcern != null && psiClass.isInheritor( genericConcern, true ); } private PolygeneConcernUtil() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.descriptors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.jcr.Value; import javax.jcr.ValueFactory; import org.apache.jackrabbit.commons.SimpleValueFactory; import org.apache.jackrabbit.oak.api.Descriptors; import org.apache.jackrabbit.oak.spi.whiteboard.Tracker; import org.junit.Test; public class AggregatingDescriptorsTest { static class DescriptorEntry { private final String key; private final Value value; private final Value[] values; private final boolean singleValued; private final boolean standard; DescriptorEntry(String key, Value value, Value[] values, boolean singleValued, boolean standard) { this.key = key; this.value = value; this.values = values; this.singleValued = singleValued; this.standard = standard; } static DescriptorEntry fromKey(String key, Descriptors descriptors) { if (descriptors.isSingleValueDescriptor(key)) { return newSingleValuedEntry(key, descriptors.getValue(key), descriptors.isStandardDescriptor(key)); } else { return newMultiValuedEntry(key, descriptors.getValues(key), descriptors.isStandardDescriptor(key)); } } private static DescriptorEntry newMultiValuedEntry(String key, Value[] values, boolean standardDescriptor) { return new DescriptorEntry(key, null, values, false, standardDescriptor); } private static DescriptorEntry newSingleValuedEntry(String key, Value value, boolean standardDescriptor) { return new DescriptorEntry(key, value, null, true, standardDescriptor); } } class MyTracker<T> implements Tracker<T> { List<T> services; public void setServices(List<T> services) { this.services = services; } public void addService(T service) { if (services == null) { services = new LinkedList<T>(); } services.add(service); } @Override public List<T> getServices() { return services; } @Override public void stop() { // no-op } } private MyTracker<Descriptors> createTracker() { return new MyTracker<Descriptors>(); } private void assertEmpty(AggregatingDescriptors aggregator) { assertFalse(aggregator.isSingleValueDescriptor("foo")); assertFalse(aggregator.isStandardDescriptor("foo")); assertNull(aggregator.getValue("foo")); assertNull(aggregator.getValues("foo")); String[] keys = aggregator.getKeys(); assertNotNull(keys); assertEquals(0, keys.length); } @Test public void testNullServices() throws Exception { try { new AggregatingDescriptors(null); fail("should complain"); } catch (IllegalArgumentException iae) { // ok } final MyTracker<Descriptors> tracker = createTracker(); AggregatingDescriptors aggregator = new AggregatingDescriptors(tracker); assertEmpty(aggregator); } @Test public void testEmptyServices() throws Exception { final MyTracker<Descriptors> tracker = createTracker(); AggregatingDescriptors aggregator = new AggregatingDescriptors(tracker); tracker.setServices(new LinkedList<Descriptors>()); assertEmpty(aggregator); } private void assertMatches(AggregatingDescriptors aggregator, int expectedEntryCount, GenericDescriptors... descriptors) { // prepare the expectedEntries map final Map<String, DescriptorEntry> expectedEntries = new HashMap<String, AggregatingDescriptorsTest.DescriptorEntry>(); for (int i = 0; i < descriptors.length; i++) { final String[] keys = descriptors[i].getKeys(); for (int j = 0; j < keys.length; j++) { final DescriptorEntry entry = DescriptorEntry.fromKey(keys[j], descriptors[i]); // implements overwriting: eg descriptors[1] values overwrite descriptors[0] values // (in terms of the AggregatingDescriptors it is the opposite: the service // that is enlisted first always wins - with the idea that later added // services should not overwrite earlier ones - lowest startorder wins) expectedEntries.put(keys[j], entry); } } assertEquals(expectedEntryCount, expectedEntries.size()); // now go through the resulting expectedEntries and match them // with the aggregator one final Collection<DescriptorEntry> entries = expectedEntries.values(); for (Iterator<DescriptorEntry> it = entries.iterator(); it.hasNext();) { DescriptorEntry entry = it.next(); assertEquals(entry.standard, aggregator.isStandardDescriptor(entry.key)); if (entry.singleValued) { assertTrue(aggregator.isSingleValueDescriptor(entry.key)); Value expectedValue = entry.value; Value actualValue = aggregator.getValue(entry.key); assertTrue(expectedValue.equals(actualValue)); } else { assertFalse(aggregator.isSingleValueDescriptor(entry.key)); Value[] expectedValues = entry.values; Value[] actualValues = aggregator.getValues(entry.key); assertEquals(expectedValues.length, actualValues.length); for(int i=0; i<expectedValues.length; i++) { assertEquals(expectedValues[i], actualValues[i]); } } } assertEquals(expectedEntryCount, aggregator.getKeys().length); } @Test public void testInitialDescriptors() throws Exception { final ValueFactory valueFactory = new SimpleValueFactory(); final MyTracker<Descriptors> tracker = createTracker(); final GenericDescriptors input = new GenericDescriptors(); input.put("a", valueFactory.createValue("b"), true, false); input.put("b", valueFactory.createValue("c"), true, true); tracker.addService(input); AggregatingDescriptors aggregator = new AggregatingDescriptors(tracker); assertMatches(aggregator, 2, input); } @Test public void testLaterAddedDescriptors() throws Exception { final ValueFactory valueFactory = new SimpleValueFactory(); final MyTracker<Descriptors> tracker = createTracker(); AggregatingDescriptors aggregator = new AggregatingDescriptors(tracker); assertMatches(aggregator, 0); final GenericDescriptors input1 = new GenericDescriptors(); input1.put("a", valueFactory.createValue("b"), true, false); input1.put("b", valueFactory.createValue("c"), true, true); tracker.addService(input1); assertMatches(aggregator, 2, input1); final GenericDescriptors input2 = new GenericDescriptors(); input2.put("b", valueFactory.createValue("c2"), true, true); input2.put("c", valueFactory.createValue("d"), true, true); tracker.addService(input2); assertMatches(aggregator, 3, input2, input1); } }
package net.javachallenge.players.others; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import net.javachallenge.api.ComputerPlayer; import net.javachallenge.api.Game; import net.javachallenge.api.GameSetting; import net.javachallenge.api.Material; import net.javachallenge.api.PlayerTrade; import net.javachallenge.api.Squad; import net.javachallenge.api.TradeType; import net.javachallenge.api.TrianglePoint; import net.javachallenge.api.Vein; import net.javachallenge.api.command.Command; import net.javachallenge.api.command.Commands; public class JoeJack extends ComputerPlayer { private Map<TrianglePoint, Vein> allVeins; private Map<TrianglePoint, Integer> sentRobots; private Map<TrianglePoint, Integer> launchedRobots; private Map<Material, Integer> usedMaterials; private int usedMoney; private Game game; private final int MaxRank = 3; private void initialize(Game game) { this.game = game; allVeins = game.getField().getVeinMap(); sentRobots = new HashMap<TrianglePoint, Integer>(); launchedRobots = new HashMap<TrianglePoint, Integer>(); usedMaterials = new HashMap<Material, Integer>(); usedMoney = 0; for (Squad squad : game.getField().getSquads()) { TrianglePoint destination = squad.getDestinationLocation(); increment(sentRobots, destination, squad.getRobot() * (squad.getOwnerId() == game.getMyPlayer().getId() ? 1 : -1)); } } @Override public String getName() { return "JoeJack"; } private Vein firstSelectedVein; @Override public TrianglePoint selectVein(Game game) { initialize(game); List<TrianglePoint> all = new ArrayList<TrianglePoint>(); all.addAll(allVeins.keySet()); Collections.sort(all, veinValueComparator); TrianglePoint best = null; for (TrianglePoint point : all) { if (allVeins.get(point).getOwnerId() == game.getNeutralPlayerId() && (firstSelectedVein == null || allVeins.get(point).getShortestPath(firstSelectedVein) .size() < game.getSetting().getMapSize())) { best = point; break; } } if (firstSelectedVein == null && best != null) { firstSelectedVein = allVeins.get(best); } return best; } private void launch(List<Command> commands, int robotsToLaunch, TrianglePoint from, TrianglePoint to) { addCommand(commands, Commands.launch(robotsToLaunch, from, to)); increment(sentRobots, to, robotsToLaunch); increment(launchedRobots, from, robotsToLaunch); } @Override public List<Command> selectActions(Game game) { // initialize initialize(game); List<Command> commands = new ArrayList<Command>(); ArrayList<TrianglePoint> myVeinPoints = new ArrayList<TrianglePoint>(); ArrayList<TrianglePoint> otherVeinPoints = new ArrayList<TrianglePoint>(); for (TrianglePoint point : allVeins.keySet()) { if (allVeins.get(point).getOwnerId() == game.getMyPlayer().getId()) { myVeinPoints.add(point); } else { otherVeinPoints.add(point); } } // launch Collections.sort(myVeinPoints, frontVeinComparator); Collections.reverse(myVeinPoints); for (TrianglePoint myPoint : myVeinPoints) { TargetVeinComparator targetVeinComparator = new TargetVeinComparator(); targetVeinComparator.setSource(myPoint); Collections.sort(otherVeinPoints, targetVeinComparator); for (TrianglePoint otherPoint : otherVeinPoints.subList(0, otherVeinPoints.size() / 4)) { int robotsToLaunch = getRequiredRobots(myPoint, otherPoint) + 1 - getValue(sentRobots, otherPoint); if (robotsToLaunch > 0 && robotsToLaunch <= getRemainingRobots(myPoint) - getValue(launchedRobots, myPoint)) { launch(commands, robotsToLaunch, myPoint, otherPoint); } } final int NumberOfRobotsToHold = 100; for (TrianglePoint otherPoint : otherVeinPoints) { int robotsToLaunch = getRemainingRobots(myPoint) - NumberOfRobotsToHold; if (robotsToLaunch <= 0) { break; } else { launch(commands, robotsToLaunch, myPoint, otherPoint); } } } // upgrade Collections.sort(myVeinPoints, veinValueComparator); for (TrianglePoint point : myVeinPoints) { GameSetting setting = game.getSetting(); int currentMaterialRank = allVeins.get(point).getMaterialRank(); Map<Material, Integer> requiredMaterialsForMaterial = new HashMap<Material, Integer>(); for (Material material : Material.values()) { int requiredAmount; if (currentMaterialRank == 1) { requiredAmount = setting.getMaterialsForUpgradingMaterialRankFrom1To2(material); } else { requiredAmount = setting.getMaterialsForUpgradingMaterialRankFrom2To3(material); } requiredMaterialsForMaterial.put(material, requiredAmount); } upgrade(requiredMaterialsForMaterial, Commands.upgradeMaterial(point), point, currentMaterialRank, commands); int currentRobotRank = allVeins.get(point).getRobotRank(); Map<Material, Integer> requiredMaterialsForRobot = new HashMap<Material, Integer>(); for (Material material : Material.values()) { int requiredAmount; if (currentRobotRank == 1) { requiredAmount = setting.getMaterialsForUpgradingRobotRankFrom1To2(material); } else { requiredAmount = setting.getMaterialsForUpgradingRobotRankFrom2To3(material); } requiredMaterialsForRobot.put(material, requiredAmount); } upgrade(requiredMaterialsForRobot, Commands.upgradeRobot(point), point, currentRobotRank, commands); } // trade : prepare final int MaxTradeAmount = 100; final int MaterialAmountToHold = 300; List<Material> materialsToBuy = new ArrayList<Material>(); List<Material> materialsToSell = new ArrayList<Material>(); for (Material material : Material.values()) { if (getRemainingMaterials(material) < MaterialAmountToHold) { materialsToBuy.add(material); } else if (getRemainingMaterials(material) > MaterialAmountToHold) { materialsToSell.add(material); } } // trade : accept their trades for (PlayerTrade trade : game.getPlayerTrades()) { Material material = trade.getMaterial(); if (trade.getTradeType() == TradeType.Offer && trade.getPricePerOneMaterial() < getDefaultPrice(material) && materialsToBuy.contains(material)) { addCommand(commands, Commands.buyFromPlayerTrade(trade, Math.min(trade.getAmount(), Math.min(getMaximumAmountFromRemainingMoney(trade.getPricePerOneMaterial()), MaxTradeAmount)))); } else if (trade.getTradeType() == TradeType.Demand && trade.getPricePerOneMaterial() > getDefaultPrice(material) && materialsToSell.contains(material)) { addCommand( commands, Commands.sellToPlayerTrade( trade, Math.min(trade.getAmount(), Math.min(getRemainingMaterials(material), MaxTradeAmount)))); } } // trade : send our trade requests for (Material material : materialsToBuy) { int amount = Math.min(getRemainingMoney() / getDefaultPrice(material), MaxTradeAmount); if (amount > 0) { addCommand(commands, Commands.demand(material, amount, getDefaultPrice(material))); } } for (Material material : materialsToSell) { int amount = Math.min(getRemainingMaterials(material), MaxTradeAmount); if (amount > 0) { addCommand(commands, Commands.offer(material, amount, getDefaultPrice(material))); } } return commands; } private int getDefaultPrice(Material material) { return game.getAlienTrade().getBuyPriceOf(material) / 2; } private int getMaximumAmountFromRemainingMoney(int price) { return getRemainingMoney() / price; } private void upgrade(Map<Material, Integer> requiredMaterials, Command upgradeCommand, TrianglePoint point, int currentRank, List<Command> commands) { if (currentRank < MaxRank) { boolean hasEnoughMaterials = true; for (Material material : requiredMaterials.keySet()) { if (getRemainingMaterials(material) < requiredMaterials.get(material)) { hasEnoughMaterials = false; break; } } if (hasEnoughMaterials) { addCommand(commands, upgradeCommand); for (Material material : requiredMaterials.keySet()) { increment(usedMaterials, material, requiredMaterials.get(material)); } } } } private void addCommand(List<Command> commands, Command command) { commands.add(command); this.saveTemporalCommands(commands); } private <K> void increment(Map<K, Integer> map, K key, Integer value) { map.put(key, (map.containsKey(key) ? map.get(key) : 0) + value); } private <K> Integer getValue(Map<K, Integer> map, K key) { if (map.containsKey(key)) { return map.get(key); } else { return 0; } } private int evaluateVein(TrianglePoint from, TrianglePoint to) { if (getRequiredRobots(from, to) <= 0) { return Integer.MIN_VALUE / 10; } int distance = from.getShortestPath(to).size(); if (distance > game.getSetting().getMapSize()) { return Integer.MIN_VALUE / 10*2; } int evaluation = 0; evaluation -= getRequiredRobots(from, to); evaluation += allVeins.get(from).getNumberOfRobots(); evaluation -= distance * (allVeins.get(to).getOwnerId() == game.getNeutralPlayerId() ? 5 : allVeins.get(to) .getCurrentRobotProductivity()); return evaluation; } private int getRequiredRobots(TrianglePoint from, TrianglePoint to) { Vein targetVein = allVeins.get(to); int productivity = (targetVein.getOwnerId() == game.getNeutralPlayerId() ? 0 : targetVein .getCurrentRobotProductivity()); int robots = targetVein.getNumberOfRobots() + from.getShortestPath(to).size() * productivity; return robots; } private int getRemainingMaterials(Material material) { return game.getMyPlayer().getMaterial(material) - getValue(usedMaterials, material); } private int getRemainingMoney() { return game.getMyPlayer().getMoney() - usedMoney; } private int getRemainingRobots(TrianglePoint vein) { return allVeins.get(vein).getNumberOfRobots() - getValue(launchedRobots, vein); } public Comparator<TrianglePoint> veinValueComparator = new Comparator<TrianglePoint>() { private int getValue(Vein vein) { return vein.getCurrentMaterialProductivity() + vein.getCurrentRobotProductivity(); } public int compare(TrianglePoint left, TrianglePoint right) { return getValue(allVeins.get(right)) - getValue(allVeins.get(left)); } }; public class TargetVeinComparator implements Comparator<TrianglePoint> { private TrianglePoint source; public void setSource(TrianglePoint source) { this.source = source; } public int compare(TrianglePoint left, TrianglePoint right) { return evaluateVein(source, right) - evaluateVein(source, left); } }; public Comparator<TrianglePoint> frontVeinComparator = new Comparator<TrianglePoint>() { private int getValue(TrianglePoint from) { int value = 0; int nearFriendlies = 0; for (TrianglePoint to : allVeins.keySet()) { if (allVeins.get(to).getOwnerId() != game.getMyPlayer().getId()) { value -= from.getShortestPath(to).size(); } else { value -= game.getSetting().getMapSize() * 2; nearFriendlies++; } } if (nearFriendlies < 2) { return Integer.MIN_VALUE / 10; } else { return value; } } public int compare(TrianglePoint left, TrianglePoint right) { return getValue(right) - getValue(left); } }; }
package org.c99.wear_imessage; import android.content.ComponentName; import android.content.ContentUris; import android.content.Intent; import android.content.SharedPreferences; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.provider.BaseColumns; import android.provider.ContactsContract; import android.support.v4.app.NotificationManagerCompat; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.support.v7.app.NotificationCompat; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.BaseAdapter; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.ListView; import android.widget.Spinner; import android.widget.TextView; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.FileNotFoundException; import java.util.ArrayList; public class QuickReplyActivity extends ActionBarActivity { String handle; String service; String protocol; String name = ""; Uri attachment; private class MessagesAdapter extends BaseAdapter { private class ViewHolder { TextView sent; TextView received; } private JSONArray msgs = new JSONArray(); public void loadMessages(String service, String handle) { JSONObject conversations; try { conversations = new JSONObject(getSharedPreferences("data", 0).getString("conversations", "{}")); } catch (JSONException e) { conversations = new JSONObject(); } try { JSONObject conversation; String key = service + ":" + handle; if (conversations.has(key)) { conversation = conversations.getJSONObject(key); msgs = conversation.getJSONArray("msgs"); } } catch (JSONException e) { msgs = new JSONArray(); } notifyDataSetChanged(); } @Override public int getCount() { return msgs.length(); } @Override public Object getItem(int i) { try { return msgs.get(i); } catch (JSONException e) { e.printStackTrace(); } return null; } @Override public long getItemId(int i) { return i; } @Override public View getView(int i, View view, ViewGroup viewGroup) { View row = view; ViewHolder holder; if (row == null) { LayoutInflater inflater = getLayoutInflater(); row = inflater.inflate(R.layout.row_msg, viewGroup, false); holder = new ViewHolder(); holder.received = (TextView)row.findViewById(R.id.recv); holder.sent = (TextView)row.findViewById(R.id.sent); row.setTag(holder); } else { holder = (ViewHolder) row.getTag(); } try { JSONObject msg = msgs.getJSONObject(i); if(msg.has("type") && msg.getString("type").equals("sent")) { holder.received.setVisibility(View.GONE); holder.sent.setVisibility(View.VISIBLE); holder.sent.setText(msg.getString("msg")); } else { holder.received.setVisibility(View.VISIBLE); holder.sent.setVisibility(View.GONE); holder.received.setText(msg.getString("msg")); } } catch (JSONException e) { try { String msg = msgs.getString(i); holder.received.setVisibility(View.VISIBLE); holder.sent.setVisibility(View.GONE); holder.received.setText(msg); } catch (JSONException e1) { e1.printStackTrace(); } } return row; } } private static class SyncEntry { public String handle; public String service; @Override public String toString() { return handle; } } private MessagesAdapter adapter = new MessagesAdapter(); private SharedPreferences.OnSharedPreferenceChangeListener prefslistener = new SharedPreferences.OnSharedPreferenceChangeListener() { @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String s) { adapter.loadMessages(service, handle); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_quick_reply); if(getSupportActionBar() != null) getSupportActionBar().hide(); final EditText message = (EditText) findViewById(R.id.message); ImageButton send = (ImageButton) findViewById(R.id.send); send.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent i = new Intent(RemoteInputService.ACTION_REPLY); i.setComponent(new ComponentName(getPackageName(), RemoteInputService.class.getName())); i.putExtra("handle", handle); i.putExtra("service", service); i.putExtra("name", name); i.putExtra("notification_id", getIntent().getIntExtra("notification_id", 0)); i.putExtra("reply", message.getText().toString()); if (attachment != null) { i.putExtra(Intent.EXTRA_STREAM, attachment); finish(); } startService(i); message.setText(""); } }); if(getIntent() != null) onNewIntent(getIntent()); } @Override protected void onNewIntent(Intent intent) { Uri contact = null; if(intent.hasExtra("notification_id")) NotificationManagerCompat.from(this).cancel(intent.getIntExtra("notification_id", 0)); if(intent.hasExtra("handle") && intent.hasExtra("service")) { handle = intent.getStringExtra("handle"); service = intent.getStringExtra("service"); Cursor c = getContentResolver().query( ContactsContract.RawContacts.CONTENT_URI, new String[] { ContactsContract.RawContacts.CONTACT_ID, ContactsSyncAdapterService.ProtocolColumn }, ContactsSyncAdapterService.HandleColumn + " = ? AND " + ContactsSyncAdapterService.ServiceColumn + " = ?", new String[] { intent.getStringExtra("handle"), intent.getStringExtra("service") }, null ); if(c != null && c.moveToFirst()) { contact = ContentUris.withAppendedId(ContactsContract.Contacts.CONTENT_URI, c.getLong(0)); protocol = c.getString(1); c.close(); } } else if(intent.getData() != null) { Cursor cursor = getContentResolver().query(intent.getData(), null, null, null, null); if(cursor != null && cursor.moveToFirst()) { handle = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.DATA1)); service = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.DATA2)); protocol = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.DATA3)); contact = ContentUris.withAppendedId(ContactsContract.Contacts.CONTENT_URI, cursor.getLong(cursor.getColumnIndex(ContactsContract.RawContacts.CONTACT_ID))); cursor.close(); } } if(contact != null) { findViewById(R.id.contact).setVisibility(View.VISIBLE); findViewById(R.id.spinner).setVisibility(View.GONE); ListView listView = (ListView) findViewById(R.id.conversation); listView.setVisibility(View.VISIBLE); listView.setAdapter(adapter); Bitmap b = BitmapFactory.decodeStream(ContactsContract.Contacts.openContactPhotoInputStream(getContentResolver(), contact, true)); if (b != null) { ImageView photo = (ImageView) findViewById(R.id.photo); photo.setImageBitmap(b); } Cursor cursor = getContentResolver().query(contact, new String[]{ContactsContract.Contacts.DISPLAY_NAME}, null, null, null); if(cursor != null && cursor.moveToFirst()) { name = cursor.getString(0); ((TextView)findViewById(R.id.name)).setText(name); cursor.close(); } ((TextView)findViewById(R.id.protocol)).setText(protocol); } else if(intent.hasExtra(Intent.EXTRA_STREAM)) { findViewById(R.id.contact).setVisibility(View.GONE); findViewById(R.id.spinner).setVisibility(View.VISIBLE); findViewById(R.id.thumbnail).setVisibility(View.VISIBLE); Spinner s = (Spinner)findViewById(R.id.spinner); ArrayList<SyncEntry> contacts = new ArrayList<>(); Uri rawContactUri = ContactsContract.RawContacts.CONTENT_URI.buildUpon().appendQueryParameter(ContactsContract.RawContacts.ACCOUNT_NAME, getResources().getString(R.string.app_name)).appendQueryParameter( ContactsContract.RawContacts.ACCOUNT_TYPE, "org.c99.wear_imessage.account").build(); Cursor c1 = getContentResolver().query(rawContactUri, new String[]{BaseColumns._ID, ContactsSyncAdapterService.HandleColumn, ContactsSyncAdapterService.ServiceColumn}, null, null, null); while (c1 != null && c1.moveToNext()) { SyncEntry e = new SyncEntry(); e.handle = c1.getString(1); e.service = c1.getString(2); contacts.add(e); } if(c1 != null) c1.close(); s.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_dropdown_item_1line, contacts)); s.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) { SyncEntry e = (SyncEntry)adapterView.getItemAtPosition(i); handle = e.handle; service = e.service; } @Override public void onNothingSelected(AdapterView<?> adapterView) { } }); attachment = intent.getParcelableExtra(Intent.EXTRA_STREAM); String type = getContentResolver().getType(attachment); if(type.startsWith("image/")) { try { android.util.Log.e("iMessage", "Image data: " + attachment); BitmapFactory.Options o = new BitmapFactory.Options(); o.inJustDecodeBounds = true; BitmapFactory.decodeStream(getContentResolver().openInputStream(attachment), null, o); int scale = 1; if (o.outWidth >= 640 || o.outHeight >= 640) { if (o.outWidth > o.outHeight) { if (o.outWidth > 640) scale = o.outWidth / 640; } else { if (o.outHeight > 640) scale = o.outHeight / 640; } } o = new BitmapFactory.Options(); o.inSampleSize = scale; Bitmap bmp = BitmapFactory.decodeStream(getContentResolver().openInputStream(attachment), null, o); ((ImageView) findViewById(R.id.thumbnail)).setImageBitmap(bmp); } catch (FileNotFoundException e) { e.printStackTrace(); } } } setIntent(intent); } @Override protected void onResume() { super.onResume(); getSharedPreferences("data", 0).registerOnSharedPreferenceChangeListener(prefslistener); adapter.loadMessages(service, handle); } @Override protected void onPause() { super.onPause(); getSharedPreferences("data", 0).unregisterOnSharedPreferenceChangeListener(prefslistener); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.router.proxy.aggregate; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.ArrayList; import java.util.List; import org.jboss.logging.Logger; import org.junit.Test; import org.kie.server.router.proxy.aggragate.XstreamXMLResponseAggregator; import org.w3c.dom.Document; import org.w3c.dom.NodeList; public class XstreamAggregatorTest extends AbstractAggregateTest { private static final Logger logger = Logger.getLogger(XstreamAggregatorTest.class); @Test public void testAggregateProcessDefinitions() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(5, processDefs.getLength()); } @Test public void testAggregateProcessDefinitionsTargetEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(2, processDefs.getLength()); } @Test public void testAggregateProcessDefinitionsSourceEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(3, processDefs.getLength()); } @Test public void testAggregateProcessDefinitionsEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(0, processDefs.getLength()); } @Test public void testAggregateProcessInstances() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-instance-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-instance-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstanceList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList instances = xml.getElementsByTagName("processInstances"); assertNotNull(instances); assertEquals(1, instances.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstance"); assertNotNull(processInstances); assertEquals(3, processInstances.getLength()); } @Test public void testAggregateProcessInstancesTargetEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-instance-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-instance-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstanceList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList instances = xml.getElementsByTagName("processInstances"); assertNotNull(instances); assertEquals(1, instances.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstance"); assertNotNull(processInstances); assertEquals(1, processInstances.getLength()); } @Test public void testAggregateProcessInstancesSourceEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-instance-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-instance-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstanceList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList instances = xml.getElementsByTagName("processInstances"); assertNotNull(instances); assertEquals(1, instances.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstance"); assertNotNull(processInstances); assertEquals(2, processInstances.getLength()); } @Test public void testAggregateProcessInstancesEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-instance-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-instance-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstanceList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList instances = xml.getElementsByTagName("processInstances"); assertNotNull(instances); assertEquals(1, instances.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.ProcessInstance"); assertNotNull(processInstances); assertEquals(0, processInstances.getLength()); } @Test public void testAggregateTaskSummaries() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/task-summary-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/task-summary-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummaryList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList tasks = xml.getElementsByTagName("tasks"); assertNotNull(tasks); assertEquals(1, tasks.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummary"); assertNotNull(processInstances); assertEquals(5, processInstances.getLength()); } @Test public void testAggregateTaskSummariesTargetEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/task-summary-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/task-summary-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummaryList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList tasks = xml.getElementsByTagName("tasks"); assertNotNull(tasks); assertEquals(1, tasks.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummary"); assertNotNull(processInstances); assertEquals(3, processInstances.getLength()); } @Test public void testAggregateTaskSummariesSourceEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/task-summary-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/task-summary-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummaryList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList tasks = xml.getElementsByTagName("tasks"); assertNotNull(tasks); assertEquals(1, tasks.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummary"); assertNotNull(processInstances); assertEquals(2, processInstances.getLength()); } @Test public void testAggregateTaskSummariesEmpty() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/task-summary-empty.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/task-summary-empty.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummaryList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList tasks = xml.getElementsByTagName("tasks"); assertNotNull(tasks); assertEquals(1, tasks.getLength()); NodeList processInstances = xml.getElementsByTagName("org.kie.server.api.model.instance.TaskSummary"); assertNotNull(processInstances); assertEquals(0, processInstances.getLength()); } @Test public void testSortProcessDefinitions() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, "ProcessId", true, 0, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(2, processDefs.getLength()); NodeList processDefIds = xml.getElementsByTagName("id"); assertNotNull(processDefIds); assertEquals(2, processDefIds.getLength()); // make sure it's properly sorted and paged String value1 = processDefIds.item(0).getFirstChild().getNodeValue(); assertEquals("1", value1); String value2 = processDefIds.item(1).getFirstChild().getNodeValue(); assertEquals("2", value2); } @Test public void testSortProcessDefinitionsDescending() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, "ProcessId", false, 0, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(2, processDefs.getLength()); NodeList processDefIds = xml.getElementsByTagName("id"); assertNotNull(processDefIds); assertEquals(2, processDefIds.getLength()); // make sure it's properly sorted and paged String value1 = processDefIds.item(0).getFirstChild().getNodeValue(); assertEquals("5", value1); String value2 = processDefIds.item(1).getFirstChild().getNodeValue(); assertEquals("4", value2); } @Test public void testSortProcessDefinitionsNextPage() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, "ProcessId", true, 1, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(2, processDefs.getLength()); NodeList processDefIds = xml.getElementsByTagName("id"); assertNotNull(processDefIds); assertEquals(2, processDefIds.getLength()); // make sure it's properly sorted and paged String value1 = processDefIds.item(0).getFirstChild().getNodeValue(); assertEquals("3", value1); String value2 = processDefIds.item(1).getFirstChild().getNodeValue(); assertEquals("4", value2); } @Test public void testSortProcessDefinitionsNextPageDescending() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, "ProcessId", false, 1, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(2, processDefs.getLength()); NodeList processDefIds = xml.getElementsByTagName("id"); assertNotNull(processDefIds); assertEquals(2, processDefIds.getLength()); // make sure it's properly sorted and paged String value1 = processDefIds.item(0).getFirstChild().getNodeValue(); assertEquals("3", value1); String value2 = processDefIds.item(1).getFirstChild().getNodeValue(); assertEquals("2", value2); } @Test public void testSortProcessDefinitionsOutOfPage() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/process-def-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/process-def-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, "ProcessId", true, 5, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinitionList"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("processes"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("org.kie.server.api.model.definition.ProcessDefinition"); assertNotNull(processDefs); assertEquals(0, processDefs.getLength()); } @Test public void testAggregateContainers() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/containers-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/containers-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("org.kie.server.api.model.ServiceResponse"); assertNotNull(processes); assertEquals(1, processes.getLength()); NodeList defs = xml.getElementsByTagName("result"); assertNotNull(defs); assertEquals(1, defs.getLength()); NodeList processDefs = xml.getElementsByTagName("kie-container"); assertNotNull(processDefs); assertEquals(6, processDefs.getLength()); } @Test public void testAggregateRawList() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/raw-list-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/raw-list-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("sql-timestamp"); assertNotNull(processes); assertEquals(5, processes.getLength()); } @Test public void testAggregateRawListWithPaging() throws Exception { String xml1 = read(this.getClass().getResourceAsStream("/xstream/raw-list-1.xml")); String xml2 = read(this.getClass().getResourceAsStream("/xstream/raw-list-2.xml")); XstreamXMLResponseAggregator aggregate = new XstreamXMLResponseAggregator(); List<String> data = new ArrayList<>(); data.add(xml1); data.add(xml2); String result = aggregate.aggregate(data, null, true, 1, 2); logger.debug(result); Document xml = toXml(result); assertNotNull(xml); NodeList processes = xml.getElementsByTagName("sql-timestamp"); assertNotNull(processes); assertEquals(2, processes.getLength()); } }
/* Generic definitions */ /* Assertions (useful to generate conditional code) */ /* Current type and class (and size, if applicable) */ /* Value methods */ /* Interfaces (keys) */ /* Interfaces (values) */ /* Abstract implementations (keys) */ /* Abstract implementations (values) */ /* Static containers (keys) */ /* Static containers (values) */ /* Implementations */ /* Synchronized wrappers */ /* Unmodifiable wrappers */ /* Other wrappers */ /* Methods (keys) */ /* Methods (values) */ /* Methods (keys/values) */ /* Methods that have special names depending on keys (but the special names depend on values) */ /* Equality */ /* Object/Reference-only definitions (keys) */ /* Primitive-type-only definitions (keys) */ /* Object/Reference-only definitions (values) */ /* Primitive-type-only definitions (values) */ /* * Copyright (C) 2002-2013 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.bytes; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import it.unimi.dsi.fastutil.booleans.BooleanArrays; import static it.unimi.dsi.fastutil.HashCommon.arraySize; import static it.unimi.dsi.fastutil.HashCommon.maxFill; import java.util.Map; import java.util.NoSuchElementException; import it.unimi.dsi.fastutil.ints.IntCollection; import it.unimi.dsi.fastutil.ints.AbstractIntCollection; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.objects.AbstractObjectSet; import it.unimi.dsi.fastutil.objects.ObjectIterator; /** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy} * is specified at creation time. * * <P>Instances of this class use a hash table to represent a map. The table is * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * <p><strong>Warning:</strong> The implementation of this class has significantly * changed in <code>fastutil</code> 6.1.0. Please read the * comments about this issue in the section &ldquo;Faster Hash Tables&rdquo; of the <a href="../../../../../overview-summary.html">overview</a>. * * @see Hash * @see HashCommon */ public class Byte2IntOpenCustomHashMap extends AbstractByte2IntMap implements java.io.Serializable, Cloneable, Hash { private static final long serialVersionUID = 0L; private static final boolean ASSERTS = false; /** The array of keys. */ protected transient byte key[]; /** The array of values. */ protected transient int value[]; /** The array telling whether a position is used. */ protected transient boolean used[]; /** The acceptable load factor. */ protected final float f; /** The current table size. */ protected transient int n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected transient int maxFill; /** The mask for wrapping a position counter. */ protected transient int mask; /** Number of entries in the set. */ protected int size; /** Cached set of entries. */ protected transient volatile FastEntrySet entries; /** Cached set of keys. */ protected transient volatile ByteSet keys; /** Cached collection of values. */ protected transient volatile IntCollection values; /** The hash strategy of this custom map. */ protected it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy; /** Creates a new hash map. * * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>. * * @param expected the expected number of elements in the hash set. * @param f the load factor. * @param strategy the strategy. */ @SuppressWarnings("unchecked") public Byte2IntOpenCustomHashMap( final int expected, final float f, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this.strategy = strategy; if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" ); if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" ); this.f = f; n = arraySize( expected, f ); mask = n - 1; maxFill = maxFill( n, f ); key = new byte[ n ]; value = new int[ n ]; used = new boolean[ n ]; } /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash map. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final int expected, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( expected, DEFAULT_LOAD_FACTOR, strategy ); } /** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy ); } /** Creates a new hash map copying a given one. * * @param m a {@link Map} to be copied into the new hash map. * @param f the load factor. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final Map<? extends Byte, ? extends Integer> m, final float f, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( m.size(), f, strategy ); putAll( m ); } /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one. * * @param m a {@link Map} to be copied into the new hash map. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final Map<? extends Byte, ? extends Integer> m, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( m, DEFAULT_LOAD_FACTOR, strategy ); } /** Creates a new hash map copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. * @param f the load factor. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final Byte2IntMap m, final float f, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( m.size(), f, strategy ); putAll( m ); } /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one. * * @param m a type-specific map to be copied into the new hash map. * @param strategy the strategy. */ public Byte2IntOpenCustomHashMap( final Byte2IntMap m, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( m, DEFAULT_LOAD_FACTOR, strategy ); } /** Creates a new hash map using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @param f the load factor. * @param strategy the strategy. * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths. */ public Byte2IntOpenCustomHashMap( final byte[] k, final int v[], final float f, final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( k.length, f, strategy ); if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" ); for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] ); } /** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays. * * @param k the array of keys of the new hash map. * @param v the array of corresponding values in the new hash map. * @param strategy the strategy. * @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths. */ public Byte2IntOpenCustomHashMap( final byte[] k, final int v[], final it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy ) { this( k, v, DEFAULT_LOAD_FACTOR, strategy ); } /** Returns the hashing strategy. * * @return the hashing strategy of this custom hash map. */ public it.unimi.dsi.fastutil.bytes.ByteHash.Strategy strategy() { return strategy; } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in OpenHashSet.drv. */ public int put(final byte k, final int v) { // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { final int oldValue = value[ pos ]; value[ pos ] = v; return oldValue; } pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; value[ pos ] = v; if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; } public Integer put( final Byte ok, final Integer ov ) { final int v = ((ov).intValue()); final byte k = ((ok).byteValue()); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { final Integer oldValue = (Integer.valueOf(value[ pos ])); value[ pos ] = v; return oldValue; } pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; value[ pos ] = v; if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return (null); } /** Adds an increment to value currently associated with a key. * * @param k the key. * @param incr the increment. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @deprecated use <code>addTo()</code> instead; having the same name of a {@link java.util.Set} method turned out to be a recipe for disaster. */ @Deprecated public int add(final byte k, final int incr) { return addTo( k, incr ); } /** Adds an increment to value currently associated with a key. * * <P>Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when * called with a key that does not currently appears in the map, the key * will be associated with the default return value plus * the given increment. * * @param k the key. * @param incr the increment. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */ public int addTo(final byte k, final int incr) { // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { final int oldValue = value[ pos ]; value[ pos ] += incr; return oldValue; } pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; value[ pos ] = defRetValue + incr; if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. * @return the position cleared by the shifting process. */ protected final int shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; while( used[ pos ] ) { slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( ! used[ pos ] ) break; key[ last ] = key[ pos ]; value[ last ] = value[ pos ]; } used[ last ] = false; return last; } @SuppressWarnings("unchecked") public int remove( final byte k ) { // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { size--; final int v = value[ pos ]; shiftKeys( pos ); return v; } pos = ( pos + 1 ) & mask; } return defRetValue; } @SuppressWarnings("unchecked") public Integer remove( final Object ok ) { final byte k = ((((Byte)(ok)).byteValue())); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { size--; final int v = value[ pos ]; shiftKeys( pos ); return (Integer.valueOf(v)); } pos = ( pos + 1 ) & mask; } return (null); } public Integer get( final Byte ok ) { final byte k = ((ok).byteValue()); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode( k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), ( k) ) ) ) return (Integer.valueOf(value[ pos ])); pos = ( pos + 1 ) & mask; } return (null); } @SuppressWarnings("unchecked") public int get( final byte k ) { // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return value[ pos ]; pos = ( pos + 1 ) & mask; } return defRetValue; } @SuppressWarnings("unchecked") public boolean containsKey( final byte k ) { // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return true; pos = ( pos + 1 ) & mask; } return false; } public boolean containsValue( final int v ) { final int value[] = this.value; final boolean used[] = this.used; for( int i = n; i-- != 0; ) if ( used[ i ] && ( (value[ i ]) == (v) ) ) return true; return false; } /* Removes all elements from this map. * * <P>To increase object reuse, this method does not change the table size. * If you want to reduce the table size, you must use {@link #trim()}. * */ public void clear() { if ( size == 0 ) return; size = 0; BooleanArrays.fill( used, false ); // We null all object entries so that the garbage collector can do its work. } public int size() { return size; } public boolean isEmpty() { return size == 0; } /** A no-op for backward compatibility. * * @param growthFactor unused. * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public void growthFactor( int growthFactor ) {} /** Gets the growth factor (2). * * @return the growth factor of this set, which is fixed (2). * @see #growthFactor(int) * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public int growthFactor() { return 16; } /** The entry class for a hash map does not record key and value, but * rather the position in the hash table of the corresponding entry. This * is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in * the map */ private final class MapEntry implements Byte2IntMap.Entry , Map.Entry<Byte, Integer> { // The table index this entry refers to, or -1 if this entry has been deleted. private int index; MapEntry( final int index ) { this.index = index; } public Byte getKey() { return (Byte.valueOf(key[ index ])); } public byte getByteKey() { return key[ index ]; } public Integer getValue() { return (Integer.valueOf(value[ index ])); } public int getIntValue() { return value[ index ]; } public int setValue( final int v ) { final int oldValue = value[ index ]; value[ index ] = v; return oldValue; } public Integer setValue( final Integer v ) { return (Integer.valueOf(setValue( ((v).intValue()) ))); } @SuppressWarnings("unchecked") public boolean equals( final Object o ) { if (!(o instanceof Map.Entry)) return false; Map.Entry<Byte, Integer> e = (Map.Entry<Byte, Integer>)o; return ( strategy.equals( (key[ index ]), (((e.getKey()).byteValue())) ) ) && ( (value[ index ]) == (((e.getValue()).intValue())) ); } public int hashCode() { return ( strategy.hashCode(key[ index ]) ) ^ (value[ index ]); } public String toString() { return key[ index ] + "=>" + value[ index ]; } } /** An iterator over a hash map. */ private class MapIterator { /** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */ int pos = Byte2IntOpenCustomHashMap.this.n; /** The index of the last entry that has been returned. It is -1 if either we did not return an entry yet, or the last returned entry has been removed. */ int last = -1; /** A downward counter measuring how many entries must still be returned. */ int c = size; /** A lazily allocated list containing the keys of elements that have wrapped around the table because of removals; such elements would not be enumerated (other elements would be usually enumerated twice in their place). */ ByteArrayList wrapped; { final boolean used[] = Byte2IntOpenCustomHashMap.this.used; if ( c != 0 ) while( ! used[ --pos ] ); } public boolean hasNext() { return c != 0; } public int nextEntry() { if ( ! hasNext() ) throw new NoSuchElementException(); c--; // We are just enumerating elements from the wrapped list. if ( pos < 0 ) { final byte k = wrapped.getByte( - ( last = --pos ) - 2 ); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return pos; pos = ( pos + 1 ) & mask; } } last = pos; //System.err.println( "Count: " + c ); if ( c != 0 ) { final boolean used[] = Byte2IntOpenCustomHashMap.this.used; while ( pos-- != 0 && !used[ pos ] ); // When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty. } return last; } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. If any entry wraps around the table, instantiates * lazily {@link #wrapped} and stores the entry key. * * @param pos a starting position. * @return the position cleared by the shifting process. */ protected final int shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; while( used[ pos ] ) { slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( ! used[ pos ] ) break; if ( pos < last ) { // Wrapped entry. if ( wrapped == null ) wrapped = new ByteArrayList (); wrapped.add( key[ pos ] ); } key[ last ] = key[ pos ]; value[ last ] = value[ pos ]; } used[ last ] = false; return last; } @SuppressWarnings("unchecked") public void remove() { if ( last == -1 ) throw new IllegalStateException(); if ( pos < -1 ) { // We're removing wrapped entries. Byte2IntOpenCustomHashMap.this.remove( wrapped.getByte( - pos - 2 ) ); last = -1; return; } size--; if ( shiftKeys( last ) == pos && c > 0 ) { c++; nextEntry(); } last = -1; // You can no longer remove this entry. if ( ASSERTS ) checkTable(); } public int skip( final int n ) { int i = n; while( i-- != 0 && hasNext() ) nextEntry(); return n - i - 1; } } private class EntryIterator extends MapIterator implements ObjectIterator<Byte2IntMap.Entry > { private MapEntry entry; public Byte2IntMap.Entry next() { return entry = new MapEntry( nextEntry() ); } @Override public void remove() { super.remove(); entry.index = -1; // You cannot use a deleted entry. } } private class FastEntryIterator extends MapIterator implements ObjectIterator<Byte2IntMap.Entry > { final BasicEntry entry = new BasicEntry ( ((byte)0), (0) ); public BasicEntry next() { final int e = nextEntry(); entry.key = key[ e ]; entry.value = value[ e ]; return entry; } } private final class MapEntrySet extends AbstractObjectSet<Byte2IntMap.Entry > implements FastEntrySet { public ObjectIterator<Byte2IntMap.Entry > iterator() { return new EntryIterator(); } public ObjectIterator<Byte2IntMap.Entry > fastIterator() { return new FastEntryIterator(); } @SuppressWarnings("unchecked") public boolean contains( final Object o ) { if ( !( o instanceof Map.Entry ) ) return false; final Map.Entry<Byte, Integer> e = (Map.Entry<Byte, Integer>)o; final byte k = ((e.getKey()).byteValue()); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return ( (value[ pos ]) == (((e.getValue()).intValue())) ); pos = ( pos + 1 ) & mask; } return false; } @SuppressWarnings("unchecked") public boolean remove( final Object o ) { if ( !( o instanceof Map.Entry ) ) return false; final Map.Entry<Byte, Integer> e = (Map.Entry<Byte, Integer>)o; final byte k = ((e.getKey()).byteValue()); // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) { Byte2IntOpenCustomHashMap.this.remove( e.getKey() ); return true; } pos = ( pos + 1 ) & mask; } return false; } public int size() { return size; } public void clear() { Byte2IntOpenCustomHashMap.this.clear(); } } public FastEntrySet byte2IntEntrySet() { if ( entries == null ) entries = new MapEntrySet(); return entries; } /** An iterator on keys. * * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods * (and possibly their type-specific counterparts) so that they return keys * instead of entries. */ private final class KeyIterator extends MapIterator implements ByteIterator { public KeyIterator() { super(); } public byte nextByte() { return key[ nextEntry() ]; } public Byte next() { return (Byte.valueOf(key[ nextEntry() ])); } } private final class KeySet extends AbstractByteSet { public ByteIterator iterator() { return new KeyIterator(); } public int size() { return size; } public boolean contains( byte k ) { return containsKey( k ); } public boolean remove( byte k ) { final int oldSize = size; Byte2IntOpenCustomHashMap.this.remove( k ); return size != oldSize; } public void clear() { Byte2IntOpenCustomHashMap.this.clear(); } } public ByteSet keySet() { if ( keys == null ) keys = new KeySet(); return keys; } /** An iterator on values. * * <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods * (and possibly their type-specific counterparts) so that they return values * instead of entries. */ private final class ValueIterator extends MapIterator implements IntIterator { public ValueIterator() { super(); } public int nextInt() { return value[ nextEntry() ]; } public Integer next() { return (Integer.valueOf(value[ nextEntry() ])); } } public IntCollection values() { if ( values == null ) values = new AbstractIntCollection () { public IntIterator iterator() { return new ValueIterator(); } public int size() { return size; } public boolean contains( int v ) { return containsValue( v ); } public void clear() { Byte2IntOpenCustomHashMap.this.clear(); } }; return values; } /** A no-op for backward compatibility. The kind of tables implemented by * this class never need rehashing. * * <P>If you need to reduce the table size to fit exactly * this set, use {@link #trim()}. * * @return true. * @see #trim() * @deprecated A no-op. */ @Deprecated public boolean rehash() { return true; } /** Rehashes the map, making the table as small as possible. * * <P>This method rehashes the table to the smallest size satisfying the * load factor. It can be used when the set will not be changed anymore, so * to optimize access speed and size. * * <P>If the table size is already the minimum possible, this method * does nothing. * * @return true if there was enough memory to trim the map. * @see #trim(int) */ public boolean trim() { final int l = arraySize( size, f ); if ( l >= n ) return true; try { rehash( l ); } catch(OutOfMemoryError cantDoIt) { return false; } return true; } /** Rehashes this map if the table is too large. * * <P>Let <var>N</var> be the smallest table size that can hold * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current * table size is smaller than or equal to <var>N</var>, this method does * nothing. Otherwise, it rehashes this map in a table of size * <var>N</var>. * * <P>This method is useful when reusing maps. {@linkplain #clear() Clearing a * map} leaves the table size untouched. If you are reusing a map * many times, you can call this method with a typical * size to avoid keeping around a very large table just * because of a few large transient maps. * * @param n the threshold for the trimming. * @return true if there was enough memory to trim the map. * @see #trim() */ public boolean trim( final int n ) { final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) ); if ( this.n <= l ) return true; try { rehash( l ); } catch( OutOfMemoryError cantDoIt ) { return false; } return true; } /** Resizes the map. * * <P>This method implements the basic rehashing strategy, and may be * overriden by subclasses implementing different rehashing strategies (e.g., * disk-based rehashing). However, you should not override this method * unless you understand the internal workings of this class. * * @param newN the new size */ @SuppressWarnings("unchecked") protected void rehash( final int newN ) { int i = 0, pos; final boolean used[] = this.used; byte k; final byte key[] = this.key; final int value[] = this.value; final int newMask = newN - 1; final byte newKey[] = new byte[ newN ]; final int newValue[] = new int[newN]; final boolean newUsed[] = new boolean[ newN ]; for( int j = size; j-- != 0; ) { while( ! used[ i ] ) i++; k = key[ i ]; pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & newMask; while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask; newUsed[ pos ] = true; newKey[ pos ] = k; newValue[ pos ] = value[ i ]; i++; } n = newN; mask = newMask; maxFill = maxFill( n, f ); this.key = newKey; this.value = newValue; this.used = newUsed; } /** Returns a deep copy of this map. * * <P>This method performs a deep copy of this hash map; the data stored in the * map, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this map. */ @SuppressWarnings("unchecked") public Byte2IntOpenCustomHashMap clone() { Byte2IntOpenCustomHashMap c; try { c = (Byte2IntOpenCustomHashMap )super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.keys = null; c.values = null; c.entries = null; c.key = key.clone(); c.value = value.clone(); c.used = used.clone(); c.strategy = strategy; return c; } /** Returns a hash code for this map. * * This method overrides the generic method provided by the superclass. * Since <code>equals()</code> is not overriden, it is important * that the value returned by this method is the same value as * the one returned by the overriden method. * * @return a hash code for this map. */ public int hashCode() { int h = 0; for( int j = size, i = 0, t = 0; j-- != 0; ) { while( ! used[ i ] ) i++; t = ( strategy.hashCode(key[ i ]) ); t ^= (value[ i ]); h += t; i++; } return h; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { final byte key[] = this.key; final int value[] = this.value; final MapIterator i = new MapIterator(); s.defaultWriteObject(); for( int j = size, e; j-- != 0; ) { e = i.nextEntry(); s.writeByte( key[ e ] ); s.writeInt( value[ e ] ); } } @SuppressWarnings("unchecked") private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = arraySize( size, f ); maxFill = maxFill( n, f ); mask = n - 1; final byte key[] = this.key = new byte[ n ]; final int value[] = this.value = new int[ n ]; final boolean used[] = this.used = new boolean[ n ]; byte k; int v; for( int i = size, pos = 0; i-- != 0; ) { k = s.readByte(); v = s.readInt(); pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask; while ( used[ pos ] ) pos = ( pos + 1 ) & mask; used[ pos ] = true; key[ pos ] = k; value[ pos ] = v; } if ( ASSERTS ) checkTable(); } private void checkTable() {} }
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.wildfly.agent.installer; import java.io.Console; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.hawkular.wildfly.module.installer.DeploymentConfiguration; import org.hawkular.wildfly.module.installer.DeploymentConfiguration.Builder; import org.hawkular.wildfly.module.installer.ExtensionDeployer; import org.hawkular.wildfly.module.installer.XmlEdit; import org.jboss.aesh.cl.CommandLine; import org.jboss.aesh.cl.internal.ProcessedCommand; import org.jboss.aesh.cl.parser.CommandLineParser; import org.jboss.aesh.cl.parser.CommandLineParserBuilder; import org.jboss.aesh.cl.parser.CommandLineParserException; import org.jboss.logging.Logger; public class AgentInstaller { private static final Logger log = Logger.getLogger(AgentInstaller.class); private static final String SECURITY_REALM_NAME = "HawkularRealm"; public static void main(String[] args) throws Exception { ProcessedCommand<?> options = null; ArrayList<File> filesToDelete = new ArrayList<>(); try { options = InstallerConfiguration.buildCommandLineOptions(); CommandLineParser<?> parser = new CommandLineParserBuilder().processedCommand(options).create(); StringBuilder argLine = new StringBuilder(InstallerConfiguration.COMMAND_NAME); for (String str : args) { argLine.append(' ').append(str); } CommandLine<?> commandLine = parser.parse(argLine.toString()); InstallerConfiguration installerConfig = new InstallerConfiguration(commandLine); // IF we were told the passwords were encrypted THEN // IF we were given the key on the command line THEN // Use the key given on the command line for decoding // ELSE // Use the key the user gives us over stdin for decoding // // IF we were given the salt on the command line THEN // Use the salt given on the command line for decoding // ELSE // Use the salt the user gives us over stdin for decoding // // Decode using the key and salt. boolean passwordsEncrypted = commandLine.hasOption(InstallerConfiguration.OPTION_ENCRYPTION_KEY); if (passwordsEncrypted) { String key = commandLine.getOptionValue(InstallerConfiguration.OPTION_ENCRYPTION_KEY, null); String saltAsString = commandLine.getOptionValue(InstallerConfiguration.OPTION_ENCRYPTION_SALT, null); if (key == null || key.isEmpty()) { key = readPasswordFromStdin("Encryption key:"); } boolean saltSpecified = commandLine.hasOption(InstallerConfiguration.OPTION_ENCRYPTION_SALT); if (!saltSpecified) { saltAsString = key; } if (saltAsString == null || saltAsString.isEmpty()) { saltAsString = readPasswordFromStdin("Salt:"); } assert saltAsString != null; assert key != null; byte[] salt = saltAsString.getBytes("UTF-8"); installerConfig.decodeProperties(key, salt); } String jbossHome = installerConfig.getTargetLocation(); if (jbossHome == null) { // user did not provide us with a wildfly home - let's see if we are sitting in a wildfly home already File jbossHomeFile = new File(".").getCanonicalFile(); if (!(jbossHomeFile.exists() && jbossHomeFile.isDirectory() && jbossHomeFile.canRead() && new File(jbossHomeFile, "modules").isDirectory())) { throw new Exception(InstallerConfiguration.OPTION_TARGET_LOCATION + " must be specified"); } // looks like our current working directory is a WildFly home - use that jbossHome = jbossHomeFile.getCanonicalPath(); } if (installerConfig.getUsername() == null || installerConfig.getPassword() == null) { throw new Exception( "You must provide credentials (username/password) in installer configuration"); } String hawkularServerProtocol; String hawkularServerHost; String hawkularServerPort; if (installerConfig.getServerUrl() == null) { throw new Exception("You must provide the Hawkular Server URL"); } try { URL hawkularServerUrl = new URL(installerConfig.getServerUrl()); hawkularServerProtocol = hawkularServerUrl.getProtocol(); hawkularServerHost = hawkularServerUrl.getHost(); hawkularServerPort = String.valueOf(hawkularServerUrl.getPort()); if ("-1".equals(hawkularServerPort)) { hawkularServerPort = "80"; } } catch (MalformedURLException mue) { // its possible the user passed a URL with a WildFly expression like // "http://${jboss.bind.address:localhost}:8080". Try to parse something like that. Matcher m = Pattern.compile("(https?)://(.*):(\\d+)").matcher(installerConfig.getServerUrl()); if (!m.matches()) { throw mue; } try { hawkularServerProtocol = m.group(1); hawkularServerHost = m.group(2); hawkularServerPort = m.group(3); } catch (Exception e) { throw mue; } } String moduleZip = installerConfig.getModuleDistribution(); URL moduleZipUrl; if (moduleZip == null) { // --module is not supplied so try to download agent module from server File moduleTempFile = downloadModuleZip(getHawkularServerAgentDownloadUrl(installerConfig)); if (moduleTempFile == null) { throw new IOException("Failed to retrieve module dist from server, You can use option [" + InstallerConfiguration.OPTION_MODULE_DISTRIBUTION + "] to supply your own"); } filesToDelete.add(moduleTempFile); moduleZipUrl = moduleTempFile.toURI().toURL(); } else if (moduleZip.startsWith("classpath:")) { // This special protocol tells us to read module zip as resource from classpath. // This is in case the module zip is bundled directly in the installer. String resourceUrl = moduleZip.substring(10); if (!resourceUrl.startsWith("/")) { resourceUrl = "/" + resourceUrl; } moduleZipUrl = AgentInstaller.class.getResource(resourceUrl); if (moduleZipUrl == null) { throw new IOException("Unable to load module.zip from classpath [" + resourceUrl + "]"); } } else if (moduleZip.matches("(http|https|file):.*")){ // the module is specified as a URL - we'll download it File moduleTempFile = downloadModuleZip(new URL(moduleZip)); if (moduleTempFile == null) { throw new IOException("Failed to retrieve agent module from server, option [" + InstallerConfiguration.OPTION_MODULE_DISTRIBUTION + "] is now required but it was not supplied"); } filesToDelete.add(moduleTempFile); moduleZipUrl = moduleTempFile.toURI().toURL(); } else { // the module is specified as a file path moduleZipUrl = new File(moduleZip).toURI().toURL(); } // deploy given module into given app server home directory and // set it up the way it talks to hawkular server on hawkularServerUrl File socketBindingSnippetFile = createSocketBindingSnippet(hawkularServerHost, hawkularServerPort); filesToDelete.add(socketBindingSnippetFile); Builder configurationBldr = DeploymentConfiguration.builder() .jbossHome(new File(jbossHome)) .module(moduleZipUrl) .socketBinding(socketBindingSnippetFile.toURI().toURL()); // let the user override the default subsystem snippet that is found in the module zip // can be specified as a URL or a file path if (installerConfig.getSubsystemSnippet() != null) { try { configurationBldr.subsystem(new URL(installerConfig.getSubsystemSnippet())); } catch (MalformedURLException mue) { File file = new File(installerConfig.getSubsystemSnippet()); if (file.exists()) { configurationBldr.subsystem(file.getAbsoluteFile().toURI().toURL()); } else { throw new FileNotFoundException("Subsystem snippet not found at [" + installerConfig.getSubsystemSnippet() + "]"); } } } String targetConfig = installerConfig.getTargetConfig(); if (targetConfig != null) { configurationBldr.serverConfig(targetConfig); } else { targetConfig = DeploymentConfiguration.DEFAULT_SERVER_CONFIG; // we'll use this in case of https to resolve server configuration directory } // some xpaths are different depending on which config we are installing in (standalone, domain, host) TargetConfigInfo targetConfigInfo; if (targetConfig.matches(".*standalone[^/]*.xml")) { targetConfigInfo = new StandaloneTargetConfigInfo(); } else if (targetConfig.matches(".*host[^/]*.xml")) { targetConfigInfo = new HostTargetConfigInfo(); } else if (targetConfig.matches(".*domain[^/]*.xml")) { targetConfigInfo = new DomainTargetConfigInfo(); } else { log.warnf("Don't know the kind of config this is, will assume standalone: [%s]", targetConfig); targetConfigInfo = new StandaloneTargetConfigInfo(); } // If we are to talk to the Hawkular Server over HTTPS, we need to set up some additional things if (hawkularServerProtocol.equals("https")) { String keystorePath = installerConfig.getKeystorePath(); String keystorePass = installerConfig.getKeystorePassword(); String keyPass = installerConfig.getKeyPassword(); String keyAlias = installerConfig.getKeyAlias(); if (keystorePath == null || keyAlias == null) { throw new Exception(String.format("When using https protocol, the following keystore " + "command line options are required: %s, %s", InstallerConfiguration.OPTION_KEYSTORE_PATH, InstallerConfiguration.OPTION_KEY_ALIAS)); } // password fields are not required, but if not supplied we'll ask user if (keystorePass == null) { keystorePass = readPasswordFromStdin("Keystore password:"); if (keystorePass == null || keystorePass.isEmpty()) { keystorePass = ""; log.warn(InstallerConfiguration.OPTION_KEYSTORE_PASSWORD + " was not provided; using empty password"); } } if (keyPass == null) { keyPass = readPasswordFromStdin("Key password:"); if (keyPass == null || keyPass.isEmpty()) { keyPass = ""; log.warn(InstallerConfiguration.OPTION_KEY_PASSWORD + " was not provided; using empty password"); } } // if given keystore path is not already present within server-config directory, copy it File keystoreSrcFile = new File(keystorePath); if (!(keystoreSrcFile.isFile() && keystoreSrcFile.canRead())) { throw new FileNotFoundException("Cannot read " + keystoreSrcFile.getAbsolutePath()); } File targetConfigDir; if (new File(targetConfig).isAbsolute()) { targetConfigDir = new File(targetConfig).getParentFile(); } else { targetConfigDir = new File(jbossHome, targetConfig).getParentFile(); } Path keystoreDst = Paths.get(targetConfigDir.getAbsolutePath()).resolve(keystoreSrcFile.getName()); // never overwrite target keystore if (!keystoreDst.toFile().exists()) { log.info("Copy [" + keystoreSrcFile.getAbsolutePath() + "] to [" + keystoreDst.toString() + "]"); Files.copy(Paths.get(keystoreSrcFile.getAbsolutePath()), keystoreDst); } // setup security-realm and storage-adapter (within hawkular-wildfly-agent subsystem) String securityRealm = createSecurityRealm(keystoreSrcFile.getName(), keystorePass, keyPass, keyAlias); configurationBldr.addXmlEdit(new XmlEdit(targetConfigInfo.getSecurityRealmsXPath(), securityRealm)); configurationBldr.addXmlEdit(createStorageAdapter(targetConfigInfo, true, installerConfig)); } else { // just going over non-secure HTTP configurationBldr.addXmlEdit(createStorageAdapter(targetConfigInfo, false, installerConfig)); } configurationBldr.addXmlEdit(createManagedServers(targetConfigInfo, installerConfig)); configurationBldr.addXmlEdit(setEnableFlag(targetConfigInfo, installerConfig)); configurationBldr.modulesHome("modules"); // TODO remove if-stmt when WFCORE-1505 is fixed - domain mode can't use outbound bindings if (!(targetConfigInfo instanceof StandaloneTargetConfigInfo)) { configurationBldr.socketBinding(null); } new ExtensionDeployer().install(configurationBldr.build()); } catch (CommandLineParserException pe) { log.error(pe); printHelp(options); if (Boolean.getBoolean("org.hawkular.wildfly.agent.installer.throw-exception-on-error")) { throw pe; } } catch (Exception ex) { log.error(ex); if (Boolean.getBoolean("org.hawkular.wildfly.agent.installer.throw-exception-on-error")) { throw ex; } } finally { for (File fileToDelete : filesToDelete) { if (!fileToDelete.delete()) { log.warn("Failed to delete temporary file: " + fileToDelete); } } } } /** * Reads password from the console (stdin). * * @param message to present before reading * @return password or null if console is not available */ private static String readPasswordFromStdin(String message) { Console console = System.console(); if (console == null) { return null; } console.writer().write(message); console.writer().flush(); return String.valueOf(console.readPassword()); } /** * Creates xml snippet which sets up security-realm. * * @param keystoreFile location of the keystore file * @param keystorePass the password to access the keystore file * @param keyPass the password to access the data for the given alias * @param keyAlias the alias specifying the identifying security information * @return XML snippet */ private static String createSecurityRealm(String keystoreFile, String keystorePass, String keyPass, String keyAlias) { return new StringBuilder("<security-realm name=\"" + SECURITY_REALM_NAME + "\">") .append("<server-identities><ssl>") .append("<keystore path=\"" + keystoreFile + "\"") .append(" relative-to=\"jboss.server.config.dir\"") .append(" keystore-password=\"" + keystorePass + "\"") .append(" key-password=\"" + keyPass + "\"") .append(" alias=\"" + keyAlias + "\"") .append(" /></ssl></server-identities></security-realm>").toString(); } /** * Creates XML edit which sets up storage-adapter configuration, creates a reference * to the security-realm and enables SSL, if appropriate. * * @param targetConfigInfo info on the xml config file being edited * @param withHttps if the storage adapter will be accessed via HTTPS * @return object that can be used to edit some xml content */ private static XmlEdit createStorageAdapter(TargetConfigInfo targetConfigInfo, boolean withHttps, InstallerConfiguration installerConfig) { String select = targetConfigInfo.getProfileXPath() + "/*[namespace-uri()='urn:org.hawkular.agent:agent:1.0']/"; StringBuilder xml = new StringBuilder("<storage-adapter"); String tenantId = installerConfig.getTenantId(); if (installerConfig.isMetricsOnlyMode()) { xml.append(" type=\"METRICS\""); if (tenantId == null || tenantId.isEmpty()) { throw new IllegalArgumentException("You must specify tenant-id when in metrics-only mode"); } } else { xml.append(" type=\"HAWKULAR\""); } if (tenantId != null && !tenantId.isEmpty()) { xml.append(" tenant-id=\"" + tenantId + "\""); } if (withHttps) { xml.append(" security-realm=\"" + SECURITY_REALM_NAME + "\"") .append(" use-ssl=\"true\""); } if (installerConfig.getFeedId() != null && !installerConfig.getFeedId().isEmpty()) { xml.append(" feed-id=\"" + installerConfig.getFeedId() + "\""); } if (installerConfig.getUsername() != null && !installerConfig.getUsername().isEmpty()) { xml.append(" username=\"" + installerConfig.getUsername() + "\""); } if (installerConfig.getPassword() != null && !installerConfig.getPassword().isEmpty()) { xml.append(" password=\"" + installerConfig.getPassword() + "\""); } // xml.append(" serverOutboundSocketBindingRef=\"hawkular\""); // TODO remove if-stmt and uncomment above when WFCORE-1505 is fixed - domain mode can't use outbound bindings if (targetConfigInfo instanceof StandaloneTargetConfigInfo) { xml.append(" server-outbound-socket-binding-ref=\"hawkular\""); } else { xml.append(" url=\"").append(installerConfig.getServerUrl()).append("\""); } xml.append("/>"); // replaces <storage-adapter> under urn:org.hawkular.agent:agent:1.0 subsystem with above content // we ignore whether the original storage-adapter has type of HAWKULAR or METRICS return new XmlEdit(select, xml.toString()).withAttribute("type").withIsIgnoreAttributeValue(true); } /** * Creates a (outbound) socket-binding snippet XML file * * @param host the host where the hawkular server is running * @param port the port where the hawkular server is listening * @return file to the temporary socket binding snippet file (this should be cleaned up by the caller) * @throws IOException on error */ private static File createSocketBindingSnippet(String host, String port) throws IOException { StringBuilder xml = new StringBuilder("<outbound-socket-binding name=\"hawkular\">\n") .append(" <remote-destination host=\""+host+"\" port=\""+port+"\" />\n") .append("</outbound-socket-binding>"); Path tempFile = Files.createTempFile("hawkular-wildfly-module-installer-outbound-socket-binding", ".xml"); Files.write(tempFile, xml.toString().getBytes()); return tempFile.toFile(); } private static XmlEdit createManagedServers(TargetConfigInfo targetConfigInfo, InstallerConfiguration config) { String select = targetConfigInfo.getProfileXPath() + "/*[namespace-uri()='urn:org.hawkular.agent:agent:1.0']/"; String managedServerName = config.getManagedServerName(); if (managedServerName == null || managedServerName.trim().isEmpty()) { managedServerName = "Local"; // just make sure its something } String managedServerResourceTypeSets = config.getManagedResourceTypeSets(); if (managedServerResourceTypeSets == null || managedServerResourceTypeSets.trim().isEmpty()) { managedServerResourceTypeSets = targetConfigInfo.getManagedServerResourceTypeSets(); } StringBuilder xml = new StringBuilder("<managed-servers>") .append("<local-dmr name=\"" + managedServerName + "\" enabled=\"true\" " + "resource-type-sets=\"" + managedServerResourceTypeSets + "\" />") .append("</managed-servers>"); // replaces <managed-servers> under urn:org.hawkular.agent:agent:1.0 subsystem with above content return new XmlEdit(select, xml.toString()); } private static XmlEdit setEnableFlag(TargetConfigInfo targetConfigInfo, InstallerConfiguration config) { String select = targetConfigInfo.getProfileXPath() + "/*[namespace-uri()='urn:org.hawkular.agent:agent:1.0'][@enabled]"; String isEnabled = String.valueOf(config.isEnabled()); return new XmlEdit(select, isEnabled).withIsAttributeContent(true).withAttribute("enabled"); } private static URL getHawkularServerAgentDownloadUrl(InstallerConfiguration config) throws MalformedURLException { String serverUrl = String.format("%s/hawkular/wildfly-agent/download", config.getServerUrl()); return new URL(serverUrl); } /** * Downloads the Hawkular WildFly Agent ZIP file from a URL * * @param url where the agent zip is * @return absolute path to module downloaded locally or null if it could not be retrieved; * this is a temporary file that should be cleaned once it is used */ private static File downloadModuleZip(URL url) { File tempFile; try { tempFile = File.createTempFile("hawkular-wildfly-agent", ".zip"); } catch (Exception e) { throw new RuntimeException("Cannot create temp file to hold module zip", e); } try (FileOutputStream fos = new FileOutputStream(tempFile); InputStream ios = url.openStream()) { IOUtils.copyLarge(ios, fos); return tempFile; } catch (Exception e) { log.warn("Unable to download hawkular wildfly agent ZIP: " + url, e); tempFile.delete(); } return null; } private static void printHelp(ProcessedCommand<?> options) { if (options == null) { throw new RuntimeException("Cannot print help - options is null"); } System.out.println(options.printHelp()); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.indices; import org.elasticsearch.action.admin.indices.rollover.Condition; import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MaxPrimaryShardSizeCondition; import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; import org.elasticsearch.action.resync.TransportResyncReplicationAction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.RuntimeField; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; import org.elasticsearch.index.seqno.RetentionLeaseBackgroundSyncAction; import org.elasticsearch.index.seqno.RetentionLeaseSyncAction; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.index.mapper.BooleanScriptFieldType; import org.elasticsearch.index.mapper.DateScriptFieldType; import org.elasticsearch.index.mapper.DoubleScriptFieldType; import org.elasticsearch.index.mapper.GeoPointScriptFieldType; import org.elasticsearch.index.mapper.IpScriptFieldType; import org.elasticsearch.index.mapper.KeywordScriptFieldType; import org.elasticsearch.index.mapper.LongScriptFieldType; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; /** * Configures classes and services that are shared by indices on each node. */ public class IndicesModule extends AbstractModule { private final MapperRegistry mapperRegistry; public IndicesModule(List<MapperPlugin> mapperPlugins) { this.mapperRegistry = new MapperRegistry(getMappers(mapperPlugins), getRuntimeFields(mapperPlugins), getMetadataMappers(mapperPlugins), getFieldFilter(mapperPlugins)); } public static List<NamedWriteableRegistry.Entry> getNamedWriteables() { return Arrays.asList( new NamedWriteableRegistry.Entry(Condition.class, MaxAgeCondition.NAME, MaxAgeCondition::new), new NamedWriteableRegistry.Entry(Condition.class, MaxDocsCondition.NAME, MaxDocsCondition::new), new NamedWriteableRegistry.Entry(Condition.class, MaxSizeCondition.NAME, MaxSizeCondition::new), new NamedWriteableRegistry.Entry(Condition.class, MaxPrimaryShardSizeCondition.NAME, MaxPrimaryShardSizeCondition::new) ); } public static List<NamedXContentRegistry.Entry> getNamedXContents() { return Arrays.asList( new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxAgeCondition.NAME), (p, c) -> MaxAgeCondition.fromXContent(p)), new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxDocsCondition.NAME), (p, c) -> MaxDocsCondition.fromXContent(p)), new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxSizeCondition.NAME), (p, c) -> MaxSizeCondition.fromXContent(p)), new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxPrimaryShardSizeCondition.NAME), (p, c) -> MaxPrimaryShardSizeCondition.fromXContent(p)) ); } public static Map<String, Mapper.TypeParser> getMappers(List<MapperPlugin> mapperPlugins) { Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>(); // builtin mappers for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { mappers.put(type.typeName(), type.parser()); } for (RangeType type : RangeType.values()) { mappers.put(type.typeName(), type.parser()); } mappers.put(BooleanFieldMapper.CONTENT_TYPE, BooleanFieldMapper.PARSER); mappers.put(BinaryFieldMapper.CONTENT_TYPE, BinaryFieldMapper.PARSER); mappers.put(CompletionFieldMapper.CONTENT_TYPE, CompletionFieldMapper.PARSER); DateFieldMapper.Resolution milliseconds = DateFieldMapper.Resolution.MILLISECONDS; mappers.put(milliseconds.type(), DateFieldMapper.MILLIS_PARSER); DateFieldMapper.Resolution nanoseconds = DateFieldMapper.Resolution.NANOSECONDS; mappers.put(nanoseconds.type(), DateFieldMapper.NANOS_PARSER); mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); mappers.put(FlattenedFieldMapper.CONTENT_TYPE, FlattenedFieldMapper.PARSER); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, GeoPointFieldMapper.PARSER); mappers.put(IpFieldMapper.CONTENT_TYPE, IpFieldMapper.PARSER); mappers.put(KeywordFieldMapper.CONTENT_TYPE, KeywordFieldMapper.PARSER); mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(TextFieldMapper.CONTENT_TYPE, TextFieldMapper.PARSER); for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry<String, Mapper.TypeParser> entry : mapperPlugin.getMappers().entrySet()) { if (mappers.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("Mapper [" + entry.getKey() + "] is already registered"); } } } return Collections.unmodifiableMap(mappers); } private static Map<String, RuntimeField.Parser> getRuntimeFields(List<MapperPlugin> mapperPlugins) { Map<String, RuntimeField.Parser> runtimeParsers = new LinkedHashMap<>(); runtimeParsers.put(BooleanFieldMapper.CONTENT_TYPE, BooleanScriptFieldType.PARSER); runtimeParsers.put(NumberFieldMapper.NumberType.LONG.typeName(), LongScriptFieldType.PARSER); runtimeParsers.put(NumberFieldMapper.NumberType.DOUBLE.typeName(), DoubleScriptFieldType.PARSER); runtimeParsers.put(IpFieldMapper.CONTENT_TYPE, IpScriptFieldType.PARSER); runtimeParsers.put(DateFieldMapper.CONTENT_TYPE, DateScriptFieldType.PARSER); runtimeParsers.put(KeywordFieldMapper.CONTENT_TYPE, KeywordScriptFieldType.PARSER); runtimeParsers.put(GeoPointFieldMapper.CONTENT_TYPE, GeoPointScriptFieldType.PARSER); for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry<String, RuntimeField.Parser> entry : mapperPlugin.getRuntimeFields().entrySet()) { if (runtimeParsers.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("Runtime field type [" + entry.getKey() + "] is already registered"); } } } return Collections.unmodifiableMap(runtimeParsers); } private static final Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers = initBuiltInMetadataMappers(); private static final Set<String> builtInMetadataFields = Collections.unmodifiableSet(builtInMetadataMappers.keySet()); private static Map<String, MetadataFieldMapper.TypeParser> initBuiltInMetadataMappers() { Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers; // Use a LinkedHashMap for metadataMappers because iteration order matters builtInMetadataMappers = new LinkedHashMap<>(); // _ignored first so that we always load it, even if only _id is requested builtInMetadataMappers.put(IgnoredFieldMapper.NAME, IgnoredFieldMapper.PARSER); // ID second so it will be the first (if no ignored fields) stored field to load // (so will benefit from "fields: []" early termination builtInMetadataMappers.put(IdFieldMapper.NAME, IdFieldMapper.PARSER); builtInMetadataMappers.put(RoutingFieldMapper.NAME, RoutingFieldMapper.PARSER); builtInMetadataMappers.put(IndexFieldMapper.NAME, IndexFieldMapper.PARSER); builtInMetadataMappers.put(SourceFieldMapper.NAME, SourceFieldMapper.PARSER); builtInMetadataMappers.put(NestedPathFieldMapper.NAME, NestedPathFieldMapper.PARSER); builtInMetadataMappers.put(VersionFieldMapper.NAME, VersionFieldMapper.PARSER); builtInMetadataMappers.put(SeqNoFieldMapper.NAME, SeqNoFieldMapper.PARSER); builtInMetadataMappers.put(DocCountFieldMapper.NAME, DocCountFieldMapper.PARSER); //_field_names must be added last so that it has a chance to see all the other mappers builtInMetadataMappers.put(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.PARSER); return Collections.unmodifiableMap(builtInMetadataMappers); } public static Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers(List<MapperPlugin> mapperPlugins) { Map<String, MetadataFieldMapper.TypeParser> metadataMappers = new LinkedHashMap<>(); int i = 0; Map.Entry<String, MetadataFieldMapper.TypeParser> fieldNamesEntry = null; for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : builtInMetadataMappers.entrySet()) { if (i < builtInMetadataMappers.size() - 1) { metadataMappers.put(entry.getKey(), entry.getValue()); } else { assert entry.getKey().equals(FieldNamesFieldMapper.NAME) : "_field_names must be the last registered mapper, order counts"; fieldNamesEntry = entry; } i++; } assert fieldNamesEntry != null; for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperPlugin.getMetadataMappers().entrySet()) { if (entry.getKey().equals(FieldNamesFieldMapper.NAME)) { throw new IllegalArgumentException("Plugin cannot contain metadata mapper [" + FieldNamesFieldMapper.NAME + "]"); } if (metadataMappers.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("MetadataFieldMapper [" + entry.getKey() + "] is already registered"); } } } // we register _field_names here so that it has a chance to see all the other mappers, including from plugins metadataMappers.put(fieldNamesEntry.getKey(), fieldNamesEntry.getValue()); return Collections.unmodifiableMap(metadataMappers); } /** * Returns a set containing all of the builtin metadata fields */ public static Set<String> getBuiltInMetadataFields() { return builtInMetadataFields; } private static Function<String, Predicate<String>> getFieldFilter(List<MapperPlugin> mapperPlugins) { Function<String, Predicate<String>> fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; for (MapperPlugin mapperPlugin : mapperPlugins) { fieldFilter = and(fieldFilter, mapperPlugin.getFieldFilter()); } return fieldFilter; } private static Function<String, Predicate<String>> and(Function<String, Predicate<String>> first, Function<String, Predicate<String>> second) { //the purpose of this method is to not chain no-op field predicates, so that we can easily find out when no plugins plug in //a field filter, hence skip the mappings filtering part as a whole, as it requires parsing mappings into a map. if (first == MapperPlugin.NOOP_FIELD_FILTER) { return second; } if (second == MapperPlugin.NOOP_FIELD_FILTER) { return first; } return index -> { Predicate<String> firstPredicate = first.apply(index); Predicate<String> secondPredicate = second.apply(index); if (firstPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { return secondPredicate; } if (secondPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { return firstPredicate; } return firstPredicate.and(secondPredicate); }; } @Override protected void configure() { bind(IndicesStore.class).asEagerSingleton(); bind(IndicesClusterStateService.class).asEagerSingleton(); bind(TransportResyncReplicationAction.class).asEagerSingleton(); bind(PrimaryReplicaSyncer.class).asEagerSingleton(); bind(RetentionLeaseSyncAction.class).asEagerSingleton(); bind(RetentionLeaseBackgroundSyncAction.class).asEagerSingleton(); bind(RetentionLeaseSyncer.class).asEagerSingleton(); } /** * A registry for all field mappers. */ public MapperRegistry getMapperRegistry() { return mapperRegistry; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.mapreduce.index; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.mapreduce.index.IndexVerificationOutputRepository.IndexVerificationErrorType; import java.util.Arrays; import java.util.Objects; public class IndexVerificationOutputRow { public static final String SCAN_MAX_TIMESTAMP = "ScanMaxTimestamp: "; private String dataTableName; private String indexTableName; private Long scanMaxTimestamp; private byte[] dataTableRowKey; private byte[] indexTableRowKey; private Long dataTableRowTimestamp; private Long indexTableRowTimestamp; private String errorMessage; private byte[] expectedValue; private byte[] actualValue; private byte[] phaseValue; private IndexVerificationErrorType errorType; private IndexVerificationOutputRow(String dataTableName, String indexTableName, byte[] dataTableRowKey, Long scanMaxTimestamp, byte[] indexTableRowKey, long dataTableRowTimestamp, long indexTableRowTimestamp, String errorMessage, byte[] expectedValue, byte[] actualValue, byte[] phaseValue, IndexVerificationErrorType errorType) { this.dataTableName = dataTableName; this.indexTableName = indexTableName; this.scanMaxTimestamp = scanMaxTimestamp; this.dataTableRowKey = dataTableRowKey; this.indexTableRowKey = indexTableRowKey; this.dataTableRowTimestamp = dataTableRowTimestamp; this.indexTableRowTimestamp = indexTableRowTimestamp; this.errorMessage = errorMessage; this.expectedValue = expectedValue; this.actualValue = actualValue; this.phaseValue = phaseValue; this.errorType = errorType; } public String getDataTableName() { return dataTableName; } public String getIndexTableName() { return indexTableName; } public Long getScanMaxTimestamp() { return scanMaxTimestamp; } public byte[] getIndexTableRowKey() { return indexTableRowKey; } public long getIndexTableRowTimestamp() { return indexTableRowTimestamp; } public String getErrorMessage() { return errorMessage; } public byte[] getExpectedValue() { return expectedValue; } public byte[] getActualValue() { return actualValue; } public byte[] getPhaseValue() { return phaseValue; } public byte[] getDataTableRowKey() { return dataTableRowKey; } public Long getDataTableRowTimestamp() { return dataTableRowTimestamp; } @Override public boolean equals(Object o) { if (o == null ) { return false; } if (!(o instanceof IndexVerificationOutputRow)) { return false; } IndexVerificationOutputRow otherRow = (IndexVerificationOutputRow) o; return Objects.equals(dataTableName, otherRow.getDataTableName()) && Objects.equals(indexTableName, otherRow.getIndexTableName()) && Objects.equals(scanMaxTimestamp, otherRow.getScanMaxTimestamp()) && Arrays.equals(dataTableRowKey, otherRow.getDataTableRowKey()) && Arrays.equals(indexTableRowKey, otherRow.getIndexTableRowKey()) && Objects.equals(dataTableRowTimestamp, otherRow.getDataTableRowTimestamp()) && Objects.equals(indexTableRowTimestamp, otherRow.getIndexTableRowTimestamp()) && Objects.equals(errorMessage, otherRow.getErrorMessage()) && Arrays.equals(expectedValue, otherRow.getExpectedValue()) && Arrays.equals(actualValue, otherRow.getActualValue()) && Arrays.equals(phaseValue, otherRow.getPhaseValue()) && Objects.equals(errorType, otherRow.getErrorType()); } @Override public int hashCode(){ return Objects.hashCode(scanMaxTimestamp) ^ Objects.hashCode(indexTableName) ^ Arrays.hashCode(dataTableRowKey); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(IndexVerificationOutputRepository.DATA_TABLE_NAME + ": ").append(dataTableName).append(","); sb.append(IndexVerificationOutputRepository.INDEX_TABLE_NAME + ": ").append(indexTableName).append(","); sb.append(SCAN_MAX_TIMESTAMP).append(": ").append(scanMaxTimestamp).append(","); sb.append(IndexVerificationOutputRepository.DATA_TABLE_ROW_KEY + ": ").append(Bytes.toString(dataTableRowKey)).append(","); sb.append(IndexVerificationOutputRepository.INDEX_TABLE_ROW_KEY + ": ").append(Bytes.toString(indexTableRowKey)).append(","); sb.append(IndexVerificationOutputRepository.DATA_TABLE_TS + ": ").append(dataTableRowTimestamp).append(","); sb.append(IndexVerificationOutputRepository.INDEX_TABLE_TS + ": ").append(indexTableRowTimestamp).append(","); sb.append(IndexVerificationOutputRepository.ERROR_MESSAGE + ": ").append(errorMessage).append(","); sb.append(IndexVerificationOutputRepository.EXPECTED_VALUE + ": ").append(Bytes.toString(expectedValue)).append(","); sb.append(IndexVerificationOutputRepository.ACTUAL_VALUE + ": ").append(Bytes.toString(actualValue)).append( ","); sb.append(IndexVerificationOutputRepository.VERIFICATION_PHASE + ": ").append(Bytes.toString(phaseValue)); sb.append(IndexVerificationOutputRepository.ERROR_TYPE + ": " ).append(Objects.toString(errorType)); return sb.toString(); } public IndexVerificationErrorType getErrorType() { return errorType; } public static class IndexVerificationOutputRowBuilder { private String dataTableName; private String indexTableName; private Long scanMaxTimestamp; private byte[] dataTableRowKey; private byte[] indexTableRowKey; private long dataTableRowTimestamp; private long indexTableRowTimestamp; private String errorMessage; private byte[] expectedValue; private byte[] actualValue; private byte[] phaseValue; private IndexVerificationErrorType errorType; public IndexVerificationOutputRowBuilder setDataTableName(String dataTableName) { this.dataTableName = dataTableName; return this; } public IndexVerificationOutputRowBuilder setIndexTableName(String indexTableName) { this.indexTableName = indexTableName; return this; } public IndexVerificationOutputRowBuilder setScanMaxTimestamp(Long scanMaxTimestamp) { this.scanMaxTimestamp = scanMaxTimestamp; return this; } public IndexVerificationOutputRowBuilder setIndexTableRowKey(byte[] indexTableRowKey) { this.indexTableRowKey = indexTableRowKey; return this; } public IndexVerificationOutputRowBuilder setDataTableRowKey(byte[] dataTableRowKey){ this.dataTableRowKey = dataTableRowKey; return this; } public IndexVerificationOutputRowBuilder setDataTableRowTimestamp(long dataTableRowTimestamp) { this.dataTableRowTimestamp = dataTableRowTimestamp; return this; } public IndexVerificationOutputRowBuilder setIndexTableRowTimestamp(long indexTableRowTimestamp) { this.indexTableRowTimestamp = indexTableRowTimestamp; return this; } public IndexVerificationOutputRowBuilder setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; return this; } public IndexVerificationOutputRowBuilder setExpectedValue(byte[] expectedValue) { this.expectedValue = expectedValue; return this; } public IndexVerificationOutputRowBuilder setActualValue(byte[] actualValue) { this.actualValue = actualValue; return this; } public IndexVerificationOutputRowBuilder setPhaseValue(byte[] phaseValue) { this.phaseValue = phaseValue; return this; } public IndexVerificationOutputRowBuilder setErrorType(IndexVerificationErrorType errorType) { this.errorType = errorType; return this; } public IndexVerificationOutputRow build() { return new IndexVerificationOutputRow(dataTableName, indexTableName, dataTableRowKey, scanMaxTimestamp, indexTableRowKey, dataTableRowTimestamp, indexTableRowTimestamp, errorMessage, expectedValue, actualValue, phaseValue, errorType); } } }
/* * Copyright 2016, The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package <%= appPackage %>.tasks; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.content.ContextCompat; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.PopupMenu; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import <%= appPackage %>.R; import <%= appPackage %>.addedittask.AddEditTaskActivity; import <%= appPackage %>.data.Task; import <%= appPackage %>.di.ActivityScoped; import <%= appPackage %>.taskdetail.TaskDetailActivity; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import dagger.android.support.DaggerFragment; import static com.google.common.base.Preconditions.checkNotNull; /** * Display a grid of {@link Task}s. User can choose to view all, active or completed tasks. */ @ActivityScoped public class TasksFragment extends DaggerFragment implements TasksContract.View { @Inject TasksContract.Presenter mPresenter; /** * Listener for clicks on tasks in the ListView. */ TaskItemListener mItemListener = new TaskItemListener() { @Override public void onTaskClick(Task clickedTask) { mPresenter.openTaskDetails(clickedTask); } @Override public void onCompleteTaskClick(Task completedTask) { mPresenter.completeTask(completedTask); } @Override public void onActivateTaskClick(Task activatedTask) { mPresenter.activateTask(activatedTask); } }; private TasksAdapter mListAdapter; private View mNoTasksView; private ImageView mNoTaskIcon; private TextView mNoTaskMainView; private TextView mNoTaskAddView; private LinearLayout mTasksView; private TextView mFilteringLabelView; @Inject public TasksFragment() { // Requires empty public constructor } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); mListAdapter = new TasksAdapter(new ArrayList<Task>(0), mItemListener); } @Override public void onResume() { super.onResume(); mPresenter.takeView(this); } @Override public void onDestroy() { super.onDestroy(); mPresenter.dropView(); //prevent leaking activity in // case presenter is orchestrating a long running task } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { mPresenter.result(requestCode, resultCode); } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View root = inflater.inflate(R.layout.tasks_frag, container, false); // Set up tasks view ListView listView = root.findViewById(R.id.tasks_list); listView.setAdapter(mListAdapter); mFilteringLabelView = root.findViewById(R.id.filteringLabel); mTasksView = root.findViewById(R.id.tasksLL); // Set up no tasks view mNoTasksView = root.findViewById(R.id.noTasks); mNoTaskIcon = root.findViewById(R.id.noTasksIcon); mNoTaskMainView = root.findViewById(R.id.noTasksMain); mNoTaskAddView = root.findViewById(R.id.noTasksAdd); mNoTaskAddView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showAddTask(); } }); // Set up floating action button FloatingActionButton fab = getActivity().findViewById(R.id.fab_add_task); fab.setImageResource(R.drawable.ic_add); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mPresenter.addNewTask(); } }); // Set up progress indicator final ScrollChildSwipeRefreshLayout swipeRefreshLayout = root.findViewById(R.id.refresh_layout); swipeRefreshLayout.setColorSchemeColors( ContextCompat.getColor(getActivity(), R.color.colorPrimary), ContextCompat.getColor(getActivity(), R.color.colorAccent), ContextCompat.getColor(getActivity(), R.color.colorPrimaryDark) ); // Set the scrolling view in the custom SwipeRefreshLayout. swipeRefreshLayout.setScrollUpChild(listView); swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { mPresenter.loadTasks(false); } }); setHasOptionsMenu(true); return root; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_clear: mPresenter.clearCompletedTasks(); break; case R.id.menu_filter: showFilteringPopUpMenu(); break; case R.id.menu_refresh: mPresenter.loadTasks(true); break; } return true; } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.tasks_fragment_menu, menu); } @Override public void showFilteringPopUpMenu() { PopupMenu popup = new PopupMenu(getContext(), getActivity().findViewById(R.id.menu_filter)); popup.getMenuInflater().inflate(R.menu.filter_tasks, popup.getMenu()); popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) { case R.id.active: mPresenter.setFiltering(TasksFilterType.ACTIVE_TASKS); break; case R.id.completed: mPresenter.setFiltering(TasksFilterType.COMPLETED_TASKS); break; default: mPresenter.setFiltering(TasksFilterType.ALL_TASKS); break; } mPresenter.loadTasks(false); return true; } }); popup.show(); } @Override public void setLoadingIndicator(final boolean active) { if (getView() == null) { return; } final SwipeRefreshLayout srl = getView().findViewById(R.id.refresh_layout); // Make sure setRefreshing() is called after the layout is done with everything else. srl.post(new Runnable() { @Override public void run() { srl.setRefreshing(active); } }); } @Override public void showTasks(List<Task> tasks) { mListAdapter.replaceData(tasks); mTasksView.setVisibility(View.VISIBLE); mNoTasksView.setVisibility(View.GONE); } @Override public void showNoActiveTasks() { showNoTasksViews( getResources().getString(R.string.no_tasks_active), R.drawable.ic_check_circle_24dp, false ); } @Override public void showNoTasks() { showNoTasksViews( getResources().getString(R.string.no_tasks_all), R.drawable.ic_assignment_turned_in_24dp, false ); } @Override public void showNoCompletedTasks() { showNoTasksViews( getResources().getString(R.string.no_tasks_completed), R.drawable.ic_verified_user_24dp, false ); } @Override public void showSuccessfullySavedMessage() { showMessage(getString(R.string.successfully_saved_task_message)); } private void showNoTasksViews(String mainText, int iconRes, boolean showAddView) { mTasksView.setVisibility(View.GONE); mNoTasksView.setVisibility(View.VISIBLE); mNoTaskMainView.setText(mainText); //noinspection deprecation mNoTaskIcon.setImageDrawable(getResources().getDrawable(iconRes)); mNoTaskAddView.setVisibility(showAddView ? View.VISIBLE : View.GONE); } @Override public void showActiveFilterLabel() { mFilteringLabelView.setText(getResources().getString(R.string.label_active)); } @Override public void showCompletedFilterLabel() { mFilteringLabelView.setText(getResources().getString(R.string.label_completed)); } @Override public void showAllFilterLabel() { mFilteringLabelView.setText(getResources().getString(R.string.label_all)); } @Override public void showAddTask() { Intent intent = new Intent(getContext(), AddEditTaskActivity.class); startActivityForResult(intent, AddEditTaskActivity.REQUEST_ADD_TASK); } @Override public void showTaskDetailsUi(String taskId) { //Shown in it's own Activity, since it makes more sense that way // and it gives us the flexibility to show some Intent stubbing. Intent intent = new Intent(getContext(), TaskDetailActivity.class); intent.putExtra(TaskDetailActivity.EXTRA_TASK_ID, taskId); startActivity(intent); } @Override public void showTaskMarkedComplete() { showMessage(getString(R.string.task_marked_complete)); } @Override public void showTaskMarkedActive() { showMessage(getString(R.string.task_marked_active)); } @Override public void showCompletedTasksCleared() { showMessage(getString(R.string.completed_tasks_cleared)); } @Override public void showLoadingTasksError() { showMessage(getString(R.string.loading_tasks_error)); } private void showMessage(String message) { Snackbar.make(getView(), message, Snackbar.LENGTH_LONG).show(); } @Override public boolean isActive() { return isAdded(); } public interface TaskItemListener { void onTaskClick(Task clickedTask); void onCompleteTaskClick(Task completedTask); void onActivateTaskClick(Task activatedTask); } private static class TasksAdapter extends BaseAdapter { private List<Task> mTasks; private TaskItemListener mItemListener; public TasksAdapter(List<Task> tasks, TaskItemListener itemListener) { setList(tasks); mItemListener = itemListener; } public void replaceData(List<Task> tasks) { setList(tasks); notifyDataSetChanged(); } private void setList(List<Task> tasks) { mTasks = checkNotNull(tasks); } @Override public int getCount() { return mTasks.size(); } @Override public Task getItem(int i) { return mTasks.get(i); } @Override public long getItemId(int i) { return i; } @Override public View getView(int i, View view, ViewGroup viewGroup) { View rowView = view; if (rowView == null) { LayoutInflater inflater = LayoutInflater.from(viewGroup.getContext()); rowView = inflater.inflate(R.layout.task_item, viewGroup, false); } final Task task = getItem(i); TextView titleTV = rowView.findViewById(R.id.title); titleTV.setText(task.getTitleForList()); CheckBox completeCB = rowView.findViewById(R.id.complete); // Active/completed task UI completeCB.setChecked(task.isCompleted()); if (task.isCompleted()) { //noinspection deprecation (api <16) rowView.setBackgroundDrawable(viewGroup.getContext() .getResources().getDrawable(R.drawable.list_completed_touch_feedback)); } else { //noinspection deprecation (api <16) rowView.setBackgroundDrawable(viewGroup.getContext() .getResources().getDrawable(R.drawable.touch_feedback)); } completeCB.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (!task.isCompleted()) { mItemListener.onCompleteTaskClick(task); } else { mItemListener.onActivateTaskClick(task); } } }); rowView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mItemListener.onTaskClick(task); } }); return rowView; } } }
/* * Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.measure.fxopt; import static com.opengamma.strata.measure.fxopt.FxCalculationUtils.checkBlackVolatilities; import static com.opengamma.strata.measure.fxopt.FxCalculationUtils.checkTrinomialTreeVolatilities; import java.time.LocalDate; import com.opengamma.strata.basics.currency.CurrencyAmount; import com.opengamma.strata.basics.currency.CurrencyPair; import com.opengamma.strata.basics.currency.MultiCurrencyAmount; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.data.scenario.CurrencyScenarioArray; import com.opengamma.strata.data.scenario.MultiCurrencyScenarioArray; import com.opengamma.strata.data.scenario.ScenarioArray; import com.opengamma.strata.market.param.CurrencyParameterSensitivities; import com.opengamma.strata.market.sensitivity.PointSensitivities; import com.opengamma.strata.measure.rate.RatesScenarioMarketData; import com.opengamma.strata.pricer.fxopt.BlackFxOptionVolatilities; import com.opengamma.strata.pricer.fxopt.BlackFxSingleBarrierOptionTradePricer; import com.opengamma.strata.pricer.fxopt.FxOptionVolatilities; import com.opengamma.strata.pricer.fxopt.ImpliedTrinomialTreeFxSingleBarrierOptionTradePricer; import com.opengamma.strata.pricer.rate.RatesProvider; import com.opengamma.strata.pricer.sensitivity.MarketQuoteSensitivityCalculator; import com.opengamma.strata.product.fxopt.ResolvedFxSingleBarrierOptionTrade; /** * Multi-scenario measure calculations for FX single barrier option trades. * <p> * Each method corresponds to a measure, typically calculated by one or more calls to the pricer. */ final class FxSingleBarrierOptionMeasureCalculations { /** * Default implementation. */ public static final FxSingleBarrierOptionMeasureCalculations DEFAULT = new FxSingleBarrierOptionMeasureCalculations( BlackFxSingleBarrierOptionTradePricer.DEFAULT, ImpliedTrinomialTreeFxSingleBarrierOptionTradePricer.DEFAULT); /** * The market quote sensitivity calculator. */ private static final MarketQuoteSensitivityCalculator MARKET_QUOTE_SENS = MarketQuoteSensitivityCalculator.DEFAULT; /** * One basis point, expressed as a {@code double}. */ private static final double ONE_BASIS_POINT = 1e-4; /** * Pricer for {@link ResolvedFxSingleBarrierOptionTrade}. */ private final BlackFxSingleBarrierOptionTradePricer blackPricer; /** * Pricer for {@link ResolvedFxSingleBarrierOptionTrade}. */ private final ImpliedTrinomialTreeFxSingleBarrierOptionTradePricer trinomialTreePricer; /** * Creates an instance. * * @param blackPricer the pricer for {@link ResolvedFxSingleBarrierOptionTrade} * @param trinomialTreePricer the pricer for {@link ResolvedFxSingleBarrierOptionTrade} SABR */ FxSingleBarrierOptionMeasureCalculations( BlackFxSingleBarrierOptionTradePricer blackPricer, ImpliedTrinomialTreeFxSingleBarrierOptionTradePricer trinomialTreePricer) { this.blackPricer = ArgChecker.notNull(blackPricer, "blackPricer"); this.trinomialTreePricer = ArgChecker.notNull(trinomialTreePricer, "trinomialTreePricer"); } //------------------------------------------------------------------------- // calculates present value for all scenarios MultiCurrencyScenarioArray presentValue( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return MultiCurrencyScenarioArray.of( ratesMarketData.getScenarioCount(), i -> presentValue( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // present value for one scenario MultiCurrencyAmount presentValue( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { return trinomialTreePricer.presentValue(trade, ratesProvider, checkTrinomialTreeVolatilities(volatilities)); } else { return blackPricer.presentValue(trade, ratesProvider, checkBlackVolatilities(volatilities)); } } //------------------------------------------------------------------------- // calculates calibrated sum PV01 for all scenarios MultiCurrencyScenarioArray pv01RatesCalibratedSum( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return MultiCurrencyScenarioArray.of( ratesMarketData.getScenarioCount(), i -> pv01RatesCalibratedSum( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // calibrated sum PV01 for one scenario MultiCurrencyAmount pv01RatesCalibratedSum( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { CurrencyParameterSensitivities paramSens = parameterSensitivities(trade, ratesProvider, volatilities, method); return paramSens.total().multipliedBy(ONE_BASIS_POINT); } //------------------------------------------------------------------------- // calculates calibrated bucketed PV01 for all scenarios ScenarioArray<CurrencyParameterSensitivities> pv01RatesCalibratedBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return ScenarioArray.of( ratesMarketData.getScenarioCount(), i -> pv01RatesCalibratedBucketed( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // calibrated bucketed PV01 for one scenario CurrencyParameterSensitivities pv01RatesCalibratedBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { CurrencyParameterSensitivities paramSens = parameterSensitivities(trade, ratesProvider, volatilities, method); return paramSens.multipliedBy(ONE_BASIS_POINT); } //------------------------------------------------------------------------- // calculates market quote sum PV01 for all scenarios MultiCurrencyScenarioArray pv01RatesMarketQuoteSum( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return MultiCurrencyScenarioArray.of( ratesMarketData.getScenarioCount(), i -> pv01RatesMarketQuoteSum( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // market quote sum PV01 for one scenario MultiCurrencyAmount pv01RatesMarketQuoteSum( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { CurrencyParameterSensitivities paramSens = parameterSensitivities(trade, ratesProvider, volatilities, method); return MARKET_QUOTE_SENS.sensitivity(paramSens, ratesProvider).total().multipliedBy(ONE_BASIS_POINT); } //------------------------------------------------------------------------- // calculates market quote bucketed PV01 for all scenarios ScenarioArray<CurrencyParameterSensitivities> pv01RatesMarketQuoteBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return ScenarioArray.of( ratesMarketData.getScenarioCount(), i -> pv01RatesMarketQuoteBucketed( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // market quote bucketed PV01 for one scenario CurrencyParameterSensitivities pv01RatesMarketQuoteBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { CurrencyParameterSensitivities paramSens = parameterSensitivities(trade, ratesProvider, volatilities, method); return MARKET_QUOTE_SENS.sensitivity(paramSens, ratesProvider).multipliedBy(ONE_BASIS_POINT); } // point sensitivity private CurrencyParameterSensitivities parameterSensitivities( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { return trinomialTreePricer.presentValueSensitivityRates( trade, ratesProvider, checkTrinomialTreeVolatilities(volatilities)); } else { PointSensitivities pointSens = blackPricer.presentValueSensitivityRatesStickyStrike( trade, ratesProvider, checkBlackVolatilities(volatilities)); return ratesProvider.parameterSensitivity(pointSens); } } //------------------------------------------------------------------------- // calculates vega (present value volatility sensitivities) for all scenarios ScenarioArray<CurrencyParameterSensitivities> vegaMarketQuoteBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { throw new IllegalArgumentException( "FX single barrier option Trinomial Tree pricer does not currently support vega calculation"); } else { return ScenarioArray.of( ratesMarketData.getScenarioCount(), i -> vegaMarketQuoteBucketed( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } } // point sensitivity CurrencyParameterSensitivities vegaMarketQuoteBucketed( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { throw new IllegalArgumentException( "FX single barrier option Trinomial Tree pricer does not currently support vega calculation"); } else { BlackFxOptionVolatilities blackVols = checkBlackVolatilities(volatilities); PointSensitivities pointSens = blackPricer.presentValueSensitivityModelParamsVolatility(trade, ratesProvider, blackVols); return blackVols.parameterSensitivity(pointSens); } } //------------------------------------------------------------------------- // calculates currency exposure for all scenarios MultiCurrencyScenarioArray currencyExposure( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { CurrencyPair currencyPair = trade.getProduct().getCurrencyPair(); return MultiCurrencyScenarioArray.of( ratesMarketData.getScenarioCount(), i -> currencyExposure( trade, ratesMarketData.scenario(i).ratesProvider(), optionMarketData.scenario(i).volatilities(currencyPair), method)); } // currency exposure for one scenario MultiCurrencyAmount currencyExposure( ResolvedFxSingleBarrierOptionTrade trade, RatesProvider ratesProvider, FxOptionVolatilities volatilities, FxSingleBarrierOptionMethod method) { if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { return trinomialTreePricer.currencyExposure(trade, ratesProvider, checkTrinomialTreeVolatilities(volatilities)); } else { return blackPricer.currencyExposure(trade, ratesProvider, checkBlackVolatilities(volatilities)); } } //------------------------------------------------------------------------- // calculates current cash for all scenarios CurrencyScenarioArray currentCash( ResolvedFxSingleBarrierOptionTrade trade, RatesScenarioMarketData ratesMarketData, FxOptionScenarioMarketData optionMarketData, FxSingleBarrierOptionMethod method) { return CurrencyScenarioArray.of( ratesMarketData.getScenarioCount(), i -> currentCash( trade, ratesMarketData.scenario(i).getValuationDate(), method)); } // current cash for one scenario CurrencyAmount currentCash( ResolvedFxSingleBarrierOptionTrade trade, LocalDate valuationDate, FxSingleBarrierOptionMethod method) { if (method == FxSingleBarrierOptionMethod.TRINOMIAL_TREE) { return trinomialTreePricer.currentCash(trade, valuationDate); } else { return blackPricer.currentCash(trade, valuationDate); } } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.service.protocol; import java.util.*; import net.java.sip.communicator.util.*; import net.java.sip.communicator.service.credentialsstorage.*; import org.jitsi.service.neomedia.*; import org.osgi.framework.*; /** * The AccountID is an account identifier that, uniquely represents a specific * user account over a specific protocol. The class needs to be extended by * every protocol implementation because of its protected * constructor. The reason why this constructor is protected is mostly avoiding * confusion and letting people (using the protocol provider service) believe * that they are the ones who are supposed to instantiate the accountid class. * <p> * Every instance of the <tt>ProtocolProviderService</tt>, created through the * ProtocolProviderFactory is assigned an AccountID instance, that uniquely * represents it and whose string representation (obtained through the * getAccountUID() method) can be used for identification of persistently stored * account details. * <p> * Account id's are guaranteed to be different for different accounts and in the * same time are bound to be equal for multiple installations of the same * account. * * @author Emil Ivov * @author Lubomir Marinov * @author Pawel Domas */ public abstract class AccountID { /** * The <tt>Logger</tt> used by the <tt>AccountID</tt> class and its * instances for logging output. */ private static final Logger logger = Logger.getLogger(AccountID.class); /** * The default properties key prefix used in lib/jitsi-defaults.properties */ protected static final String DEFAULTS_PREFIX = "net.java.sip.communicator.service.protocol."; /** * The protocol display name. In the case of overridden protocol name this * would be the new name. */ private final String protocolDisplayName; /** * The real protocol name. */ private final String protocolName; /** * Allows a specific set of account properties to override a given default * protocol name (e.g. account registration wizards which want to present a * well-known protocol name associated with the account that is different * from the name of the effective protocol). * <p> * Note: The logic of the SIP protocol implementation at the time of this * writing modifies <tt>accountProperties</tt> to contain the default * protocol name if an override hasn't been defined. Since the desire is to * enable all account registration wizards to override the protocol name, * the current implementation places the specified * <tt>defaultProtocolName</tt> in a similar fashion. * </p> * * @param accountProperties a Map containing any other protocol and * implementation specific account initialization properties * @param defaultProtocolName the protocol name to be used in case * <tt>accountProperties</tt> doesn't provide an overriding value * @return the protocol name */ private static final String getOverriddenProtocolName( Map<String, String> accountProperties, String defaultProtocolName) { String key = ProtocolProviderFactory.PROTOCOL; String protocolName = accountProperties.get(key); if ((protocolName == null) && (defaultProtocolName != null)) { protocolName = defaultProtocolName; accountProperties.put(key, protocolName); } return protocolName; } /** * Contains all implementation specific properties that define the account. * The exact names of the keys are protocol (and sometimes implementation) * specific. * Currently, only String property keys and values will get properly stored. * If you need something else, please consider converting it through custom * accessors (get/set) in your implementation. */ protected Map<String, String> accountProperties = null; /** * A String uniquely identifying the user for this particular account. */ private final String userID; /** * A String uniquely identifying this account, that can also be used for * storing and unambiguously retrieving details concerning it. */ private final String accountUID; /** * The name of the service that defines the context for this account. */ private final String serviceName; /** * Creates an account id for the specified provider userid and * accountProperties. * If account uid exists in account properties, we are loading the account * and so load its value from there, prevent changing account uid * when server changed (serviceName has changed). * @param userID a String that uniquely identifies the user. * @param accountProperties a Map containing any other protocol and * implementation specific account initialization properties * @param protocolName the name of the protocol implemented by the provider * that this id is meant for. * @param serviceName the name of the service (e.g. iptel.org, jabber.org, * icq.com) that this account is registered with. */ protected AccountID( String userID, Map<String, String> accountProperties, String protocolName, String serviceName) { /* * Allow account registration wizards to override the default protocol * name through accountProperties for the purposes of presenting a * well-known protocol name associated with the account that is * different from the name of the effective protocol. */ this.protocolDisplayName = getOverriddenProtocolName(accountProperties, protocolName); this.protocolName = protocolName; this.userID = userID; this.accountProperties = new HashMap<String, String>(accountProperties); this.serviceName = serviceName; String existingAccountUID = accountProperties.get(ProtocolProviderFactory.ACCOUNT_UID); if(existingAccountUID == null) { //create a unique identifier string this.accountUID = protocolDisplayName + ":" + userID + "@" + ((serviceName == null) ? "" : serviceName); } else { this.accountUID = existingAccountUID; } } /** * Returns the user id associated with this account. * * @return A String identifying the user inside this particular service. */ public String getUserID() { return userID; } /** * Returns a name that can be displayed to the user when referring to this * account. * * @return A String identifying the user inside this particular service. */ public String getDisplayName() { // If the ACCOUNT_DISPLAY_NAME property has been set for this account // we'll be using it as a display name. String key = ProtocolProviderFactory.ACCOUNT_DISPLAY_NAME; String accountDisplayName = accountProperties.get(key); if (accountDisplayName != null && accountDisplayName.length() > 0) { return accountDisplayName; } // Otherwise construct a display name. String returnValue = getUserID(); String protocolName = getProtocolDisplayName(); if (protocolName != null && protocolName.trim().length() > 0) returnValue += " (" + protocolName + ")"; return returnValue; } /** * Sets {@link ProtocolProviderFactory#DISPLAY_NAME} property value. * * @param displayName the display name value to set. */ public void setDisplayName(String displayName) { setOrRemoveIfEmpty(ProtocolProviderFactory.DISPLAY_NAME, displayName); } /** * Returns the display name of the protocol. * * @return the display name of the protocol */ public String getProtocolDisplayName() { return protocolDisplayName; } /** * Returns the name of the protocol. * * @return the name of the protocol */ public String getProtocolName() { return protocolName; } /** * Returns a String uniquely identifying this account, guaranteed to remain * the same across multiple installations of the same account and to always * be unique for differing accounts. * @return String */ public String getAccountUniqueID() { return accountUID; } /** * Returns a Map containing protocol and implementation account * initialization properties. * @return a Map containing protocol and implementation account * initialization properties. */ public Map<String, String> getAccountProperties() { return new HashMap<String, String>(accountProperties); } /** * Returns the specific account property. * * @param key property key * @param defaultValue default value if the property does not exist * @return property value corresponding to property key */ public boolean getAccountPropertyBoolean(Object key, boolean defaultValue) { String value = getAccountPropertyString(key); if(value == null) value = getDefaultString(key.toString()); return (value == null) ? defaultValue : Boolean.parseBoolean(value); } /** * Gets the value of a specific property as a signed decimal integer. If the * specified property key is associated with a value in this * <tt>AccountID</tt>, the string representation of the value is parsed into * a signed decimal integer according to the rules of * {@link Integer#parseInt(String)} . If parsing the value as a signed * decimal integer fails or there is no value associated with the specified * property key, <tt>defaultValue</tt> is returned. * * @param key the key of the property to get the value of as a * signed decimal integer * @param defaultValue the value to be returned if parsing the value of the * specified property key as a signed decimal integer fails or there is no * value associated with the specified property key in this * <tt>AccountID</tt> * @return the value of the property with the specified key in this * <tt>AccountID</tt> as a signed decimal integer; <tt>defaultValue</tt> if * parsing the value of the specified property key fails or no value is * associated in this <tt>AccountID</tt> with the specified property name */ public int getAccountPropertyInt(Object key, int defaultValue) { String stringValue = getAccountPropertyString(key); int intValue = defaultValue; if ((stringValue == null) || (stringValue.isEmpty())) { stringValue = getDefaultString(key.toString()); } if ((stringValue != null) && (stringValue.length() > 0)) { try { intValue = Integer.parseInt(stringValue); } catch (NumberFormatException ex) { logger.error("Failed to parse account property " + key + " value " + stringValue + " as an integer", ex); } } return intValue; } /** * Returns the account property string corresponding to the given key. * * @param key the key, corresponding to the property string we're looking * for * @return the account property string corresponding to the given key */ public String getAccountPropertyString(Object key) { return getAccountPropertyString(key, null); } /** * Returns the account property string corresponding to the given key. * * @param key the key, corresponding to the property string we're looking * for * @param defValue the default value returned when given <tt>key</tt> * is not present * @return the account property string corresponding to the given key */ public String getAccountPropertyString(Object key, String defValue) { String value = accountProperties.get(key); if(value == null) value = getDefaultString(key.toString()); return (value == null) ? defValue : value; } /** * Adds a property to the map of properties for this account identifier. * * @param key the key of the property * @param value the property value */ public void putAccountProperty(String key, String value) { accountProperties.put(key, value); } /** * Adds property to the map of properties for this account * identifier. * @param key the key of the property * @param value the property value */ public void putAccountProperty(String key, Object value) { accountProperties.put(key, String.valueOf(value)); } /** * Removes specified account property. * @param key the key to remove. */ public void removeAccountProperty(String key) { accountProperties.remove(key); } /** * Returns a hash code value for the object. This method is * supported for the benefit of hashtables such as those provided by * <tt>java.util.Hashtable</tt>. * <p> * @return a hash code value for this object. * @see java.lang.Object#equals(java.lang.Object) * @see java.util.Hashtable */ @Override public int hashCode() { return (accountUID == null)? 0 : accountUID.hashCode(); } /** * Indicates whether some other object is "equal to" this account id. * <p> * @param obj the reference object with which to compare. * @return <tt>true</tt> if this object is the same as the obj * argument; <tt>false</tt> otherwise. * @see #hashCode() * @see java.util.Hashtable */ @Override public boolean equals(Object obj) { if (this == obj) return true; return (obj != null) && getClass().isInstance(obj) && userID.equals(((AccountID)obj).userID); } /** * Returns a string representation of this account id (same as calling * getAccountUniqueID()). * * @return a string representation of this account id. */ @Override public String toString() { return getAccountUniqueID(); } /** * Returns the name of the service that defines the context for this * account. Often this name would be an sqdn or even an ipaddress but this * would not always be the case (e.g. p2p providers may return a name that * does not directly correspond to an IP address or host name). * <p> * @return the name of the service that defines the context for this * account. */ public String getService() { return this.serviceName; } /** * Returns a string that could be directly used (or easily converted to) an * address that other users of the protocol can use to communicate with us. * By default this string is set to userid@servicename. Protocol * implementors should override it if they'd need it to respect a different * syntax. * * @return a String in the form of userid@service that other protocol users * should be able to parse into a meaningful address and use it to * communicate with us. */ public String getAccountAddress() { String userID = getUserID(); return (userID.indexOf('@') > 0) ? userID : (userID + "@" + getService()); } /** * Indicates if this account is currently enabled. * @return <tt>true</tt> if this account is enabled, <tt>false</tt> - * otherwise. */ public boolean isEnabled() { return !getAccountPropertyBoolean( ProtocolProviderFactory.IS_ACCOUNT_DISABLED, false); } /** * The address of the server we will use for this account * * @return String */ public String getServerAddress() { return getAccountPropertyString(ProtocolProviderFactory.SERVER_ADDRESS); } /** * Get the {@link ProtocolProviderFactory#ACCOUNT_DISPLAY_NAME} property. * * @return the {@link ProtocolProviderFactory#ACCOUNT_DISPLAY_NAME} * property value. */ public String getAccountDisplayName() { return getAccountPropertyString( ProtocolProviderFactory.ACCOUNT_DISPLAY_NAME); } /** * Sets {@link ProtocolProviderFactory#ACCOUNT_DISPLAY_NAME} property value. * * @param displayName the account display name value to set. */ public void setAccountDisplayName(String displayName) { setOrRemoveIfEmpty(ProtocolProviderFactory.ACCOUNT_DISPLAY_NAME, displayName); } /** * Returns the password of the account. * * @return the password of the account. */ public String getPassword() { return getAccountPropertyString(ProtocolProviderFactory.PASSWORD); } /** * Sets the password of the account. * * @param password the password of the account. */ public void setPassword(String password) { setOrRemoveIfEmpty(ProtocolProviderFactory.PASSWORD, password); } /** * The authorization name * * @return String auth name */ public String getAuthorizationName() { return getAccountPropertyString( ProtocolProviderFactory.AUTHORIZATION_NAME); } /** * Sets authorization name. * * @param authName String */ public void setAuthorizationName(String authName) { setOrRemoveIfEmpty( ProtocolProviderFactory.AUTHORIZATION_NAME, authName); } /** * The port on the specified server * * @return int */ public String getServerPort() { return getAccountPropertyString(ProtocolProviderFactory.SERVER_PORT); } /** * Sets the server port. * * @param port int */ public void setServerPort(String port) { setOrRemoveIfEmpty(ProtocolProviderFactory.SERVER_PORT, port); } /** * Sets the server * * @param serverAddress String */ public void setServerAddress(String serverAddress) { setOrRemoveIfEmpty(ProtocolProviderFactory.SERVER_ADDRESS, serverAddress); } /** * Returns <tt>true</tt> if server was overriden. * @return <tt>true</tt> if server was overriden. */ public boolean isServerOverridden() { return getAccountPropertyBoolean( ProtocolProviderFactory.IS_SERVER_OVERRIDDEN, false); } /** * Sets <tt>isServerOverridden</tt> property. * @param isServerOverridden indicates if the server is overridden */ public void setServerOverridden(boolean isServerOverridden) { putAccountProperty( ProtocolProviderFactory.IS_SERVER_OVERRIDDEN, isServerOverridden); } /** * Returns the protocol icon path stored under * {@link ProtocolProviderFactory#PROTOCOL_ICON_PATH} key. * * @return the protocol icon path. */ public String getProtocolIconPath() { return getAccountPropertyString( ProtocolProviderFactory.PROTOCOL_ICON_PATH); } /** * Sets the protocl icon path that will be held under * {@link ProtocolProviderFactory#PROTOCOL_ICON_PATH} key. * * @param iconPath a path to the protocol icon to set. */ public void setProtocolIconPath(String iconPath) { putAccountProperty( ProtocolProviderFactory.PROTOCOL_ICON_PATH, iconPath); } /** * Returns the protocol icon path stored under * {@link ProtocolProviderFactory#ACCOUNT_ICON_PATH} key. * * @return the protocol icon path. */ public String getAccountIconPath() { return getAccountPropertyString( ProtocolProviderFactory.ACCOUNT_ICON_PATH); } /** * Sets the account icon path that will be held under * {@link ProtocolProviderFactory#ACCOUNT_ICON_PATH} key. * * @param iconPath a path to the account icon to set. */ public void setAccountIconPath(String iconPath) { putAccountProperty( ProtocolProviderFactory.ACCOUNT_ICON_PATH, iconPath); } /** * Returns the DTMF method. * * @return the DTMF method. */ public String getDTMFMethod() { return getAccountPropertyString(ProtocolProviderFactory.DTMF_METHOD); } /** * Sets the DTMF method. * * @param dtmfMethod the DTMF method to set */ public void setDTMFMethod(String dtmfMethod) { putAccountProperty(ProtocolProviderFactory.DTMF_METHOD, dtmfMethod); } /** * Returns the minimal DTMF tone duration. * * @return The minimal DTMF tone duration. */ public String getDtmfMinimalToneDuration() { return getAccountPropertyString( ProtocolProviderFactory.DTMF_MINIMAL_TONE_DURATION); } /** * Sets the minimal DTMF tone duration. * * @param dtmfMinimalToneDuration The minimal DTMF tone duration to set. */ public void setDtmfMinimalToneDuration(String dtmfMinimalToneDuration) { putAccountProperty( ProtocolProviderFactory.DTMF_MINIMAL_TONE_DURATION, dtmfMinimalToneDuration ); } /** * Gets the ID of the client certificate configuration. * @return the ID of the client certificate configuration. */ public String getTlsClientCertificate() { return getAccountPropertyString( ProtocolProviderFactory.CLIENT_TLS_CERTIFICATE); } /** * Sets the ID of the client certificate configuration. * @param id the client certificate configuration template ID. */ public void setTlsClientCertificate(String id) { setOrRemoveIfEmpty(ProtocolProviderFactory.CLIENT_TLS_CERTIFICATE, id); } /** * Checks if the account is hidden. * @return <tt>true</tt> if this account is hidden or <tt>false</tt> * otherwise. */ public boolean isHidden() { return getAccountPropertyString( ProtocolProviderFactory.IS_PROTOCOL_HIDDEN) != null; } /** * Checks if the account config is hidden. * @return <tt>true</tt> if the account config is hidden or <tt>false</tt> * otherwise. */ public boolean isConfigHidden() { return getAccountPropertyString( ProtocolProviderFactory.IS_ACCOUNT_CONFIG_HIDDEN) != null; } /** * Checks if the account status menu is hidden. * @return <tt>true</tt> if the account status menu is hidden or * <tt>false</tt> otherwise. */ public boolean isStatusMenuHidden() { return getAccountPropertyString( ProtocolProviderFactory.IS_ACCOUNT_STATUS_MENU_HIDDEN) != null; } /** * Checks if the account is marked as readonly. * @return <tt>true</tt> if the account is marked as readonly or * <tt>false</tt> otherwise. */ public boolean isReadOnly() { return getAccountPropertyString( ProtocolProviderFactory.IS_ACCOUNT_READ_ONLY) != null; } /** * Returns the first <tt>ProtocolProviderService</tt> implementation * corresponding to the preferred protocol * * @return the <tt>ProtocolProviderService</tt> corresponding to the * preferred protocol */ public boolean isPreferredProvider() { String preferredProtocolProp = getAccountPropertyString( ProtocolProviderFactory.IS_PREFERRED_PROTOCOL); if (preferredProtocolProp != null && preferredProtocolProp.length() > 0 && Boolean.parseBoolean(preferredProtocolProp)) { return true; } return false; } /** * Set the account properties. * * @param accountProperties the properties of the account */ public void setAccountProperties(Map<String, String> accountProperties) { this.accountProperties = accountProperties; } /** * Returns if the encryption protocol given in parameter is enabled. * * @param encryptionProtocolName The name of the encryption protocol * ("ZRTP", "SDES" or "MIKEY"). */ public boolean isEncryptionProtocolEnabled(SrtpControlType type) { // The default value is false, except for ZRTP. boolean defaultValue = type == SrtpControlType.ZRTP; return getAccountPropertyBoolean( ProtocolProviderFactory.ENCRYPTION_PROTOCOL_STATUS + "." + type.toString(), defaultValue); } /** * Returns the list of STUN servers that this account is currently * configured to use. * * @return the list of STUN servers that this account is currently * configured to use. */ public List<StunServerDescriptor> getStunServers( BundleContext bundleContext) { Map<String, String> accountProperties = getAccountProperties(); List<StunServerDescriptor> stunServerList = new ArrayList<StunServerDescriptor>(); for (int i = 0; i < StunServerDescriptor.MAX_STUN_SERVER_COUNT; i ++) { StunServerDescriptor stunServer = StunServerDescriptor.loadDescriptor( accountProperties, ProtocolProviderFactory.STUN_PREFIX + i); // If we don't find a stun server with the given index, it means // there are no more servers left in the table so we've nothing // more to do here. if (stunServer == null) break; String password = loadStunPassword( bundleContext, this, ProtocolProviderFactory.STUN_PREFIX + i); if(password != null) stunServer.setPassword(password); stunServerList.add(stunServer); } return stunServerList; } /** * Returns the password for the STUN server with the specified prefix. * * @param bundleContext the OSGi bundle context that we are currently * running in. * @param accountID account ID * @param namePrefix name prefix * * @return password or null if empty */ protected static String loadStunPassword(BundleContext bundleContext, AccountID accountID, String namePrefix) { ProtocolProviderFactory providerFactory = ProtocolProviderFactory.getProtocolProviderFactory( bundleContext, accountID.getSystemProtocolName()); String password = null; String className = providerFactory.getClass().getName(); String packageSourceName = className.substring(0, className.lastIndexOf('.')); String accountPrefix = ProtocolProviderFactory.findAccountPrefix( bundleContext, accountID, packageSourceName); CredentialsStorageService credentialsService = ServiceUtils.getService( bundleContext, CredentialsStorageService.class); try { password = credentialsService. loadPassword(accountPrefix + "." + namePrefix); } catch(Exception e) { return null; } return password; } /** * Determines whether this account's provider is supposed to auto discover * STUN and TURN servers. * * @return <tt>true</tt> if this provider would need to discover STUN/TURN * servers and false otherwise. */ public boolean isStunServerDiscoveryEnabled() { return getAccountPropertyBoolean( ProtocolProviderFactory.AUTO_DISCOVER_STUN, true); } /** * Determines whether this account's provider uses UPnP (if available). * * @return <tt>true</tt> if this provider would use UPnP (if available), * <tt>false</tt> otherwise */ public boolean isUPNPEnabled() { return getAccountPropertyBoolean( ProtocolProviderFactory.IS_USE_UPNP, true); } /** * Determines whether this account's provider uses the default STUN server * provided by Jitsi (stun.jitsi.net) if there is no other STUN/TURN server * discovered/configured. * * @return <tt>true</tt> if this provider would use the default STUN server, * <tt>false</tt> otherwise */ public boolean isUseDefaultStunServer() { return getAccountPropertyBoolean( ProtocolProviderFactory.USE_DEFAULT_STUN_SERVER, true); } /** * Returns the actual name of the protocol used rather than a branded * variant. The method is primarily meant for open protocols such as SIP * or XMPP so that it would always return SIP or XMPP even in branded * protocols who otherwise return things like GTalk and ippi for * PROTOCOL_NAME. * * @return the real non-branded name of the protocol. */ public String getSystemProtocolName() { return getProtocolName(); } /** * Sorts the enabled encryption protocol list given in parameter to match * the preferences set for this account. * * @return Sorts the enabled encryption protocol list given in parameter to * match the preferences set for this account. */ public List<SrtpControlType> getSortedEnabledEncryptionProtocolList() { Map<String, Integer> encryptionProtocols = getIntegerPropertiesByPrefix( ProtocolProviderFactory.ENCRYPTION_PROTOCOL, true); Map<String, Boolean> encryptionProtocolStatus = getBooleanPropertiesByPrefix( ProtocolProviderFactory.ENCRYPTION_PROTOCOL_STATUS, true, false); // If the account is not yet configured, then ZRTP is activated by // default. if(encryptionProtocols.size() == 0) { encryptionProtocols.put( ProtocolProviderFactory.ENCRYPTION_PROTOCOL + ".ZRTP", 0); encryptionProtocolStatus.put( ProtocolProviderFactory.ENCRYPTION_PROTOCOL_STATUS + ".ZRTP", true); } List<SrtpControlType> sortedEncryptionProtocols = new ArrayList<SrtpControlType>(encryptionProtocols.size()); // First: add all protocol in the right order. for (Map.Entry<String, Integer> e : encryptionProtocols.entrySet()) { int index = e.getValue(); // If the key is set. if (index != -1) { if (index > sortedEncryptionProtocols.size()) index = sortedEncryptionProtocols.size(); String name = e.getKey() .substring( ProtocolProviderFactory.ENCRYPTION_PROTOCOL .length() + 1); try { sortedEncryptionProtocols.add(index, SrtpControlType.fromString(name)); } catch(IllegalArgumentException exc) { logger.error( "Failed to get SRTP control type for name: '" + name + "', key: '" + e.getKey() + "'", exc); } } } // Second: remove all disabled protocols. for (Iterator<SrtpControlType> i = sortedEncryptionProtocols.iterator(); i.hasNext();) { String name = i.next().toString(); if (!encryptionProtocolStatus.get( ProtocolProviderFactory.ENCRYPTION_PROTOCOL_STATUS + "." + name)) { i.remove(); } } return sortedEncryptionProtocols; } /** * Returns a <tt>java.util.Map</tt> of <tt>String</tt>s containing the * all property names that have the specified prefix and <tt>Boolean</tt> * containing the value for each property selected. Depending on the value * of the <tt>exactPrefixMatch</tt> parameter the method will (when false) * or will not (when exactPrefixMatch is true) include property names that * have prefixes longer than the specified <tt>prefix</tt> param. * <p> * Example: * <p> * Imagine a configuration service instance containing 2 properties * only:<br> * <code> * net.java.sip.communicator.PROP1=value1<br> * net.java.sip.communicator.service.protocol.PROP1=value2 * </code> * <p> * A call to this method with a prefix="net.java.sip.communicator" and * exactPrefixMatch=true would only return the first property - * net.java.sip.communicator.PROP1, whereas the same call with * exactPrefixMatch=false would return both properties as the second prefix * includes the requested prefix string. * <p> * @param prefix a String containing the prefix (the non dotted non-caps * part of a property name) that we're looking for. * @param exactPrefixMatch a boolean indicating whether the returned * property names should all have a prefix that is an exact match of the * the <tt>prefix</tt> param or whether properties with prefixes that * contain it but are longer than it are also accepted. * @param defaultValue the default value if the key is not set. * @return a <tt>java.util.Map</tt> containing all property name String-s * matching the specified conditions and the corresponding values as * Boolean. */ public Map<String, Boolean> getBooleanPropertiesByPrefix( String prefix, boolean exactPrefixMatch, boolean defaultValue) { String propertyName; List<String> propertyNames = getPropertyNamesByPrefix(prefix, exactPrefixMatch); Map<String, Boolean> properties = new HashMap<String, Boolean>(propertyNames.size()); for(int i = 0; i < propertyNames.size(); ++i) { propertyName = propertyNames.get(i); properties.put( propertyName, getAccountPropertyBoolean(propertyName, defaultValue)); } return properties; } /** * Returns a <tt>java.util.Map</tt> of <tt>String</tt>s containing the * all property names that have the specified prefix and <tt>Integer</tt> * containing the value for each property selected. Depending on the value * of the <tt>exactPrefixMatch</tt> parameter the method will (when false) * or will not (when exactPrefixMatch is true) include property names that * have prefixes longer than the specified <tt>prefix</tt> param. * <p> * Example: * <p> * Imagine a configuration service instance containing 2 properties * only:<br> * <code> * net.java.sip.communicator.PROP1=value1<br> * net.java.sip.communicator.service.protocol.PROP1=value2 * </code> * <p> * A call to this method with a prefix="net.java.sip.communicator" and * exactPrefixMatch=true would only return the first property - * net.java.sip.communicator.PROP1, whereas the same call with * exactPrefixMatch=false would return both properties as the second prefix * includes the requested prefix string. * <p> * @param prefix a String containing the prefix (the non dotted non-caps * part of a property name) that we're looking for. * @param exactPrefixMatch a boolean indicating whether the returned * property names should all have a prefix that is an exact match of the * the <tt>prefix</tt> param or whether properties with prefixes that * contain it but are longer than it are also accepted. * @return a <tt>java.util.Map</tt> containing all property name String-s * matching the specified conditions and the corresponding values as * Integer. */ public Map<String, Integer> getIntegerPropertiesByPrefix( String prefix, boolean exactPrefixMatch) { String propertyName; List<String> propertyNames = getPropertyNamesByPrefix(prefix, exactPrefixMatch); Map<String, Integer> properties = new HashMap<String, Integer>(propertyNames.size()); for(int i = 0; i < propertyNames.size(); ++i) { propertyName = propertyNames.get(i); properties.put( propertyName, getAccountPropertyInt(propertyName, -1)); } return properties; } /** * Returns a <tt>java.util.List</tt> of <tt>String</tt>s containing the * all property names that have the specified prefix. Depending on the value * of the <tt>exactPrefixMatch</tt> parameter the method will (when false) * or will not (when exactPrefixMatch is true) include property names that * have prefixes longer than the specified <tt>prefix</tt> param. * <p> * Example: * <p> * Imagine a configuration service instance containing 2 properties * only:<br> * <code> * net.java.sip.communicator.PROP1=value1<br> * net.java.sip.communicator.service.protocol.PROP1=value2 * </code> * <p> * A call to this method with a prefix="net.java.sip.communicator" and * exactPrefixMatch=true would only return the first property - * net.java.sip.communicator.PROP1, whereas the same call with * exactPrefixMatch=false would return both properties as the second prefix * includes the requested prefix string. * <p> * @param prefix a String containing the prefix (the non dotted non-caps * part of a property name) that we're looking for. * @param exactPrefixMatch a boolean indicating whether the returned * property names should all have a prefix that is an exact match of the * the <tt>prefix</tt> param or whether properties with prefixes that * contain it but are longer than it are also accepted. * @return a <tt>java.util.List</tt>containing all property name String-s * matching the specified conditions. */ public List<String> getPropertyNamesByPrefix( String prefix, boolean exactPrefixMatch) { List<String> resultKeySet = new LinkedList<String>(); for (String key : accountProperties.keySet()) { int ix = key.lastIndexOf('.'); if(ix == -1) continue; String keyPrefix = key.substring(0, ix); if(exactPrefixMatch) { if(prefix.equals(keyPrefix)) resultKeySet.add(key); } else { if(keyPrefix.startsWith(prefix)) resultKeySet.add(key); } } return resultKeySet; } /** * Sets the property a new value, but only if it's not <tt>null</tt> or * the property is removed from the map. * * @param key the property key * @param value the property value */ public void setOrRemoveIfNull(String key, String value) { if(value != null) { putAccountProperty(key, value); } else { removeAccountProperty(key); } } /** * Puts the new property value if it's not <tt>null</tt> nor empty. * @param key the property key * @param value the property value */ public void setOrRemoveIfEmpty(String key, String value) { setOrRemoveIfEmpty(key, value, false); } /** * Puts the new property value if it's not <tt>null</tt> nor empty. If * <tt>trim</tt> parameter is set to <tt>true</tt> the string will be * trimmed, before checked for emptiness. * * @param key the property key * @param value the property value * @param trim <tt>true</tt> if the value will be trimmed, before * <tt>isEmpty()</tt> is called. */ public void setOrRemoveIfEmpty(String key, String value, boolean trim) { if( value != null && (trim ? !value.trim().isEmpty() : !value.isEmpty()) ) { putAccountProperty(key, value); } else { removeAccountProperty(key); } } /** * Stores configuration properties held by this object into given * <tt>accountProperties</tt> map. * * @param protocolIconPath the path to the protocol icon is used * @param accountIconPath the path to the account icon if used * @param accountProperties output properties map */ public void storeProperties( String protocolIconPath, String accountIconPath, Map<String, String> accountProperties ) { if(protocolIconPath != null) setProtocolIconPath(protocolIconPath); if(accountIconPath != null) setAccountIconPath(accountIconPath); mergeProperties(this.accountProperties, accountProperties); // Removes encrypted password property, as it will be restored during // account storage, but only if the password property is present. accountProperties.remove("ENCRYPTED_PASSWORD"); } /** * Gets default property value for given <tt>key</tt>. * * @param key the property key * @return default property value for given<tt>key</tt> */ protected String getDefaultString(String key) { return getDefaultStr(key); } /** * Gets default property value for given <tt>key</tt>. * * @param key the property key * @return default property value for given<tt>key</tt> */ public static String getDefaultStr(String key) { return ProtocolProviderActivator .getConfigurationService() .getString(DEFAULTS_PREFIX +key); } /** * Copies all properties from <tt>input</tt> map to <tt>output</tt> map. * @param input source properties map * @param output destination properties map */ public static void mergeProperties( Map<String, String> input, Map<String, String> output ) { for(String key : input.keySet()) { output.put(key, input.get(key)); } } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeFirstInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeSecondInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MessageInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInSequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnector2EditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider; /** * @generated */ public class EsbModelingAssistantProviderOfPayloadFactoryMediatorOutputConnectorEditPart extends EsbModelingAssistantProvider { /** * @generated */ @Override public List<IElementType> getRelTypesOnSource(IAdaptable source) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source .getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSource((PayloadFactoryMediatorOutputConnectorEditPart) sourceEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSource( PayloadFactoryMediatorOutputConnectorEditPart source) { List<IElementType> types = new ArrayList<IElementType>(1); types.add(EsbElementTypes.EsbLink_4001); return types; } /** * @generated */ @Override public List<IElementType> getRelTypesOnSourceAndTarget(IAdaptable source, IAdaptable target) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source .getAdapter(IGraphicalEditPart.class); IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target .getAdapter(IGraphicalEditPart.class); return doGetRelTypesOnSourceAndTarget( (PayloadFactoryMediatorOutputConnectorEditPart) sourceEditPart, targetEditPart); } /** * @generated */ public List<IElementType> doGetRelTypesOnSourceAndTarget( PayloadFactoryMediatorOutputConnectorEditPart source, IGraphicalEditPart targetEditPart) { List<IElementType> types = new LinkedList<IElementType>(); if (targetEditPart instanceof ProxyInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DropMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LogMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SendMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof StoreMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BuilderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallTemplateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PayloadFactoryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnqueueMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof URLRewriteMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ValidateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ConditionalRouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BAMMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BeanMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EJBMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof NamedEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressingEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloudConnectorOperationInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoopBackMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RespondMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DataMapperMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FastXSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MessageInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequencesInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TemplateEndpointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInSequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } return types; } /** * @generated */ @Override public List<IElementType> getTypesForTarget(IAdaptable source, IElementType relationshipType) { IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source .getAdapter(IGraphicalEditPart.class); return doGetTypesForTarget( (PayloadFactoryMediatorOutputConnectorEditPart) sourceEditPart, relationshipType); } /** * @generated */ public List<IElementType> doGetTypesForTarget( PayloadFactoryMediatorOutputConnectorEditPart source, IElementType relationshipType) { List<IElementType> types = new ArrayList<IElementType>(); if (relationshipType == EsbElementTypes.EsbLink_4001) { types.add(EsbElementTypes.ProxyInputConnector_3003); types.add(EsbElementTypes.ProxyFaultInputConnector_3489); types.add(EsbElementTypes.DropMediatorInputConnector_3008); types.add(EsbElementTypes.PropertyMediatorInputConnector_3033); types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121); types.add(EsbElementTypes.FilterMediatorInputConnector_3010); types.add(EsbElementTypes.LogMediatorInputConnector_3018); types.add(EsbElementTypes.EnrichMediatorInputConnector_3036); types.add(EsbElementTypes.XSLTMediatorInputConnector_3039); types.add(EsbElementTypes.SwitchMediatorInputConnector_3042); types.add(EsbElementTypes.SequenceInputConnector_3049); types.add(EsbElementTypes.EventMediatorInputConnector_3052); types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055); types.add(EsbElementTypes.ClassMediatorInputConnector_3058); types.add(EsbElementTypes.SpringMediatorInputConnector_3061); types.add(EsbElementTypes.ScriptMediatorInputConnector_3064); types.add(EsbElementTypes.FaultMediatorInputConnector_3067); types.add(EsbElementTypes.XQueryMediatorInputConnector_3070); types.add(EsbElementTypes.CommandMediatorInputConnector_3073); types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076); types.add(EsbElementTypes.DBReportMediatorInputConnector_3079); types.add(EsbElementTypes.SmooksMediatorInputConnector_3082); types.add(EsbElementTypes.SendMediatorInputConnector_3085); types.add(EsbElementTypes.HeaderMediatorInputConnector_3100); types.add(EsbElementTypes.CloneMediatorInputConnector_3103); types.add(EsbElementTypes.CacheMediatorInputConnector_3106); types.add(EsbElementTypes.IterateMediatorInputConnector_3109); types.add(EsbElementTypes.CalloutMediatorInputConnector_3115); types.add(EsbElementTypes.TransactionMediatorInputConnector_3118); types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124); types.add(EsbElementTypes.RuleMediatorInputConnector_3127); types.add(EsbElementTypes.OAuthMediatorInputConnector_3130); types.add(EsbElementTypes.AggregateMediatorInputConnector_3112); types.add(EsbElementTypes.StoreMediatorInputConnector_3589); types.add(EsbElementTypes.BuilderMediatorInputConnector_3592); types.add(EsbElementTypes.CallTemplateMediatorInputConnector_3595); types.add(EsbElementTypes.PayloadFactoryMediatorInputConnector_3598); types.add(EsbElementTypes.EnqueueMediatorInputConnector_3601); types.add(EsbElementTypes.URLRewriteMediatorInputConnector_3621); types.add(EsbElementTypes.ValidateMediatorInputConnector_3624); types.add(EsbElementTypes.RouterMediatorInputConnector_3629); types.add(EsbElementTypes.ConditionalRouterMediatorInputConnector_3636); types.add(EsbElementTypes.BAMMediatorInputConnector_3681); types.add(EsbElementTypes.BeanMediatorInputConnector_3684); types.add(EsbElementTypes.EJBMediatorInputConnector_3687); types.add(EsbElementTypes.DefaultEndPointInputConnector_3021); types.add(EsbElementTypes.AddressEndPointInputConnector_3030); types.add(EsbElementTypes.FailoverEndPointInputConnector_3088); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3693); types.add(EsbElementTypes.WSDLEndPointInputConnector_3092); types.add(EsbElementTypes.NamedEndpointInputConnector_3661); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095); types.add(EsbElementTypes.APIResourceEndpointInputConnector_3675); types.add(EsbElementTypes.AddressingEndpointInputConnector_3690); types.add(EsbElementTypes.HTTPEndPointInputConnector_3710); types.add(EsbElementTypes.TemplateEndpointInputConnector_3717); types.add(EsbElementTypes.CloudConnectorInputConnector_3720); types.add(EsbElementTypes.CloudConnectorOperationInputConnector_3723); types.add(EsbElementTypes.LoopBackMediatorInputConnector_3737); types.add(EsbElementTypes.RespondMediatorInputConnector_3740); types.add(EsbElementTypes.CallMediatorInputConnector_3743); types.add(EsbElementTypes.DataMapperMediatorInputConnector_3762); types.add(EsbElementTypes.FastXSLTMediatorInputConnector_3765); types.add(EsbElementTypes.ProxyInSequenceInputConnector_3731); types.add(EsbElementTypes.MessageInputConnector_3046); types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014); types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015); types.add(EsbElementTypes.SequencesInputConnector_3616); types.add(EsbElementTypes.DefaultEndPointInputConnector_3644); types.add(EsbElementTypes.AddressEndPointInputConnector_3647); types.add(EsbElementTypes.FailoverEndPointInputConnector_3650); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3697); types.add(EsbElementTypes.WSDLEndPointInputConnector_3654); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3657); types.add(EsbElementTypes.HTTPEndPointInputConnector_3713); types.add(EsbElementTypes.TemplateEndpointInputConnector_3726); types.add(EsbElementTypes.APIResourceInputConnector_3670); types.add(EsbElementTypes.APIResourceFaultInputConnector_3672); types.add(EsbElementTypes.APIResourceInSequenceInputConnector_3747); } return types; } }
package org.robolectric.shadows; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Path; import android.graphics.Rect; import android.graphics.RectF; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.util.ReflectionHelpers; import java.util.ArrayList; import java.util.List; import static org.robolectric.Shadows.shadowOf; /** * Shadows the {@code android.graphics.Canvas} class. * * <p> * Broken. * This implementation is very specific to the application for which it was developed. * Todo: Reimplement. Consider using the same strategy of collecting a history of draw events and providing methods for writing queries based on type, number, and order of events. */ @SuppressWarnings({"UnusedDeclaration"}) @Implements(Canvas.class) public class ShadowCanvas { private List<PathPaintHistoryEvent> pathPaintEvents = new ArrayList<>(); private List<CirclePaintHistoryEvent> circlePaintEvents = new ArrayList<>(); private List<ArcPaintHistoryEvent> arcPaintEvents = new ArrayList<>(); private List<RectPaintHistoryEvent> rectPaintEvents = new ArrayList<>(); private List<LinePaintHistoryEvent> linePaintEvents = new ArrayList<>(); private List<OvalPaintHistoryEvent> ovalPaintEvents = new ArrayList<>(); private List<TextHistoryEvent> drawnTextEventHistory = new ArrayList<>(); private Paint drawnPaint; private Bitmap targetBitmap = ReflectionHelpers.callConstructor(Bitmap.class); private float translateX; private float translateY; private float scaleX = 1; private float scaleY = 1; private int height; private int width; /** * Returns a textual representation of the appearance of the object. * * @param canvas the canvas to visualize * @return The textual representation of the appearance of the object. */ public static String visualize(Canvas canvas) { return shadowOf(canvas).getDescription(); } public void __constructor__(Bitmap bitmap) { this.targetBitmap = bitmap; } public void appendDescription(String s) { shadowOf(targetBitmap).appendDescription(s); } public String getDescription() { return shadowOf(targetBitmap).getDescription(); } @Implementation public void setBitmap(Bitmap bitmap) { targetBitmap = bitmap; } @Implementation public void drawText(String text, float x, float y, Paint paint) { drawnTextEventHistory.add(new TextHistoryEvent(x, y, paint, text)); } @Implementation public void translate(float x, float y) { this.translateX = x; this.translateY = y; } @Implementation public void scale(float sx, float sy) { this.scaleX = sx; this.scaleY = sy; } @Implementation public void scale(float sx, float sy, float px, float py) { this.scaleX = sx; this.scaleY = sy; } @Implementation public void drawPaint(Paint paint) { drawnPaint = paint; } @Implementation public void drawColor(int color) { appendDescription("draw color " + color); } @Implementation public void drawBitmap(Bitmap bitmap, float left, float top, Paint paint) { describeBitmap(bitmap, paint); int x = (int) (left + translateX); int y = (int) (top + translateY); if (x != 0 || y != 0) { appendDescription(" at (" + x + "," + y + ")"); } if (scaleX != 1 && scaleY != 1) { appendDescription(" scaled by (" + scaleX + "," + scaleY + ")"); } } @Implementation public void drawBitmap(Bitmap bitmap, Rect src, Rect dst, Paint paint) { describeBitmap(bitmap, paint); StringBuilder descriptionBuilder = new StringBuilder(); if (dst != null) { descriptionBuilder.append(" at (").append(dst.left).append(",").append(dst.top) .append(") with height=").append(dst.height()).append(" and width=").append(dst.width()); } if (src != null) { descriptionBuilder.append( " taken from ").append(src.toString()); } appendDescription(descriptionBuilder.toString()); } @Implementation public void drawBitmap(Bitmap bitmap, Matrix matrix, Paint paint) { describeBitmap(bitmap, paint); appendDescription(" transformed by matrix"); } @Implementation public void drawPath(Path path, Paint paint) { pathPaintEvents.add(new PathPaintHistoryEvent(new Path(path), paint)); separateLines(); appendDescription("Path " + shadowOf(path).getPoints().toString()); } @Implementation public void drawCircle(float cx, float cy, float radius, Paint paint) { circlePaintEvents.add(new CirclePaintHistoryEvent(cx, cy, radius, paint)); } @Implementation public void drawArc(RectF oval, float startAngle, float sweepAngle, boolean useCenter, Paint paint) { arcPaintEvents.add(new ArcPaintHistoryEvent(oval, startAngle, sweepAngle, useCenter, paint)); } @Implementation public void drawRect(float left, float top, float right, float bottom, Paint paint) { rectPaintEvents.add(new RectPaintHistoryEvent(left, top, right, bottom, paint)); } @Implementation public void drawLine(float startX, float startY, float stopX, float stopY, Paint paint) { linePaintEvents.add(new LinePaintHistoryEvent(startX, startY, stopX, stopY, paint)); } @Implementation public void drawOval(RectF oval, Paint paint) { ovalPaintEvents.add(new OvalPaintHistoryEvent(oval, paint)); } private void describeBitmap(Bitmap bitmap, Paint paint) { separateLines(); appendDescription(shadowOf(bitmap).getDescription()); if (paint != null) { ColorFilter colorFilter = paint.getColorFilter(); if (colorFilter != null) { appendDescription(" with " + colorFilter); } } } private void separateLines() { if (getDescription().length() != 0) { appendDescription("\n"); } } public int getPathPaintHistoryCount() { return pathPaintEvents.size(); } public int getCirclePaintHistoryCount() { return circlePaintEvents.size(); } public int getArcPaintHistoryCount() { return arcPaintEvents.size(); } public boolean hasDrawnPath() { return getPathPaintHistoryCount() > 0; } public boolean hasDrawnCircle() { return circlePaintEvents.size() > 0; } public Paint getDrawnPathPaint(int i) { return pathPaintEvents.get(i).pathPaint; } public Path getDrawnPath(int i) { return pathPaintEvents.get(i).drawnPath; } public CirclePaintHistoryEvent getDrawnCircle(int i) { return circlePaintEvents.get(i); } public ArcPaintHistoryEvent getDrawnArc(int i) { return arcPaintEvents.get(i); } public void resetCanvasHistory() { drawnTextEventHistory.clear(); pathPaintEvents.clear(); circlePaintEvents.clear(); rectPaintEvents.clear(); linePaintEvents.clear(); ovalPaintEvents.clear(); shadowOf(targetBitmap).setDescription(""); } public Paint getDrawnPaint() { return drawnPaint; } public void setHeight(int height) { this.height = height; } public void setWidth(int width) { this.width = width; } @Implementation public int getWidth() { return width; } @Implementation public int getHeight() { return height; } public TextHistoryEvent getDrawnTextEvent(int i) { return drawnTextEventHistory.get(i); } public int getTextHistoryCount() { return drawnTextEventHistory.size(); } public RectPaintHistoryEvent getDrawnRect(int i) { return rectPaintEvents.get(i); } public RectPaintHistoryEvent getLastDrawnRect() { return rectPaintEvents.get(rectPaintEvents.size() - 1); } public int getRectPaintHistoryCount() { return rectPaintEvents.size(); } public LinePaintHistoryEvent getDrawnLine(int i) { return linePaintEvents.get(i); } public int getLinePaintHistoryCount() { return linePaintEvents.size(); } public int getOvalPaintHistoryCount() { return ovalPaintEvents.size(); } public OvalPaintHistoryEvent getDrawnOval(int i) { return ovalPaintEvents.get(i); } public static class LinePaintHistoryEvent { public Paint paint; public float startX; public float startY; public float stopX; public float stopY; private LinePaintHistoryEvent( float startX, float startY, float stopX, float stopY, Paint paint) { this.paint = new Paint(paint); this.paint.setColor(paint.getColor()); this.paint.setStrokeWidth(paint.getStrokeWidth()); this.startX = startX; this.startY = startY; this.stopX = stopX; this.stopY = stopY; } } public static class OvalPaintHistoryEvent { public final RectF oval; public final Paint paint; private OvalPaintHistoryEvent(RectF oval, Paint paint) { this.oval = new RectF(oval); this.paint = new Paint(paint); this.paint.setColor(paint.getColor()); this.paint.setStrokeWidth(paint.getStrokeWidth()); } } public static class RectPaintHistoryEvent { public final Paint paint; public final RectF rect; public final float left; public final float top; public final float right; public final float bottom; private RectPaintHistoryEvent( float left, float top, float right, float bottom, Paint paint){ this.rect = new RectF(left, top, right, bottom); this.paint = new Paint(paint); this.paint.setColor(paint.getColor()); this.paint.setStrokeWidth(paint.getStrokeWidth()); this.paint.setTextSize(paint.getTextSize()); this.paint.setStyle(paint.getStyle()); this.left = left; this.top = top; this.right = right; this.bottom = bottom; } } private static class PathPaintHistoryEvent { private final Path drawnPath; private final Paint pathPaint; PathPaintHistoryEvent(Path drawnPath, Paint pathPaint) { this.drawnPath = drawnPath; this.pathPaint = pathPaint; } } public static class CirclePaintHistoryEvent { public final float centerX; public final float centerY; public final float radius; public final Paint paint; private CirclePaintHistoryEvent(float centerX, float centerY, float radius, Paint paint) { this.centerX = centerX; this.centerY = centerY; this.radius = radius; this.paint = paint; } } public static class ArcPaintHistoryEvent { public final RectF oval; public final float startAngle; public final float sweepAngle; public final boolean useCenter; public final Paint paint; public ArcPaintHistoryEvent(RectF oval, float startAngle, float sweepAngle, boolean useCenter, Paint paint) { this.oval = oval; this.startAngle = startAngle; this.sweepAngle = sweepAngle; this.useCenter = useCenter; this.paint = paint; } } public static class TextHistoryEvent { public final float x; public final float y; public final Paint paint; public final String text; private TextHistoryEvent(float x, float y, Paint paint, String text) { this.x = x; this.y = y; this.paint = paint; this.text = text; } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver.wal; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import java.io.FilterInputStream; import java.io.IOException; import java.lang.reflect.Field; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.NavigableMap; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine; import org.apache.hadoop.hbase.regionserver.DefaultStoreFlusher; import org.apache.hadoop.hbase.regionserver.FlushRequestListener; import org.apache.hadoop.hbase.regionserver.FlushRequester; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.MemStoreSnapshot; import org.apache.hadoop.hbase.regionserver.MemStoreSize; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileTestUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.hadoop.hdfs.DFSInputStream; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; /** * Test replay of edits out of a WAL split. */ public abstract class AbstractTestWALReplay { private static final Log LOG = LogFactory.getLog(AbstractTestWALReplay.class); static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final EnvironmentEdge ee = EnvironmentEdgeManager.getDelegate(); private Path hbaseRootDir = null; private String logName; private Path oldLogDir; private Path logDir; private FileSystem fs; private Configuration conf; private RecoveryMode mode; private WALFactory wals; @Rule public final TestName currentTest = new TestName(); @BeforeClass public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); // The below config supported by 0.20-append and CDH3b2 conf.setInt("dfs.client.block.recovery.retries", 2); TEST_UTIL.startMiniCluster(3); Path hbaseRootDir = TEST_UTIL.getDFSCluster().getFileSystem().makeQualified(new Path("/hbase")); LOG.info("hbase.rootdir=" + hbaseRootDir); FSUtils.setRootDir(conf, hbaseRootDir); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setUp() throws Exception { this.conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); this.fs = TEST_UTIL.getDFSCluster().getFileSystem(); this.hbaseRootDir = FSUtils.getRootDir(this.conf); this.oldLogDir = new Path(this.hbaseRootDir, HConstants.HREGION_OLDLOGDIR_NAME); String serverName = ServerName.valueOf(currentTest.getMethodName() + "-manual", 16010, System.currentTimeMillis()).toString(); this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName); this.logDir = new Path(this.hbaseRootDir, logName); if (TEST_UTIL.getDFSCluster().getFileSystem().exists(this.hbaseRootDir)) { TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseRootDir, true); } this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, HConstants.DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG) ? RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING); this.wals = new WALFactory(conf, null, currentTest.getMethodName()); } @After public void tearDown() throws Exception { this.wals.close(); TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseRootDir, true); } /* * @param p Directory to cleanup */ private void deleteDir(final Path p) throws IOException { if (this.fs.exists(p)) { if (!this.fs.delete(p, true)) { throw new IOException("Failed remove of " + p); } } } /** * * @throws Exception */ @Test public void testReplayEditsAfterRegionMovedWithMultiCF() throws Exception { final TableName tableName = TableName.valueOf("testReplayEditsAfterRegionMovedWithMultiCF"); byte[] family1 = Bytes.toBytes("cf1"); byte[] family2 = Bytes.toBytes("cf2"); byte[] qualifier = Bytes.toBytes("q"); byte[] value = Bytes.toBytes("testV"); byte[][] familys = { family1, family2 }; TEST_UTIL.createTable(tableName, familys); Table htable = TEST_UTIL.getConnection().getTable(tableName); Put put = new Put(Bytes.toBytes("r1")); put.addColumn(family1, qualifier, value); htable.put(put); ResultScanner resultScanner = htable.getScanner(new Scan()); int count = 0; while (resultScanner.next() != null) { count++; } resultScanner.close(); assertEquals(1, count); MiniHBaseCluster hbaseCluster = TEST_UTIL.getMiniHBaseCluster(); List<HRegion> regions = hbaseCluster.getRegions(tableName); assertEquals(1, regions.size()); // move region to another regionserver Region destRegion = regions.get(0); int originServerNum = hbaseCluster.getServerWith(destRegion.getRegionInfo().getRegionName()); assertTrue("Please start more than 1 regionserver", hbaseCluster.getRegionServerThreads().size() > 1); int destServerNum = 0; while (destServerNum == originServerNum) { destServerNum++; } HRegionServer originServer = hbaseCluster.getRegionServer(originServerNum); HRegionServer destServer = hbaseCluster.getRegionServer(destServerNum); // move region to destination regionserver TEST_UTIL.moveRegionAndWait(destRegion.getRegionInfo(), destServer.getServerName()); // delete the row Delete del = new Delete(Bytes.toBytes("r1")); htable.delete(del); resultScanner = htable.getScanner(new Scan()); count = 0; while (resultScanner.next() != null) { count++; } resultScanner.close(); assertEquals(0, count); // flush region and make major compaction HRegion region = (HRegion) destServer.getOnlineRegion(destRegion.getRegionInfo().getRegionName()); region.flush(true); // wait to complete major compaction for (HStore store : region.getStores()) { store.triggerMajorCompaction(); } region.compact(true); // move region to origin regionserver TEST_UTIL.moveRegionAndWait(destRegion.getRegionInfo(), originServer.getServerName()); // abort the origin regionserver originServer.abort("testing"); // see what we get Result result = htable.get(new Get(Bytes.toBytes("r1"))); if (result != null) { assertTrue("Row is deleted, but we get" + result.toString(), (result == null) || result.isEmpty()); } resultScanner.close(); } /** * Tests for hbase-2727. * @throws Exception * @see <a href="https://issues.apache.org/jira/browse/HBASE-2727">HBASE-2727</a> */ @Test public void test2727() throws Exception { // Test being able to have > 1 set of edits in the recovered.edits directory. // Ensure edits are replayed properly. final TableName tableName = TableName.valueOf("test2727"); MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); Path basedir = FSUtils.getTableDir(hbaseRootDir, tableName); deleteDir(basedir); HTableDescriptor htd = createBasic3FamilyHTD(tableName); Region region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region2); final byte [] rowName = tableName.getName(); WAL wal1 = createWAL(this.conf, hbaseRootDir, logName); // Add 1k to each family. final int countPerFamily = 1000; NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for(byte[] fam : htd.getFamiliesKeys()) { scopes.put(fam, 0); } for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, ee, wal1, htd, mvcc, scopes); } wal1.shutdown(); runWALSplit(this.conf); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); // Add 1k to each family. for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, ee, wal2, htd, mvcc, scopes); } wal2.shutdown(); runWALSplit(this.conf); WAL wal3 = createWAL(this.conf, hbaseRootDir, logName); try { HRegion region = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal3); long seqid = region.getOpenSeqNum(); // The regions opens with sequenceId as 1. With 6k edits, its sequence number reaches 6k + 1. // When opened, this region would apply 6k edits, and increment the sequenceId by 1 assertTrue(seqid > mvcc.getWritePoint()); assertEquals(seqid - 1, mvcc.getWritePoint()); LOG.debug("region.getOpenSeqNum(): " + region.getOpenSeqNum() + ", wal3.id: " + mvcc.getReadPoint()); // TODO: Scan all. region.close(); } finally { wal3.close(); } } /** * Test case of HRegion that is only made out of bulk loaded files. Assert * that we don't 'crash'. * @throws IOException * @throws IllegalAccessException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws SecurityException */ @Test public void testRegionMadeOfBulkLoadedFilesOnly() throws IOException, SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException, InterruptedException { final TableName tableName = TableName.valueOf("testRegionMadeOfBulkLoadedFilesOnly"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = new Path(this.hbaseRootDir, tableName.getNameAsString()); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); Region region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region2); WAL wal = createWAL(this.conf, hbaseRootDir, logName); Region region = HRegion.openHRegion(hri, htd, wal, this.conf); byte [] family = htd.getFamilies().iterator().next().getName(); Path f = new Path(basedir, "hfile"); HFileTestUtil.createHFile(this.conf, fs, f, family, family, Bytes.toBytes(""), Bytes.toBytes("z"), 10); List<Pair<byte[], String>> hfs = new ArrayList<>(1); hfs.add(Pair.newPair(family, f.toString())); region.bulkLoadHFiles(hfs, true, null); // Add an edit so something in the WAL byte[] row = tableName.getName(); region.put((new Put(row)).addColumn(family, family, family)); wal.sync(); final int rowsInsertedCount = 11; assertEquals(rowsInsertedCount, getScannedCount(region.getScanner(new Scan()))); // Now 'crash' the region by stealing its wal final Configuration newConf = HBaseConfiguration.create(this.conf); User user = HBaseTestingUtility.getDifferentUser(newConf, tableName.getNameAsString()); user.runAs(new PrivilegedExceptionAction() { @Override public Object run() throws Exception { runWALSplit(newConf); WAL wal2 = createWAL(newConf, hbaseRootDir, logName); HRegion region2 = HRegion.openHRegion(newConf, FileSystem.get(newConf), hbaseRootDir, hri, htd, wal2); long seqid2 = region2.getOpenSeqNum(); assertTrue(seqid2 > -1); assertEquals(rowsInsertedCount, getScannedCount(region2.getScanner(new Scan()))); // I can't close wal1. Its been appropriated when we split. region2.close(); wal2.close(); return null; } }); } /** * HRegion test case that is made of a major compacted HFile (created with three bulk loaded * files) and an edit in the memstore. * This is for HBASE-10958 "[dataloss] Bulk loading with seqids can prevent some log entries * from being replayed" * @throws IOException * @throws IllegalAccessException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws SecurityException */ @Test public void testCompactedBulkLoadedFiles() throws IOException, SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException, InterruptedException { final TableName tableName = TableName.valueOf("testCompactedBulkLoadedFiles"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = new Path(this.hbaseRootDir, tableName.getNameAsString()); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region2); WAL wal = createWAL(this.conf, hbaseRootDir, logName); HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf); // Add an edit so something in the WAL byte [] row = tableName.getName(); byte [] family = htd.getFamilies().iterator().next().getName(); region.put((new Put(row)).addColumn(family, family, family)); wal.sync(); List <Pair<byte[],String>> hfs= new ArrayList<>(1); for (int i = 0; i < 3; i++) { Path f = new Path(basedir, "hfile"+i); HFileTestUtil.createHFile(this.conf, fs, f, family, family, Bytes.toBytes(i + "00"), Bytes.toBytes(i + "50"), 10); hfs.add(Pair.newPair(family, f.toString())); } region.bulkLoadHFiles(hfs, true, null); final int rowsInsertedCount = 31; assertEquals(rowsInsertedCount, getScannedCount(region.getScanner(new Scan()))); // major compact to turn all the bulk loaded files into one normal file region.compact(true); assertEquals(rowsInsertedCount, getScannedCount(region.getScanner(new Scan()))); // Now 'crash' the region by stealing its wal final Configuration newConf = HBaseConfiguration.create(this.conf); User user = HBaseTestingUtility.getDifferentUser(newConf, tableName.getNameAsString()); user.runAs(new PrivilegedExceptionAction() { @Override public Object run() throws Exception { runWALSplit(newConf); WAL wal2 = createWAL(newConf, hbaseRootDir, logName); HRegion region2 = HRegion.openHRegion(newConf, FileSystem.get(newConf), hbaseRootDir, hri, htd, wal2); long seqid2 = region2.getOpenSeqNum(); assertTrue(seqid2 > -1); assertEquals(rowsInsertedCount, getScannedCount(region2.getScanner(new Scan()))); // I can't close wal1. Its been appropriated when we split. region2.close(); wal2.close(); return null; } }); } /** * Test writing edits into an HRegion, closing it, splitting logs, opening * Region again. Verify seqids. * @throws IOException * @throws IllegalAccessException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws SecurityException */ @Test public void testReplayEditsWrittenViaHRegion() throws IOException, SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException, InterruptedException { final TableName tableName = TableName.valueOf("testReplayEditsWrittenViaHRegion"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. WAL wal = createWAL(this.conf, hbaseRootDir, logName); HRegion region = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal); long seqid = region.getOpenSeqNum(); boolean first = true; for (HColumnDescriptor hcd: htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), countPerFamily, this.ee, region, "x"); if (first) { // If first, so we have at least one family w/ different seqid to rest. region.flush(true); first = false; } } // Now assert edits made it in. final Get g = new Get(rowName); Result result = region.get(g); assertEquals(countPerFamily * htd.getFamilies().size(), result.size()); // Now close the region (without flush), split the log, reopen the region and assert that // replay of log has the correct effect, that our seqids are calculated correctly so // all edits in logs are seen as 'stale'/old. region.close(true); wal.shutdown(); runWALSplit(this.conf); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); HRegion region2 = HRegion.openHRegion(conf, this.fs, hbaseRootDir, hri, htd, wal2); long seqid2 = region2.getOpenSeqNum(); assertTrue(seqid + result.size() < seqid2); final Result result1b = region2.get(g); assertEquals(result.size(), result1b.size()); // Next test. Add more edits, then 'crash' this region by stealing its wal // out from under it and assert that replay of the log adds the edits back // correctly when region is opened again. for (HColumnDescriptor hcd: htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), countPerFamily, this.ee, region2, "y"); } // Get count of edits. final Result result2 = region2.get(g); assertEquals(2 * result.size(), result2.size()); wal2.sync(); final Configuration newConf = HBaseConfiguration.create(this.conf); User user = HBaseTestingUtility.getDifferentUser(newConf, tableName.getNameAsString()); user.runAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { runWALSplit(newConf); FileSystem newFS = FileSystem.get(newConf); // Make a new wal for new region open. WAL wal3 = createWAL(newConf, hbaseRootDir, logName); final AtomicInteger countOfRestoredEdits = new AtomicInteger(0); HRegion region3 = new HRegion(basedir, wal3, newFS, newConf, hri, htd, null) { @Override protected void restoreEdit(HStore s, Cell cell, MemStoreSize memstoreSize) { super.restoreEdit(s, cell, memstoreSize); countOfRestoredEdits.incrementAndGet(); } }; long seqid3 = region3.initialize(); Result result3 = region3.get(g); // Assert that count of cells is same as before crash. assertEquals(result2.size(), result3.size()); assertEquals(htd.getFamilies().size() * countPerFamily, countOfRestoredEdits.get()); // I can't close wal1. Its been appropriated when we split. region3.close(); wal3.close(); return null; } }); } /** * Test that we recover correctly when there is a failure in between the * flushes. i.e. Some stores got flushed but others did not. * * Unfortunately, there is no easy hook to flush at a store level. The way * we get around this is by flushing at the region level, and then deleting * the recently flushed store file for one of the Stores. This would put us * back in the situation where all but that store got flushed and the region * died. * * We restart Region again, and verify that the edits were replayed. * * @throws IOException * @throws IllegalAccessException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws SecurityException */ @Test public void testReplayEditsAfterPartialFlush() throws IOException, SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException, InterruptedException { final TableName tableName = TableName.valueOf("testReplayEditsWrittenViaHRegion"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. WAL wal = createWAL(this.conf, hbaseRootDir, logName); HRegion region = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal); long seqid = region.getOpenSeqNum(); for (HColumnDescriptor hcd: htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), countPerFamily, this.ee, region, "x"); } // Now assert edits made it in. final Get g = new Get(rowName); Result result = region.get(g); assertEquals(countPerFamily * htd.getFamilies().size(), result.size()); // Let us flush the region region.flush(true); region.close(true); wal.shutdown(); // delete the store files in the second column family to simulate a failure // in between the flushcache(); // we have 3 families. killing the middle one ensures that taking the maximum // will make us fail. int cf_count = 0; for (HColumnDescriptor hcd: htd.getFamilies()) { cf_count++; if (cf_count == 2) { region.getRegionFileSystem().deleteFamily(hcd.getNameAsString()); } } // Let us try to split and recover runWALSplit(this.conf); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); HRegion region2 = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal2); long seqid2 = region2.getOpenSeqNum(); assertTrue(seqid + result.size() < seqid2); final Result result1b = region2.get(g); assertEquals(result.size(), result1b.size()); } // StoreFlusher implementation used in testReplayEditsAfterAbortingFlush. // Only throws exception if throwExceptionWhenFlushing is set true. public static class CustomStoreFlusher extends DefaultStoreFlusher { // Switch between throw and not throw exception in flush static final AtomicBoolean throwExceptionWhenFlushing = new AtomicBoolean(false); public CustomStoreFlusher(Configuration conf, HStore store) { super(conf, store); } @Override public List<Path> flushSnapshot(MemStoreSnapshot snapshot, long cacheFlushId, MonitoredTask status, ThroughputController throughputController) throws IOException { if (throwExceptionWhenFlushing.get()) { throw new IOException("Simulated exception by tests"); } return super.flushSnapshot(snapshot, cacheFlushId, status, throughputController); } }; /** * Test that we could recover the data correctly after aborting flush. In the * test, first we abort flush after writing some data, then writing more data * and flush again, at last verify the data. * @throws IOException */ @Test public void testReplayEditsAfterAbortingFlush() throws IOException { final TableName tableName = TableName.valueOf("testReplayEditsAfterAbortingFlush"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. WAL wal = createWAL(this.conf, hbaseRootDir, logName); RegionServerServices rsServices = Mockito.mock(RegionServerServices.class); Mockito.doReturn(false).when(rsServices).isAborted(); when(rsServices.getServerName()).thenReturn(ServerName.valueOf("foo", 10, 10)); Configuration customConf = new Configuration(this.conf); customConf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY, CustomStoreFlusher.class.getName()); HRegion region = HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal, customConf, rsServices, null); int writtenRowCount = 10; List<HColumnDescriptor> families = new ArrayList<>(htd.getFamilies()); for (int i = 0; i < writtenRowCount; i++) { Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i))); put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"), Bytes.toBytes("val")); region.put(put); } // Now assert edits made it in. RegionScanner scanner = region.getScanner(new Scan()); assertEquals(writtenRowCount, getScannedCount(scanner)); // Let us flush the region CustomStoreFlusher.throwExceptionWhenFlushing.set(true); try { region.flush(true); fail("Injected exception hasn't been thrown"); } catch (Throwable t) { LOG.info("Expected simulated exception when flushing region," + t.getMessage()); // simulated to abort server Mockito.doReturn(true).when(rsServices).isAborted(); region.setClosing(false); // region normally does not accept writes after // DroppedSnapshotException. We mock around it for this test. } // writing more data int moreRow = 10; for (int i = writtenRowCount; i < writtenRowCount + moreRow; i++) { Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i))); put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"), Bytes.toBytes("val")); region.put(put); } writtenRowCount += moreRow; // call flush again CustomStoreFlusher.throwExceptionWhenFlushing.set(false); try { region.flush(true); } catch (IOException t) { LOG.info("Expected exception when flushing region because server is stopped," + t.getMessage()); } region.close(true); wal.shutdown(); // Let us try to split and recover runWALSplit(this.conf); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); Mockito.doReturn(false).when(rsServices).isAborted(); HRegion region2 = HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal2, this.conf, rsServices, null); scanner = region2.getScanner(new Scan()); assertEquals(writtenRowCount, getScannedCount(scanner)); } private int getScannedCount(RegionScanner scanner) throws IOException { int scannedCount = 0; List<Cell> results = new ArrayList<>(); while (true) { boolean existMore = scanner.next(results); if (!results.isEmpty()) scannedCount++; if (!existMore) break; results.clear(); } return scannedCount; } /** * Create an HRegion with the result of a WAL split and test we only see the * good edits * @throws Exception */ @Test public void testReplayEditsWrittenIntoWAL() throws Exception { final TableName tableName = TableName.valueOf("testReplayEditsWrittenIntoWAL"); final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(hbaseRootDir, tableName); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region2); final WAL wal = createWAL(this.conf, hbaseRootDir, logName); final byte[] rowName = tableName.getName(); final byte[] regionName = hri.getEncodedNameAsBytes(); // Add 1k to each family. final int countPerFamily = 1000; Set<byte[]> familyNames = new HashSet<>(); NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for(byte[] fam : htd.getFamiliesKeys()) { scopes.put(fam, 0); } for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, ee, wal, htd, mvcc, scopes); familyNames.add(hcd.getName()); } // Add a cache flush, shouldn't have any effect wal.startCacheFlush(regionName, familyNames); wal.completeCacheFlush(regionName); // Add an edit to another family, should be skipped. WALEdit edit = new WALEdit(); long now = ee.currentTime(); edit.add(new KeyValue(rowName, Bytes.toBytes("another family"), rowName, now, rowName)); wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes), edit, true); // Delete the c family to verify deletes make it over. edit = new WALEdit(); now = ee.currentTime(); edit.add(new KeyValue(rowName, Bytes.toBytes("c"), null, now, KeyValue.Type.DeleteFamily)); wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes), edit, true); // Sync. wal.sync(); // Make a new conf and a new fs for the splitter to run on so we can take // over old wal. final Configuration newConf = HBaseConfiguration.create(this.conf); User user = HBaseTestingUtility.getDifferentUser(newConf, ".replay.wal.secondtime"); user.runAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { runWALSplit(newConf); FileSystem newFS = FileSystem.get(newConf); // 100k seems to make for about 4 flushes during HRegion#initialize. newConf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 100); // Make a new wal for new region. WAL newWal = createWAL(newConf, hbaseRootDir, logName); final AtomicInteger flushcount = new AtomicInteger(0); try { final HRegion region = new HRegion(basedir, newWal, newFS, newConf, hri, htd, null) { @Override protected FlushResultImpl internalFlushcache(final WAL wal, final long myseqid, final Collection<HStore> storesToFlush, MonitoredTask status, boolean writeFlushWalMarker) throws IOException { LOG.info("InternalFlushCache Invoked"); FlushResultImpl fs = super.internalFlushcache(wal, myseqid, storesToFlush, Mockito.mock(MonitoredTask.class), writeFlushWalMarker); flushcount.incrementAndGet(); return fs; } }; // The seq id this region has opened up with long seqid = region.initialize(); // The mvcc readpoint of from inserting data. long writePoint = mvcc.getWritePoint(); // We flushed during init. assertTrue("Flushcount=" + flushcount.get(), flushcount.get() > 0); assertTrue((seqid - 1) == writePoint); Get get = new Get(rowName); Result result = region.get(get); // Make sure we only see the good edits assertEquals(countPerFamily * (htd.getFamilies().size() - 1), result.size()); region.close(); } finally { newWal.close(); } return null; } }); } @Test // the following test is for HBASE-6065 public void testSequentialEditLogSeqNum() throws IOException { final TableName tableName = TableName.valueOf(currentTest.getMethodName()); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic1FamilyHTD(tableName); // Mock the WAL MockWAL wal = createMockWAL(); HRegion region = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal); for (HColumnDescriptor hcd : htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), countPerFamily, this.ee, region, "x"); } // Let us flush the region // But this time completeflushcache is not yet done region.flush(true); for (HColumnDescriptor hcd : htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), 5, this.ee, region, "x"); } long lastestSeqNumber = region.getReadPoint(null); // get the current seq no wal.doCompleteCacheFlush = true; // allow complete cache flush with the previous seq number got after first // set of edits. wal.completeCacheFlush(hri.getEncodedNameAsBytes()); wal.shutdown(); FileStatus[] listStatus = wal.getFiles(); assertNotNull(listStatus); assertTrue(listStatus.length > 0); WALSplitter.splitLogFile(hbaseRootDir, listStatus[0], this.fs, this.conf, null, null, null, mode, wals); FileStatus[] listStatus1 = this.fs.listStatus( new Path(FSUtils.getTableDir(hbaseRootDir, tableName), new Path(hri.getEncodedName(), "recovered.edits")), new PathFilter() { @Override public boolean accept(Path p) { if (WALSplitter.isSequenceIdFile(p)) { return false; } return true; } }); int editCount = 0; for (FileStatus fileStatus : listStatus1) { editCount = Integer.parseInt(fileStatus.getPath().getName()); } // The sequence number should be same assertEquals( "The sequence number of the recoverd.edits and the current edit seq should be same", lastestSeqNumber, editCount); } /** * testcase for https://issues.apache.org/jira/browse/HBASE-15252 */ @Test public void testDatalossWhenInputError() throws IOException, InstantiationException, IllegalAccessException { final TableName tableName = TableName.valueOf("testDatalossWhenInputError"); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic1FamilyHTD(tableName); HRegion region1 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); Path regionDir = region1.getRegionFileSystem().getRegionDir(); HBaseTestingUtility.closeRegionAndWAL(region1); WAL wal = createWAL(this.conf, hbaseRootDir, logName); HRegion region = HRegion.openHRegion(this.conf, this.fs, hbaseRootDir, hri, htd, wal); for (HColumnDescriptor hcd : htd.getFamilies()) { addRegionEdits(rowName, hcd.getName(), countPerFamily, this.ee, region, "x"); } // Now assert edits made it in. final Get g = new Get(rowName); Result result = region.get(g); assertEquals(countPerFamily * htd.getFamilies().size(), result.size()); // Now close the region (without flush), split the log, reopen the region and assert that // replay of log has the correct effect. region.close(true); wal.shutdown(); runWALSplit(this.conf); // here we let the DFSInputStream throw an IOException just after the WALHeader. Path editFile = WALSplitter.getSplitEditFilesSorted(this.fs, regionDir).first(); FSDataInputStream stream = fs.open(editFile); stream.seek(ProtobufLogReader.PB_WAL_MAGIC.length); Class<? extends AbstractFSWALProvider.Reader> logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class, AbstractFSWALProvider.Reader.class); AbstractFSWALProvider.Reader reader = logReaderClass.newInstance(); reader.init(this.fs, editFile, conf, stream); final long headerLength = stream.getPos(); reader.close(); FileSystem spyFs = spy(this.fs); doAnswer(new Answer<FSDataInputStream>() { @Override public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable { FSDataInputStream stream = (FSDataInputStream) invocation.callRealMethod(); Field field = FilterInputStream.class.getDeclaredField("in"); field.setAccessible(true); final DFSInputStream in = (DFSInputStream) field.get(stream); DFSInputStream spyIn = spy(in); doAnswer(new Answer<Integer>() { private long pos; @Override public Integer answer(InvocationOnMock invocation) throws Throwable { if (pos >= headerLength) { throw new IOException("read over limit"); } int b = (Integer) invocation.callRealMethod(); if (b > 0) { pos += b; } return b; } }).when(spyIn).read(any(byte[].class), any(int.class), any(int.class)); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { invocation.callRealMethod(); in.close(); return null; } }).when(spyIn).close(); field.set(stream, spyIn); return stream; } }).when(spyFs).open(eq(editFile)); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); HRegion region2; try { // log replay should fail due to the IOException, otherwise we may lose data. region2 = HRegion.openHRegion(conf, spyFs, hbaseRootDir, hri, htd, wal2); assertEquals(result.size(), region2.get(g).size()); } catch (IOException e) { assertEquals("read over limit", e.getMessage()); } region2 = HRegion.openHRegion(conf, fs, hbaseRootDir, hri, htd, wal2); assertEquals(result.size(), region2.get(g).size()); } /** * testcase for https://issues.apache.org/jira/browse/HBASE-14949. */ private void testNameConflictWhenSplit(boolean largeFirst) throws IOException { final TableName tableName = TableName.valueOf("testReplayEditsWrittenIntoWAL"); final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(hbaseRootDir, tableName); deleteDir(basedir); final HTableDescriptor htd = createBasic1FamilyHTD(tableName); NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getFamiliesKeys()) { scopes.put(fam, 0); } HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region); final byte[] family = htd.getColumnFamilies()[0].getName(); final byte[] rowName = tableName.getName(); FSWALEntry entry1 = createFSWALEntry(htd, hri, 1L, rowName, family, ee, mvcc, 1, scopes); FSWALEntry entry2 = createFSWALEntry(htd, hri, 2L, rowName, family, ee, mvcc, 2, scopes); Path largeFile = new Path(logDir, "wal-1"); Path smallFile = new Path(logDir, "wal-2"); writerWALFile(largeFile, Arrays.asList(entry1, entry2)); writerWALFile(smallFile, Arrays.asList(entry2)); FileStatus first, second; if (largeFirst) { first = fs.getFileStatus(largeFile); second = fs.getFileStatus(smallFile); } else { first = fs.getFileStatus(smallFile); second = fs.getFileStatus(largeFile); } WALSplitter.splitLogFile(hbaseRootDir, first, fs, conf, null, null, null, RecoveryMode.LOG_SPLITTING, wals); WALSplitter.splitLogFile(hbaseRootDir, second, fs, conf, null, null, null, RecoveryMode.LOG_SPLITTING, wals); WAL wal = createWAL(this.conf, hbaseRootDir, logName); region = HRegion.openHRegion(conf, this.fs, hbaseRootDir, hri, htd, wal); assertTrue(region.getOpenSeqNum() > mvcc.getWritePoint()); assertEquals(2, region.get(new Get(rowName)).size()); } @Test public void testNameConflictWhenSplit0() throws IOException { testNameConflictWhenSplit(true); } @Test public void testNameConflictWhenSplit1() throws IOException { testNameConflictWhenSplit(false); } static class MockWAL extends FSHLog { boolean doCompleteCacheFlush = false; public MockWAL(FileSystem fs, Path rootDir, String logName, Configuration conf) throws IOException { super(fs, rootDir, logName, HConstants.HREGION_OLDLOGDIR_NAME, conf, null, true, null, null); } @Override public void completeCacheFlush(byte[] encodedRegionName) { if (!doCompleteCacheFlush) { return; } super.completeCacheFlush(encodedRegionName); } } private HTableDescriptor createBasic1FamilyHTD(final TableName tableName) { HTableDescriptor htd = new HTableDescriptor(tableName); HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a")); htd.addFamily(a); return htd; } private MockWAL createMockWAL() throws IOException { MockWAL wal = new MockWAL(fs, hbaseRootDir, logName, conf); // Set down maximum recovery so we dfsclient doesn't linger retrying something // long gone. HBaseTestingUtility.setMaxRecoveryErrorCount(wal.getOutputStream(), 1); return wal; } // Flusher used in this test. Keep count of how often we are called and // actually run the flush inside here. class TestFlusher implements FlushRequester { private HRegion r; @Override public void requestFlush(Region region, boolean force) { try { r.flush(force); } catch (IOException e) { throw new RuntimeException("Exception flushing", e); } } @Override public void requestDelayedFlush(Region region, long when, boolean forceFlushAllStores) { // TODO Auto-generated method stub } @Override public void registerFlushRequestListener(FlushRequestListener listener) { } @Override public boolean unregisterFlushRequestListener(FlushRequestListener listener) { return false; } @Override public void setGlobalMemStoreLimit(long globalMemStoreSize) { } } private WALKey createWALKey(final TableName tableName, final HRegionInfo hri, final MultiVersionConcurrencyControl mvcc, NavigableMap<byte[], Integer> scopes) { return new WALKey(hri.getEncodedNameAsBytes(), tableName, 999, mvcc, scopes); } private WALEdit createWALEdit(final byte[] rowName, final byte[] family, EnvironmentEdge ee, int index) { byte[] qualifierBytes = Bytes.toBytes(Integer.toString(index)); byte[] columnBytes = Bytes.toBytes(Bytes.toString(family) + ":" + Integer.toString(index)); WALEdit edit = new WALEdit(); edit.add(new KeyValue(rowName, family, qualifierBytes, ee.currentTime(), columnBytes)); return edit; } private FSWALEntry createFSWALEntry(HTableDescriptor htd, HRegionInfo hri, long sequence, byte[] rowName, byte[] family, EnvironmentEdge ee, MultiVersionConcurrencyControl mvcc, int index, NavigableMap<byte[], Integer> scopes) throws IOException { FSWALEntry entry = new FSWALEntry(sequence, createWALKey(htd.getTableName(), hri, mvcc, scopes), createWALEdit( rowName, family, ee, index), hri, true); entry.stampRegionSequenceId(mvcc.begin()); return entry; } private void addWALEdits(final TableName tableName, final HRegionInfo hri, final byte[] rowName, final byte[] family, final int count, EnvironmentEdge ee, final WAL wal, final HTableDescriptor htd, final MultiVersionConcurrencyControl mvcc, NavigableMap<byte[], Integer> scopes) throws IOException { for (int j = 0; j < count; j++) { wal.append(hri, createWALKey(tableName, hri, mvcc, scopes), createWALEdit(rowName, family, ee, j), true); } wal.sync(); } static List<Put> addRegionEdits(final byte[] rowName, final byte[] family, final int count, EnvironmentEdge ee, final Region r, final String qualifierPrefix) throws IOException { List<Put> puts = new ArrayList<>(); for (int j = 0; j < count; j++) { byte[] qualifier = Bytes.toBytes(qualifierPrefix + Integer.toString(j)); Put p = new Put(rowName); p.addColumn(family, qualifier, ee.currentTime(), rowName); r.put(p); puts.add(p); } return puts; } /* * Creates an HRI around an HTD that has <code>tableName</code> and three * column families named 'a','b', and 'c'. * @param tableName Name of table to use when we create HTableDescriptor. */ private HRegionInfo createBasic3FamilyHRegionInfo(final TableName tableName) { return new HRegionInfo(tableName, null, null, false); } /* * Run the split. Verify only single split file made. * @param c * @return The single split file made * @throws IOException */ private Path runWALSplit(final Configuration c) throws IOException { List<Path> splits = WALSplitter.split( hbaseRootDir, logDir, oldLogDir, FileSystem.get(c), c, wals); // Split should generate only 1 file since there's only 1 region assertEquals("splits=" + splits, 1, splits.size()); // Make sure the file exists assertTrue(fs.exists(splits.get(0))); LOG.info("Split file=" + splits.get(0)); return splits.get(0); } private HTableDescriptor createBasic3FamilyHTD(final TableName tableName) { HTableDescriptor htd = new HTableDescriptor(tableName); HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a")); htd.addFamily(a); HColumnDescriptor b = new HColumnDescriptor(Bytes.toBytes("b")); htd.addFamily(b); HColumnDescriptor c = new HColumnDescriptor(Bytes.toBytes("c")); htd.addFamily(c); return htd; } private void writerWALFile(Path file, List<FSWALEntry> entries) throws IOException { fs.mkdirs(file.getParent()); ProtobufLogWriter writer = new ProtobufLogWriter(); writer.init(fs, file, conf, true); for (FSWALEntry entry : entries) { writer.append(entry); } writer.sync(); writer.close(); } protected abstract WAL createWAL(Configuration c, Path hbaseRootDir, String logName) throws IOException; }
package vpc.simu; import java.util.HashMap; import java.util.LinkedList; import java.util.Stack; import cck.util.Util; import vpc.Interpreter; import vpc.core.Program; import vpc.core.ProgramDecl; import vpc.core.Value; import vpc.core.base.PrimRaw; import vpc.core.base.PrimConversion.Raw_Int32; import vpc.dart.ExternalVariable; import vpc.dart.PathChecker; import vpc.dart.symc.SCValue; import vpc.stack.InterruptGraph; import vpc.stack.StackSizeMonitor; import vpc.tir.TIRInterpreter; import vpc.hil.Device; public class IntptrScheduler { public static final IntptrScheduler Instance = new IntptrScheduler(); private TIRInterpreter currInterp; private Program currProg; private PathChecker checker; private InterruptGraph intrGraph; private StackSizeMonitor monitor; private HashMap<String, Register> intrRegs; private ExternalVariable ioRegs; private HashMap<String, BitDisp> intrBit; private LinkedList<String> fireableInts; private Stack<ProgramDecl.EntryPoint> interruptCall; //private StringBuilder interruptOutput = new StringBuilder(); private boolean initialized; long endTime, runTime, lastTime; class BitDisp{ public String regName; public int regBit; public BitDisp(String reg, int bit){ regName = reg; regBit = bit; } public boolean equals(BitDisp t){ if(t.regName.matches(regName) && t.regBit == regBit) return true; else return false; } } private Register sysInt; // public String getInterruptInfoString() { // return interruptOutput.toString(); // } private void getSysInterrupts(){ // manually add the system enable interrupt sysInt = new Register("SREG"); intrRegs.put("SREG", sysInt); for(ProgramDecl.EntryPoint e : currProg.programDecl.entryPoints){ if(e == currProg.programDecl.mainEntry) // skip the main entry continue; String intName = e.getName(); System.out.println("get int name == " + intName); Device.Interrupt intr = currProg.targetDevice.interrupts.get(intName); BitDisp bit = new BitDisp(intr.maskReg.image, Integer.parseInt(intr.maskBit.image)); intrBit.put(intName, bit); Register currReg = intrRegs.get(bit.regName); // if empty, add to firable set if(currReg == null){ currReg = new Register(bit.regName); intrRegs.put(currReg.getName(), currReg); } // initially, firable currReg.setBit(bit.regBit, true); } buildFireableList(); } public void restoreAllRegs(RegisterPackage regs) { this.intrRegs.clear(); for(Register cur : regs.getRegisters()) { this.intrRegs.put(cur.m_regName, cur); } buildFireableList(); } public RegisterPackage backupAllRegs(String name) { // it will be deep copyed return new RegisterPackage(name, intrRegs.values().toArray(new Register[0]), checker.getCurrentInput()); } public IntptrScheduler(){ intrRegs = new HashMap<String,Register>(); intrBit = new HashMap<String, BitDisp>(); fireableInts = new LinkedList<String>(); initialized = false; runTime = 0; } public void initScheduler(TIRInterpreter interp, Program p){ currInterp = interp; currProg = p; getSysInterrupts(); System.out.println("Int Scheduler initialized"); initialized = true; int time = (int)Float.class.cast(Interpreter.options.getOption("seconds").getValue()).floatValue(); endTime = time <= 0 ? 0 : System.currentTimeMillis() + time * 1000; runTime = System.currentTimeMillis(); } private void buildFireableList(){ fireableInts.clear(); for(String intr : intrBit.keySet()){ BitDisp b = intrBit.get(intr); if(intrRegs.get(b.regName).getBit(b.regBit)){ fireableInts.add(intr); } } } public boolean isTimeout(){ return endTime > 0 && endTime < System.currentTimeMillis(); } public long runTime() { return System.currentTimeMillis() - runTime; } private ProgramDecl.EntryPoint getIntFired(){ if(!isEnable()) return null; String fire = getFirableInt(); if(fire == null ) return null; ProgramDecl.EntryPoint intEntry = ProgramDecl.lookupEntryPoint(fire, currProg); return intEntry; } private boolean isEnable(){ return sysInt.getBit(7); } private void CLI(String intrName) { sysInt.setBit(7, false); setIntMask(intrName, false); } private void STI(String intrName){ sysInt.setBit(7, true); setIntMask(intrName, true); } public void setRegValue(Device.Register r, Value v) { if(!initialized) return; Register currReg = intrRegs.get(r.getName()); if(currReg != null) currReg.setValue(PrimRaw.fromValue(v)); else ioRegs.setValue(r.getName(), v); } public Value getRegValue(Device.Register r){ if(!initialized) return SCValue.getSCValue(PrimRaw.toValue(8, 0)); Register currReg = intrRegs.get(r.getName()); if(currReg != null) return SCValue.getSCValue(PrimRaw.toValue(Register.BitSize, currReg.getValue())); else return ioRegs.getValue(r.getName(), r.getType()); } public static class TimeupEvent extends Util.Error { private static final long serialVersionUID = 1L; public TimeupEvent() { super("Interpreter running timeup!"); } } private void setIntMask(String intName, boolean val){ BitDisp bit = intrBit.get(intName); intrRegs.get(bit.regName).setBit(bit.regBit, val); buildFireableList(); } public void tryInterrupt(){ if(!initialized) return; ProgramDecl.EntryPoint e = getIntFired(); if(e != null){ if(firePolicy()){ CLI(e.getName()); interruptCall.push(e); long enterTime = runTime(); //System.out.println(String.format("Enter Interrupt %s (function name \"%s\"), at time %d ms", e.getName(), e.method.getName(), enterTime)); System.out.println(String.format(" %d (%s, %d) ", enterTime - lastTime, e.getName(), enterTime)); lastTime = enterTime; RegisterPackage pack = null; if(this.checker.isTracing()) { pack = new RegisterPackage(e.getName(), intrRegs.values().toArray(new Register[0]), checker.getCurrentInput()); intrGraph.addConnection(pack, monitor.getCurrentMaxStack()); } this.checker.enterInterrupt(pack); currInterp.invokeComponentMethod(e.method, null); this.checker.exitInterrupt(); interruptCall.pop(); // System.out.print(String.format("Exit Interrupt %s (function name \"%s\"), back to %s (function name \"%s\"), ", // e.getName(), e.method.getName(), interruptCall.peek().getName(), interruptCall.peek().method.getName())); long leavingTime = runTime(); //System.out.println(String.format("at time %d ms, time consuming %d ms", runTime(), leavingTime - enterTime)); System.out.println(String.format(" %d (%s, %d) ", leavingTime - lastTime, interruptCall.peek().getName(), leavingTime)); lastTime = leavingTime; STI(e.getName()); } } } public void initDartEnvi(PathChecker checker, ExternalVariable eVar) { this.checker = checker; this.ioRegs = eVar; } public void initStackChecker(StackSizeMonitor monitor, InterruptGraph graph, String entryPoint) { this.intrGraph = graph; this.monitor = monitor; this.interruptCall = new Stack<ProgramDecl.EntryPoint>(); this.interruptCall.add(ProgramDecl.lookupEntryPoint(entryPoint, currProg)); System.out.println(String.format("(%s, %d) ", entryPoint, lastTime = runTime())); } // Return null to skip this time of interrupt pick up private String getFirableInt(){ int guard; if(fireableInts.size() <= 0) return null; String fire = null; guard= (int)(Math.random() * (fireableInts.size())); fire = fireableInts.get(guard); return fire; } //TODO: determine if the interrupt can fire or not private boolean firePolicy(){ if(Math.random() > 0.9){ return true; }else{ return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.query.h2.sql; import java.io.Serializable; import java.sql.Connection; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.concurrent.Callable; import org.apache.ignite.Ignite; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheKeyConfiguration; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.QueryIndex; import org.apache.ignite.cache.QueryIndexType; import org.apache.ignite.cache.affinity.AffinityKeyMapped; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.cache.query.annotations.QuerySqlFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.cache.index.AbstractIndexingCommonTest; import org.apache.ignite.internal.processors.query.IgniteSQLException; import org.apache.ignite.internal.processors.query.QueryUtils; import org.apache.ignite.internal.processors.query.h2.H2PooledConnection; import org.apache.ignite.internal.processors.query.h2.H2Utils; import org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing; import org.apache.ignite.internal.processors.query.h2.opt.QueryContext; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.GridTestUtils; import org.h2.command.Prepared; import org.h2.engine.Session; import org.h2.message.DbException; import org.h2.table.Column; import org.h2.value.Value; import org.jetbrains.annotations.NotNull; import org.junit.Test; import static org.apache.ignite.cache.CacheRebalanceMode.SYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * */ public class GridQueryParsingTest extends AbstractIndexingCommonTest { /** */ private static Ignite ignite; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); c.setCacheConfiguration( cacheConfiguration(DEFAULT_CACHE_NAME, "SCH1", String.class, Person.class), cacheConfiguration("addr", "SCH2", String.class, Address.class), cacheConfiguration("aff", "SCH3", PersonKey.class, Person.class)); return c; } /** * @param name Cache name. * @param clsK Key class. * @param clsV Value class. * @return Cache configuration. */ @SuppressWarnings("unchecked") private CacheConfiguration cacheConfiguration(@NotNull String name, String sqlSchema, Class<?> clsK, Class<?> clsV) { CacheConfiguration cc = defaultCacheConfiguration(); cc.setName(name); cc.setCacheMode(CacheMode.PARTITIONED); cc.setAtomicityMode(CacheAtomicityMode.ATOMIC); cc.setNearConfiguration(null); cc.setWriteSynchronizationMode(FULL_SYNC); cc.setRebalanceMode(SYNC); cc.setSqlSchema(sqlSchema); cc.setSqlFunctionClasses(GridQueryParsingTest.class); cc.setIndexedTypes(clsK, clsV); if (!QueryUtils.isSqlType(clsK)) cc.setKeyConfiguration(new CacheKeyConfiguration(clsK)); return cc; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); ignite = startGrid(); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { ignite = null; } /** * @throws Exception If failed. */ @Test public void testParseSelectAndUnion() throws Exception { checkQuery("select 1 from Person p where addrIds in ((1,2,3), (3,4,5))"); checkQuery("select 1 from Person p where addrId in ((1,))"); checkQuery("select 1 from Person p " + "where p.addrId in (select a.id from sch2.Address a)"); checkQuery("select 1 from Person p " + "where exists(select 1 from sch2.Address a where p.addrId = a.id)"); checkQuery("select 42"); checkQuery("select ()"); checkQuery("select (1)"); checkQuery("select (1 + 1)"); checkQuery("select (1,)"); checkQuery("select (?)"); checkQuery("select (?,)"); checkQuery("select (1, 2)"); checkQuery("select (?, ? + 1, 2 + 2) as z"); checkQuery("select (1,(1,(1,(1,(1,?)))))"); checkQuery("select (select 1)"); checkQuery("select (select 1, select ?)"); checkQuery("select ((select 1), select ? + ?)"); checkQuery("select CURRENT_DATE"); checkQuery("select CURRENT_DATE()"); checkQuery("select extract(year from ?)"); checkQuery("select convert(?, timestamp)"); checkQuery("select * from table(id bigint = 1)"); checkQuery("select * from table(id bigint = (1))"); checkQuery("select * from table(id bigint = (1,))"); checkQuery("select * from table(id bigint = (1,), name varchar = 'asd')"); checkQuery("select * from table(id bigint = (1,2), name varchar = 'asd')"); checkQuery("select * from table(id bigint = (1,2), name varchar = ('asd',))"); checkQuery("select * from table(id bigint = (1,2), name varchar = ?)"); checkQuery("select * from table(id bigint = (1,2), name varchar = (?,))"); checkQuery("select * from table(id bigint = ?, name varchar = ('abc', 'def', 100, ?)) t"); checkQuery("select ? limit ? offset ?"); checkQuery("select cool1()"); checkQuery("select cool1() z"); checkQuery("select b,a from table0('aaa', 100)"); checkQuery("select * from table0('aaa', 100)"); checkQuery("select * from table0('aaa', 100) t0"); checkQuery("select x.a, y.b from table0('aaa', 100) x natural join table0('bbb', 100) y"); checkQuery("select * from table0('aaa', 100) x join table0('bbb', 100) y on x.a=y.a and x.b = 1"); checkQuery("select * from table0('aaa', 100) x left join table0('bbb', 100) y on x.a=y.a and x.b = 1"); checkQuery("select * from table0('aaa', 100) x left join table0('bbb', 100) y on x.a=y.a where x.b = 1"); checkQuery("select * from table0('aaa', 100) x left join table0('bbb', 100) y where x.b = 1"); checkQuery("select avg(old) from Person left join sch2.Address on Person.addrId = Address.id " + "where lower(Address.street) = lower(?)"); checkQuery("select avg(old) from sch1.Person join sch2.Address on Person.addrId = Address.id " + "where lower(Address.street) = lower(?)"); checkQuery("select avg(old) from Person left join sch2.Address where Person.addrId = Address.id " + "and lower(Address.street) = lower(?)"); checkQuery("select avg(old) from Person right join sch2.Address where Person.addrId = Address.id " + "and lower(Address.street) = lower(?)"); checkQuery("select avg(old) from Person, sch2.Address where Person.addrId = Address.id " + "and lower(Address.street) = lower(?)"); checkQuery("select name, name, date, date d from Person"); checkQuery("select distinct name, date from Person"); checkQuery("select * from Person p"); checkQuery("select * from Person"); checkQuery("select distinct * from Person"); checkQuery("select p.name, date from Person p"); checkQuery("select p.name, date from Person p for update"); checkQuery("select * from Person p, sch2.Address a"); checkQuery("select * from Person, sch2.Address"); checkQuery("select p.* from Person p, sch2.Address a"); checkQuery("select person.* from Person, sch2.Address a"); checkQuery("select p.*, street from Person p, sch2.Address a"); checkQuery("select p.name, a.street from Person p, sch2.Address a"); checkQuery("select p.name, a.street from sch2.Address a, Person p"); checkQuery("select distinct p.name, a.street from Person p, sch2.Address a"); checkQuery("select distinct name, street from Person, sch2.Address group by old"); checkQuery("select distinct name, street from Person, sch2.Address"); checkQuery("select p1.name, a2.street from Person p1, sch2.Address a1, Person p2, sch2.Address a2"); checkQuery("select p.name n, a.street s from Person p, sch2.Address a"); checkQuery("select p.name, 1 as i, 'aaa' s from Person p"); checkQuery("select p.name + 'a', 1 * 3 as i, 'aaa' s, -p.old, -p.old as old from Person p"); checkQuery("select p.name || 'a' + p.name, (p.old * 3) % p.old - p.old / p.old, p.name = 'aaa', " + " p.name is p.name, p.old > 0, p.old >= 0, p.old < 0, p.old <= 0, p.old <> 0, p.old is not p.old, " + " p.old is null, p.old is not null " + " from Person p"); checkQuery("select p.name from Person p where name <> 'ivan'"); checkQuery("select p.name from Person p where name like 'i%'"); checkQuery("select p.name from Person p where name regexp 'i%'"); checkQuery("select p.name from Person p, sch2.Address a " + "where p.name <> 'ivan' and a.id > 10 or not (a.id = 100)"); checkQuery("select case p.name when 'a' then 1 when 'a' then 2 end as a from Person p"); checkQuery("select case p.name when 'a' then 1 when 'a' then 2 else -1 end as a from Person p"); checkQuery("select abs(p.old) from Person p"); checkQuery("select cast(p.old as numeric(10, 2)) from Person p"); checkQuery("select cast(p.old as numeric(10, 2)) z from Person p"); checkQuery("select cast(p.old as numeric(10, 2)) as z from Person p"); checkQuery("select * from Person p where p.name in ('a', 'b', '_' + RAND())"); // test ConditionIn checkQuery("select * from Person p where p.name in ('a', 'b', 'c')"); // test ConditionInConstantSet // test ConditionInConstantSet checkQuery("select * from Person p where p.name in (select a.street from sch2.Address a)"); // test ConditionInConstantSet checkQuery("select (select a.street from sch2.Address a where a.id = p.addrId) from Person p"); checkQuery("select p.name, ? from Person p where name regexp ? and p.old < ?"); checkQuery("select count(*) as a from Person having a > 10"); checkQuery("select count(*) as a, count(p.*), count(p.name) from Person p"); checkQuery("select count(distinct p.name) from Person p"); checkQuery("select name, count(*) cnt from Person group by name order by cnt desc limit 10"); checkQuery("select p.name, avg(p.old), max(p.old) from Person p group by p.name"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.name"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by n"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.addrId, p.name"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.name, p.addrId"); checkQuery("select p.name n, max(p.old) + min(p.old) / count(distinct p.old) from Person p group by p.name"); checkQuery("select p.name n, max(p.old) maxOld, min(p.old) minOld from Person p " + "group by p.name having maxOld > 10 and min(p.old) < 1"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.name order by n"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.name order by p.name"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p group by p.name order by p.name, m"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p " + "group by p.name order by p.name, max(p.old) desc"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p " + "group by p.name order by p.name nulls first"); checkQuery("select p.name n, avg(p.old) a, max(p.old) m from Person p " + "group by p.name order by p.name nulls last"); checkQuery("select p.name n from Person p order by p.old + 10"); checkQuery("select p.name n from Person p order by p.old + 10, p.name"); checkQuery("select p.name n from Person p order by p.old + 10, p.name desc"); checkQuery("select p.name n from Person p, (select a.street from sch2.Address a " + "where a.street is not null) "); checkQuery("select street from Person p, (select a.street from sch2.Address a " + "where a.street is not null) "); checkQuery("select addr.street from Person p, (select a.street from sch2.Address a " + "where a.street is not null) addr"); checkQuery("select p.name n from sch1.Person p order by p.old + 10"); checkQuery("select case when p.name is null then 'Vasya' end x from sch1.Person p"); checkQuery("select case when p.name like 'V%' then 'Vasya' else 'Other' end x from sch1.Person p"); checkQuery("select case when upper(p.name) = 'VASYA' then 'Vasya' " + "when p.name is not null then p.name else 'Other' end x from sch1.Person p"); checkQuery("select case p.name when 'Vasya' then 1 end z from sch1.Person p"); checkQuery("select case p.name when 'Vasya' then 1 when 'Petya' then 2 end z from sch1.Person p"); checkQuery("select case p.name when 'Vasya' then 1 when 'Petya' then 2 else 3 end z from sch1.Person p"); checkQuery("select case p.name when 'Vasya' then 1 else 3 end z from sch1.Person p"); checkQuery("select count(*) as a from Person union select count(*) as a from sch2.Address"); checkQuery("select old, count(*) as a from Person group by old union select 1, count(*) as a " + "from sch2.Address"); checkQuery("select name from Person MINUS select street from sch2.Address"); checkQuery("select name from Person EXCEPT select street from sch2.Address"); checkQuery("select name from Person INTERSECT select street from sch2.Address"); checkQuery("select name from Person UNION select street from sch2.Address limit 5"); checkQuery("select name from Person UNION select street from sch2.Address limit ?"); checkQuery("select name from Person UNION select street from sch2.Address limit ? offset ?"); checkQuery("(select name from Person limit 4) " + "UNION (select street from sch2.Address limit 1) limit ? offset ?"); checkQuery("(select 2 a) union all (select 1) order by 1"); checkQuery("(select 2 a) union all (select 1) order by a desc nulls first limit ? offset ?"); checkQuery("select public.\"#\".\"@\" from (select 1 as \"@\") \"#\""); // checkQuery("select sch.\"#\".\"@\" from (select 1 as \"@\") \"#\""); // Illegal query. checkQuery("select \"#\".\"@\" from (select 1 as \"@\") \"#\""); checkQuery("select \"@\" from (select 1 as \"@\") \"#\""); checkQuery("select sch1.\"#\".old from sch1.Person \"#\""); checkQuery("select sch1.\"#\".old from Person \"#\""); checkQuery("select \"#\".old from Person \"#\""); checkQuery("select old from Person \"#\""); // checkQuery("select Person.old from Person \"#\""); // Illegal query. checkQuery("select \"#\".* from Person \"#\""); } /** * @throws Exception If failed. */ @Test public void testUseIndexHints() throws Exception { checkQuery("select * from Person use index (\"PERSON_NAME_IDX\")"); checkQuery("select * from Person use index (\"PERSON_PARENTNAME_IDX\")"); checkQuery("select * from Person use index (\"PERSON_NAME_IDX\", \"PERSON_PARENTNAME_IDX\")"); checkQuery("select * from Person use index ()"); checkQuery("select * from Person p use index (\"PERSON_NAME_IDX\")"); checkQuery("select * from Person p use index (\"PERSON_PARENTNAME_IDX\")"); checkQuery("select * from Person p use index (\"PERSON_NAME_IDX\", \"PERSON_PARENTNAME_IDX\")"); checkQuery("select * from Person p use index ()"); } /** * Query AST transformation heavily depends on this behavior. * * @throws Exception If failed. */ @Test public void testParseTableFilter() throws Exception { Prepared prepared = parse("select Person.old, p1.old, p1.addrId from Person, Person p1 " + "where exists(select 1 from sch2.Address a where a.id = p1.addrId)"); GridSqlSelect select = (GridSqlSelect)new GridSqlQueryParser(false, log).parse(prepared); GridSqlJoin join = (GridSqlJoin)select.from(); GridSqlTable tbl1 = (GridSqlTable)join.leftTable(); GridSqlAlias tbl2Alias = (GridSqlAlias)join.rightTable(); GridSqlTable tbl2 = tbl2Alias.child(); // Must be distinct objects, even if it is the same table. assertNotSame(tbl1, tbl2); assertNotNull(tbl1.dataTable()); assertNotNull(tbl2.dataTable()); assertSame(tbl1.dataTable(), tbl2.dataTable()); GridSqlColumn col1 = (GridSqlColumn)select.column(0); GridSqlColumn col2 = (GridSqlColumn)select.column(1); assertSame(tbl1, col1.expressionInFrom()); // Alias in FROM must be included in column. assertSame(tbl2Alias, col2.expressionInFrom()); // In EXISTS we must correctly reference the column from the outer query. GridSqlAst exists = select.where(); GridSqlSubquery subqry = exists.child(); GridSqlSelect subSelect = subqry.child(); GridSqlColumn p1AddrIdCol = (GridSqlColumn)select.column(2); assertEquals("ADDRID", p1AddrIdCol.column().getName()); assertSame(tbl2Alias, p1AddrIdCol.expressionInFrom()); GridSqlColumn p1AddrIdColExists = subSelect.where().child(1); assertEquals("ADDRID", p1AddrIdCol.column().getName()); assertSame(tbl2Alias, p1AddrIdColExists.expressionInFrom()); } /** */ @Test public void testParseInsert() throws Exception { /* Plain rows w/functions, operators, defaults, and placeholders. */ checkQuery("insert into Person(old, name) values(5, 'John')"); checkQuery("insert into Person(name) values(null)"); checkQuery("insert into Person() values()"); checkQuery("insert into Person(name) values(null), (null)"); checkQuery("insert into Person(name) values(null),"); checkQuery("insert into Person(name, parentName) values(null, null), (?, ?)"); checkQuery("insert into Person(old, name) values(5, 'John',), (6, 'Jack')"); checkQuery("insert into Person(old, name) values(5 * 3, null,)"); checkQuery("insert into Person(old, name) values(ABS(-8), 'Max')"); checkQuery("insert into Person(old, name) values(5, 'Jane'), (null, null), (6, 'Jill')"); checkQuery("insert into Person(old, name, parentName) values(8 * 7, null, 'Unknown')"); checkQuery("insert into Person(old, name, parentName) values" + "(2016 - 1828, CONCAT('Leo', 'Tolstoy'), CONCAT(?, 'Tolstoy'))," + "(?, 'AlexanderPushkin', null)," + "(ABS(1821 - 2016), CONCAT('Fyodor', null, UPPER(CONCAT(SQRT(?), 'dostoevsky'))), null),"); checkQuery("insert into Person(date, old, name, parentName, addrId) values " + "('20160112', 1233, 'Ivan Ivanov', 'Peter Ivanov', 123)"); checkQuery("insert into Person(date, old, name, parentName, addrId) values " + "(CURRENT_DATE(), RAND(), ASCII('Hi'), INSERT('Leo Tolstoy', 4, 4, 'Max'), ASCII('HI'))"); checkQuery("insert into Person(date, old, name, parentName, addrId) values " + "(TRUNCATE(TIMESTAMP '2015-12-31 23:59:59'), POWER(3,12), NULL, NULL, NULL)"); checkQuery("insert into Person SET old = 5, name = 'John'"); checkQuery("insert into Person SET name = CONCAT('Fyodor', null, UPPER(CONCAT(SQRT(?), 'dostoevsky'))), " + "old = select (5, 6)"); checkQuery("insert into Person(old, name) select ASCII(parentName), INSERT(parentName, 4, 4, 'Max') from " + "Person where date='2011-03-12'"); /* Subqueries. */ checkQuery("insert into Person(old, name) select old, parentName from Person"); checkQuery("insert into Person(old, name) direct sorted select old, parentName from Person"); checkQuery("insert into Person(old, name) sorted select old, parentName from Person where old > 5"); checkQuery("insert into Person(old, name) select 5, 'John'"); checkQuery("insert into Person(old, name) select p1.old, 'Name' from person p1 join person p2 on " + "p2.name = p1.parentName where p2.old > 30"); checkQuery("insert into Person(old) select 5 from Person UNION select street from sch2.Address limit ? " + "offset ?"); } /** */ @Test public void testParseDelete() throws Exception { checkQuery("delete from Person"); checkQuery("delete from Person p where p.old > ?"); checkQuery("delete from Person where old in (select (40, 41, 42))"); checkQuery("delete top 5 from Person where old in (select (40, 41, 42))"); checkQuery("delete top ? from Person where old > 5 and length(name) < ?"); checkQuery("delete from Person where name in ('Ivan', 'Peter') limit 20"); checkQuery("delete from Person where name in ('Ivan', ?) limit ?"); } /** */ @Test public void testParseUpdate() throws Exception { checkQuery("update Person set name='Peter'"); checkQuery("update Person per set name='Peter', old = 5"); checkQuery("update Person p set name='Peter' limit 20"); checkQuery("update Person p set name='Peter', old = length('zzz') limit 20"); checkQuery("update Person p set name=? where old >= ? and old < ? limit ?"); checkQuery("update Person p set name=(select a.Street from sch2.Address a where a.id=p.addrId), old = " + "(select 42) where old = sqrt(?)"); checkQuery("update Person p set (name, old) = (select 'Peter', 42)"); checkQuery("update Person p set (name, old) = (select street, id from sch2.Address where id > 5 and id <= ?)"); } /** * */ @Test public void testParseCreateIndex() throws Exception { assertCreateIndexEquals( buildCreateIndex(null, "Person", "sch1", false, QueryIndexType.SORTED, QueryIndex.DFLT_INLINE_SIZE,"name", true), "create index on Person (name)"); assertCreateIndexEquals( buildCreateIndex("idx", "Person", "sch1", false, QueryIndexType.SORTED, QueryIndex.DFLT_INLINE_SIZE, "name", true), "create index idx on Person (name ASC)"); assertCreateIndexEquals( buildCreateIndex("idx", "Person", "sch1", false, QueryIndexType.GEOSPATIAL, QueryIndex.DFLT_INLINE_SIZE, "name", true), "create spatial index sch1.idx on sch1.Person (name ASC)"); assertCreateIndexEquals( buildCreateIndex("idx", "Person", "sch1", true, QueryIndexType.SORTED, QueryIndex.DFLT_INLINE_SIZE, "name", true), "create index if not exists sch1.idx on sch1.Person (name)"); // When we specify schema for the table and don't specify it for the index, resulting schema is table's assertCreateIndexEquals( buildCreateIndex("idx", "Person", "sch1", true, QueryIndexType.SORTED, QueryIndex.DFLT_INLINE_SIZE,"name", false), "create index if not exists idx on sch1.Person (name dEsC)"); assertCreateIndexEquals( buildCreateIndex("idx", "Person", "sch1", true, QueryIndexType.GEOSPATIAL, QueryIndex.DFLT_INLINE_SIZE, "old", true, "name", false), "create spatial index if not exists idx on Person (old, name desc)"); // Schemas for index and table must match assertParseThrows("create index if not exists sch2.idx on sch1.Person (name)", DbException.class, "Schema name must match"); assertParseThrows("create hash index if not exists idx on Person (name)", IgniteSQLException.class, "Only SPATIAL modifier is supported for CREATE INDEX"); assertParseThrows("create unique index if not exists idx on Person (name)", IgniteSQLException.class, "Only SPATIAL modifier is supported for CREATE INDEX"); assertParseThrows("create primary key on Person (name)", IgniteSQLException.class, "Only SPATIAL modifier is supported for CREATE INDEX"); assertParseThrows("create primary key hash on Person (name)", IgniteSQLException.class, "Only SPATIAL modifier is supported for CREATE INDEX"); assertParseThrows("create index on Person (name nulls first)", IgniteSQLException.class, "NULLS FIRST and NULLS LAST modifiers are not supported for index columns"); assertParseThrows("create index on Person (name desc nulls last)", IgniteSQLException.class, "NULLS FIRST and NULLS LAST modifiers are not supported for index columns"); } /** * */ @Test public void testParseDropIndex() throws Exception { // Schema that is not set defaults to default schema of connection which is sch1 assertDropIndexEquals(buildDropIndex("idx", "sch1", false), "drop index idx"); assertDropIndexEquals(buildDropIndex("idx", "sch1", true), "drop index if exists idx"); assertDropIndexEquals(buildDropIndex("idx", "sch1", true), "drop index if exists sch1.idx"); assertDropIndexEquals(buildDropIndex("idx", "sch1", false), "drop index sch1.idx"); // Message is null as long as it may differ from system to system, so we just check for exceptions assertParseThrows("drop index schema2.", DbException.class, null); assertParseThrows("drop index", DbException.class, null); assertParseThrows("drop index if exists", DbException.class, null); assertParseThrows("drop index if exists schema2.", DbException.class, null); } /** * */ @Test public void testParseDropTable() throws Exception { // Schema that is not set defaults to default schema of connection which is sch1 assertDropTableEquals(buildDropTable("sch1", "tbl", false), "drop table tbl"); assertDropTableEquals(buildDropTable("sch1", "tbl", true), "drop table if exists tbl"); assertDropTableEquals(buildDropTable("sch1", "tbl", true), "drop table if exists sch1.tbl"); assertDropTableEquals(buildDropTable("sch1", "tbl", false), "drop table sch1.tbl"); // Message is null as long as it may differ from system to system, so we just check for exceptions assertParseThrows("drop table schema2.", DbException.class, null); assertParseThrows("drop table", DbException.class, null); assertParseThrows("drop table if exists", DbException.class, null); assertParseThrows("drop table if exists schema2.", DbException.class, null); } /** */ @Test public void testParseCreateTable() throws Exception { assertCreateTableEquals( buildCreateTable("sch1", "Person", "cache", F.asList("id", "city"), true, c("id", Value.INT), c("city", Value.STRING), c("name", Value.STRING), c("surname", Value.STRING), c("age", Value.INT)), "CREATE TABLE IF NOT EXISTS sch1.\"Person\" (\"id\" integer, \"city\" varchar," + " \"name\" varchar, \"surname\" varchar, \"age\" integer, PRIMARY KEY (\"id\", \"city\")) WITH " + "\"template=cache\""); assertCreateTableEquals( buildCreateTable("sch1", "Person", "cache", F.asList("id"), false, c("id", Value.INT), c("city", Value.STRING), c("name", Value.STRING), c("surname", Value.STRING), cn("age", Value.INT)), "CREATE TABLE sch1.\"Person\" (\"id\" integer PRIMARY KEY, \"city\" varchar," + " \"name\" varchar, \"surname\" varchar, \"age\" integer NOT NULL) WITH " + "\"template=cache\""); assertParseThrows("create table Person (id int)", IgniteSQLException.class, "No PRIMARY KEY defined for CREATE TABLE"); assertParseThrows("create table Person (id int) AS SELECT 2 * 2", IgniteSQLException.class, "CREATE TABLE ... AS ... syntax is not supported"); assertParseThrows("create table Person (id int primary key)", IgniteSQLException.class, "Table must have at least one non PRIMARY KEY column."); assertParseThrows("create table Person (id int primary key, age int unique) WITH \"template=cache\"", IgniteSQLException.class, "Too many constraints - only PRIMARY KEY is supported for CREATE TABLE"); assertParseThrows("create table Person (id int auto_increment primary key, age int) WITH \"template=cache\"", IgniteSQLException.class, "AUTO_INCREMENT columns are not supported"); assertParseThrows("create table Person (id int primary key check id > 0, age int) WITH \"template=cache\"", IgniteSQLException.class, "Column CHECK constraints are not supported [colName=ID]"); assertParseThrows("create table Person (id int as age * 2 primary key, age int) WITH \"template=cache\"", IgniteSQLException.class, "Computed columns are not supported [colName=ID]"); assertParseThrows("create table Int (_key int primary key, _val int) WITH \"template=cache\"", IgniteSQLException.class, "Direct specification of _KEY and _VAL columns is forbidden"); assertParseThrows("create table Person (" + "unquoted_id LONG, " + "\"quoted_id\" LONG, " + "PERSON_NAME VARCHAR(255), " + "PRIMARY KEY (UNQUOTED_ID, quoted_id)) " + "WITH \"template=cache\"", IgniteSQLException.class, "PRIMARY KEY column is not defined: QUOTED_ID"); assertParseThrows("create table Person (" + "unquoted_id LONG, " + "\"quoted_id\" LONG, " + "PERSON_NAME VARCHAR(255), " + "PRIMARY KEY (\"unquoted_id\", \"quoted_id\")) " + "WITH \"template=cache\"", IgniteSQLException.class, "PRIMARY KEY column is not defined: unquoted_id"); } /** */ @Test public void testParseCreateTableWithDefaults() { assertParseThrows("create table Person (id int primary key, age int, " + "ts TIMESTAMP default CURRENT_TIMESTAMP()) WITH \"template=cache\"", IgniteSQLException.class, "Non-constant DEFAULT expressions are not supported [colName=TS]"); assertParseThrows("create table Person (id int primary key, age int default 'test') " + "WITH \"template=cache\"", IgniteSQLException.class, "Invalid default value for column. " + "[colName=AGE, colType=INTEGER, dfltValueType=VARCHAR]"); assertParseThrows("create table Person (id int primary key, name varchar default 1) " + "WITH \"template=cache\"", IgniteSQLException.class, "Invalid default value for column. " + "[colName=NAME, colType=VARCHAR, dfltValueType=INTEGER]"); } /** */ @Test public void testParseAlterTableAddColumn() throws Exception { assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "Person", false, false, c("COMPANY", Value.STRING)), "ALTER TABLE SCH2.Person ADD company varchar"); assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "Person", true, true, c("COMPANY", Value.STRING)), "ALTER TABLE IF EXISTS SCH2.Person ADD if not exists company varchar"); assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "Person", false, true, c("COMPANY", Value.STRING), c("city", Value.STRING)), "ALTER TABLE IF EXISTS SCH2.Person ADD (company varchar, \"city\" varchar)"); assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "City", false, true, c("POPULATION", Value.INT)), "ALTER TABLE IF EXISTS SCH2.\"City\" ADD (population int)"); assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "City", false, true, cn("POPULATION", Value.INT)), "ALTER TABLE IF EXISTS SCH2.\"City\" ADD (population int NOT NULL)"); // There's no table with such name, but H2 parsing does not fail just yet. assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "City", false, false, c("POPULATION", Value.INT)), "ALTER TABLE SCH2.\"City\" ADD (population int)"); assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "Person", true, false, c("NAME", Value.STRING)), "ALTER TABLE SCH2.Person ADD if not exists name varchar"); // There's a column with such name, but H2 parsing does not fail just yet. assertAlterTableAddColumnEquals(buildAlterTableAddColumn("SCH2", "Person", false, false, c("NAME", Value.STRING)), "ALTER TABLE SCH2.Person ADD name varchar"); // IF NOT EXISTS with multiple columns. assertParseThrows("ALTER TABLE IF EXISTS SCH2.Person ADD if not exists (company varchar, city varchar)", DbException.class, null); // Both BEFORE keyword. assertParseThrows("ALTER TABLE IF EXISTS SCH2.Person ADD if not exists company varchar before addrid", IgniteSQLException.class, "BEFORE keyword is not supported"); // Both AFTER keyword. assertParseThrows("ALTER TABLE IF EXISTS SCH2.Person ADD if not exists company varchar after addrid", IgniteSQLException.class, "AFTER keyword is not supported"); assertParseThrows("ALTER TABLE IF EXISTS SCH2.Person ADD if not exists company varchar first", IgniteSQLException.class, "FIRST keyword is not supported"); // No such schema. assertParseThrows("ALTER TABLE SCH5.\"Person\" ADD (city varchar)", DbException.class, null); } /** * @param sql Statement. * @param exCls Exception class. * @param msg Expected message. */ private void assertParseThrows(final String sql, Class<? extends Exception> exCls, String msg) { GridTestUtils.assertThrows(null, new Callable<Object>() { @Override public Object call() throws Exception { Prepared p = parse(sql); return new GridSqlQueryParser(false, log).parse(p); } }, exCls, msg); } /** * Parse SQL and compare it to expected instance. */ private void assertCreateIndexEquals(GridSqlCreateIndex exp, String sql) throws Exception { Prepared prepared = parse(sql); GridSqlStatement stmt = new GridSqlQueryParser(false, log).parse(prepared); assertTrue(stmt instanceof GridSqlCreateIndex); assertCreateIndexEquals(exp, (GridSqlCreateIndex) stmt); } /** * Parse SQL and compare it to expected instance of DROP INDEX. */ private void assertDropIndexEquals(GridSqlDropIndex exp, String sql) throws Exception { Prepared prepared = parse(sql); GridSqlStatement stmt = new GridSqlQueryParser(false, log).parse(prepared); assertTrue(stmt instanceof GridSqlDropIndex); assertDropIndexEquals(exp, (GridSqlDropIndex) stmt); } /** * Test two instances of {@link GridSqlDropIndex} for equality. */ private static void assertDropIndexEquals(GridSqlDropIndex exp, GridSqlDropIndex actual) { assertEqualsIgnoreCase(exp.indexName(), actual.indexName()); assertEqualsIgnoreCase(exp.schemaName(), actual.schemaName()); assertEquals(exp.ifExists(), actual.ifExists()); } /** * */ private static GridSqlDropIndex buildDropIndex(String name, String schema, boolean ifExists) { GridSqlDropIndex res = new GridSqlDropIndex(); res.indexName(name); res.schemaName(schema); res.ifExists(ifExists); return res; } /** * Parse SQL and compare it to expected instance of DROP TABLE. */ private void assertCreateTableEquals(GridSqlCreateTable exp, String sql) throws Exception { Prepared prepared = parse(sql); GridSqlStatement stmt = new GridSqlQueryParser(false, log).parse(prepared); assertTrue(stmt instanceof GridSqlCreateTable); assertCreateTableEquals(exp, (GridSqlCreateTable) stmt); } /** * Test two instances of {@link GridSqlDropTable} for equality. */ private static void assertCreateTableEquals(GridSqlCreateTable exp, GridSqlCreateTable actual) { assertEqualsIgnoreCase(exp.schemaName(), actual.schemaName()); assertEqualsIgnoreCase(exp.tableName(), actual.tableName()); assertEquals(exp.templateName(), actual.templateName()); assertEquals(exp.primaryKeyColumns(), actual.primaryKeyColumns()); assertEquals(new ArrayList<>(exp.columns().keySet()), new ArrayList<>(actual.columns().keySet())); for (Map.Entry<String, GridSqlColumn> col : exp.columns().entrySet()) { GridSqlColumn val = actual.columns().get(col.getKey()); assertNotNull(val); assertEquals(col.getValue().columnName(), val.columnName()); assertEquals(col.getValue().column().getType(), val.column().getType()); } assertEquals(exp.ifNotExists(), actual.ifNotExists()); } /** * */ private static GridSqlCreateTable buildCreateTable(String schema, String tbl, String tplCacheName, Collection<String> pkColNames, boolean ifNotExists, GridSqlColumn... cols) { GridSqlCreateTable res = new GridSqlCreateTable(); res.schemaName(schema); res.tableName(tbl); res.templateName(tplCacheName); res.primaryKeyColumns(new LinkedHashSet<>(pkColNames)); LinkedHashMap<String, GridSqlColumn> m = new LinkedHashMap<>(); for (GridSqlColumn col : cols) m.put(col.columnName(), col); res.columns(m); res.ifNotExists(ifNotExists); return res; } /** * Parse SQL and compare it to expected instance of ALTER TABLE. */ private void assertAlterTableAddColumnEquals(GridSqlAlterTableAddColumn exp, String sql) throws Exception { Prepared prepared = parse(sql); GridSqlStatement stmt = new GridSqlQueryParser(false, log).parse(prepared); assertTrue(stmt instanceof GridSqlAlterTableAddColumn); assertAlterTableAddColumnEquals(exp, (GridSqlAlterTableAddColumn)stmt); } /** */ private static GridSqlAlterTableAddColumn buildAlterTableAddColumn(String schema, String tbl, boolean ifNotExists, boolean ifTblExists, GridSqlColumn... cols) { GridSqlAlterTableAddColumn res = new GridSqlAlterTableAddColumn(); res.schemaName(schema); res.tableName(tbl); res.ifNotExists(ifNotExists); res.ifTableExists(ifTblExists); res.columns(cols); return res; } /** * Test two instances of {@link GridSqlAlterTableAddColumn} for equality. */ private static void assertAlterTableAddColumnEquals(GridSqlAlterTableAddColumn exp, GridSqlAlterTableAddColumn actual) { assertEqualsIgnoreCase(exp.schemaName(), actual.schemaName()); assertEqualsIgnoreCase(exp.tableName(), actual.tableName()); assertEquals(exp.columns().length, actual.columns().length); for (int i = 0; i < exp.columns().length; i++) { GridSqlColumn expCol = exp.columns()[i]; GridSqlColumn col = actual.columns()[i]; assertEquals(expCol.columnName(), col.columnName()); assertEquals(expCol.column().getType(), col.column().getType()); } assertEquals(exp.ifNotExists(), actual.ifNotExists()); assertEquals(exp.ifTableExists(), actual.ifTableExists()); } /** * @param name Column name. * @param type Column data type. * @return {@link GridSqlColumn} with given name and type. */ private static GridSqlColumn c(String name, int type) { return new GridSqlColumn(new Column(name, type), null, name); } /** * Constructs non-nullable column. * * @param name Column name. * @param type Column data type. * @return {@link GridSqlColumn} with given name and type. */ private static GridSqlColumn cn(String name, int type) { Column col = new Column(name, type); col.setNullable(false); return new GridSqlColumn(col, null, name); } /** * Parse SQL and compare it to expected instance of DROP TABLE. */ private void assertDropTableEquals(GridSqlDropTable exp, String sql) throws Exception { Prepared prepared = parse(sql); GridSqlStatement stmt = new GridSqlQueryParser(false, log).parse(prepared); assertTrue(stmt instanceof GridSqlDropTable); assertDropTableEquals(exp, (GridSqlDropTable) stmt); } /** * Test two instances of {@link GridSqlDropTable} for equality. */ private static void assertDropTableEquals(GridSqlDropTable exp, GridSqlDropTable actual) { assertEqualsIgnoreCase(exp.schemaName(), actual.schemaName()); assertEqualsIgnoreCase(exp.tableName(), actual.tableName()); assertEquals(exp.ifExists(), actual.ifExists()); } /** * */ private static GridSqlDropTable buildDropTable(String schema, String tbl, boolean ifExists) { GridSqlDropTable res = new GridSqlDropTable(); res.schemaName(schema); res.tableName(tbl); res.ifExists(ifExists); return res; } /** * Test two instances of {@link GridSqlCreateIndex} for equality. */ private static void assertCreateIndexEquals(GridSqlCreateIndex exp, GridSqlCreateIndex actual) { assertEquals(exp.ifNotExists(), actual.ifNotExists()); assertEqualsIgnoreCase(exp.schemaName(), actual.schemaName()); assertEqualsIgnoreCase(exp.tableName(), actual.tableName()); assertEqualsIgnoreCase(exp.index().getName(), actual.index().getName()); Iterator<Map.Entry<String, Boolean>> expFldsIt = exp.index().getFields().entrySet().iterator(); Iterator<Map.Entry<String, Boolean>> actualFldsIt = actual.index().getFields().entrySet().iterator(); while (expFldsIt.hasNext()) { assertTrue(actualFldsIt.hasNext()); Map.Entry<String, Boolean> expEntry = expFldsIt.next(); Map.Entry<String, Boolean> actualEntry = actualFldsIt.next(); assertEqualsIgnoreCase(expEntry.getKey(), actualEntry.getKey()); assertEquals(expEntry.getValue(), actualEntry.getValue()); } assertFalse(actualFldsIt.hasNext()); assertEquals(exp.index().getIndexType(), actual.index().getIndexType()); } /** * */ private static void assertEqualsIgnoreCase(String exp, String actual) { assertEquals((exp == null), (actual == null)); if (exp != null) assertTrue(exp.equalsIgnoreCase(actual)); } /** * */ private static GridSqlCreateIndex buildCreateIndex(String name, String tblName, String schemaName, boolean ifNotExists, QueryIndexType type, int inlineSize, Object... flds) { QueryIndex idx = new QueryIndex(); idx.setName(name); assert !F.isEmpty(flds) && flds.length % 2 == 0; LinkedHashMap<String, Boolean> trueFlds = new LinkedHashMap<>(); for (int i = 0; i < flds.length / 2; i++) trueFlds.put((String)flds[i * 2], (Boolean)flds[i * 2 + 1]); idx.setFields(trueFlds); idx.setIndexType(type); idx.setInlineSize(inlineSize); GridSqlCreateIndex res = new GridSqlCreateIndex(); res.schemaName(schemaName); res.tableName(tblName); res.ifNotExists(ifNotExists); res.index(idx); return res; } /** * */ private H2PooledConnection connection() throws Exception { IgniteH2Indexing idx = (IgniteH2Indexing)((IgniteEx)ignite).context().query().getIndexing(); return idx.connections().connection(idx.schema(DEFAULT_CACHE_NAME)); } /** * @param sql Sql. */ @SuppressWarnings("unchecked") private <T extends Prepared> T parse(String sql) throws Exception { try (H2PooledConnection conn = connection()) { Session ses = H2Utils.session(conn); H2Utils.setupConnection(conn, QueryContext.parseContext(null, true), false, false, false); return (T)ses.prepare(sql); } } /** * @param exp Sql 1. * @param actual Sql 2. */ private void assertSqlEquals(String exp, String actual) { String nsql1 = normalizeSql(exp); String nsql2 = normalizeSql(actual); assertEquals(nsql1, nsql2); } /** * @param sql Sql. */ private static String normalizeSql(String sql) { return sql.toLowerCase() .replaceAll("/\\*(?:.|\r|\n)*?\\*/", " ") .replaceAll("\\s*on\\s+1\\s*=\\s*1\\s*", " on true ") .replaceAll("\\s+", " ") .replaceAll("\\( +", "(") .replaceAll(" +\\)", ")") .trim(); } /** * @param qry Query. */ private void checkQuery(String qry) throws Exception { Prepared prepared = parse(qry); GridSqlStatement gQry = new GridSqlQueryParser(false, log).parse(prepared); String res = gQry.getSQL(); System.out.println(normalizeSql(res)); assertSqlEquals(U.firstNotNull(prepared.getPlanSQL(), prepared.getSQL()), res); } @QuerySqlFunction public static int cool1() { return 1; } @QuerySqlFunction public static ResultSet table0(Connection c, String a, int b) throws SQLException { return c.createStatement().executeQuery("select '" + a + "' as a, " + b + " as b"); } /** * */ public static class PersonKey implements Serializable { /** */ @QuerySqlField @AffinityKeyMapped public int id; /** Should not be allowed in KEY clause of MERGE. */ @QuerySqlField public String stuff; } /** * */ public static class Person implements Serializable { @QuerySqlField(index = true) public Date date = new Date(System.currentTimeMillis()); @QuerySqlField(index = true) public String name = "Ivan"; @QuerySqlField(index = true) public String parentName; @QuerySqlField(index = true) public int addrId; @QuerySqlField public Integer[] addrIds; @QuerySqlField(index = true) public int old; } /** * */ public static class Address implements Serializable { @QuerySqlField(index = true) public int id; @QuerySqlField(index = true) public int streetNumber; @QuerySqlField(index = true) public String street = "Nevskiy"; } }
package grundkurs; import grundkurs.tools.IOTools; import java.util.Arrays; import java.util.Scanner; public class NonPrimitives { public static class Address { // 5.6 public String name; public String street; public String city; public String mail; public String comment; public int streetNo; public int zipcode; } public static void main(String[] args) { // Integer a = 10, b = 20; // System.out.println(a * b); // createFields(); // swap(); // calendar(); // multiDimensional(); // fixedLength(); // loopComparison(); // derp(); // alternates(); // sortNumbers(); // magicSquare(2); Address a = new Address(); System.out.println(a.name); System.out.println(a.streetNo); } public static void magicSquare(int n) { // 5.5 if (n < 3) { System.out.println("Correcting input to 3."); n = 3; } else if (n > 10) { System.out.println("Correcting input to 10."); n = 10; } int col = n / 2 + 1; int row = n / 2; int[][] square = new int[n][n]; for (int i = 1; i <= n * n; i++) { square[row][col] = i; col += 1; row -= 1; if (row < 0) row = n - 1; if (col == n) col = 0; if (square[row][col] != 0) { col += 1; row += 1; if (row == n) row = 0; if (col == n) col = 0; } } for (int i = 0; i < n; i++) // System.out.println(square[i]); for (int j = 0; j < n; j++) { System.out.print(square[i][j] + "\t"); if (j == n - 1) System.out.println(""); } } public static void sortNumbers() { // 5.4 Scanner in = new Scanner(System.in); int[] list = new int[5]; for (int i = 0; i < list.length; i++) { System.out.println("Enter an integer: "); list[i] = in.nextInt(); } for (int i : list) System.out.print(i + " "); System.out.println(""); Arrays.sort(list); for (int i : list) System.out.print(i + " "); System.out.println(""); } public static void alternates() { // 5.2 double[] a, c; a = new double[5]; double[] b = { 1.1, 2.2, 3.3, 4.4, 5.5 }; c = new double[] { 1.1, 2.2, 3.3, 4.4, 5.5 }; int[][][][] Feld1 = new int[6][10][8][]; int[][][][] Feld2 = new int[6][][][]; for (int d1 = 0; d1 < 6; d1++) { Feld2[d1] = new int[10][][]; for (int d2 = 0; d2 < 10; d2++) { Feld2[d1][d2] = new int[8][]; } } int[][] g = { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } }; int[][] h = { { 1, 2, 3 }, { 1, 2, 3 }, { 1, 2, 3 } }; System.out.println(Feld1.length == Feld2.length); System.out.println(g == h); // only compares references. System.out.println(Arrays.equals(g, h)); // this works for // one-dimensional // arrays only. System.out.println(Arrays.deepEquals(g, h)); } public static int multiply(int a, int b) { // JUnit-tested method. /** * @param int a. * @param int b. * @returns Product of a and b. */ return a * b; } public static void derp() { // 5.1 byte a, b; byte[] aReihe, aZeile, bReihe, bZeile; byte[][] aMatrix, bMatrix; } public static void loopComparison() { int[] werte = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; int summe = 0; for (int i = 0; i < werte.length; i++) summe = summe + werte[i]; System.out.println("Summe: " + summe); summe = 0; for (int x : werte) summe = summe + x; System.out.println("Summe: " + summe); // Zweidimensionale Matrix mit Zeilen unterschiedlicher // Laenge (hier speziell eine Dreiecksmatrix) int[][] matrix = { { 1 }, { 2, 3 }, { 4, 5, 6 }, { 7, 8, 9, 10 } }; // Summation der Elemente mit traditioneller Schleifen-Notation summe = 0; for (int i = 0; i < matrix.length; i++) for (int j = 0; j < matrix[i].length; j++) summe = summe + matrix[i][j]; System.out.println("Summe: " + summe); // Summation der Elemente mit vereinfachter Schleifen-Notation summe = 0; for (int[] zeile : matrix) for (int element : zeile) summe = summe + element; System.out.println("Summe: " + summe); } private static void fixedLength() { String[][][] appointments = new String[12][][]; appointments[0] = new String[31][24]; // Jan appointments[1] = new String[28][24]; // Feb appointments[2] = new String[31][24]; // March appointments[3] = new String[30][24]; // April appointments[4] = new String[31][24]; // May appointments[5] = new String[30][24]; // June appointments[6] = new String[31][24]; // July appointments[7] = new String[31][24]; // August appointments[8] = new String[30][24]; // September appointments[9] = new String[31][24]; // October appointments[10] = new String[30][24]; // Nov appointments[11] = new String[31][24]; // Dec for (int i = 0; i < appointments.length; i++) for (int j = 0; j < appointments[i].length; j++) for (int k = 0; k < appointments[i][j].length; k++) appointments[i][j][k] = ""; } private static void multiDimensional() { String[][] appointments; appointments = new String[31][]; for (int i = 0; i < appointments.length; i++) { appointments[i] = new String[24]; for (int j = 0; j < appointments[i].length; j++) appointments[i][j] = ""; } boolean done = false; Scanner in = new Scanner(System.in); while (!done) { System.out.println("1 = New Entry"); System.out.println("2 = Show appointment"); System.out.println("3 = Quit"); int choice = in.nextInt(); switch (choice) { case 1: System.out.println("What day?"); int day = in.nextInt(); if (day < 0 | day > 31) { System.out.println("Is that day really on your calendar?"); break; } System.out.println("What hour?"); int hour = in.nextInt(); if (hour < 0 | hour > 23) { System.out.println("Is that hour really on your clock?"); break; } System.out.println("What is your entry?"); String entry = in.next(); appointments[day][hour] = entry; break; case 2: System.out.println("What day?"); int printDay = in.nextInt(); for (int i = 0; i < 24; i++) System.out .println(i + " Uhr: " + appointments[printDay][i]); break; case 3: done = true; break; default: System.out.println("Invalid choice."); } } } private static void calendar() { String[] appointments = new String[24]; for (int i = 0; i < appointments.length; i++) appointments[i] = ""; boolean done = false; Scanner in = new Scanner(System.in); while (!done) { System.out.println("1 = New Entry"); System.out.println("2 = Show appointment"); System.out.println("3 = Quit"); int choice = in.nextInt(); switch (choice) { case 1: System.out.println("What hour?"); int hour = in.nextInt(); if (hour < 0 | hour > 23) { System.out.println("Is that hour really on your clock?"); break; } System.out.println("What is your entry?"); String entry = in.next(); appointments[hour] = entry; break; case 2: for (int i = 0; i < 24; i++) System.out.println(i + " Uhr: " + appointments[i]); break; case 3: done = true; break; default: System.out.println("Invalid choice."); } } } private static void createFields() { int[] field1 = new int[5]; double[] field2 = new double[2]; String[] field3 = new String[4]; int[] field4 = { 0, 1, 2, 3, 4, 5 }; field1[4] = 1; field2[0] = 3.14; int[] field5 = new int[6]; System.arraycopy(field4, 0, field5, 0, field4.length); for (int i : field5) System.out.println(i); } public static void swap() { int n = 12; int[] werte1 = new int[n]; // Lese die Werte von der Tastatur ein for (int i = 0; i < werte1.length; i++) werte1[i] = IOTools.readInteger("Wert Nr. " + i + ": "); // Wie viele Werte sollen in Reihe 2 eingelesen werden? n = IOTools.readInteger("Wie viele Werte? "); // n wird geaendert! // Lege ein Feld an int[] werte2 = new int[n]; // Lese die Werte von der Tastatur ein for (int i = 0; i < werte2.length; i++) werte2[i] = IOTools.readInteger("Wert Nr. " + i + ": "); // Gib die Werte verkehrt herum aus System.out.println("Reihe 1 verkehrt herum"); for (int i = 0; i < werte1.length; i++) System.out.println("Wert Nr. " + i + ": " + werte1[werte1.length - 1 - i]); System.out.println("Reihe 2 verkehrt herum"); for (int i = 0; i < werte2.length; i++) System.out.println("Wert Nr. " + i + ": " + werte2[werte2.length - 1 - i]); } }
/* * Copyright (C) 2013 readyState Software Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.unipad.singlebrain.absPic.bean; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.os.Build; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.FrameLayout.LayoutParams; import java.lang.reflect.Method; /** * Class to manage status and navigation bar tint effects when using KitKat * translucent system UI modes. * */ public class SystemBarTintManager { static { // Android allows a system property to override the presence of the navigation bar. // Used by the emulator. // See https://github.com/android/platform_frameworks_base/blob/master/policy/src/com/android/internal/policy/impl/PhoneWindowManager.java#L1076 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { try { Class c = Class.forName("android.os.SystemProperties"); Method m = c.getDeclaredMethod("get", String.class); m.setAccessible(true); sNavBarOverride = (String) m.invoke(null, "qemu.hw.mainkeys"); } catch (Throwable e) { sNavBarOverride = null; } } } /** * The default system bar tint color value. */ public static final int DEFAULT_TINT_COLOR = 0x99000000; private static String sNavBarOverride; private final SystemBarConfig mConfig; private boolean mStatusBarAvailable; private boolean mNavBarAvailable; private boolean mStatusBarTintEnabled; private boolean mNavBarTintEnabled; private View mStatusBarTintView; private View mNavBarTintView; /** * Constructor. Call this in the host activity onCreate method after its * content view has been set. You should always create new instances when * the host activity is recreated. * * @param activity The host activity. */ @TargetApi(19) public SystemBarTintManager(Activity activity) { Window win = activity.getWindow(); ViewGroup decorViewGroup = (ViewGroup) win.getDecorView(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { // check theme attrs int[] attrs = {android.R.attr.windowTranslucentStatus, android.R.attr.windowTranslucentNavigation}; TypedArray a = activity.obtainStyledAttributes(attrs); try { mStatusBarAvailable = a.getBoolean(0, false); mNavBarAvailable = a.getBoolean(1, false); } finally { a.recycle(); } // check window flags WindowManager.LayoutParams winParams = win.getAttributes(); int bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS; if ((winParams.flags & bits) != 0) { mStatusBarAvailable = true; } bits = WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION; if ((winParams.flags & bits) != 0) { mNavBarAvailable = true; } } mConfig = new SystemBarConfig(activity, mStatusBarAvailable, mNavBarAvailable); // device might not have virtual navigation keys if (!mConfig.hasNavigtionBar()) { mNavBarAvailable = false; } if (mStatusBarAvailable) { setupStatusBarView(activity, decorViewGroup); } if (mNavBarAvailable) { setupNavBarView(activity, decorViewGroup); } } /** * Enable tinting of the system status bar. * * If the platform is running Jelly Bean or earlier, or translucent system * UI modes have not been enabled in either the theme or via window flags, * then this method does nothing. * * @param enabled True to enable tinting, false to disable it (default). */ public void setStatusBarTintEnabled(boolean enabled) { mStatusBarTintEnabled = enabled; if (mStatusBarAvailable) { mStatusBarTintView.setVisibility(enabled ? View.VISIBLE : View.GONE); } } /** * Enable tinting of the system navigation bar. * * If the platform does not have soft navigation keys, is running Jelly Bean * or earlier, or translucent system UI modes have not been enabled in either * the theme or via window flags, then this method does nothing. * * @param enabled True to enable tinting, false to disable it (default). */ public void setNavigationBarTintEnabled(boolean enabled) { mNavBarTintEnabled = enabled; if (mNavBarAvailable) { mNavBarTintView.setVisibility(enabled ? View.VISIBLE : View.GONE); } } /** * Apply the specified color tint to all system UI bars. * * @param color The color of the background tint. */ public void setTintColor(int color) { setStatusBarTintColor(color); setNavigationBarTintColor(color); } /** * Apply the specified drawable or color resource to all system UI bars. * * @param res The identifier of the resource. */ public void setTintResource(int res) { setStatusBarTintResource(res); setNavigationBarTintResource(res); } /** * Apply the specified drawable to all system UI bars. * * @param drawable The drawable to use as the background, or null to remove it. */ public void setTintDrawable(Drawable drawable) { setStatusBarTintDrawable(drawable); setNavigationBarTintDrawable(drawable); } /** * Apply the specified alpha to all system UI bars. * * @param alpha The alpha to use */ public void setTintAlpha(float alpha) { setStatusBarAlpha(alpha); setNavigationBarAlpha(alpha); } /** * Apply the specified color tint to the system status bar. * * @param color The color of the background tint. */ public void setStatusBarTintColor(int color) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundColor(color); } } /** * Apply the specified drawable or color resource to the system status bar. * * @param res The identifier of the resource. */ public void setStatusBarTintResource(int res) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundResource(res); } } /** * Apply the specified drawable to the system status bar. * * @param drawable The drawable to use as the background, or null to remove it. */ @SuppressWarnings("deprecation") public void setStatusBarTintDrawable(Drawable drawable) { if (mStatusBarAvailable) { mStatusBarTintView.setBackgroundDrawable(drawable); } } /** * Apply the specified alpha to the system status bar. * * @param alpha The alpha to use */ @TargetApi(11) public void setStatusBarAlpha(float alpha) { if (mStatusBarAvailable && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mStatusBarTintView.setAlpha(alpha); } } /** * Apply the specified color tint to the system navigation bar. * * @param color The color of the background tint. */ public void setNavigationBarTintColor(int color) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundColor(color); } } /** * Apply the specified drawable or color resource to the system navigation bar. * * @param res The identifier of the resource. */ public void setNavigationBarTintResource(int res) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundResource(res); } } /** * Apply the specified drawable to the system navigation bar. * * @param drawable The drawable to use as the background, or null to remove it. */ @SuppressWarnings("deprecation") public void setNavigationBarTintDrawable(Drawable drawable) { if (mNavBarAvailable) { mNavBarTintView.setBackgroundDrawable(drawable); } } /** * Apply the specified alpha to the system navigation bar. * * @param alpha The alpha to use */ @TargetApi(11) public void setNavigationBarAlpha(float alpha) { if (mNavBarAvailable && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mNavBarTintView.setAlpha(alpha); } } /** * Get the system bar configuration. * * @return The system bar configuration for the current device configuration. */ public SystemBarConfig getConfig() { return mConfig; } /** * Is tinting enabled for the system status bar? * * @return True if enabled, False otherwise. */ public boolean isStatusBarTintEnabled() { return mStatusBarTintEnabled; } /** * Is tinting enabled for the system navigation bar? * * @return True if enabled, False otherwise. */ public boolean isNavBarTintEnabled() { return mNavBarTintEnabled; } private void setupStatusBarView(Context context, ViewGroup decorViewGroup) { mStatusBarTintView = new View(context); LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, mConfig.getStatusBarHeight()); params.gravity = Gravity.TOP; if (mNavBarAvailable && !mConfig.isNavigationAtBottom()) { params.rightMargin = mConfig.getNavigationBarWidth(); } mStatusBarTintView.setLayoutParams(params); mStatusBarTintView.setBackgroundColor(DEFAULT_TINT_COLOR); mStatusBarTintView.setVisibility(View.GONE); decorViewGroup.addView(mStatusBarTintView); } private void setupNavBarView(Context context, ViewGroup decorViewGroup) { mNavBarTintView = new View(context); LayoutParams params; if (mConfig.isNavigationAtBottom()) { params = new LayoutParams(LayoutParams.MATCH_PARENT, mConfig.getNavigationBarHeight()); params.gravity = Gravity.BOTTOM; } else { params = new LayoutParams(mConfig.getNavigationBarWidth(), LayoutParams.MATCH_PARENT); params.gravity = Gravity.RIGHT; } mNavBarTintView.setLayoutParams(params); mNavBarTintView.setBackgroundColor(DEFAULT_TINT_COLOR); mNavBarTintView.setVisibility(View.GONE); decorViewGroup.addView(mNavBarTintView); } /** * Class which describes system bar sizing and other characteristics for the current * device configuration. * */ public static class SystemBarConfig { private static final String STATUS_BAR_HEIGHT_RES_NAME = "status_bar_height"; private static final String NAV_BAR_HEIGHT_RES_NAME = "navigation_bar_height"; private static final String NAV_BAR_HEIGHT_LANDSCAPE_RES_NAME = "navigation_bar_height_landscape"; private static final String NAV_BAR_WIDTH_RES_NAME = "navigation_bar_width"; private static final String SHOW_NAV_BAR_RES_NAME = "config_showNavigationBar"; private final boolean mTranslucentStatusBar; private final boolean mTranslucentNavBar; private final int mStatusBarHeight; private final int mActionBarHeight; private final boolean mHasNavigationBar; private final int mNavigationBarHeight; private final int mNavigationBarWidth; private final boolean mInPortrait; private final float mSmallestWidthDp; private SystemBarConfig(Activity activity, boolean translucentStatusBar, boolean traslucentNavBar) { Resources res = activity.getResources(); mInPortrait = (res.getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT); mSmallestWidthDp = getSmallestWidthDp(activity); mStatusBarHeight = getInternalDimensionSize(res, STATUS_BAR_HEIGHT_RES_NAME); mActionBarHeight = getActionBarHeight(activity); mNavigationBarHeight = getNavigationBarHeight(activity); mNavigationBarWidth = getNavigationBarWidth(activity); mHasNavigationBar = (mNavigationBarHeight > 0); mTranslucentStatusBar = translucentStatusBar; mTranslucentNavBar = traslucentNavBar; } @TargetApi(14) private int getActionBarHeight(Context context) { int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { TypedValue tv = new TypedValue(); context.getTheme().resolveAttribute(android.R.attr.actionBarSize, tv, true); result = TypedValue.complexToDimensionPixelSize(tv.data, context.getResources().getDisplayMetrics()); } return result; } @TargetApi(14) private int getNavigationBarHeight(Context context) { Resources res = context.getResources(); int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { if (hasNavBar(context)) { String key; if (mInPortrait) { key = NAV_BAR_HEIGHT_RES_NAME; } else { key = NAV_BAR_HEIGHT_LANDSCAPE_RES_NAME; } return getInternalDimensionSize(res, key); } } return result; } @TargetApi(14) private int getNavigationBarWidth(Context context) { Resources res = context.getResources(); int result = 0; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { if (hasNavBar(context)) { return getInternalDimensionSize(res, NAV_BAR_WIDTH_RES_NAME); } } return result; } @TargetApi(14) private boolean hasNavBar(Context context) { Resources res = context.getResources(); int resourceId = res.getIdentifier(SHOW_NAV_BAR_RES_NAME, "bool", "android"); if (resourceId != 0) { boolean hasNav = res.getBoolean(resourceId); // check override flag (see static block) if ("1".equals(sNavBarOverride)) { hasNav = false; } else if ("0".equals(sNavBarOverride)) { hasNav = true; } return hasNav; } else { // fallback return !ViewConfiguration.get(context).hasPermanentMenuKey(); } } private int getInternalDimensionSize(Resources res, String key) { int result = 0; int resourceId = res.getIdentifier(key, "dimen", "android"); if (resourceId > 0) { result = res.getDimensionPixelSize(resourceId); } return result; } @SuppressLint("NewApi") private float getSmallestWidthDp(Activity activity) { DisplayMetrics metrics = new DisplayMetrics(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { activity.getWindowManager().getDefaultDisplay().getRealMetrics(metrics); } else { // TODO this is not correct, but we don't really care pre-kitkat activity.getWindowManager().getDefaultDisplay().getMetrics(metrics); } float widthDp = metrics.widthPixels / metrics.density; float heightDp = metrics.heightPixels / metrics.density; return Math.min(widthDp, heightDp); } /** * Should a navigation bar appear at the bottom of the screen in the current * device configuration? A navigation bar may appear on the right side of * the screen in certain configurations. * * @return True if navigation should appear at the bottom of the screen, False otherwise. */ public boolean isNavigationAtBottom() { return (mSmallestWidthDp >= 600 || mInPortrait); } /** * Get the height of the system status bar. * * @return The height of the status bar (in pixels). */ public int getStatusBarHeight() { return mStatusBarHeight; } /** * Get the height of the action bar. * * @return The height of the action bar (in pixels). */ public int getActionBarHeight() { return mActionBarHeight; } /** * Does this device have a system navigation bar? * * @return True if this device uses soft key navigation, False otherwise. */ public boolean hasNavigtionBar() { return mHasNavigationBar; } /** * Get the height of the system navigation bar. * * @return The height of the navigation bar (in pixels). If the device does not have * soft navigation keys, this will always return 0. */ public int getNavigationBarHeight() { return mNavigationBarHeight; } /** * Get the width of the system navigation bar when it is placed vertically on the screen. * * @return The width of the navigation bar (in pixels). If the device does not have * soft navigation keys, this will always return 0. */ public int getNavigationBarWidth() { return mNavigationBarWidth; } /** * Get the layout inset for any system UI that appears at the top of the screen. * * @param withActionBar True to include the height of the action bar, False otherwise. * @return The layout inset (in pixels). */ public int getPixelInsetTop(boolean withActionBar) { return (mTranslucentStatusBar ? mStatusBarHeight : 0) + (withActionBar ? mActionBarHeight : 0); } /** * Get the layout inset for any system UI that appears at the bottom of the screen. * * @return The layout inset (in pixels). */ public int getPixelInsetBottom() { if (mTranslucentNavBar && isNavigationAtBottom()) { return mNavigationBarHeight; } else { return 0; } } /** * Get the layout inset for any system UI that appears at the right of the screen. * * @return The layout inset (in pixels). */ public int getPixelInsetRight() { if (mTranslucentNavBar && !isNavigationAtBottom()) { return mNavigationBarWidth; } else { return 0; } } } }
package skylin.services; import java.lang.reflect.Array; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; public class J { int ARRAY = 0; int MAP = 1; int type; ArrayList<Object> arrayValues; Map<String,J> mapValues; private boolean arrayHint; private J() { } private J(boolean map, Object... d) { if (map) { type = MAP; mapValues = new HashMap<String,J>(); for (int i = 0; i < d.length;i+=2) { Object v = d[i+1]; if (!(v instanceof J)) { v = new J(false,v); } else if (((J)v).isMap()) { v = new J(false,v); } mapValues.put((String)d[i], (J)v); } } else { type = ARRAY; arrayValues = new ArrayList<Object>(); if (d == null) { arrayValues.add(null); } else { for (Object o:d) { if (o != null && o.getClass().isArray()) { int length = Array.getLength(o); for (int i = 0; i < length;i++) { arrayValues.add(Array.get(o, i)); } } else if (o != null && o instanceof Iterable) { for (Object o2 : (Iterable) o) { arrayValues.add(o2); } } else { arrayValues.add(o); } } } } } public static J A(Object... d){ J ret = new J(false,d); ret.arrayHint = true;return ret;} private static J A_INTERNAL(Object... d){return new J(false,d);} public static J M(Object... d){return new J(true,d);} public static J fromString(String s) { boolean inString = false; for (int i = 0; i < s.length();i++) { if (s.charAt(i) == '"') { inString = !inString; } if (!inString) { char c = s.charAt(i); if (c == ' ' || c == '\n' || c == '\r' || c == '\t') { s = s.substring(0,i) + s.substring(i+1,s.length()); i--; } } } return new J().fromStringInternal(s); } private J fromStringInternal(String s) { if (s.startsWith("{")) { type = MAP; s = s.substring(1,s.length()-1); mapValues = getMap(s); } else { type = ARRAY; if (s.startsWith("[")) { s = s.substring(1,s.length()-1); arrayHint = true; } arrayValues = getArray(s); } return this; } private ArrayList<Object> getArray(String s) { ArrayList<Object> ret = new ArrayList<Object>(); ArrayList<String> elements = getElements(s); for (String e:elements) { //System.out.println("e:"+e); if (e.startsWith("{") || e.startsWith("["))//second part might not be needed. dont think it would be legal json { ret.add(new J().fromStringInternal(e)); } else if (e.startsWith("\"")) { ret.add(e.substring(1,e.length()-1)); } else if (e.toLowerCase().equals("true") || e.toLowerCase().equals("false")) { ret.add(Boolean.parseBoolean(e)); } else if (e.toLowerCase().equals("null")) { ret.add(null); } else { ret.add(new BigDecimal(Double.parseDouble(e))); } } return ret; } private Map<String, J> getMap(String s) { HashMap<String,J> ret = new HashMap<String,J>(); ArrayList<String> elements = getElements(s); for (String e:elements) { J newj = new J().fromStringInternal(e.substring(e.indexOf(":")+1,e.length())); if (newj.isMap()) { newj = J.A_INTERNAL(newj); } ret.put(e.substring(1, e.indexOf(":")-1), newj); } return ret; } public static ArrayList<String> getElements(String r) { ArrayList<String> ret = new ArrayList<String>(); int level = 0; boolean inString = false; int count = 0; while (r.length() > 0) { if (r.charAt(count) == '"') { inString = !inString; } if (!inString && (r.charAt(count) == '{' || r.charAt(count) == '[')) { level++; } if (!inString && (r.charAt(count) == '}' || r.charAt(count) == ']')) { level--; } if (!inString && level == 0 && (r.charAt(count) == ',' || count == r.length()-1)) { int take = count; if (count == r.length()-1) { take++; } String s = r.substring(0,take); ret.add(s); r = r.substring(count+1); count = 0; } else { count++; } } return ret; } public String toString() { String ret = ""; if (type == ARRAY) { for (int i = 0; i < arrayValues.size();i++) { Object e = arrayValues.get(i); String v = e+""; if (e instanceof String) { v = "\""+v+"\""; } if (i < arrayValues.size()-1) { ret += v + ","; } else { ret += v; } } if (arrayValues.size() > 1 || arrayHint) { ret = "["+ret+"]"; } if (ret.length()==0) { ret = "[]"; } } else//type == MAP { int i = 0; for (String k: mapValues.keySet()) { Object e = mapValues.get(k); String v = e.toString(); if (i < mapValues.size()-1) { ret += "\""+k+"\":"+v + ","; } else { ret += "\""+k+"\":"+v; } i++; } ret = "{"+ret+"}"; } return ret; } public Object[] values() { /* if (type == ARRAY) { return arrayValues.toArray(); } else//type == map { return mapValues.values().toArray(); } */ return arrayValues.toArray(); } public J[] valuesj() { Object[] v = values(); J[] ret = new J[v.length]; for (int i = 0; i < v.length;i++) { ret[i] = (J) v[i]; } return ret; } public J get(String name) { if (isMap()) { return mapValues.get(name); } return getJ(0).getJ(name); } public boolean contains(String name) { if (isMap()) { return mapValues.containsKey(name); } return getJ(0).contains(name); } public void remove(String name) { if (isMap()) { mapValues.remove(name); return; } getJ(0).remove(name); } public Object get(int index) { return arrayValues.get(index); } public Object get() { return arrayValues.get(0); } public J getJ(int index) { return (J)get(index); } public J getJ(String name) { return (J)get(name); } public String[] keys() { String ret[] = new String[mapValues.size()]; int i = 0; for (String k:mapValues.keySet()) { ret[i] = k; i++; } return ret; } public boolean isMap() { return type == MAP; } public int getInt() { return ((BigDecimal)get(0)).intValue(); } public BigDecimal getBigDecimal() { return (BigDecimal)get(0); } public int getInt(int index) { return ((BigDecimal)get(index)).intValue(); } public BigDecimal getBigDecimal(int index) { return (BigDecimal)get(index); } public String getString() { return (String)get(0); } public String getString(int index) { return (String)get(index); } public String getString(String name) { J j = get(name); if (j == null)return null; return (String)j.get(0); } public String getString(String name, int index) { return (String)get(name).get(index); } public int getInt(String name) { return ((BigDecimal)get(name).get(0)).intValue(); } public int getInt(String name,int index) { return ((BigDecimal)get(name).get(index)).intValue(); } public float getFloat(String name) { return ((BigDecimal)get(name).get(0)).floatValue(); } public float getFloat(String name,int index) { return ((BigDecimal)get(name).get(index)).floatValue(); } public BigDecimal getBigDecimal(String name) { return (BigDecimal)get(name).get(0); } public BigDecimal getBigDecimal(String name, int index) { return (BigDecimal)get(name).get(index); } public boolean getBoolean(int index) { return (boolean)get(index); } public boolean getBoolean(String name) { return (boolean)get(name).get(0); } public boolean getBoolean(String name, int index) { return (boolean)get(name).get(index); } public void add(Object v) { arrayValues.add(v); } public void put(String key, Object v) { J value = null; if (v instanceof J) { value = (J) v; if (value.isMap()) { value = J.A_INTERNAL(value); } } else { value = J.A_INTERNAL(v); } mapValues.put(key, value); } public int getMapSize() { if (isMap()) { return mapValues.size(); } else { return getJ(0).getMapSize(); } } public int getArraySize() { return arrayValues.size(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! package net.unto.twitter; public final class UtilProtos { private UtilProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public static final class Url extends com.google.protobuf.GeneratedMessage { // Use Url.newBuilder() to construct. private Url() {} private static final Url defaultInstance = new Url(); public static Url getDefaultInstance() { return defaultInstance; } public Url getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_fieldAccessorTable; } public enum Scheme implements com.google.protobuf.ProtocolMessageEnum { HTTP(0, 0), HTTPS(1, 1), ; public final int getNumber() { return value; } public static Scheme valueOf(int value) { switch (value) { case 0: return HTTP; case 1: return HTTPS; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Scheme> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<Scheme> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Scheme>() { public Scheme findValueByNumber(int number) { return Scheme.valueOf(number) ; } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return net.unto.twitter.UtilProtos.Url.getDescriptor().getEnumTypes().get(0); } private static final Scheme[] VALUES = { HTTP, HTTPS, }; public static Scheme valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private Scheme(int index, int value) { this.index = index; this.value = value; } static { net.unto.twitter.UtilProtos.getDescriptor(); } } public static final class Parameter extends com.google.protobuf.GeneratedMessage { // Use Parameter.newBuilder() to construct. private Parameter() {} private static final Parameter defaultInstance = new Parameter(); public static Parameter getDefaultInstance() { return defaultInstance; } public Parameter getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_Parameter_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_Parameter_fieldAccessorTable; } // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; private boolean hasName; private java.lang.String name_ = ""; public boolean hasName() { return hasName; } public java.lang.String getName() { return name_; } // optional string value = 2; public static final int VALUE_FIELD_NUMBER = 2; private boolean hasValue; private java.lang.String value_ = ""; public boolean hasValue() { return hasValue; } public java.lang.String getValue() { return value_; } public final boolean isInitialized() { return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (hasName()) { output.writeString(1, getName()); } if (hasValue()) { output.writeString(2, getValue()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasName()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getName()); } if (hasValue()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getValue()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Parameter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(net.unto.twitter.UtilProtos.Url.Parameter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private net.unto.twitter.UtilProtos.Url.Parameter result; // Construct using net.unto.twitter.UtilProtos.Url.Parameter.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new net.unto.twitter.UtilProtos.Url.Parameter(); return builder; } protected net.unto.twitter.UtilProtos.Url.Parameter internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new net.unto.twitter.UtilProtos.Url.Parameter(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return net.unto.twitter.UtilProtos.Url.Parameter.getDescriptor(); } public net.unto.twitter.UtilProtos.Url.Parameter getDefaultInstanceForType() { return net.unto.twitter.UtilProtos.Url.Parameter.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public net.unto.twitter.UtilProtos.Url.Parameter build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private net.unto.twitter.UtilProtos.Url.Parameter buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public net.unto.twitter.UtilProtos.Url.Parameter buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } net.unto.twitter.UtilProtos.Url.Parameter returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof net.unto.twitter.UtilProtos.Url.Parameter) { return mergeFrom((net.unto.twitter.UtilProtos.Url.Parameter)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(net.unto.twitter.UtilProtos.Url.Parameter other) { if (other == net.unto.twitter.UtilProtos.Url.Parameter.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setName(input.readString()); break; } case 18: { setValue(input.readString()); break; } } } } // optional string name = 1; public boolean hasName() { return result.hasName(); } public java.lang.String getName() { return result.getName(); } public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasName = true; result.name_ = value; return this; } public Builder clearName() { result.hasName = false; result.name_ = getDefaultInstance().getName(); return this; } // optional string value = 2; public boolean hasValue() { return result.hasValue(); } public java.lang.String getValue() { return result.getValue(); } public Builder setValue(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasValue = true; result.value_ = value; return this; } public Builder clearValue() { result.hasValue = false; result.value_ = getDefaultInstance().getValue(); return this; } } static { net.unto.twitter.UtilProtos.getDescriptor(); } static { net.unto.twitter.UtilProtos.internalForceInit(); } } public static final class Part extends com.google.protobuf.GeneratedMessage { // Use Part.newBuilder() to construct. private Part() {} private static final Part defaultInstance = new Part(); public static Part getDefaultInstance() { return defaultInstance; } public Part getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_Part_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return net.unto.twitter.UtilProtos.internal_static_twitter_Url_Part_fieldAccessorTable; } // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; private boolean hasName; private java.lang.String name_ = ""; public boolean hasName() { return hasName; } public java.lang.String getName() { return name_; } // optional string filename = 2; public static final int FILENAME_FIELD_NUMBER = 2; private boolean hasFilename; private java.lang.String filename_ = ""; public boolean hasFilename() { return hasFilename; } public java.lang.String getFilename() { return filename_; } // optional string content_type = 3; public static final int CONTENT_TYPE_FIELD_NUMBER = 3; private boolean hasContentType; private java.lang.String contentType_ = ""; public boolean hasContentType() { return hasContentType; } public java.lang.String getContentType() { return contentType_; } // optional string charset = 4; public static final int CHARSET_FIELD_NUMBER = 4; private boolean hasCharset; private java.lang.String charset_ = ""; public boolean hasCharset() { return hasCharset; } public java.lang.String getCharset() { return charset_; } // optional bytes value = 5; public static final int VALUE_FIELD_NUMBER = 5; private boolean hasValue; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; public boolean hasValue() { return hasValue; } public com.google.protobuf.ByteString getValue() { return value_; } public final boolean isInitialized() { return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (hasName()) { output.writeString(1, getName()); } if (hasFilename()) { output.writeString(2, getFilename()); } if (hasContentType()) { output.writeString(3, getContentType()); } if (hasCharset()) { output.writeString(4, getCharset()); } if (hasValue()) { output.writeBytes(5, getValue()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasName()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getName()); } if (hasFilename()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getFilename()); } if (hasContentType()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(3, getContentType()); } if (hasCharset()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(4, getCharset()); } if (hasValue()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, getValue()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url.Part parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(net.unto.twitter.UtilProtos.Url.Part prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private net.unto.twitter.UtilProtos.Url.Part result; // Construct using net.unto.twitter.UtilProtos.Url.Part.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new net.unto.twitter.UtilProtos.Url.Part(); return builder; } protected net.unto.twitter.UtilProtos.Url.Part internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new net.unto.twitter.UtilProtos.Url.Part(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return net.unto.twitter.UtilProtos.Url.Part.getDescriptor(); } public net.unto.twitter.UtilProtos.Url.Part getDefaultInstanceForType() { return net.unto.twitter.UtilProtos.Url.Part.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public net.unto.twitter.UtilProtos.Url.Part build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private net.unto.twitter.UtilProtos.Url.Part buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public net.unto.twitter.UtilProtos.Url.Part buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } net.unto.twitter.UtilProtos.Url.Part returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof net.unto.twitter.UtilProtos.Url.Part) { return mergeFrom((net.unto.twitter.UtilProtos.Url.Part)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(net.unto.twitter.UtilProtos.Url.Part other) { if (other == net.unto.twitter.UtilProtos.Url.Part.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (other.hasFilename()) { setFilename(other.getFilename()); } if (other.hasContentType()) { setContentType(other.getContentType()); } if (other.hasCharset()) { setCharset(other.getCharset()); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setName(input.readString()); break; } case 18: { setFilename(input.readString()); break; } case 26: { setContentType(input.readString()); break; } case 34: { setCharset(input.readString()); break; } case 42: { setValue(input.readBytes()); break; } } } } // optional string name = 1; public boolean hasName() { return result.hasName(); } public java.lang.String getName() { return result.getName(); } public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasName = true; result.name_ = value; return this; } public Builder clearName() { result.hasName = false; result.name_ = getDefaultInstance().getName(); return this; } // optional string filename = 2; public boolean hasFilename() { return result.hasFilename(); } public java.lang.String getFilename() { return result.getFilename(); } public Builder setFilename(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasFilename = true; result.filename_ = value; return this; } public Builder clearFilename() { result.hasFilename = false; result.filename_ = getDefaultInstance().getFilename(); return this; } // optional string content_type = 3; public boolean hasContentType() { return result.hasContentType(); } public java.lang.String getContentType() { return result.getContentType(); } public Builder setContentType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasContentType = true; result.contentType_ = value; return this; } public Builder clearContentType() { result.hasContentType = false; result.contentType_ = getDefaultInstance().getContentType(); return this; } // optional string charset = 4; public boolean hasCharset() { return result.hasCharset(); } public java.lang.String getCharset() { return result.getCharset(); } public Builder setCharset(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasCharset = true; result.charset_ = value; return this; } public Builder clearCharset() { result.hasCharset = false; result.charset_ = getDefaultInstance().getCharset(); return this; } // optional bytes value = 5; public boolean hasValue() { return result.hasValue(); } public com.google.protobuf.ByteString getValue() { return result.getValue(); } public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } result.hasValue = true; result.value_ = value; return this; } public Builder clearValue() { result.hasValue = false; result.value_ = getDefaultInstance().getValue(); return this; } } static { net.unto.twitter.UtilProtos.getDescriptor(); } static { net.unto.twitter.UtilProtos.internalForceInit(); } } // required .twitter.Url.Scheme scheme = 1; public static final int SCHEME_FIELD_NUMBER = 1; private boolean hasScheme; private net.unto.twitter.UtilProtos.Url.Scheme scheme_ = net.unto.twitter.UtilProtos.Url.Scheme.HTTP; public boolean hasScheme() { return hasScheme; } public net.unto.twitter.UtilProtos.Url.Scheme getScheme() { return scheme_; } // required string host = 2; public static final int HOST_FIELD_NUMBER = 2; private boolean hasHost; private java.lang.String host_ = ""; public boolean hasHost() { return hasHost; } public java.lang.String getHost() { return host_; } // required int32 port = 3; public static final int PORT_FIELD_NUMBER = 3; private boolean hasPort; private int port_ = 0; public boolean hasPort() { return hasPort; } public int getPort() { return port_; } // required string path = 4; public static final int PATH_FIELD_NUMBER = 4; private boolean hasPath; private java.lang.String path_ = ""; public boolean hasPath() { return hasPath; } public java.lang.String getPath() { return path_; } // repeated .twitter.Url.Parameter parameters = 5; public static final int PARAMETERS_FIELD_NUMBER = 5; private java.util.List<net.unto.twitter.UtilProtos.Url.Parameter> parameters_ = java.util.Collections.emptyList(); public java.util.List<net.unto.twitter.UtilProtos.Url.Parameter> getParametersList() { return parameters_; } public int getParametersCount() { return parameters_.size(); } public net.unto.twitter.UtilProtos.Url.Parameter getParameters(int index) { return parameters_.get(index); } // repeated .twitter.Url.Part parts = 6; public static final int PARTS_FIELD_NUMBER = 6; private java.util.List<net.unto.twitter.UtilProtos.Url.Part> parts_ = java.util.Collections.emptyList(); public java.util.List<net.unto.twitter.UtilProtos.Url.Part> getPartsList() { return parts_; } public int getPartsCount() { return parts_.size(); } public net.unto.twitter.UtilProtos.Url.Part getParts(int index) { return parts_.get(index); } public final boolean isInitialized() { if (!hasScheme) return false; if (!hasHost) return false; if (!hasPort) return false; if (!hasPath) return false; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (hasScheme()) { output.writeEnum(1, getScheme().getNumber()); } if (hasHost()) { output.writeString(2, getHost()); } if (hasPort()) { output.writeInt32(3, getPort()); } if (hasPath()) { output.writeString(4, getPath()); } for (net.unto.twitter.UtilProtos.Url.Parameter element : getParametersList()) { output.writeMessage(5, element); } for (net.unto.twitter.UtilProtos.Url.Part element : getPartsList()) { output.writeMessage(6, element); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasScheme()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, getScheme().getNumber()); } if (hasHost()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getHost()); } if (hasPort()) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, getPort()); } if (hasPath()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(4, getPath()); } for (net.unto.twitter.UtilProtos.Url.Parameter element : getParametersList()) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, element); } for (net.unto.twitter.UtilProtos.Url.Part element : getPartsList()) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, element); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static net.unto.twitter.UtilProtos.Url parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input, extensionRegistry) .buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static net.unto.twitter.UtilProtos.Url parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(net.unto.twitter.UtilProtos.Url prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private net.unto.twitter.UtilProtos.Url result; // Construct using net.unto.twitter.UtilProtos.Url.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new net.unto.twitter.UtilProtos.Url(); return builder; } protected net.unto.twitter.UtilProtos.Url internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new net.unto.twitter.UtilProtos.Url(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return net.unto.twitter.UtilProtos.Url.getDescriptor(); } public net.unto.twitter.UtilProtos.Url getDefaultInstanceForType() { return net.unto.twitter.UtilProtos.Url.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public net.unto.twitter.UtilProtos.Url build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private net.unto.twitter.UtilProtos.Url buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public net.unto.twitter.UtilProtos.Url buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } if (result.parameters_ != java.util.Collections.EMPTY_LIST) { result.parameters_ = java.util.Collections.unmodifiableList(result.parameters_); } if (result.parts_ != java.util.Collections.EMPTY_LIST) { result.parts_ = java.util.Collections.unmodifiableList(result.parts_); } net.unto.twitter.UtilProtos.Url returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof net.unto.twitter.UtilProtos.Url) { return mergeFrom((net.unto.twitter.UtilProtos.Url)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(net.unto.twitter.UtilProtos.Url other) { if (other == net.unto.twitter.UtilProtos.Url.getDefaultInstance()) return this; if (other.hasScheme()) { setScheme(other.getScheme()); } if (other.hasHost()) { setHost(other.getHost()); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasPath()) { setPath(other.getPath()); } if (!other.parameters_.isEmpty()) { if (result.parameters_.isEmpty()) { result.parameters_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Parameter>(); } result.parameters_.addAll(other.parameters_); } if (!other.parts_.isEmpty()) { if (result.parts_.isEmpty()) { result.parts_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Part>(); } result.parts_.addAll(other.parts_); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 8: { int rawValue = input.readEnum(); net.unto.twitter.UtilProtos.Url.Scheme value = net.unto.twitter.UtilProtos.Url.Scheme.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { setScheme(value); } break; } case 18: { setHost(input.readString()); break; } case 24: { setPort(input.readInt32()); break; } case 34: { setPath(input.readString()); break; } case 42: { net.unto.twitter.UtilProtos.Url.Parameter.Builder subBuilder = net.unto.twitter.UtilProtos.Url.Parameter.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addParameters(subBuilder.buildPartial()); break; } case 50: { net.unto.twitter.UtilProtos.Url.Part.Builder subBuilder = net.unto.twitter.UtilProtos.Url.Part.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addParts(subBuilder.buildPartial()); break; } } } } // required .twitter.Url.Scheme scheme = 1; public boolean hasScheme() { return result.hasScheme(); } public net.unto.twitter.UtilProtos.Url.Scheme getScheme() { return result.getScheme(); } public Builder setScheme(net.unto.twitter.UtilProtos.Url.Scheme value) { if (value == null) { throw new NullPointerException(); } result.hasScheme = true; result.scheme_ = value; return this; } public Builder clearScheme() { result.hasScheme = false; result.scheme_ = net.unto.twitter.UtilProtos.Url.Scheme.HTTP; return this; } // required string host = 2; public boolean hasHost() { return result.hasHost(); } public java.lang.String getHost() { return result.getHost(); } public Builder setHost(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasHost = true; result.host_ = value; return this; } public Builder clearHost() { result.hasHost = false; result.host_ = getDefaultInstance().getHost(); return this; } // required int32 port = 3; public boolean hasPort() { return result.hasPort(); } public int getPort() { return result.getPort(); } public Builder setPort(int value) { result.hasPort = true; result.port_ = value; return this; } public Builder clearPort() { result.hasPort = false; result.port_ = 0; return this; } // required string path = 4; public boolean hasPath() { return result.hasPath(); } public java.lang.String getPath() { return result.getPath(); } public Builder setPath(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasPath = true; result.path_ = value; return this; } public Builder clearPath() { result.hasPath = false; result.path_ = getDefaultInstance().getPath(); return this; } // repeated .twitter.Url.Parameter parameters = 5; public java.util.List<net.unto.twitter.UtilProtos.Url.Parameter> getParametersList() { return java.util.Collections.unmodifiableList(result.parameters_); } public int getParametersCount() { return result.getParametersCount(); } public net.unto.twitter.UtilProtos.Url.Parameter getParameters(int index) { return result.getParameters(index); } public Builder setParameters(int index, net.unto.twitter.UtilProtos.Url.Parameter value) { if (value == null) { throw new NullPointerException(); } result.parameters_.set(index, value); return this; } public Builder setParameters(int index, net.unto.twitter.UtilProtos.Url.Parameter.Builder builderForValue) { result.parameters_.set(index, builderForValue.build()); return this; } public Builder addParameters(net.unto.twitter.UtilProtos.Url.Parameter value) { if (value == null) { throw new NullPointerException(); } if (result.parameters_.isEmpty()) { result.parameters_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Parameter>(); } result.parameters_.add(value); return this; } public Builder addParameters(net.unto.twitter.UtilProtos.Url.Parameter.Builder builderForValue) { if (result.parameters_.isEmpty()) { result.parameters_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Parameter>(); } result.parameters_.add(builderForValue.build()); return this; } public Builder addAllParameters( java.lang.Iterable<? extends net.unto.twitter.UtilProtos.Url.Parameter> values) { if (result.parameters_.isEmpty()) { result.parameters_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Parameter>(); } super.addAll(values, result.parameters_); return this; } public Builder clearParameters() { result.parameters_ = java.util.Collections.emptyList(); return this; } // repeated .twitter.Url.Part parts = 6; public java.util.List<net.unto.twitter.UtilProtos.Url.Part> getPartsList() { return java.util.Collections.unmodifiableList(result.parts_); } public int getPartsCount() { return result.getPartsCount(); } public net.unto.twitter.UtilProtos.Url.Part getParts(int index) { return result.getParts(index); } public Builder setParts(int index, net.unto.twitter.UtilProtos.Url.Part value) { if (value == null) { throw new NullPointerException(); } result.parts_.set(index, value); return this; } public Builder setParts(int index, net.unto.twitter.UtilProtos.Url.Part.Builder builderForValue) { result.parts_.set(index, builderForValue.build()); return this; } public Builder addParts(net.unto.twitter.UtilProtos.Url.Part value) { if (value == null) { throw new NullPointerException(); } if (result.parts_.isEmpty()) { result.parts_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Part>(); } result.parts_.add(value); return this; } public Builder addParts(net.unto.twitter.UtilProtos.Url.Part.Builder builderForValue) { if (result.parts_.isEmpty()) { result.parts_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Part>(); } result.parts_.add(builderForValue.build()); return this; } public Builder addAllParts( java.lang.Iterable<? extends net.unto.twitter.UtilProtos.Url.Part> values) { if (result.parts_.isEmpty()) { result.parts_ = new java.util.ArrayList<net.unto.twitter.UtilProtos.Url.Part>(); } super.addAll(values, result.parts_); return this; } public Builder clearParts() { result.parts_ = java.util.Collections.emptyList(); return this; } } static { net.unto.twitter.UtilProtos.getDescriptor(); } static { net.unto.twitter.UtilProtos.internalForceInit(); } } private static com.google.protobuf.Descriptors.Descriptor internal_static_twitter_Url_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_twitter_Url_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_twitter_Url_Parameter_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_twitter_Url_Parameter_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_twitter_Url_Part_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_twitter_Url_Part_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\nutil.proto\022\007twitter\"\311\002\n\003Url\022#\n\006scheme\030" + "\001 \002(\0162\023.twitter.Url.Scheme\022\014\n\004host\030\002 \002(\t" + "\022\014\n\004port\030\003 \002(\005\022\014\n\004path\030\004 \002(\t\022*\n\nparamete" + "rs\030\005 \003(\0132\026.twitter.Url.Parameter\022 \n\005part" + "s\030\006 \003(\0132\021.twitter.Url.Part\032(\n\tParameter\022" + "\014\n\004name\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\032\\\n\004Part\022\014\n\004" + "name\030\001 \001(\t\022\020\n\010filename\030\002 \001(\t\022\024\n\014content_" + "type\030\003 \001(\t\022\017\n\007charset\030\004 \001(\t\022\r\n\005value\030\005 \001" + "(\014\"\035\n\006Scheme\022\010\n\004HTTP\020\000\022\t\n\005HTTPS\020\001B\036\n\020net" + ".unto.twitterB\nUtilProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_twitter_Url_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_twitter_Url_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_twitter_Url_descriptor, new java.lang.String[] { "Scheme", "Host", "Port", "Path", "Parameters", "Parts", }, net.unto.twitter.UtilProtos.Url.class, net.unto.twitter.UtilProtos.Url.Builder.class); internal_static_twitter_Url_Parameter_descriptor = internal_static_twitter_Url_descriptor.getNestedTypes().get(0); internal_static_twitter_Url_Parameter_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_twitter_Url_Parameter_descriptor, new java.lang.String[] { "Name", "Value", }, net.unto.twitter.UtilProtos.Url.Parameter.class, net.unto.twitter.UtilProtos.Url.Parameter.Builder.class); internal_static_twitter_Url_Part_descriptor = internal_static_twitter_Url_descriptor.getNestedTypes().get(1); internal_static_twitter_Url_Part_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_twitter_Url_Part_descriptor, new java.lang.String[] { "Name", "Filename", "ContentType", "Charset", "Value", }, net.unto.twitter.UtilProtos.Url.Part.class, net.unto.twitter.UtilProtos.Url.Part.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } public static void internalForceInit() {} }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.json; import static org.junit.Assert.assertEquals; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.apache.nifi.record.NullSuppression; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.schema.access.SchemaNameAsAttribute; import org.apache.nifi.serialization.SimpleRecordSchema; import org.apache.nifi.serialization.record.DataType; import org.apache.nifi.serialization.record.MapRecord; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.serialization.record.RecordSet; import org.apache.nifi.serialization.record.SerializedForm; import org.junit.Test; import org.mockito.Mockito; public class TestWriteJsonResult { @Test public void testDataTypes() throws IOException, ParseException { final List<RecordField> fields = new ArrayList<>(); for (final RecordFieldType fieldType : RecordFieldType.values()) { if (fieldType == RecordFieldType.CHOICE) { final List<DataType> possibleTypes = new ArrayList<>(); possibleTypes.add(RecordFieldType.INT.getDataType()); possibleTypes.add(RecordFieldType.LONG.getDataType()); fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getChoiceDataType(possibleTypes))); } else if (fieldType == RecordFieldType.MAP) { fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getMapDataType(RecordFieldType.INT.getDataType()))); } else { fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getDataType())); } } final RecordSchema schema = new SimpleRecordSchema(fields); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DateFormat df = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS"); df.setTimeZone(TimeZone.getTimeZone("gmt")); final long time = df.parse("2017/01/01 17:00:00.000").getTime(); final Map<String, Object> map = new LinkedHashMap<>(); map.put("height", 48); map.put("width", 96); final Map<String, Object> valueMap = new LinkedHashMap<>(); valueMap.put("string", "string"); valueMap.put("boolean", true); valueMap.put("byte", (byte) 1); valueMap.put("char", 'c'); valueMap.put("short", (short) 8); valueMap.put("int", 9); valueMap.put("bigint", BigInteger.valueOf(8L)); valueMap.put("long", 8L); valueMap.put("float", 8.0F); valueMap.put("double", 8.0D); valueMap.put("date", new Date(time)); valueMap.put("time", new Time(time)); valueMap.put("timestamp", new Timestamp(time)); valueMap.put("record", null); valueMap.put("array", null); valueMap.put("choice", 48L); valueMap.put("map", map); final Record record = new MapRecord(schema, valueMap); final RecordSet rs = RecordSet.of(schema, record); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, true, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat())) { writer.write(rs); } final String output = baos.toString(); final String expected = new String(Files.readAllBytes(Paths.get("src/test/resources/json/output/dataTypes.json"))); assertEquals(expected, output); } @Test public void testWriteSerializedForm() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("age", RecordFieldType.INT.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values1 = new HashMap<>(); values1.put("name", "John Doe"); values1.put("age", 42); final String serialized1 = "{ \"name\": \"John Doe\", \"age\": 42 }"; final SerializedForm serializedForm1 = SerializedForm.of(serialized1, "application/json"); final Record record1 = new MapRecord(schema, values1, serializedForm1); final Map<String, Object> values2 = new HashMap<>(); values2.put("name", "Jane Doe"); values2.put("age", 43); final String serialized2 = "{ \"name\": \"Jane Doe\", \"age\": 43 }"; final SerializedForm serializedForm2 = SerializedForm.of(serialized2, "application/json"); final Record record2 = new MapRecord(schema, values1, serializedForm2); final RecordSet rs = RecordSet.of(schema, record1, record2); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, true, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat())) { writer.write(rs); } final byte[] data = baos.toByteArray(); final String expected = "[ " + serialized1 + ", " + serialized2 + " ]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testTimestampWithNullFormat() throws IOException { final Map<String, Object> values = new HashMap<>(); values.put("timestamp", new java.sql.Timestamp(37293723L)); values.put("time", new java.sql.Time(37293723L)); values.put("date", new java.sql.Date(37293723L)); final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("timestamp", RecordFieldType.TIMESTAMP.getDataType())); fields.add(new RecordField("time", RecordFieldType.TIME.getDataType())); fields.add(new RecordField("date", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Record record = new MapRecord(schema, values); final RecordSet rs = RecordSet.of(schema, record); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.write(rs); } final byte[] data = baos.toByteArray(); final String expected = "[{\"timestamp\":37293723,\"time\":37293723,\"date\":37293723}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testExtraFieldInWriteRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new HashMap<>(); values.put("id", "1"); values.put("name", "John"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\"}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testExtraFieldInWriteRawRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); values.put("name", "John"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRawRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\",\"name\":\"John\"}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testMissingFieldInWriteRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\",\"name\":null}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testMissingFieldInWriteRawRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRawRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\"}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testMissingAndExtraFieldInWriteRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); values.put("dob", "1/1/1970"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\",\"name\":null}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testMissingAndExtraFieldInWriteRawRecord() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); values.put("dob", "1/1/1970"); final Record record = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.writeRawRecord(record); writer.finishRecordSet(); } final byte[] data = baos.toByteArray(); final String expected = "[{\"id\":\"1\",\"dob\":\"1/1/1970\"}]"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } @Test public void testNullSuppression() throws IOException { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField("id", RecordFieldType.STRING.getDataType())); fields.add(new RecordField("name", RecordFieldType.STRING.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields); final Map<String, Object> values = new LinkedHashMap<>(); values.put("id", "1"); final Record recordWithMissingName = new MapRecord(schema, values); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithMissingName); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\",\"name\":null}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); baos.reset(); try ( final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.ALWAYS_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithMissingName); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\"}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); baos.reset(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.SUPPRESS_MISSING, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithMissingName); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\"}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); // set an explicit null value values.put("name", null); final Record recordWithNullValue = new MapRecord(schema, values); baos.reset(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithNullValue); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\",\"name\":null}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); baos.reset(); try ( final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.ALWAYS_SUPPRESS, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithNullValue); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\"}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); baos.reset(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.SUPPRESS_MISSING, OutputGrouping.OUTPUT_ARRAY, null, null, null)) { writer.beginRecordSet(); writer.write(recordWithNullValue); writer.finishRecordSet(); } assertEquals("[{\"id\":\"1\",\"name\":null}]", new String(baos.toByteArray(), StandardCharsets.UTF_8)); } @Test public void testOnelineOutput() throws IOException { final Map<String, Object> values1 = new HashMap<>(); values1.put("timestamp", new java.sql.Timestamp(37293723L)); values1.put("time", new java.sql.Time(37293723L)); values1.put("date", new java.sql.Date(37293723L)); final List<RecordField> fields1 = new ArrayList<>(); fields1.add(new RecordField("timestamp", RecordFieldType.TIMESTAMP.getDataType())); fields1.add(new RecordField("time", RecordFieldType.TIME.getDataType())); fields1.add(new RecordField("date", RecordFieldType.DATE.getDataType())); final RecordSchema schema = new SimpleRecordSchema(fields1); final Record record1 = new MapRecord(schema, values1); final Map<String, Object> values2 = new HashMap<>(); values2.put("timestamp", new java.sql.Timestamp(37293999L)); values2.put("time", new java.sql.Time(37293999L)); values2.put("date", new java.sql.Date(37293999L)); final Record record2 = new MapRecord(schema, values2); final RecordSet rs = RecordSet.of(schema, record1, record2); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final WriteJsonResult writer = new WriteJsonResult(Mockito.mock(ComponentLog.class), schema, new SchemaNameAsAttribute(), baos, false, NullSuppression.NEVER_SUPPRESS, OutputGrouping.OUTPUT_ONELINE, null, null, null)) { writer.write(rs); } final byte[] data = baos.toByteArray(); final String expected = "{\"timestamp\":37293723,\"time\":37293723,\"date\":37293723}\n{\"timestamp\":37293999,\"time\":37293999,\"date\":37293999}"; final String output = new String(data, StandardCharsets.UTF_8); assertEquals(expected, output); } }
// SPDX-License-Identifier: BSD-3-Clause // Copyright (c) 2004 Brian Wellington (bwelling@xbill.org) package org.xbill.DNS; import java.io.IOException; import java.net.Inet6Address; import java.net.InetAddress; import org.xbill.DNS.utils.base64; /** * IPsec Keying Material (RFC 4025) * * @author Brian Wellington * @see <a href="https://tools.ietf.org/html/rfc4025">RFC 4025: A Method for Storing IPsec Keying * Material in DNS</a> */ public class IPSECKEYRecord extends Record { /** * Algorithm types for IPSECKEY RRs as defined in <a * href="https://www.iana.org/assignments/ipseckey-rr-parameters/ipseckey-rr-parameters.xhtml#ipseckey-rr-parameters-1">IPSECKEY * Resource Record Parameters</a>. */ public static class Algorithm { private Algorithm() {} /** A DSA key is present, in the format defined in [RFC2536] */ public static final int DSA = 1; /** A RSA key is present, in the format defined in [RFC3110] */ public static final int RSA = 2; /** An ECDSA key is present, in the format defined in [RFC6605] */ public static final int ECDSA = 3; } /** * Gateway types for IPSECKEY RRs as defined in <a * href="https://www.iana.org/assignments/ipseckey-rr-parameters/ipseckey-rr-parameters.xhtml#ipseckey-rr-parameters-2">IPSECKEY * Resource Record Parameters</a>. */ public static class Gateway { private Gateway() {} /** No gateway is present */ public static final int None = 0; /** A 4-byte IPv4 address is present */ public static final int IPv4 = 1; /** A 16-byte IPv6 address is present */ public static final int IPv6 = 2; /** A wire-encoded domain name is present */ public static final int Name = 3; } private int precedence; private int gatewayType; private int algorithmType; private Object gateway; private byte[] key; IPSECKEYRecord() {} /** * Creates an IPSECKEY Record from the given data. * * @param precedence The record's precedence. * @param gatewayType The record's gateway type. * @param algorithmType The record's algorithm type. * @param gateway The record's gateway. * @param key The record's public key. */ public IPSECKEYRecord( Name name, int dclass, long ttl, int precedence, int gatewayType, int algorithmType, Object gateway, byte[] key) { super(name, Type.IPSECKEY, dclass, ttl); this.precedence = checkU8("precedence", precedence); this.gatewayType = checkU8("gatewayType", gatewayType); this.algorithmType = checkU8("algorithmType", algorithmType); switch (gatewayType) { case Gateway.None: this.gateway = null; break; case Gateway.IPv4: if (!(gateway instanceof InetAddress)) { throw new IllegalArgumentException("\"gateway\" must be an IPv4 address"); } this.gateway = gateway; break; case Gateway.IPv6: if (!(gateway instanceof Inet6Address)) { throw new IllegalArgumentException("\"gateway\" must be an IPv6 address"); } this.gateway = gateway; break; case Gateway.Name: if (!(gateway instanceof Name)) { throw new IllegalArgumentException("\"gateway\" must be a DNS name"); } this.gateway = checkName("gateway", (Name) gateway); break; default: throw new IllegalArgumentException("\"gatewayType\" must be between 0 and 3"); } this.key = key; } @Override protected void rrFromWire(DNSInput in) throws IOException { precedence = in.readU8(); gatewayType = in.readU8(); algorithmType = in.readU8(); switch (gatewayType) { case Gateway.None: gateway = null; break; case Gateway.IPv4: gateway = InetAddress.getByAddress(in.readByteArray(4)); break; case Gateway.IPv6: gateway = InetAddress.getByAddress(in.readByteArray(16)); break; case Gateway.Name: gateway = new Name(in); break; default: throw new WireParseException("invalid gateway type"); } if (in.remaining() > 0) { key = in.readByteArray(); } } @Override protected void rdataFromString(Tokenizer st, Name origin) throws IOException { precedence = st.getUInt8(); gatewayType = st.getUInt8(); algorithmType = st.getUInt8(); switch (gatewayType) { case Gateway.None: String s = st.getString(); if (!s.equals(".")) { throw new TextParseException("invalid gateway format"); } gateway = null; break; case Gateway.IPv4: gateway = st.getAddress(Address.IPv4); break; case Gateway.IPv6: gateway = st.getAddress(Address.IPv6); break; case Gateway.Name: gateway = st.getName(origin); break; default: throw new WireParseException("invalid gateway type"); } key = st.getBase64(false); } @Override protected String rrToString() { StringBuilder sb = new StringBuilder(); sb.append(precedence); sb.append(" "); sb.append(gatewayType); sb.append(" "); sb.append(algorithmType); sb.append(" "); switch (gatewayType) { case Gateway.None: sb.append("."); break; case Gateway.IPv4: case Gateway.IPv6: InetAddress gatewayAddr = (InetAddress) gateway; sb.append(gatewayAddr.getHostAddress()); break; case Gateway.Name: sb.append(gateway); break; } if (key != null) { sb.append(" "); sb.append(base64.toString(key)); } return sb.toString(); } /** Returns the record's precedence. */ public int getPrecedence() { return precedence; } /** Returns the record's gateway type. */ public int getGatewayType() { return gatewayType; } /** Returns the record's algorithm type. */ public int getAlgorithmType() { return algorithmType; } /** Returns the record's gateway. */ public Object getGateway() { return gateway; } /** Returns the record's public key */ public byte[] getKey() { return key; } @Override protected void rrToWire(DNSOutput out, Compression c, boolean canonical) { out.writeU8(precedence); out.writeU8(gatewayType); out.writeU8(algorithmType); switch (gatewayType) { case Gateway.None: break; case Gateway.IPv4: case Gateway.IPv6: InetAddress gatewayAddr = (InetAddress) gateway; out.writeByteArray(gatewayAddr.getAddress()); break; case Gateway.Name: Name gatewayName = (Name) gateway; gatewayName.toWire(out, null, canonical); break; } if (key != null) { out.writeByteArray(key); } } }
// Copyright (c) 2002, 2006, 2010 Per M.A. Bothner and Brainfood Inc. // This is free software; for terms and warranty disclaimer see ./COPYING. package gnu.kawa.xml; import gnu.lists.*; import gnu.xml.*; import java.io.*; import gnu.mapping.*; import java.util.Vector; /** Output as an Http response. * Used for both CGI scripts (default) and HttpServletResponse (future). */ public class HttpPrinter extends FilterConsumer { Vector headers = new Vector(); /** Used as output buffer if base is null. */ StringBuilder sbuf = new StringBuilder(100); Object currentHeader; /** 1 - implicit; 2: explicit. */ private int seenStartDocument; protected String sawContentType; /** Difference between number of startElement and endElement calls so far. */ private int elementNesting; protected OutputStream ostream; OutPort writer; public HttpPrinter(OutputStream out) { super(null); ostream = out; } public HttpPrinter(OutPort out) { super(null); writer = out; } public static HttpPrinter make (OutPort out) { return new HttpPrinter(out); } private void writeRaw(String str) throws java.io.IOException { if (writer != null) writer.write(str); else { int len = str.length(); for (int i = 0; i < len; i++) ostream.write((byte) str.charAt(i)); } } protected void beforeNode () { if (sawContentType == null) addHeader("Content-type", "text/xml"); beginData(); } public void printHeader(String label, String value) throws java.io.IOException { writeRaw(label); writeRaw(": "); writeRaw(value); // FIXME - need to quote? writeRaw("\n"); } public void printHeaders() throws java.io.IOException { int num = headers.size(); for (int i = 0; i < num; i += 2) printHeader(headers.elementAt(i).toString(), headers.elementAt(i + 1).toString()); writeRaw("\n"); } public void addHeader(String label, String value) { if (label.equalsIgnoreCase("Content-type")) sawContentType = value; headers.addElement(label); headers.addElement(value); } public void startAttribute(Object attrType) { if (base == null) currentHeader = attrType; else base.startAttribute(attrType); } public void endAttribute() { if (currentHeader != null) { addHeader(currentHeader.toString(), sbuf.toString()); sbuf.setLength(0); currentHeader = null; } else base.endAttribute(); } boolean seenXmlHeader; public void beginData() { if (base == null) { if (sawContentType == null) addHeader("Content-type", "text/plain"); if (writer == null) writer = new OutPort(ostream); // FIXME use encoding. String style = null; if ("text/html".equalsIgnoreCase(sawContentType)) style = "html"; else if ("application/xhtml+xml".equalsIgnoreCase(sawContentType)) style = "xhtml"; else if ("text/plain".equalsIgnoreCase(sawContentType)) style = "plain"; base = XMLPrinter.make(writer, style); if (seenStartDocument == 0) { base.startDocument(); seenStartDocument = 1; } try { printHeaders(); } catch (Throwable ex) { throw new RuntimeException(ex.toString()); } } /* #ifdef use:java.lang.CharSequence */ append(sbuf); /* #else */ // write(sbuf.toString()); /* #endif */ sbuf.setLength(0); } public void startElement (Object type) { if (sawContentType == null) { String mimeType; if (! seenXmlHeader) mimeType = "text/html"; else if (type instanceof Symbol && "html".equals(((Symbol) type).getLocalPart())) mimeType = "text/xhtml"; else mimeType = "text/xml"; addHeader("Content-type", mimeType); } beginData(); base.startElement(type); elementNesting++; } public void endElement () { super.endElement(); elementNesting--; if (elementNesting == 0 && seenStartDocument == 1) endDocument(); } public void writeObject(Object v) { if (v instanceof Consumable && ! (v instanceof UnescapedData)) ((Consumable) v).consume(this); else { beginData(); super.writeObject(v); } } /* #ifdef use:java.lang.CharSequence */ public void write (CharSequence str, int start, int length) /* #else */ // public void write (String str, int start, int length) /* #endif */ { if (base == null) sbuf.append(str, start, start+length); else base.write(str, start, length); } public void write(char[] buf, int off, int len) { if (base == null) sbuf.append(buf, off, len); else base.write(buf, off, len); } public void startDocument() { if (base != null) base.startDocument(); seenStartDocument = 2; } public void endDocument() { if (base != null) base.endDocument(); try { if (sawContentType == null) addHeader("Content-type", "text/plain"); if (sbuf.length() > 0) { String str = sbuf.toString(); sbuf.setLength(0); if (writer != null) writer.write(str); else ostream.write(str.getBytes()); } // else ???; if (writer != null) writer.close(); if (ostream != null) ostream.flush(); } catch (Throwable ex) { } } /** Try to reset (delete) any response generated so far. * @param headersAlso if response headers should also be reset. * @return true on success, false if it's too late. */ public boolean reset (boolean headersAlso) { if (headersAlso) { headers.clear(); sawContentType = null; currentHeader = null; elementNesting = 0; } sbuf.setLength(0); base = null; boolean ok = true; if (ostream != null) { ok = writer == null; writer = null; } return ok; } }
/* * Copyright 2014-15 Dilip Kumar * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dilipkumarg.qb; import org.junit.Before; import org.junit.Test; import com.dilipkumarg.qb.core.JoinType; import com.dilipkumarg.qb.models.SqlQuery; import static org.junit.Assert.assertEquals; /** * @author Dilip Kumar. * @since 2/7/14 */ public class SelectQueryBuilderTest { private QPerson person; private SelectQueryBuilder builder; public SelectQueryBuilderTest() { person = new QPerson(); } @Before public void setUp() { builder = new SelectQueryBuilder(person); } @Test public void testBasicSelect() { SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testSelectWithWhere() { builder.where(person.name.eq("TEST")); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person WHERE person.NAME = ?", sqlQuery.getQuery()); assertEquals(1, sqlQuery.getArgs().length); assertEquals("TEST", sqlQuery.getArgs()[0]); } @Test public void testSelectMultipleWhere() { builder.where(person.name.eq("TEST1")); builder.where(person.lastName.like("TEST2")); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person WHERE person.NAME = ? AND person.LAST_NAME LIKE ?", sqlQuery.getQuery()); assertEquals(2, sqlQuery.getArgs().length); assertEquals("TEST1", sqlQuery.getArgs()[0]); assertEquals("TEST2", sqlQuery.getArgs()[1]); } @Test public void testSelectMultipleWheres() { //builder.where(person.name.eq("TEST").and(person.lastName.eq("TEST2"))) builder.where(person.name.eq("TEST1"), person.lastName.like("TEST2")); builder.where(person.age.lt(20)); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person WHERE person.NAME = ? AND person.LAST_NAME LIKE ? AND person.AGE" + " < ?", sqlQuery.getQuery()); assertEquals(3, sqlQuery.getArgs().length); assertEquals("TEST1", sqlQuery.getArgs()[0]); assertEquals("TEST2", sqlQuery.getArgs()[1]); assertEquals(20, sqlQuery.getArgs()[2]); } @Test public void testList() { builder.list(person.name); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT person.NAME FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testEmptyList() { builder.list(); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testMultipleList() { builder.list(person.name); builder.list(person.age); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT person.NAME,person.AGE FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testListWithWhere() { builder.list(person.name); builder.list(person.age); builder.where(person.name.eq("TEST")); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT person.NAME,person.AGE FROM PERSON person WHERE person.NAME = ?", sqlQuery.getQuery()); assertEquals(1, sqlQuery.getArgs().length); assertEquals("TEST", sqlQuery.getArgs()[0]); } @Test public void testOrderBy() { builder.orderBy(person.age.asc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person ORDER BY person.AGE ASC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testMultipleOrderBy() { builder.orderBy(person.age.asc()); builder.orderBy(person.name.desc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person ORDER BY person.AGE ASC,person.NAME DESC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testSimpleOrderBy() { builder.orderBy(); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testMultipleOrderByInOneShot() { builder.orderBy(person.age.asc(), person.name.desc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person ORDER BY person.AGE ASC,person.NAME DESC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testInvalidOrderBy() { builder.orderBy(person.age.asc(), person.age.desc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person ORDER BY person.AGE ASC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testInvalidOrderByWithMoreArgs() { builder.orderBy(person.age.asc(), person.name.desc(), person.age.desc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person ORDER BY person.AGE ASC,person.NAME DESC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testOrderByWitList() { builder.list(person.name); builder.orderBy(person.age.asc()); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT person.NAME FROM PERSON person ORDER BY person.AGE ASC", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testDistinct() { builder.distinct(); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT DISTINCT * FROM PERSON person", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testJoin() { QPerson person1 = new QPerson("p"); builder.join(person1, JoinType.INNER_JOIN, person.name.eq(person1.name)); SqlQuery sqlQuery = builder.build(); assertEquals("SELECT * FROM PERSON person INNER JOIN PERSON p ON (person.NAME = p.NAME)", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testMultipleJoins() { QPerson person1 = new QPerson("p"); QPerson person2 = new QPerson("q"); builder.leftJoin(person1, person.name.eq(person1.name)); builder.rightJoin(person2, person1.lastName.like(person2.lastName)); SqlQuery sqlQuery = builder.build(); assertEquals( "SELECT * FROM PERSON person LEFT JOIN PERSON p ON (person.NAME = p.NAME) RIGHT JOIN PERSON q " + "ON (p.LAST_NAME LIKE q.LAST_NAME)", sqlQuery.getQuery()); assertEquals(0, sqlQuery.getArgs().length); } @Test public void testAll() { SqlQuery sqlQuery = builder.list(person.name) .list(person.age, person.lastName) .where(person.name.eq("TEST")) .orderBy(person.name.desc()) .distinct() .build(); assertEquals("SELECT DISTINCT person.NAME,person.AGE,person.LAST_NAME FROM PERSON person WHERE person.NAME" + " = ? " + "ORDER BY person.NAME DESC", sqlQuery.getQuery()); assertEquals(1, sqlQuery.getArgs().length); assertEquals("TEST", sqlQuery.getArgs()[0]); } }
/** * Copyright (C) 2015-2019 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.as4.attachment; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Enumeration; import javax.activation.DataHandler; import javax.activation.DataSource; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.WillNotClose; import javax.mail.Header; import javax.mail.MessagingException; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; import org.apache.wss4j.common.ext.Attachment; import org.apache.wss4j.common.util.AttachmentUtils; import com.helger.as4.messaging.domain.MessageHelperMethods; import com.helger.as4.util.AS4ResourceHelper; import com.helger.commons.CGlobal; import com.helger.commons.ValueEnforcer; import com.helger.commons.annotation.Nonempty; import com.helger.commons.http.CHttpHeader; import com.helger.commons.io.IHasInputStream; import com.helger.commons.io.file.FileHelper; import com.helger.commons.io.file.FilenameHelper; import com.helger.commons.io.stream.HasInputStream; import com.helger.commons.io.stream.NonBlockingByteArrayInputStream; import com.helger.commons.io.stream.StreamHelper; import com.helger.commons.mime.IMimeType; import com.helger.commons.string.StringHelper; import com.helger.commons.string.ToStringGenerator; import com.helger.mail.cte.EContentTransferEncoding; import com.helger.mail.datasource.InputStreamProviderDataSource; /** * Special WSS4J attachment with an InputStream provider instead of a fixed * InputStream<br> * Note: cannot be serializable because base class is not serializable and * because we're dealing with {@link InputStream}s. * * @author bayerlma * @author Philip Helger */ public class WSS4JAttachment extends Attachment { private final AS4ResourceHelper m_aResHelper; private IHasInputStream m_aISP; private EContentTransferEncoding m_eCTE = EContentTransferEncoding.BINARY; private EAS4CompressionMode m_eCM; private Charset m_aCharset; private String m_sUncompressedMimeType; public WSS4JAttachment (@Nonnull @WillNotClose final AS4ResourceHelper aResHelper, @Nullable final String sMimeType) { m_aResHelper = ValueEnforcer.notNull (aResHelper, "ResHelper"); overwriteMimeType (sMimeType); } /** * @return The resource helper provided in the constructor. Never * <code>null</code>. */ @Nonnull public final AS4ResourceHelper getResHelper () { return m_aResHelper; } /** * Create a random UUID based ID and call {@link #setId(String)} */ public void setUniqueID () { setId (MessageHelperMethods.createRandomAttachmentID ()); } @Override @Deprecated public final void setMimeType (@Nullable final String sMimeType) { throw new UnsupportedOperationException (); } public final void overwriteMimeType (@Nullable final String sMimeType) { super.setMimeType (sMimeType); m_sUncompressedMimeType = sMimeType; addHeader (AttachmentUtils.MIME_HEADER_CONTENT_TYPE, sMimeType); } @Override public final void addHeader (final String sName, final String sValue) { super.addHeader (sName, sValue); } /** * @return The MIME type of the uncompressed attachment. */ @Nullable public String getUncompressedMimeType () { return m_sUncompressedMimeType; } @Override @Nonnull public InputStream getSourceStream () { return getSourceStream (m_aResHelper); } /** * Get the source stream of the attachment using the provided resource helper. * This can be helpful, if the source helper is already out of scope. * * @param aResourceHelper * The resource helper to use. May not be <code>null</code>. * @return A non-<code>null</code> InputStream on the source. */ @Nonnull public InputStream getSourceStream (@Nonnull final AS4ResourceHelper aResourceHelper) { ValueEnforcer.notNull (aResourceHelper, "ResourceHelper"); // This will e.g. throw an UncheckedIOException if compression is enabled, // but the transmitted document is not compressed final InputStream ret = m_aISP.getInputStream (); if (ret == null) throw new IllegalStateException ("Got no InputStream from " + m_aISP); aResourceHelper.addCloseable (ret); return ret; } /** * @deprecated Do not use this, because it can be opened only once. Use * {@link #setSourceStreamProvider(IHasInputStream)} instead. */ @Override @Deprecated public void setSourceStream (final InputStream sourceStream) { throw new UnsupportedOperationException ("Use setSourceStreamProvider instead"); } @Nullable public IHasInputStream getInputStreamProvider () { return m_aISP; } public boolean isRepeatable () { return m_aISP != null && m_aISP.isReadMultiple (); } public void setSourceStreamProvider (@Nonnull final IHasInputStream aISP) { ValueEnforcer.notNull (aISP, "InputStreamProvider"); m_aISP = aISP; } /** * @return The content transfer encoding to be used. Required for MIME * multipart handling only. */ @Nonnull public final EContentTransferEncoding getContentTransferEncoding () { return m_eCTE; } @Nonnull public final WSS4JAttachment setContentTransferEncoding (@Nonnull final EContentTransferEncoding eCTE) { m_eCTE = ValueEnforcer.notNull (eCTE, "CTE"); return this; } @Nullable public final EAS4CompressionMode getCompressionMode () { return m_eCM; } public final boolean hasCompressionMode () { return m_eCM != null; } @Nonnull public final WSS4JAttachment setCompressionMode (@Nonnull final EAS4CompressionMode eCM) { ValueEnforcer.notNull (eCM, "CompressionMode"); m_eCM = eCM; if (eCM != null) { // Main MIME type is now the compression type MIME type super.setMimeType (eCM.getMimeType ().getAsString ()); } else { // Main MIME type is the uncompressed one (which may be null) super.setMimeType (m_sUncompressedMimeType); } return this; } @Nonnull public final Charset getCharset () { return getCharsetOrDefault (StandardCharsets.ISO_8859_1); } @Nullable public final Charset getCharsetOrDefault (@Nullable final Charset aDefault) { return m_aCharset != null ? m_aCharset : aDefault; } public final boolean hasCharset () { return m_aCharset != null; } @Nonnull public final WSS4JAttachment setCharset (@Nullable final Charset aCharset) { m_aCharset = aCharset; return this; } @Nonnull private DataSource _getAsDataSource () { final InputStreamProviderDataSource aDS = new InputStreamProviderDataSource (m_aISP, getId (), getMimeType ()); return aDS.getEncodingAware (getContentTransferEncoding ()); } public void addToMimeMultipart (@Nonnull final MimeMultipart aMimeMultipart) throws MessagingException { ValueEnforcer.notNull (aMimeMultipart, "MimeMultipart"); final MimeBodyPart aMimeBodyPart = new MimeBodyPart (); { // According to // http://docs.oasis-open.org/wss-m/wss/v1.1.1/os/wss-SwAProfile-v1.1.1-os.html // chapter 5.2 the CID must be enclosed in angle brackets String sContentID = getId (); if (StringHelper.hasText (sContentID)) { if (sContentID.charAt (0) != '<') sContentID = '<' + sContentID + '>'; aMimeBodyPart.setHeader (CHttpHeader.CONTENT_ID, sContentID); } } // !IMPORTANT! DO NOT CHANGE the order of the adding a DH and then the last // headers // On some tests the datahandler did reset content-type and transfer // encoding, so this is now the correct order aMimeBodyPart.setDataHandler (new DataHandler (_getAsDataSource ())); // After DataHandler!! aMimeBodyPart.setHeader (CHttpHeader.CONTENT_TYPE, getMimeType ()); aMimeBodyPart.setHeader (CHttpHeader.CONTENT_TRANSFER_ENCODING, getContentTransferEncoding ().getID ()); aMimeMultipart.addBodyPart (aMimeBodyPart); } @Override public String toString () { return new ToStringGenerator (this).append ("ID", getId ()) .append ("MimeType", getMimeType ()) .append ("Headers", getHeaders ()) .append ("ResourceManager", m_aResHelper) .append ("ISP", m_aISP) .append ("CTE", m_eCTE) .append ("CM", m_eCM) .append ("Charset", m_aCharset) .getToString (); } private static void _addOutgoingHeaders (@Nonnull final WSS4JAttachment aAttachment, @Nonnull final String sFilename) { // Ensure an ID is present if (StringHelper.hasNoText (aAttachment.getId ())) aAttachment.setUniqueID (); // Set after ID and MimeType! aAttachment.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_DESCRIPTION, "Attachment"); aAttachment.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + sFilename + "\""); aAttachment.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_ID, "<attachment=" + aAttachment.getId () + '>'); aAttachment.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_TYPE, aAttachment.getMimeType ()); } @Nonnull public static WSS4JAttachment createOutgoingFileAttachment (@Nonnull final File aSrcFile, @Nonnull final IMimeType aMimeType, @Nullable final EAS4CompressionMode eCompressionMode, @Nonnull final AS4ResourceHelper aResHelper) throws IOException { return createOutgoingFileAttachment (aSrcFile, null, aMimeType, eCompressionMode, aResHelper); } /** * Constructor. Performs compression internally. * * @param aSrcFile * Source, uncompressed, unencrypted file. * @param aMimeType * Original mime type of the file. * @param eCompressionMode * Optional compression mode to use. May be <code>null</code>. * @param aResHelper * The resource manager to use. May not be <code>null</code>. * @return The newly created attachment instance. Never <code>null</code>. * @throws IOException * In case something goes wrong during compression */ @Nonnull public static WSS4JAttachment createOutgoingFileAttachment (@Nonnull final File aSrcFile, @Nullable final String sContentID, @Nonnull final IMimeType aMimeType, @Nullable final EAS4CompressionMode eCompressionMode, @Nonnull @WillNotClose final AS4ResourceHelper aResHelper) throws IOException { ValueEnforcer.notNull (aSrcFile, "File"); ValueEnforcer.notNull (aMimeType, "MimeType"); final WSS4JAttachment ret = new WSS4JAttachment (aResHelper, aMimeType.getAsString ()); ret.setId (sContentID); _addOutgoingHeaders (ret, FilenameHelper.getWithoutPath (aSrcFile)); // If the attachment has an compressionMode do it directly, so that // encryption later on works on the compressed content File aRealFile; if (eCompressionMode != null) { ret.setCompressionMode (eCompressionMode); // Create temporary file with compressed content aRealFile = aResHelper.createTempFile (); try (final OutputStream aOS = eCompressionMode.getCompressStream (FileHelper.getBufferedOutputStream (aRealFile))) { StreamHelper.copyInputStreamToOutputStream (FileHelper.getBufferedInputStream (aSrcFile), aOS); } } else { // No compression - use file as-is aRealFile = aSrcFile; } // Set a stream provider that can be read multiple times (opens a new // FileInputStream internally) ret.setSourceStreamProvider (HasInputStream.multiple ( () -> FileHelper.getBufferedInputStream (aRealFile))); return ret; } /** * Constructor. Performs compression internally. * * @param aSrcData * Source in-memory data, uncompressed, unencrypted. * @param sContentID * Optional content ID or <code>null</code> to create a random one. * @param sFilename * Filename of the attachment. May not be <code>null</code>. * @param aMimeType * Original mime type of the file. May not be <code>null</code>. * @param eCompressionMode * Optional compression mode to use. May be <code>null</code>. * @param aResHelper * The resource manager to use. May not be <code>null</code>. * @return The newly created attachment instance. Never <code>null</code>. * @throws IOException * In case something goes wrong during compression */ @Nonnull public static WSS4JAttachment createOutgoingFileAttachment (@Nonnull final byte [] aSrcData, @Nullable final String sContentID, @Nonnull @Nonempty final String sFilename, @Nonnull final IMimeType aMimeType, @Nullable final EAS4CompressionMode eCompressionMode, @Nonnull final AS4ResourceHelper aResHelper) throws IOException { ValueEnforcer.notNull (aSrcData, "Data"); ValueEnforcer.notEmpty (sFilename, "Filename"); ValueEnforcer.notNull (aMimeType, "MimeType"); final WSS4JAttachment ret = new WSS4JAttachment (aResHelper, aMimeType.getAsString ()); ret.setId (sContentID); _addOutgoingHeaders (ret, sFilename); // If the attachment has an compressionMode do it directly, so that // encryption later on works on the compressed content if (eCompressionMode != null) { ret.setCompressionMode (eCompressionMode); // Create temporary file with compressed content final File aRealFile = aResHelper.createTempFile (); try (final OutputStream aOS = eCompressionMode.getCompressStream (FileHelper.getBufferedOutputStream (aRealFile))) { aOS.write (aSrcData); } ret.setSourceStreamProvider (HasInputStream.multiple ( () -> FileHelper.getBufferedInputStream (aRealFile))); } else { // No compression - use data as-is ret.setSourceStreamProvider (HasInputStream.multiple ( () -> new NonBlockingByteArrayInputStream (aSrcData))); } return ret; } public static boolean canBeKeptInMemory (final long nBytes) { return nBytes <= 64 * CGlobal.BYTES_PER_KILOBYTE; } @Nonnull public static WSS4JAttachment createIncomingFileAttachment (@Nonnull final MimeBodyPart aBodyPart, @Nonnull final AS4ResourceHelper aResHelper) throws MessagingException, IOException { ValueEnforcer.notNull (aBodyPart, "BodyPart"); ValueEnforcer.notNull (aResHelper, "ResHelper"); final WSS4JAttachment ret = new WSS4JAttachment (aResHelper, aBodyPart.getContentType ()); { // Reference in Content-ID header is: "<ID>" // See // http://docs.oasis-open.org/wss-m/wss/v1.1.1/os/wss-SwAProfile-v1.1.1-os.html // chapter 5.2 final String sRealContentID = StringHelper.trimStartAndEnd (aBodyPart.getContentID (), '<', '>'); ret.setId (sRealContentID); } if (canBeKeptInMemory (aBodyPart.getSize ())) { // keep some small parts in memory final DataHandler aDH = aBodyPart.getDataHandler (); final DataSource aDS = aDH.getDataSource (); if (aDS != null) { // DataSource InputStreams can be retrieved over and over again ret.setSourceStreamProvider (HasInputStream.multiple ( () -> { try { return aDS.getInputStream (); } catch (final IOException ex) { throw new UncheckedIOException (ex); } })); } else { // Can only be read once ret.setSourceStreamProvider (HasInputStream.once ( () -> { try { return aDH.getInputStream (); } catch (final IOException ex) { throw new UncheckedIOException (ex); } })); } } else { // Write to temp file final File aTempFile = aResHelper.createTempFile (); try (final OutputStream aOS = FileHelper.getBufferedOutputStream (aTempFile)) { aBodyPart.getDataHandler ().writeTo (aOS); } ret.setSourceStreamProvider (HasInputStream.multiple ( () -> FileHelper.getBufferedInputStream (aTempFile))); } // Convert all headers to attributes final Enumeration <Header> aEnum = aBodyPart.getAllHeaders (); while (aEnum.hasMoreElements ()) { final Header aHeader = aEnum.nextElement (); ret.addHeader (aHeader.getName (), aHeader.getValue ()); } // These headers are mandatory and overwrite headers from the MIME body part ret.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_DESCRIPTION, "Attachment"); ret.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_ID, "<attachment=" + ret.getId () + '>'); ret.addHeader (AttachmentUtils.MIME_HEADER_CONTENT_TYPE, ret.getMimeType ()); return ret; } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.kotlin.idea.refactoring.safeDelete; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.test.TestRoot; import org.junit.runner.RunWith; /* * This class is generated by {@link org.jetbrains.kotlin.generators.tests.TestsPackage}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) public abstract class SafeDeleteTestGenerated extends AbstractSafeDeleteTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteClass/kotlinClass") public static class KotlinClass extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doClassTest, this, testDataFilePath); } @TestMetadata("class1.kt") public void testClass1() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/class1.kt"); } @TestMetadata("class2.kt") public void testClass2() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/class2.kt"); } @TestMetadata("classInString.kt") public void testClassInString() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/classInString.kt"); } @TestMetadata("classWithExternalConstructructorUsage.kt") public void testClassWithExternalConstructructorUsage() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/classWithExternalConstructructorUsage.kt"); } @TestMetadata("classWithInternalConstructructorUsage.kt") public void testClassWithInternalConstructructorUsage() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/classWithInternalConstructructorUsage.kt"); } @TestMetadata("enumEntry.kt") public void testEnumEntry() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/enumEntry.kt"); } @TestMetadata("localClass1.kt") public void testLocalClass1() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/localClass1.kt"); } @TestMetadata("localClass2.kt") public void testLocalClass2() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/localClass2.kt"); } @TestMetadata("nestedClass1.kt") public void testNestedClass1() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/nestedClass1.kt"); } @TestMetadata("nestedClass2.kt") public void testNestedClass2() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/nestedClass2.kt"); } @TestMetadata("noUsages.kt") public void testNoUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/noUsages.kt"); } @TestMetadata("trait1.kt") public void testTrait1() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/trait1.kt"); } @TestMetadata("trait2.kt") public void testTrait2() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/trait2.kt"); } @TestMetadata("unsafeImport.kt") public void testUnsafeImport() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClass/unsafeImport.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteClass/kotlinClassWithJava") public static class KotlinClassWithJava extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doClassTestWithJava, this, testDataFilePath); } @TestMetadata("classWithDelegationCalls.kt") public void testClassWithDelegationCalls() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/kotlinClassWithJava/classWithDelegationCalls.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteClass/javaClassWithKotlin") public static class JavaClassWithKotlin extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doJavaClassTest, this, testDataFilePath); } @TestMetadata("ImportJavaClassToKotlin.java") public void testImportJavaClassToKotlin() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/javaClassWithKotlin/ImportJavaClassToKotlin.java"); } @TestMetadata("javaInterfaceInSuperTypeList.java") public void testJavaInterfaceInSuperTypeList() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/javaClassWithKotlin/javaInterfaceInSuperTypeList.java"); } @TestMetadata("javaInterfaceInSuperTypeListLast.java") public void testJavaInterfaceInSuperTypeListLast() throws Exception { runTest("testData/refactoring/safeDelete/deleteClass/javaClassWithKotlin/javaInterfaceInSuperTypeListLast.java"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteObject/kotlinObject") public static class KotlinObject extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doObjectTest, this, testDataFilePath); } @TestMetadata("anonymousObject.kt") public void testAnonymousObject() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/anonymousObject.kt"); } @TestMetadata("companionObject.kt") public void testCompanionObject() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/companionObject.kt"); } @TestMetadata("localObject1.kt") public void testLocalObject1() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/localObject1.kt"); } @TestMetadata("localObject2.kt") public void testLocalObject2() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/localObject2.kt"); } @TestMetadata("nestedObject1.kt") public void testNestedObject1() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/nestedObject1.kt"); } @TestMetadata("nestedObject2.kt") public void testNestedObject2() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/nestedObject2.kt"); } @TestMetadata("nestedObject3.kt") public void testNestedObject3() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/nestedObject3.kt"); } @TestMetadata("noUsages.kt") public void testNoUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/noUsages.kt"); } @TestMetadata("object1.kt") public void testObject1() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/object1.kt"); } @TestMetadata("object2.kt") public void testObject2() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/object2.kt"); } @TestMetadata("unsafeImport.kt") public void testUnsafeImport() throws Exception { runTest("testData/refactoring/safeDelete/deleteObject/kotlinObject/unsafeImport.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteFunction/kotlinFunction") public static class KotlinFunction extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doFunctionTest, this, testDataFilePath); } @TestMetadata("fun1.kt") public void testFun1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/fun1.kt"); } @TestMetadata("fun2.kt") public void testFun2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/fun2.kt"); } @TestMetadata("funExt1.kt") public void testFunExt1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/funExt1.kt"); } @TestMetadata("funExt2.kt") public void testFunExt2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/funExt2.kt"); } @TestMetadata("implement1.kt") public void testImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/implement1.kt"); } @TestMetadata("implement2.kt") public void testImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/implement2.kt"); } @TestMetadata("localFun1.kt") public void testLocalFun1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/localFun1.kt"); } @TestMetadata("localFun2.kt") public void testLocalFun2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/localFun2.kt"); } @TestMetadata("localFunExt1.kt") public void testLocalFunExt1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/localFunExt1.kt"); } @TestMetadata("localFunExt2.kt") public void testLocalFunExt2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/localFunExt2.kt"); } @TestMetadata("noUsages.kt") public void testNoUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/noUsages.kt"); } @TestMetadata("override1.kt") public void testOverride1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/override1.kt"); } @TestMetadata("override2.kt") public void testOverride2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/override2.kt"); } @TestMetadata("overrideAndImplement1.kt") public void testOverrideAndImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/overrideAndImplement1.kt"); } @TestMetadata("overrideAndImplement2.kt") public void testOverrideAndImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/overrideAndImplement2.kt"); } @TestMetadata("overrideAndImplement3.kt") public void testOverrideAndImplement3() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/overrideAndImplement3.kt"); } @TestMetadata("overrideWithUsages.kt") public void testOverrideWithUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunction/overrideWithUsages.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava") public static class KotlinFunctionWithJava extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doFunctionTestWithJava, this, testDataFilePath); } @TestMetadata("funExt.kt") public void testFunExt() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/funExt.kt"); } @TestMetadata("implement1.kt") public void testImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/implement1.kt"); } @TestMetadata("implement2.kt") public void testImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/implement2.kt"); } @TestMetadata("implement3.kt") public void testImplement3() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/implement3.kt"); } @TestMetadata("implement4.kt") public void testImplement4() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/implement4.kt"); } @TestMetadata("override1.kt") public void testOverride1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/override1.kt"); } @TestMetadata("override2.kt") public void testOverride2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/override2.kt"); } @TestMetadata("override3.kt") public void testOverride3() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/override3.kt"); } @TestMetadata("overrideAndImplement1.kt") public void testOverrideAndImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/overrideAndImplement1.kt"); } @TestMetadata("overrideAndImplement2.kt") public void testOverrideAndImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/overrideAndImplement2.kt"); } @TestMetadata("secondaryConstructor.kt") public void testSecondaryConstructor() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/secondaryConstructor.kt"); } @TestMetadata("usageInOverrideToDelete.kt") public void testUsageInOverrideToDelete() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/kotlinFunctionWithJava/usageInOverrideToDelete.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteFunction/javaFunctionWithKotlin") public static class JavaFunctionWithKotlin extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doJavaMethodTest, this, testDataFilePath); } @TestMetadata("mixedHierarchy1.kt") public void testMixedHierarchy1() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/javaFunctionWithKotlin/mixedHierarchy1.kt"); } @TestMetadata("mixedHierarchy2.kt") public void testMixedHierarchy2() throws Exception { runTest("testData/refactoring/safeDelete/deleteFunction/javaFunctionWithKotlin/mixedHierarchy2.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteProperty/kotlinProperty") public static class KotlinProperty extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doPropertyTest, this, testDataFilePath); } @TestMetadata("implement1.kt") public void testImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement1.kt"); } @TestMetadata("implement2.kt") public void testImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement2.kt"); } @TestMetadata("implement3.kt") public void testImplement3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement3.kt"); } @TestMetadata("implement4.kt") public void testImplement4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement4.kt"); } @TestMetadata("implement5.kt") public void testImplement5() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement5.kt"); } @TestMetadata("implement6.kt") public void testImplement6() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement6.kt"); } @TestMetadata("implement7.kt") public void testImplement7() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement7.kt"); } @TestMetadata("implement8.kt") public void testImplement8() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/implement8.kt"); } @TestMetadata("localVar.kt") public void testLocalVar() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/localVar.kt"); } @TestMetadata("noUsages.kt") public void testNoUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/noUsages.kt"); } @TestMetadata("override1.kt") public void testOverride1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/override1.kt"); } @TestMetadata("override2.kt") public void testOverride2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/override2.kt"); } @TestMetadata("override3.kt") public void testOverride3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/override3.kt"); } @TestMetadata("override4.kt") public void testOverride4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/override4.kt"); } @TestMetadata("overrideAndImplement1.kt") public void testOverrideAndImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/overrideAndImplement1.kt"); } @TestMetadata("overrideAndImplement2.kt") public void testOverrideAndImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/overrideAndImplement2.kt"); } @TestMetadata("overrideAndImplement3.kt") public void testOverrideAndImplement3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/overrideAndImplement3.kt"); } @TestMetadata("overrideAndImplement4.kt") public void testOverrideAndImplement4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/overrideAndImplement4.kt"); } @TestMetadata("overrideWithUsages.kt") public void testOverrideWithUsages() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/overrideWithUsages.kt"); } @TestMetadata("property1.kt") public void testProperty1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/property1.kt"); } @TestMetadata("property2.kt") public void testProperty2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/property2.kt"); } @TestMetadata("propertyExt1.kt") public void testPropertyExt1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/propertyExt1.kt"); } @TestMetadata("propertyExt2.kt") public void testPropertyExt2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/propertyExt2.kt"); } @TestMetadata("propertyInLocalObject.kt") public void testPropertyInLocalObject() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinProperty/propertyInLocalObject.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava") public static class KotlinPropertyWithJava extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doPropertyTestWithJava, this, testDataFilePath); } @TestMetadata("implement1.kt") public void testImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/implement1.kt"); } @TestMetadata("implement2.kt") public void testImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/implement2.kt"); } @TestMetadata("implement3.kt") public void testImplement3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/implement3.kt"); } @TestMetadata("implement4.kt") public void testImplement4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/implement4.kt"); } @TestMetadata("override1.kt") public void testOverride1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/override1.kt"); } @TestMetadata("override2.kt") public void testOverride2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/override2.kt"); } @TestMetadata("override3.kt") public void testOverride3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/override3.kt"); } @TestMetadata("override4.kt") public void testOverride4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/override4.kt"); } @TestMetadata("overrideAndImplement1.kt") public void testOverrideAndImplement1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/overrideAndImplement1.kt"); } @TestMetadata("overrideAndImplement2.kt") public void testOverrideAndImplement2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/overrideAndImplement2.kt"); } @TestMetadata("propertyExt.kt") public void testPropertyExt() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/propertyExt.kt"); } @TestMetadata("usageInOverrideToDelete.kt") public void testUsageInOverrideToDelete() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/kotlinPropertyWithJava/usageInOverrideToDelete.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin") public static class JavaPropertyWithKotlin extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doJavaPropertyTest, this, testDataFilePath); } @TestMetadata("middleJava1.kt") public void testMiddleJava1() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava1.kt"); } @TestMetadata("middleJava2.kt") public void testMiddleJava2() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava2.kt"); } @TestMetadata("middleJava3.kt") public void testMiddleJava3() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava3.kt"); } @TestMetadata("middleJava4.kt") public void testMiddleJava4() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava4.kt"); } @TestMetadata("middleJava5.kt") public void testMiddleJava5() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava5.kt"); } @TestMetadata("middleJava6.kt") public void testMiddleJava6() throws Exception { runTest("testData/refactoring/safeDelete/deleteProperty/javaPropertyWithKotlin/middleJava6.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteTypeAlias/kotlinTypeAlias") public static class KotlinTypeAlias extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTypeAliasTest, this, testDataFilePath); } @TestMetadata("simple.kt") public void testSimple() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeAlias/kotlinTypeAlias/simple.kt"); } @TestMetadata("used.kt") public void testUsed() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeAlias/kotlinTypeAlias/used.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter") public static class KotlinTypeParameter extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTypeParameterTest, this, testDataFilePath); } @TestMetadata("internalUsages1.kt") public void testInternalUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/internalUsages1.kt"); } @TestMetadata("internalUsages2.kt") public void testInternalUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/internalUsages2.kt"); } @TestMetadata("internalUsages3.kt") public void testInternalUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/internalUsages3.kt"); } @TestMetadata("internalUsages4.kt") public void testInternalUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/internalUsages4.kt"); } @TestMetadata("internalUsages5.kt") public void testInternalUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/internalUsages5.kt"); } @TestMetadata("safeUsagesWithConstraint1.kt") public void testSafeUsagesWithConstraint1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/safeUsagesWithConstraint1.kt"); } @TestMetadata("safeUsagesWithConstraint2.kt") public void testSafeUsagesWithConstraint2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/safeUsagesWithConstraint2.kt"); } @TestMetadata("subclass1.kt") public void testSubclass1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subclass1.kt"); } @TestMetadata("subclass2.kt") public void testSubclass2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subclass2.kt"); } @TestMetadata("subst1.kt") public void testSubst1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst1.kt"); } @TestMetadata("subst2.kt") public void testSubst2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst2.kt"); } @TestMetadata("subst3.kt") public void testSubst3() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst3.kt"); } @TestMetadata("subst4.kt") public void testSubst4() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst4.kt"); } @TestMetadata("subst5.kt") public void testSubst5() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst5.kt"); } @TestMetadata("subst6.kt") public void testSubst6() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst6.kt"); } @TestMetadata("subst7.kt") public void testSubst7() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameter/subst7.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava") public static class KotlinTypeParameterWithJava extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTypeParameterTestWithJava, this, testDataFilePath); } @TestMetadata("internalUsages1.kt") public void testInternalUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/internalUsages1.kt"); } @TestMetadata("internalUsages2.kt") public void testInternalUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/internalUsages2.kt"); } @TestMetadata("internalUsages3.kt") public void testInternalUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/internalUsages3.kt"); } @TestMetadata("internalUsages4.kt") public void testInternalUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/internalUsages4.kt"); } @TestMetadata("internalUsages5.kt") public void testInternalUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/internalUsages5.kt"); } @TestMetadata("rawType.kt") public void testRawType() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/rawType.kt"); } @TestMetadata("safeUsagesWithConstraint1.kt") public void testSafeUsagesWithConstraint1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/safeUsagesWithConstraint1.kt"); } @TestMetadata("safeUsagesWithConstraint2.kt") public void testSafeUsagesWithConstraint2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/safeUsagesWithConstraint2.kt"); } @TestMetadata("subclass1.kt") public void testSubclass1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subclass1.kt"); } @TestMetadata("subclass2.kt") public void testSubclass2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subclass2.kt"); } @TestMetadata("subst1.kt") public void testSubst1() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst1.kt"); } @TestMetadata("subst2.kt") public void testSubst2() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst2.kt"); } @TestMetadata("subst3.kt") public void testSubst3() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst3.kt"); } @TestMetadata("subst4.kt") public void testSubst4() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst4.kt"); } @TestMetadata("subst5.kt") public void testSubst5() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst5.kt"); } @TestMetadata("subst6.kt") public void testSubst6() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst6.kt"); } @TestMetadata("subst7.kt") public void testSubst7() throws Exception { runTest("testData/refactoring/safeDelete/deleteTypeParameter/kotlinTypeParameterWithJava/subst7.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter") public static class KotlinValueParameter extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doValueParameterTest, this, testDataFilePath); } @TestMetadata("dataClassComponent.kt") public void testDataClassComponent() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/dataClassComponent.kt"); } @TestMetadata("defaultParam1.kt") public void testDefaultParam1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/defaultParam1.kt"); } @TestMetadata("defaultParam2.kt") public void testDefaultParam2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/defaultParam2.kt"); } @TestMetadata("extNamedParam1.kt") public void testExtNamedParam1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/extNamedParam1.kt"); } @TestMetadata("extNamedParam2.kt") public void testExtNamedParam2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/extNamedParam2.kt"); } @TestMetadata("hierarchyWithSafeUsages1.kt") public void testHierarchyWithSafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithSafeUsages1.kt"); } @TestMetadata("hierarchyWithSafeUsages2.kt") public void testHierarchyWithSafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithSafeUsages2.kt"); } @TestMetadata("hierarchyWithSafeUsages3.kt") public void testHierarchyWithSafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithSafeUsages3.kt"); } @TestMetadata("hierarchyWithSafeUsages4.kt") public void testHierarchyWithSafeUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithSafeUsages4.kt"); } @TestMetadata("hierarchyWithSafeUsages5.kt") public void testHierarchyWithSafeUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithSafeUsages5.kt"); } @TestMetadata("hierarchyWithUnsafeUsages1.kt") public void testHierarchyWithUnsafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages1.kt"); } @TestMetadata("hierarchyWithUnsafeUsages2.kt") public void testHierarchyWithUnsafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages2.kt"); } @TestMetadata("hierarchyWithUnsafeUsages3.kt") public void testHierarchyWithUnsafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages3.kt"); } @TestMetadata("hierarchyWithUnsafeUsages4.kt") public void testHierarchyWithUnsafeUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages4.kt"); } @TestMetadata("hierarchyWithUnsafeUsages5.kt") public void testHierarchyWithUnsafeUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages5.kt"); } @TestMetadata("hierarchyWithUnsafeUsages6.kt") public void testHierarchyWithUnsafeUsages6() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages6.kt"); } @TestMetadata("hierarchyWithUnsafeUsages7.kt") public void testHierarchyWithUnsafeUsages7() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages7.kt"); } @TestMetadata("hierarchyWithUnsafeUsages8.kt") public void testHierarchyWithUnsafeUsages8() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/hierarchyWithUnsafeUsages8.kt"); } @TestMetadata("internalUsage1.kt") public void testInternalUsage1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/internalUsage1.kt"); } @TestMetadata("internalUsage2.kt") public void testInternalUsage2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/internalUsage2.kt"); } @TestMetadata("lambdaArg.kt") public void testLambdaArg() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/lambdaArg.kt"); } @TestMetadata("lambdaArgExt.kt") public void testLambdaArgExt() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/lambdaArgExt.kt"); } @TestMetadata("namedParam1.kt") public void testNamedParam1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/namedParam1.kt"); } @TestMetadata("namedParam2.kt") public void testNamedParam2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/namedParam2.kt"); } @TestMetadata("propertyParam1.kt") public void testPropertyParam1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/propertyParam1.kt"); } @TestMetadata("propertyParam2.kt") public void testPropertyParam2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/propertyParam2.kt"); } @TestMetadata("safeUsages1.kt") public void testSafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/safeUsages1.kt"); } @TestMetadata("safeUsages2.kt") public void testSafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/safeUsages2.kt"); } @TestMetadata("safeUsages3.kt") public void testSafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/safeUsages3.kt"); } @TestMetadata("safeUsagesExt1.kt") public void testSafeUsagesExt1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/safeUsagesExt1.kt"); } @TestMetadata("safeUsagesExt2.kt") public void testSafeUsagesExt2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/safeUsagesExt2.kt"); } @TestMetadata("setter.kt") public void testSetter() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameter/setter.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava") public static class KotlinValueParameterWithJava extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doValueParameterTestWithJava, this, testDataFilePath); } @TestMetadata("dataClassComponent.kt") public void testDataClassComponent() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/dataClassComponent.kt"); } @TestMetadata("hierarchyWithSafeUsages1.kt") public void testHierarchyWithSafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithSafeUsages1.kt"); } @TestMetadata("hierarchyWithSafeUsages2.kt") public void testHierarchyWithSafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithSafeUsages2.kt"); } @TestMetadata("hierarchyWithSafeUsages3.kt") public void testHierarchyWithSafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithSafeUsages3.kt"); } @TestMetadata("hierarchyWithSafeUsages4.kt") public void testHierarchyWithSafeUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithSafeUsages4.kt"); } @TestMetadata("hierarchyWithSafeUsages5.kt") public void testHierarchyWithSafeUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithSafeUsages5.kt"); } @TestMetadata("hierarchyWithUnsafeUsages1.kt") public void testHierarchyWithUnsafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithUnsafeUsages1.kt"); } @TestMetadata("hierarchyWithUnsafeUsages2.kt") public void testHierarchyWithUnsafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithUnsafeUsages2.kt"); } @TestMetadata("hierarchyWithUnsafeUsages3.kt") public void testHierarchyWithUnsafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithUnsafeUsages3.kt"); } @TestMetadata("hierarchyWithUnsafeUsages4.kt") public void testHierarchyWithUnsafeUsages4() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithUnsafeUsages4.kt"); } @TestMetadata("hierarchyWithUnsafeUsages5.kt") public void testHierarchyWithUnsafeUsages5() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/hierarchyWithUnsafeUsages5.kt"); } @TestMetadata("internalUsage1.kt") public void testInternalUsage1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/internalUsage1.kt"); } @TestMetadata("internalUsage2.kt") public void testInternalUsage2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/internalUsage2.kt"); } @TestMetadata("lambdaArg.kt") public void testLambdaArg() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/lambdaArg.kt"); } @TestMetadata("lambdaArgExt.kt") public void testLambdaArgExt() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/lambdaArgExt.kt"); } @TestMetadata("mixedHierarchy1.kt") public void testMixedHierarchy1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchy1.kt"); } @TestMetadata("mixedHierarchy2.kt") public void testMixedHierarchy2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchy2.kt"); } @TestMetadata("mixedHierarchy3.kt") public void testMixedHierarchy3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchy3.kt"); } @TestMetadata("mixedHierarchyWithUnsafeUsages1.kt") public void testMixedHierarchyWithUnsafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchyWithUnsafeUsages1.kt"); } @TestMetadata("mixedHierarchyWithUnsafeUsages2.kt") public void testMixedHierarchyWithUnsafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchyWithUnsafeUsages2.kt"); } @TestMetadata("mixedHierarchyWithUnsafeUsages3.kt") public void testMixedHierarchyWithUnsafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/mixedHierarchyWithUnsafeUsages3.kt"); } @TestMetadata("propertyParam1.kt") public void testPropertyParam1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/propertyParam1.kt"); } @TestMetadata("propertyParam2.kt") public void testPropertyParam2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/propertyParam2.kt"); } @TestMetadata("safeUsages1.kt") public void testSafeUsages1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/safeUsages1.kt"); } @TestMetadata("safeUsages2.kt") public void testSafeUsages2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/safeUsages2.kt"); } @TestMetadata("safeUsages3.kt") public void testSafeUsages3() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/safeUsages3.kt"); } @TestMetadata("safeUsagesExt1.kt") public void testSafeUsagesExt1() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/safeUsagesExt1.kt"); } @TestMetadata("safeUsagesExt2.kt") public void testSafeUsagesExt2() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/kotlinValueParameterWithJava/safeUsagesExt2.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/refactoring/safeDelete/deleteValueParameter/javaParameterWithKotlin") public static class JavaParameterWithKotlin extends AbstractSafeDeleteTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doJavaParameterTest, this, testDataFilePath); } @TestMetadata("hierarchyWithoutConflict.java") public void testHierarchyWithoutConflict() throws Exception { runTest("testData/refactoring/safeDelete/deleteValueParameter/javaParameterWithKotlin/hierarchyWithoutConflict.java"); } } }
package com.huawei.esdk.demo.utils; import java.io.IOException; import java.io.InputStream; import java.security.KeyStore; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import org.apache.cxf.configuration.jsse.TLSClientParameters; import org.apache.cxf.configuration.security.FiltersType; import org.apache.cxf.endpoint.Client; import org.apache.cxf.frontend.ClientProxy; import org.apache.cxf.interceptor.LoggingInInterceptor; import org.apache.cxf.interceptor.LoggingOutInterceptor; import org.apache.cxf.jaxws.JaxWsProxyFactoryBean; import org.apache.cxf.transport.http.HTTPConduit; import org.apache.cxf.transports.http.configuration.HTTPClientPolicy; import com.huawei.esdk.demo.gen.IVSProfessionalCommon; import com.huawei.esdk.demo.gen.IVSProfessionalDeviceManager; import com.huawei.esdk.demo.interceptor.MsgInInterceptor; import com.huawei.esdk.demo.interceptor.MsgOutInterceptor; import com.huawei.esdk.demo.keygen.PlatformKeyMgr; import com.huawei.esdk.ivs.professional.local.impl.utils.AESCbc128Utils; import com.huawei.esdk.ivs.professional.local.impl.utils.Base64Utils; import com.huawei.esdk.platform.professional.local.impl.utils.PropertiesUtils; public abstract class ClientProvider { private static final Logger LOGGER = Logger.getLogger(ClientProvider.class.getName()); private static Map<String, Object> clientMap = new HashMap<String, Object>(); @SuppressWarnings({"rawtypes", "unchecked"}) public static synchronized Object getClient(Class clz) { Object clientObj = clientMap.get(clz.getName()); if (null != clientObj) { return clientObj; } JaxWsProxyFactoryBean factory = new JaxWsProxyFactoryBean(); String url = PropertiesUtils.getValue("sdkserver.url"); if (null == url || (!url.startsWith("http:") && !url.startsWith("https:"))) { url = ""; } if (!Boolean.valueOf(PropertiesUtils.getValue("ssl.support"))) { url = url.replaceFirst("https", "http"); } String serviceUrl = ""; if (clz.getName().equals(IVSProfessionalDeviceManager.class.getName())) { serviceUrl = PropertiesUtils.getValue("deviceMgr.service.url"); } else if (clz.getName().equals(IVSProfessionalCommon.class.getName())) { serviceUrl = PropertiesUtils.getValue("commonMgr.service.url"); } else if (clz.getName().equals(PlatformKeyMgr.class.getName())) { serviceUrl = PropertiesUtils.getValue("professional.key.service.url"); } factory.setAddress(url + "/" + serviceUrl); Object service = null; service = factory.create(clz); if (Boolean.valueOf(PropertiesUtils.getValue("ssl.support")) && url.startsWith("https")) { return configureSSL(service, clz); } else { return configureHttp(service, clz); } } @SuppressWarnings("rawtypes") private static Object configureHttp(Object obj, Class clz) { Client client = ClientProxy.getClient(obj); addInterceptor(client); interceptorLoggingCtrl(client); HTTPConduit http = (HTTPConduit)client.getConduit(); if (null == http) { return null; } configHttpClientPolicy(http); clientMap.put(clz.getName(), obj); return obj; } @SuppressWarnings("rawtypes") private static Object configureSSL(Object obj, Class clz) { System.setProperty("javax.net.debug", PropertiesUtils.getValue("ssl.javax.net.debug")); Client client = ClientProxy.getClient(obj); addInterceptor(client); interceptorLoggingCtrl(client); HTTPConduit http = (HTTPConduit)client.getConduit(); if (null == http) { return null; } InputStream is = null; try { TLSClientParameters tlsParams = http.getTlsClientParameters();// new // TLSClientParameters(); if (null == tlsParams) { tlsParams = new TLSClientParameters(); tlsParams.setSecureSocketProtocol(PropertiesUtils.getValue("ssl.secure.socket.protocol")); } KeyStore keyStore = KeyStore.getInstance(PropertiesUtils.getValue("ssl.keystore.type"), PropertiesUtils.getValue("ssl.keystore.provider")); String storePassword = new String( AESCbc128Utils.decode(Base64Utils.getFromBASE64(PropertiesUtils.getValue("ssl.keystore.password"))), "UTF-8"); is = (InputStream)ClassLoader.getSystemClassLoader() .getResourceAsStream(PropertiesUtils.getValue("ssl.keystore")); if (null == is) { is = Thread.currentThread() .getContextClassLoader() .getResourceAsStream(PropertiesUtils.getValue("ssl.keystore")); } keyStore.load(is, storePassword.toCharArray()); TrustManagerFactory trustFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustFactory.init(keyStore); TrustManager[] trustManagers = trustFactory.getTrustManagers(); SSLContext sslContext = SSLContext.getInstance(PropertiesUtils.getValue("ssl.secure.socket.protocol")); sslContext.init(null, trustManagers, new java.security.SecureRandom()); SSLSocketFactory ssf = sslContext.getSocketFactory(); tlsParams.setSSLSocketFactory(ssf); tlsParams.setTrustManagers(trustManagers); tlsParams.setUseHttpsURLConnectionDefaultHostnameVerifier(false); FiltersType filtersTypes = new FiltersType(); filtersTypes.getInclude().add(".*_EXPORT_.*"); filtersTypes.getInclude().add(".*_EXPORT1024_.*"); filtersTypes.getInclude().add(".*_WITH_DES_.*"); filtersTypes.getInclude().add(".*_WITH_AES_.*"); filtersTypes.getInclude().add(".*_WITH_NULL_.*"); filtersTypes.getExclude().add(".*_DH_anon_.*"); tlsParams.setCipherSuitesFilter(filtersTypes); tlsParams.setDisableCNCheck(true); http.setTlsClientParameters(tlsParams); configHttpClientPolicy(http); clientMap.put(clz.getName(), obj); } catch (Exception e) { LOGGER.log(java.util.logging.Level.WARNING, "Exception happened in configureSSL() ", e); } finally { if (null != is) { try { is.close(); } catch (IOException e) { LOGGER.log(java.util.logging.Level.WARNING, "Exception happened in configureSSL() ", e); } } } return obj; } private static void interceptorLoggingCtrl(Client client) { if (Boolean.valueOf(PropertiesUtils.getValue("cxf.logging.interceptor.on"))) { client.getOutInterceptors().add(new LoggingOutInterceptor()); client.getInInterceptors().add(new LoggingInInterceptor()); } } private static void configHttpClientPolicy(HTTPConduit http) { HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setConnectionTimeout(60000); httpClientPolicy.setAllowChunking(false); httpClientPolicy.setReceiveTimeout(60000); http.setClient(httpClientPolicy); } @SuppressWarnings("unchecked") private static void addInterceptor(Client client) { client.getOutInterceptors().add(new MsgOutInterceptor()); client.getInInterceptors().add(new MsgInInterceptor()); } }
package org.motechproject.mds.builder.impl; import javassist.ByteArrayClassPath; import javassist.CannotCompileException; import javassist.CtClass; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.IOUtils; import org.motechproject.commons.sql.service.SqlDBManager; import org.motechproject.mds.builder.EntityBuilder; import org.motechproject.mds.builder.EntityInfrastructureBuilder; import org.motechproject.mds.builder.EntityMetadataBuilder; import org.motechproject.mds.builder.EnumBuilder; import org.motechproject.mds.builder.MDSConstructor; import org.motechproject.mds.config.MdsConfig; import org.motechproject.mds.domain.ClassData; import org.motechproject.mds.domain.ComboboxHolder; import org.motechproject.mds.domain.Entity; import org.motechproject.mds.domain.EntityType; import org.motechproject.mds.domain.Field; import org.motechproject.mds.domain.Type; import org.motechproject.mds.enhancer.MdsJDOEnhancer; import org.motechproject.mds.ex.entity.EntityCreationException; import org.motechproject.mds.helper.ClassTableName; import org.motechproject.mds.helper.EntitySorter; import org.motechproject.mds.helper.MdsBundleHelper; import org.motechproject.mds.javassist.JavassistLoader; import org.motechproject.mds.javassist.MotechClassPool; import org.motechproject.mds.repository.AllEntities; import org.motechproject.mds.repository.MetadataHolder; import org.motechproject.mds.util.ClassName; import org.motechproject.mds.util.Constants; import org.motechproject.mds.util.JavassistUtil; import org.motechproject.mds.util.MDSClassLoader; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import javax.jdo.PersistenceManagerFactory; import javax.jdo.datastore.JDOConnection; import javax.jdo.metadata.JDOMetadata; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; /** * Default implementation of {@link org.motechproject.mds.builder.MDSConstructor} interface. */ @Service public class MDSConstructorImpl implements MDSConstructor { private static final Logger LOGGER = LoggerFactory.getLogger(MDSConstructorImpl.class); private MdsConfig mdsConfig; private AllEntities allEntities; private EntityBuilder entityBuilder; private EntityInfrastructureBuilder infrastructureBuilder; private EntityMetadataBuilder metadataBuilder; private MetadataHolder metadataHolder; private BundleContext bundleContext; private EnumBuilder enumBuilder; private PersistenceManagerFactory persistenceManagerFactory; private SqlDBManager sqlDBManager; @Override public synchronized boolean constructEntities() { // To be able to register updated class, we need to reload class loader // and therefore add all the classes again MotechClassPool.clearEnhancedData(); MDSClassLoader.reloadClassLoader(); // we need an jdo enhancer and a temporary classLoader // to define classes in before enhancement MDSClassLoader tmpClassLoader = MDSClassLoader.getStandaloneInstance(); MdsJDOEnhancer enhancer = createEnhancer(tmpClassLoader); JavassistLoader loader = new JavassistLoader(tmpClassLoader); // process only entities that are not drafts List<Entity> entities = allEntities.retrieveAll(); filterEntities(entities); sortEntities(entities); // create enum for appropriate combobox fields for (Entity entity : entities) { buildEnum(loader, enhancer, entity); } // load entities interfaces for (Entity entity : entities) { buildInterfaces(loader, enhancer, entity); } // generate jdo metadata from scratch for our entities JDOMetadata jdoMetadata = metadataHolder.reloadMetadata(); // First we build empty history and trash classes // (We don't have to generate it for main class, // since we just fetch fields from existing definition for (Entity entity : entities) { if (entity.isRecordHistory()) { entityBuilder.prepareHistoryClass(entity); } entityBuilder.prepareTrashClass(entity); } // Build classes Map<String, ClassData> classDataMap = buildClasses(entities); List<Class> classes = new ArrayList<>(); // We add the java classes to both // the temporary ClassLoader and enhancer for (Entity entity : entities) { String className = entity.getClassName(); Class<?> definition = addClassData(loader, enhancer, classDataMap.get(className)); if (entity.isRecordHistory()) { addClassData(loader, enhancer, classDataMap.get(ClassName.getHistoryClassName(className))); } addClassData(loader, enhancer, classDataMap.get(ClassName.getTrashClassName(className))); classes.add(definition); LOGGER.debug("Generated classes for {}", entity.getClassName()); } for (Class<?> definition : classes) { loader.loadFieldsAndMethodsOfClass(definition); } // Prepare metadata buildMetadata(entities, jdoMetadata, classDataMap, classes); // after the classes are defined, we register their metadata enhancer.registerMetadata(jdoMetadata); // then, we commence with enhancement enhancer.enhance(); // we register the enhanced class bytes // and build the infrastructure classes registerEnhancedClassBytes(entities, enhancer); metadataBuilder.fixEnhancerIssuesInMetadata(jdoMetadata); return CollectionUtils.isNotEmpty(entities); } private void registerEnhancedClassBytes(List<Entity> entities, MdsJDOEnhancer enhancer) { for (Entity entity : entities) { // register String className = entity.getClassName(); LOGGER.debug("Registering {}", className); registerClass(enhancer, entity); if (entity.isRecordHistory()) { registerHistoryClass(enhancer, className); } registerTrashClass(enhancer, className); LOGGER.debug("Building infrastructure for {}", className); buildInfrastructure(entity); } } private void sortEntities(List<Entity> entities) { List<Entity> byInheritance = EntitySorter.sortByInheritance(entities); List<Entity> byHasARelation = EntitySorter.sortByHasARelation(byInheritance); // for safe we clear entities list entities.clear(); // for now the entities list will be sorted by inheritance and by 'has-a' relation entities.addAll(byHasARelation); } private Map<String, ClassData> buildClasses(List<Entity> entities) { Map<String, ClassData> classDataMap = new LinkedHashMap<>(); //We build classes for all entities for (Entity entity : entities) { ClassData classData = buildClass(entity); ClassData historyClassData = null; if (entity.isRecordHistory()) { historyClassData = entityBuilder.buildHistory(entity); } ClassData trashClassData = entityBuilder.buildTrash(entity); String className = entity.getClassName(); classDataMap.put(className, classData); if (historyClassData != null) { classDataMap.put(ClassName.getHistoryClassName(className), historyClassData); } classDataMap.put(ClassName.getTrashClassName(className), trashClassData); } return classDataMap; } private void buildMetadata(List<Entity> entities, JDOMetadata jdoMetadata, Map<String, ClassData> classDataMap, List<Class> classes) { for (Entity entity : entities) { String className = entity.getClassName(); Class definition = null; for (Class clazz : classes) { if (clazz.getName().equals(className)) { definition = clazz; break; } } metadataBuilder.addEntityMetadata(jdoMetadata, entity, definition); if (entity.isRecordHistory()) { metadataBuilder.addHelperClassMetadata(jdoMetadata, classDataMap.get(ClassName.getHistoryClassName(className)), entity, EntityType.HISTORY, definition); } metadataBuilder.addHelperClassMetadata(jdoMetadata, classDataMap.get(ClassName.getTrashClassName(className)), entity, EntityType.TRASH, definition); } } private void buildEnum(JavassistLoader loader, MdsJDOEnhancer enhancer, Entity entity) { for (Field field : entity.getFields()) { Type type = field.getType(); if (!type.isCombobox()) { continue; } ComboboxHolder holder = new ComboboxHolder(entity, field); if (holder.isEnum() || holder.isEnumCollection()) { if (field.isReadOnly()) { String enumName = holder.getEnumName(); Class<?> definition = loadClass(entity, enumName); if (null != definition) { MotechClassPool.registerEnum(enumName); CtClass ctClass = MotechClassPool.getDefault().getOrNull(enumName); if (null != ctClass) { try { ctClass.defrost(); byte[] bytecode = ctClass.toBytecode(); ClassData data = new ClassData(enumName, bytecode); // register with the classloader so that we avoid issues with the persistence manager MDSClassLoader.getInstance().safeDefineClass(data.getClassName(), data.getBytecode()); addClassData(loader, enhancer, data); } catch (IOException | CannotCompileException e) { LOGGER.error("Could not load enum: {}", enumName); } } } } else { buildEnum(loader, enhancer, holder); } } } } private void buildEnum(JavassistLoader loader, MdsJDOEnhancer enhancer, ComboboxHolder holder) { ClassData data = enumBuilder.build(holder); ByteArrayClassPath classPath = new ByteArrayClassPath(data.getClassName(), data.getBytecode()); MotechClassPool.getDefault().appendClassPath(classPath); MotechClassPool.registerEnhancedClassData(data); // register with the classloader so that we avoid issues with the persistence manager MDSClassLoader.getInstance().safeDefineClass(data.getClassName(), data.getBytecode()); addClassData(loader, enhancer, data); } @Override @Transactional public void updateFields(Long entityId, Map<String, String> fieldNameChanges) { Entity entity = allEntities.retrieveById(entityId); for (String key : fieldNameChanges.keySet()) { String tableName = ClassTableName.getTableName(entity.getClassName(), entity.getModule(), entity.getNamespace(), entity.getTableName(), null); updateFieldName(key, fieldNameChanges.get(key), tableName); if (entity.isRecordHistory()) { updateFieldName(key, fieldNameChanges.get(key), ClassTableName.getTableName(entity, EntityType.HISTORY)); } updateFieldName(key, fieldNameChanges.get(key), ClassTableName.getTableName(entity, EntityType.TRASH)); } } private void registerHistoryClass(MdsJDOEnhancer enhancer, String className) { String historyClassName = ClassName.getHistoryClassName(className); byte[] enhancedBytes = enhancer.getEnhancedBytes(historyClassName); ClassData classData = new ClassData(historyClassName, enhancedBytes); // register with the classloader so that we avoid issues with the persistence manager MDSClassLoader.getInstance().safeDefineClass(classData.getClassName(), classData.getBytecode()); MotechClassPool.registerHistoryClassData(classData); } private void registerTrashClass(MdsJDOEnhancer enhancer, String className) { String trashClassName = ClassName.getTrashClassName(className); byte[] enhancedBytes = enhancer.getEnhancedBytes(trashClassName); ClassData classData = new ClassData(trashClassName, enhancedBytes); // register with the classloader so that we avoid issues with the persistence manager MDSClassLoader.getInstance().safeDefineClass(classData.getClassName(), classData.getBytecode()); MotechClassPool.registerTrashClassData(classData); } private void registerClass(MdsJDOEnhancer enhancer, Entity entity) { byte[] enhancedBytes = enhancer.getEnhancedBytes(entity.getClassName()); ClassData classData = new ClassData(entity, enhancedBytes); // register with the classloader so that we avoid issues with the persistence manager MDSClassLoader.getInstance().safeDefineClass(classData.getClassName(), classData.getBytecode()); MotechClassPool.registerEnhancedClassData(classData); } private Class<?> addClassData(JavassistLoader loader, MdsJDOEnhancer enhancer, ClassData data) { Class<?> definition = loader.loadClass(data); enhancer.addClass(data); return definition; } private ClassData buildClass(Entity entity) { ClassData classData; if (entity.isDDE()) { // for DDE we load the class coming from the bundle Bundle declaringBundle = MdsBundleHelper.searchForBundle(bundleContext, entity); if (declaringBundle == null) { throw new EntityCreationException("Declaring bundle unavailable for entity " + entity.getClassName()); } classData = entityBuilder.buildDDE(entity, declaringBundle); } else { classData = entityBuilder.build(entity); } return classData; } private void buildInterfaces(JavassistLoader loader, MdsJDOEnhancer enhancer, Entity entity) { List<ClassData> interfaces = new LinkedList<>(); if (entity.isDDE()) { Bundle declaringBundle = MdsBundleHelper.searchForBundle(bundleContext, entity); try { Class<?> definition = declaringBundle.loadClass(entity.getClassName()); for (Class interfaceClass : definition.getInterfaces()) { String classpath = JavassistUtil.toClassPath(interfaceClass.getName()); URL classResource = declaringBundle.getResource(classpath); if (classResource != null) { try (InputStream in = classResource.openStream()) { interfaces.add(new ClassData(interfaceClass.getName(), IOUtils.toByteArray(in), true)); } } } } catch (ClassNotFoundException e) { LOGGER.error("Class {} not found in {} bundle", entity.getClassName(), declaringBundle.getSymbolicName()); } catch (IOException ioExc) { LOGGER.error("Could not load interface for {} class", entity.getClassName()); } } for (ClassData data : interfaces) { try { MDSClassLoader.getInstance().loadClass(data.getClassName()); } catch (ClassNotFoundException e) { // interfaces should be defined in the MDS class loader only if it does not exist MDSClassLoader.getInstance().safeDefineClass(data.getClassName(), data.getBytecode()); ByteArrayClassPath classPath = new ByteArrayClassPath(data.getClassName(), data.getBytecode()); MotechClassPool.getDefault().appendClassPath(classPath); MotechClassPool.registerEnhancedClassData(data); addClassData(loader, enhancer, data); } } } private void buildInfrastructure(Entity entity) { String className = entity.getClassName(); List<ClassData> infrastructure = infrastructureBuilder.buildInfrastructure(entity); for (ClassData classData : infrastructure) { // if we have a DDE service registered, we register the enhanced bytecode // so that the weaving hook can weave the interface class and add lookups // coming from the UI if (classData.isInterfaceClass() && MotechClassPool.isServiceInterfaceRegistered(className)) { MotechClassPool.registerEnhancedClassData(classData); } } } private void filterEntities(List<Entity> entities) { Iterator<Entity> it = entities.iterator(); while (it.hasNext()) { Entity entity = it.next(); if (!entity.isActualEntity() || isSkippedDDE(entity)) { it.remove(); } else if (entity.isDDE()) { Class<?> definition = loadClass(entity, entity.getClassName()); if (null == definition) { it.remove(); } } } } private boolean isSkippedDDE(Entity entity) { return entity.isDDE() && !MotechClassPool.isDDEReady(entity.getClassName()); } private void updateFieldName(String oldName, String newName, String tableName) { LOGGER.info("Renaming column in {}: {} to {}", tableName, oldName, newName); JDOConnection con = persistenceManagerFactory.getPersistenceManager().getDataStoreConnection(); Connection nativeCon = (Connection) con.getNativeConnection(); boolean isMySqlDriver = sqlDBManager.getChosenSQLDriver().equals(Constants.Config.MYSQL_DRIVER_CLASSNAME); try { Statement stmt = nativeCon.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); StringBuilder fieldTypeQuery = new StringBuilder("SELECT DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '"); fieldTypeQuery.append(tableName); fieldTypeQuery.append("' AND COLUMN_NAME = '"); fieldTypeQuery.append(oldName); fieldTypeQuery.append("';"); ResultSet resultSet = stmt.executeQuery(fieldTypeQuery.toString()); resultSet.first(); String fieldType = resultSet.getString("DATA_TYPE"); con.close(); con = persistenceManagerFactory.getPersistenceManager().getDataStoreConnection(); nativeCon = (Connection) con.getNativeConnection(); stmt = nativeCon.createStatement(); StringBuilder updateQuery = new StringBuilder("ALTER TABLE "); updateQuery.append(getDatabaseValidName(tableName, isMySqlDriver)); updateQuery.append(isMySqlDriver ? " CHANGE " : " RENAME COLUMN "); updateQuery.append(getDatabaseValidName(oldName, isMySqlDriver)); updateQuery.append(isMySqlDriver ? " " : " TO "); updateQuery.append(getDatabaseValidName(newName, isMySqlDriver)); if (isMySqlDriver) { updateQuery.append(" "); updateQuery.append("varchar".equals(fieldType) ? "varchar(255)" : fieldType); } updateQuery.append(";"); stmt.executeUpdate(updateQuery.toString()); } catch (SQLException e) { if ("S1000".equals(e.getSQLState())) { if (LOGGER.isInfoEnabled()) { LOGGER.info(String.format("Column %s does not exist in %s", oldName, tableName), e); } } else { if (LOGGER.isErrorEnabled()) { LOGGER.error(String.format("Unable to rename column in %s: %s to %s", tableName, oldName, newName), e); } } } finally { con.close(); } } private String getDatabaseValidName(String name, boolean isMySqlDriver) { return isMySqlDriver ? name : "\"".concat(name).concat("\""); } private MdsJDOEnhancer createEnhancer(ClassLoader enhancerClassLoader) { Properties config = mdsConfig.getDataNucleusProperties(); return new MdsJDOEnhancer(config, enhancerClassLoader); } private Class<?> loadClass(Entity entity, String className) { Bundle declaringBundle = MdsBundleHelper.searchForBundle(bundleContext, entity); Class<?> definition = null; if (declaringBundle == null) { LOGGER.warn("Declaring bundle unavailable for entity {}", className); } else { try { definition = declaringBundle.loadClass(className); } catch (ClassNotFoundException e) { LOGGER.warn("Class declaration for {} not present in bundle {}", className, declaringBundle.getSymbolicName()); } } return definition; } @Autowired public void setSqlDBManager(SqlDBManager sqlDBManager) { this.sqlDBManager = sqlDBManager; } @Autowired public void setEntityBuilder(EntityBuilder entityBuilder) { this.entityBuilder = entityBuilder; } @Autowired public void setInfrastructureBuilder(EntityInfrastructureBuilder infrastructureBuilder) { this.infrastructureBuilder = infrastructureBuilder; } @Autowired public void setAllEntities(AllEntities allEntities) { this.allEntities = allEntities; } @Autowired public void setMetadataBuilder(EntityMetadataBuilder metadataBuilder) { this.metadataBuilder = metadataBuilder; } @Autowired public void setMdsConfig(MdsConfig mdsConfig) { this.mdsConfig = mdsConfig; } @Autowired public void setBundleContext(BundleContext bundleContext) { this.bundleContext = bundleContext; } @Autowired public void setMetadataHolder(MetadataHolder metadataHolder) { this.metadataHolder = metadataHolder; } @Autowired public void setEnumBuilder(EnumBuilder enumBuilder) { this.enumBuilder = enumBuilder; } @Autowired public void setPersistenceManagerFactory(PersistenceManagerFactory persistenceManagerFactory) { this.persistenceManagerFactory = persistenceManagerFactory; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object representing a constraint on task placement. For more information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-placement-constraints.html">Task Placement * Constraints</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * </p> * <note> * <p> * If you're using the Fargate launch type, task placement constraints aren't supported. * </p> * </note> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecs-2014-11-13/PlacementConstraint" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PlacementConstraint implements Serializable, Cloneable, StructuredPojo { /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> */ private String type; /** * <p> * A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 * characters. You can't specify an expression if the constraint type is <code>distinctInstance</code>. For more * information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster query * language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * </p> */ private String expression; /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> * * @param type * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group * is running on a different container instance. Use <code>memberOf</code> to restrict the selection to a * group of valid candidates. * @see PlacementConstraintType */ public void setType(String type) { this.type = type; } /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> * * @return The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group * is running on a different container instance. Use <code>memberOf</code> to restrict the selection to a * group of valid candidates. * @see PlacementConstraintType */ public String getType() { return this.type; } /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> * * @param type * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group * is running on a different container instance. Use <code>memberOf</code> to restrict the selection to a * group of valid candidates. * @return Returns a reference to this object so that method calls can be chained together. * @see PlacementConstraintType */ public PlacementConstraint withType(String type) { setType(type); return this; } /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> * * @param type * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group * is running on a different container instance. Use <code>memberOf</code> to restrict the selection to a * group of valid candidates. * @see PlacementConstraintType */ public void setType(PlacementConstraintType type) { withType(type); } /** * <p> * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group is * running on a different container instance. Use <code>memberOf</code> to restrict the selection to a group of * valid candidates. * </p> * * @param type * The type of constraint. Use <code>distinctInstance</code> to ensure that each task in a particular group * is running on a different container instance. Use <code>memberOf</code> to restrict the selection to a * group of valid candidates. * @return Returns a reference to this object so that method calls can be chained together. * @see PlacementConstraintType */ public PlacementConstraint withType(PlacementConstraintType type) { this.type = type.toString(); return this; } /** * <p> * A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 * characters. You can't specify an expression if the constraint type is <code>distinctInstance</code>. For more * information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster query * language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * </p> * * @param expression * A cluster query language expression to apply to the constraint. The expression can have a maximum length * of 2000 characters. You can't specify an expression if the constraint type is * <code>distinctInstance</code>. For more information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster * query language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. */ public void setExpression(String expression) { this.expression = expression; } /** * <p> * A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 * characters. You can't specify an expression if the constraint type is <code>distinctInstance</code>. For more * information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster query * language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * </p> * * @return A cluster query language expression to apply to the constraint. The expression can have a maximum length * of 2000 characters. You can't specify an expression if the constraint type is * <code>distinctInstance</code>. For more information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster * query language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. */ public String getExpression() { return this.expression; } /** * <p> * A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 * characters. You can't specify an expression if the constraint type is <code>distinctInstance</code>. For more * information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster query * language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * </p> * * @param expression * A cluster query language expression to apply to the constraint. The expression can have a maximum length * of 2000 characters. You can't specify an expression if the constraint type is * <code>distinctInstance</code>. For more information, see <a * href="https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html">Cluster * query language</a> in the <i>Amazon Elastic Container Service Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public PlacementConstraint withExpression(String expression) { setExpression(expression); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getExpression() != null) sb.append("Expression: ").append(getExpression()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PlacementConstraint == false) return false; PlacementConstraint other = (PlacementConstraint) obj; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getExpression() == null ^ this.getExpression() == null) return false; if (other.getExpression() != null && other.getExpression().equals(this.getExpression()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getExpression() == null) ? 0 : getExpression().hashCode()); return hashCode; } @Override public PlacementConstraint clone() { try { return (PlacementConstraint) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ecs.model.transform.PlacementConstraintMarshaller.getInstance().marshall(this, protocolMarshaller); } }
// Copyright 2020 The NATS Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package io.nats.client.impl; import io.nats.client.*; import io.nats.client.api.*; import io.nats.client.utils.TestBase; import org.junit.jupiter.api.function.Executable; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import static io.nats.examples.jetstream.NatsJsUtils.printConsumerInfo; import static io.nats.examples.jetstream.NatsJsUtils.printStreamInfo; import static org.junit.jupiter.api.Assertions.*; public class JetStreamTestBase extends TestBase { public static final String JS_REPLY_TO = "$JS.ACK.test-stream.test-consumer.1.2.3.1605139610113260000"; public static final Duration DEFAULT_TIMEOUT = Duration.ofMillis(500); public NatsMessage getJsMessage(String replyTo) { return new NatsMessage.InternalMessageFactory("sid", "subj", replyTo, 0, false).getMessage(); } // ---------------------------------------------------------------------------------------------------- // Management // ---------------------------------------------------------------------------------------------------- public static StreamInfo createMemoryStream(JetStreamManagement jsm, String streamName, String... subjects) throws IOException, JetStreamApiException { StreamConfiguration sc = StreamConfiguration.builder() .name(streamName) .storageType(StorageType.Memory) .subjects(subjects).build(); return jsm.addStream(sc); } public static StreamInfo createMemoryStream(Connection nc, String streamName, String... subjects) throws IOException, JetStreamApiException { return createMemoryStream(nc.jetStreamManagement(), streamName, subjects); } public static StreamInfo createTestStream(Connection nc) throws IOException, JetStreamApiException { return createMemoryStream(nc, STREAM, SUBJECT); } public static StreamInfo createTestStream(JetStreamManagement jsm) throws IOException, JetStreamApiException { return createMemoryStream(jsm, STREAM, SUBJECT); } public static void debug(JetStreamManagement jsm, int n) throws IOException, JetStreamApiException { System.out.println("\n" + n + ". -------------------------------"); printStreamInfo(jsm.getStreamInfo(STREAM)); printConsumerInfo(jsm.getConsumerInfo(STREAM, DURABLE)); } public static <T extends Throwable> T assertThrowsPrint(Class<T> expectedType, Executable executable) { T t = org.junit.jupiter.api.Assertions.assertThrows(expectedType, executable); t.printStackTrace(); return t; } // ---------------------------------------------------------------------------------------------------- // Publish / Read // ---------------------------------------------------------------------------------------------------- public static void jsPublish(JetStream js, String subject, String prefix, int count) throws IOException, JetStreamApiException { for (int x = 1; x <= count; x++) { String data = prefix + x; js.publish(NatsMessage.builder() .subject(subject) .data(data.getBytes(StandardCharsets.US_ASCII)) .build() ); } } public static void jsPublish(JetStream js, String subject, int startId, int count) throws IOException, JetStreamApiException { for (int x = 0; x < count; x++) { js.publish(NatsMessage.builder().subject(subject).data((dataBytes(startId++))).build()); } } public static void jsPublish(JetStream js, String subject, int count) throws IOException, JetStreamApiException { jsPublish(js, subject, 1, count); } public static void jsPublish(Connection nc, String subject, int count) throws IOException, JetStreamApiException { jsPublish(nc.jetStream(), subject, 1, count); } public static void jsPublish(Connection nc, String subject, int startId, int count) throws IOException, JetStreamApiException { jsPublish(nc.jetStream(), subject, startId, count); } public static PublishAck jsPublish(JetStream js) throws IOException, JetStreamApiException { Message msg = NatsMessage.builder() .subject(SUBJECT) .data(DATA.getBytes(StandardCharsets.US_ASCII)) .build(); return js.publish(msg); } public static List<Message> readMessagesAck(JetStreamSubscription sub) throws InterruptedException { return readMessagesAck(sub, false); } public static List<Message> readMessagesAck(JetStreamSubscription sub, boolean noisy) throws InterruptedException { List<Message> messages = new ArrayList<>(); Message msg = sub.nextMessage(Duration.ofSeconds(1)); while (msg != null) { messages.add(msg); if (msg.isJetStream()) { if (noisy) { System.out.println("ACK " + new String(msg.getData())); } msg.ack(); } else if (msg.isStatusMessage()) { if (noisy) { System.out.println("STATUS " + msg.getStatus()); } } else if (noisy) { System.out.println("? " + new String(msg.getData()) + "?"); } msg = sub.nextMessage(Duration.ofSeconds(1)); } return messages; } public static List<Message> readMessages(Iterator<Message> list) { List<Message> messages = new ArrayList<>(); while (list.hasNext()) { messages.add(list.next()); } return messages; } // ---------------------------------------------------------------------------------------------------- // Validate / Assert // ---------------------------------------------------------------------------------------------------- public static void validateRedAndTotal(int expectedRed, int actualRed, int expectedTotal, int actualTotal) { validateRead(expectedRed, actualRed); validateTotal(expectedTotal, actualTotal); } public static void validateTotal(int expectedTotal, int actualTotal) { assertEquals(expectedTotal, actualTotal, "Total does not match"); } public static void validateRead(int expectedRed, int actualRed) { assertEquals(expectedRed, actualRed, "Read does not match"); } public static void assertSubscription(JetStreamSubscription sub, String stream, String consumer, String deliver, boolean isPullMode) { assertEquals(stream, ((NatsJetStreamSubscription)sub).getStream()); if (consumer == null) { assertNotNull(((NatsJetStreamSubscription)sub).getConsumer()); } else { assertEquals(consumer, ((NatsJetStreamSubscription) sub).getConsumer()); } if (deliver != null) { assertEquals(deliver, ((NatsJetStreamSubscription)sub).getDeliverSubject()); } boolean pm = ((NatsJetStreamSubscription)sub).isPullMode(); assertEquals(isPullMode, pm); // coverage assertTrue(sub.toString().contains("isPullMode=" + pm)); } public static void assertSameMessages(List<Message> l1, List<Message> l2) { assertEquals(l1.size(), l2.size()); List<String> data1 = l1.stream() .map(m -> new String(m.getData())) .collect(Collectors.toList()); List<String> data2 = l2.stream() .map(m -> new String(m.getData())) .collect(Collectors.toList()); assertEquals(data1, data2); } public static void assertAllJetStream(List<Message> messages) { for (Message m : messages) { assertIsJetStream(m); } } public static void assertIsJetStream(Message m) { assertTrue(m.isJetStream()); assertFalse(m.isStatusMessage()); assertNull(m.getStatus()); } public static void assertLastIsStatus(List<Message> messages, int code) { int lastIndex = messages.size() - 1; for (int x = 0; x < lastIndex; x++) { Message m = messages.get(x); assertTrue(m.isJetStream()); } assertIsStatus(messages.get(lastIndex), code); } public static void assertStarts408(List<Message> messages, int count408, int expectedJs) { for (int x = 0; x < count408; x++) { assertIsStatus(messages.get(x), 408); } int countedJs = 0; int lastIndex = messages.size() - 1; for (int x = count408; x <= lastIndex; x++) { Message m = messages.get(x); assertTrue(m.isJetStream()); countedJs++; } assertEquals(expectedJs, countedJs); } private static void assertIsStatus(Message statusMsg, int code) { assertFalse(statusMsg.isJetStream()); assertTrue(statusMsg.isStatusMessage()); assertNotNull(statusMsg.getStatus()); assertEquals(code, statusMsg.getStatus().getCode()); } public static void assertSource(JetStreamManagement jsm, String stream, Long msgCount, Long firstSeq) throws IOException, JetStreamApiException { sleep(1000); StreamInfo si = jsm.getStreamInfo(stream); assertConfig(stream, msgCount, firstSeq, si); } public static void assertMirror(JetStreamManagement jsm, String stream, String mirroring, Long msgCount, Long firstSeq) throws IOException, JetStreamApiException { sleep(1000); StreamInfo si = jsm.getStreamInfo(stream); MirrorInfo msi = si.getMirrorInfo(); assertNotNull(msi); assertEquals(mirroring, msi.getName()); assertConfig(stream, msgCount, firstSeq, si); } public static void assertConfig(String stream, Long msgCount, Long firstSeq, StreamInfo si) { StreamConfiguration sc = si.getConfiguration(); assertNotNull(sc); assertEquals(stream, sc.getName()); StreamState ss = si.getStreamState(); if (msgCount != null) { assertEquals(msgCount, ss.getMsgCount()); } if (firstSeq != null) { assertEquals(firstSeq, ss.getFirstSequence()); } } public static void assertStreamSource(MessageInfo info, String stream, int i) { String hval = info.getHeaders().get("Nats-Stream-Source").get(0); String[] parts = hval.split(" "); assertEquals(stream, parts[0]); assertEquals("" + i, parts[1]); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.transport; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.TimeUnit; import com.google.common.primitives.Ints; import org.apache.cassandra.transport.ClientResourceLimits.Overload; import org.apache.cassandra.utils.concurrent.NonBlockingRateLimiter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.exceptions.OverloadedException; import org.apache.cassandra.metrics.ClientMetrics; import org.apache.cassandra.metrics.ClientMessageSizeMetrics; import org.apache.cassandra.net.AbstractMessageHandler; import org.apache.cassandra.net.FrameDecoder; import org.apache.cassandra.net.FrameDecoder.IntactFrame; import org.apache.cassandra.net.FrameEncoder; import org.apache.cassandra.net.ResourceLimits; import org.apache.cassandra.net.ResourceLimits.Limit; import org.apache.cassandra.net.ShareableBytes; import org.apache.cassandra.transport.Flusher.FlushItem.Framed; import org.apache.cassandra.transport.messages.ErrorMessage; import org.apache.cassandra.utils.NoSpamLogger; import static org.apache.cassandra.utils.MonotonicClock.approxTime; /** * Implementation of {@link AbstractMessageHandler} for processing CQL messages which comprise a {@link Message} wrapped * in an {@link Envelope}. This class is parameterized by a {@link Message} subtype, expected to be either * {@link Message.Request} or {@link Message.Response}. Most commonly, an instance for handling {@link Message.Request} * is created for each inbound CQL client connection. * * # Small vs large messages * Small messages are deserialized in place, and then handed off to a consumer for processing. * Large messages accumulate frames until all bytes for the envelope are received, then concatenate and deserialize the * frames on the event loop thread and pass them on to the same consumer. * * # Flow control (backpressure) * The size of an incoming message is explicit in the {@link Envelope.Header}. * * By default, every connection has 1MiB of exlusive permits available before needing to access the per-endpoint * and global reserves. By default, those reserves are sized proportionally to the heap - 2.5% of heap per-endpoint * and a 10% for the global reserve. * * Permits are held while CQL messages are processed and released after the response has been encoded into the * buffers of the response frame. * * A connection level option (THROW_ON_OVERLOAD) allows clients to choose the backpressure strategy when a connection * has exceeded the maximum number of allowed permits. The choices are to either pause reads from the incoming socket * and allow TCP backpressure to do the work, or to throw an explict exception and rely on the client to back off. */ public class CQLMessageHandler<M extends Message> extends AbstractMessageHandler { private static final Logger logger = LoggerFactory.getLogger(CQLMessageHandler.class); private static final NoSpamLogger noSpamLogger = NoSpamLogger.getLogger(logger, 1L, TimeUnit.SECONDS); public static final int LARGE_MESSAGE_THRESHOLD = FrameEncoder.Payload.MAX_SIZE - 1; public static final TimeUnit RATE_LIMITER_DELAY_UNIT = TimeUnit.NANOSECONDS; private final Envelope.Decoder envelopeDecoder; private final Message.Decoder<M> messageDecoder; private final FrameEncoder.PayloadAllocator payloadAllocator; private final MessageConsumer<M> dispatcher; private final ErrorHandler errorHandler; private final boolean throwOnOverload; private final ProtocolVersion version; private final NonBlockingRateLimiter requestRateLimiter; long channelPayloadBytesInFlight; private int consecutiveMessageErrors = 0; interface MessageConsumer<M extends Message> { void accept(Channel channel, M message, Dispatcher.FlushItemConverter toFlushItem, Overload backpressure); } interface ErrorHandler { void accept(Throwable error); } CQLMessageHandler(Channel channel, ProtocolVersion version, FrameDecoder decoder, Envelope.Decoder envelopeDecoder, Message.Decoder<M> messageDecoder, MessageConsumer<M> dispatcher, FrameEncoder.PayloadAllocator payloadAllocator, int queueCapacity, ClientResourceLimits.ResourceProvider resources, OnHandlerClosed onClosed, ErrorHandler errorHandler, boolean throwOnOverload) { super(decoder, channel, LARGE_MESSAGE_THRESHOLD, queueCapacity, resources.endpointLimit(), resources.globalLimit(), resources.endpointWaitQueue(), resources.globalWaitQueue(), onClosed); this.envelopeDecoder = envelopeDecoder; this.messageDecoder = messageDecoder; this.payloadAllocator = payloadAllocator; this.dispatcher = dispatcher; this.errorHandler = errorHandler; this.throwOnOverload = throwOnOverload; this.version = version; this.requestRateLimiter = resources.requestRateLimiter(); } @Override public boolean process(FrameDecoder.Frame frame) throws IOException { // new frame, clean slate for processing errors consecutiveMessageErrors = 0; return super.process(frame); } /** * Checks limits on bytes in flight and the request rate limiter (if enabled), then takes one of three actions: * * 1.) If no limits are breached, process the request. * 2.) If a limit is breached, and the connection is configured to throw on overload, throw {@link OverloadedException}. * 3.) If a limit is breached, and the connection is not configurd to throw, process the request, and return false * to let the {@link FrameDecoder} know it should stop processing frames. * * If the connection is configured to throw {@link OverloadedException}, requests that breach the rate limit are * not counted against that limit. * * @return true if the {@link FrameDecoder} should continue to process incoming frames, and false if it should stop * processing them, effectively applying backpressure to clients * * @throws ErrorMessage.WrappedException with an {@link OverloadedException} if overload occurs and the * connection is configured to throw on overload */ protected boolean processOneContainedMessage(ShareableBytes bytes, Limit endpointReserve, Limit globalReserve) { ByteBuffer buf = bytes.get(); Envelope.Decoder.HeaderExtractionResult extracted = envelopeDecoder.extractHeader(buf); if (!extracted.isSuccess()) return handleProtocolException(extracted.error(), buf, extracted.streamId(), extracted.bodyLength()); Envelope.Header header = extracted.header(); if (header.version != version) { ProtocolException error = new ProtocolException(String.format("Invalid message version. Got %s but previous" + "messages on this connection had version %s", header.version, version)); return handleProtocolException(error, buf, header.streamId, header.bodySizeInBytes); } // max CQL message size defaults to 256mb, so should be safe to downcast int messageSize = Ints.checkedCast(header.bodySizeInBytes); if (throwOnOverload) { if (!acquireCapacity(header, endpointReserve, globalReserve)) { discardAndThrow(endpointReserve, globalReserve, buf, header, messageSize, Overload.BYTES_IN_FLIGHT); return true; } if (DatabaseDescriptor.getNativeTransportRateLimitingEnabled() && !requestRateLimiter.tryReserve()) { // We've already allocated against the bytes-in-flight limits, so release those resources. release(header); discardAndThrow(endpointReserve, globalReserve, buf, header, messageSize, Overload.REQUESTS); return true; } } else { Overload backpressure = Overload.NONE; if (!acquireCapacityAndQueueOnFailure(header, endpointReserve, globalReserve)) { if (processRequestAndUpdateMetrics(bytes, header, messageSize, Overload.BYTES_IN_FLIGHT)) { if (decoder.isActive()) ClientMetrics.instance.pauseConnection(); } backpressure = Overload.BYTES_IN_FLIGHT; } if (DatabaseDescriptor.getNativeTransportRateLimitingEnabled()) { // Reserve a permit even if we've already triggered backpressure on bytes in flight. long delay = requestRateLimiter.reserveAndGetDelay(RATE_LIMITER_DELAY_UNIT); if (backpressure == Overload.NONE && delay > 0) { if (processRequestAndUpdateMetrics(bytes, header, messageSize, Overload.REQUESTS)) { if (decoder.isActive()) ClientMetrics.instance.pauseConnection(); // Schedule a wakup here if we process successfully. The connection should be closing otherwise. scheduleConnectionWakeupTask(delay, RATE_LIMITER_DELAY_UNIT); } backpressure = Overload.REQUESTS; } } // If we triggered backpressure, make sure the caller stops processing frames after the request completes. if (backpressure != Overload.NONE) return false; } return processRequestAndUpdateMetrics(bytes, header, messageSize, Overload.NONE); } private boolean processRequestAndUpdateMetrics(ShareableBytes bytes, Envelope.Header header, int messageSize, Overload backpressure) { channelPayloadBytesInFlight += messageSize; incrementReceivedMessageMetrics(messageSize); return processRequest(composeRequest(header, bytes), backpressure); } private void discardAndThrow(Limit endpointReserve, Limit globalReserve, ByteBuffer buf, Envelope.Header header, int messageSize, Overload overload) { ClientMetrics.instance.markRequestDiscarded(); logOverload(endpointReserve, globalReserve, header, messageSize); OverloadedException exception = buildOverloadedException(endpointReserve, globalReserve, overload); handleError(exception, header); // Don't stop processing incoming messages, as we rely on the client to apply // backpressure when it receives OverloadedException, but discard this message // as we're responding with the overloaded error. incrementReceivedMessageMetrics(messageSize); buf.position(buf.position() + Envelope.Header.LENGTH + messageSize); } private OverloadedException buildOverloadedException(Limit endpointReserve, Limit globalReserve, Overload overload) { return overload == Overload.REQUESTS ? new OverloadedException(String.format("Request breached global limit of %d requests/second. Server is " + "currently in an overloaded state and cannot accept more requests.", requestRateLimiter.getRate())) : new OverloadedException(String.format("Request breached limit on bytes in flight. (Endpoint: %d/%d bytes, Global: %d/%d bytes.) " + "Server is currently in an overloaded state and cannot accept more requests.", endpointReserve.using(), endpointReserve.limit(), globalReserve.using(), globalReserve.limit())); } private void logOverload(Limit endpointReserve, Limit globalReserve, Envelope.Header header, int messageSize) { logger.trace("Discarded request of size {} with {} bytes in flight on channel. " + "Using {}/{} bytes of endpoint limit and {}/{} bytes of global limit. " + "Global rate limiter: {} Header: {}", messageSize, channelPayloadBytesInFlight, endpointReserve.using(), endpointReserve.limit(), globalReserve.using(), globalReserve.limit(), requestRateLimiter, header); } private boolean handleProtocolException(ProtocolException exception, ByteBuffer buf, int streamId, long expectedMessageLength) { // hard fail if either : // * the expectedMessageLength is < 0 as we're unable to skip the remainder // of the Envelope and attempt to read the next one // * we hit a run of errors in the same frame. Some errors are recoverable // as they have no effect on subsequent Envelopes, in which case we attempt // to continue processing. If we start seeing consecutive errors we assume // that this is not the case and that the entire remaining frame is garbage. // It's possible here that we fail hard when we could potentially not do // (e.g. every Envelope has an invalid opcode, but is otherwise semantically // intact), but this is a trade off. if (expectedMessageLength < 0 || ++consecutiveMessageErrors > DatabaseDescriptor.getConsecutiveMessageErrorsThreshold()) { // transform the exception to a fatal one so the exception handler closes the channel if (!exception.isFatal()) exception = ProtocolException.toFatalException(exception); handleError(exception, streamId); return false; } else { // exception should not be a fatal error or the exception handler will close the channel handleError(exception, streamId); // skip body buf.position(Math.min(buf.limit(), buf.position() + Envelope.Header.LENGTH + Ints.checkedCast(expectedMessageLength))); // continue processing frame return true; } } private void incrementReceivedMessageMetrics(int messageSize) { receivedCount++; receivedBytes += messageSize + Envelope.Header.LENGTH; ClientMessageSizeMetrics.bytesReceived.inc(messageSize + Envelope.Header.LENGTH); ClientMessageSizeMetrics.bytesReceivedPerRequest.update(messageSize + Envelope.Header.LENGTH); } private Envelope composeRequest(Envelope.Header header, ShareableBytes bytes) { // extract body ByteBuffer buf = bytes.get(); int idx = buf.position() + Envelope.Header.LENGTH; final int end = idx + Ints.checkedCast(header.bodySizeInBytes); ByteBuf body = Unpooled.wrappedBuffer(buf.slice()); body.readerIndex(Envelope.Header.LENGTH); body.retain(); buf.position(end); return new Envelope(header, body); } protected boolean processRequest(Envelope request) { return processRequest(request, Overload.NONE); } protected boolean processRequest(Envelope request, Overload backpressure) { M message = null; try { message = messageDecoder.decode(channel, request); dispatcher.accept(channel, message, this::toFlushItem, backpressure); // sucessfully delivered a CQL message to the execution // stage, so reset the counter of consecutive errors consecutiveMessageErrors = 0; return true; } catch (Exception e) { if (message != null) request.release(); boolean continueProcessing = true; // Indicate that an error was encountered. Initially, we can continue to // process the current frame, but if we keep catching errors, we assume that // the whole frame payload is no good, stop processing and close the connection. if(++consecutiveMessageErrors > DatabaseDescriptor.getConsecutiveMessageErrorsThreshold()) { if (!(e instanceof ProtocolException)) { logger.debug("Error decoding CQL message", e); e = new ProtocolException("Error encountered decoding CQL message: " + e.getMessage()); } e = ProtocolException.toFatalException((ProtocolException) e); continueProcessing = false; } handleErrorAndRelease(e, request.header); return continueProcessing; } } /** * For "expected" errors this ensures we pass a WrappedException, * which contains a streamId, to the error handler. This makes * sure that whereever possible, the streamId is propagated back * to the client. * This also releases the capacity acquired for processing as * indicated by supplied header. */ private void handleErrorAndRelease(Throwable t, Envelope.Header header) { release(header); handleError(t, header); } /** * For "expected" errors this ensures we pass a WrappedException, * which contains a streamId, to the error handler. This makes * sure that whereever possible, the streamId is propagated back * to the client. * This variant doesn't call release as it is intended for use * when an error occurs without any capacity being acquired. * Typically, this would be the result of an acquisition failure * if the THROW_ON_OVERLOAD option has been specified by the client. */ private void handleError(Throwable t, Envelope.Header header) { handleError(t, header.streamId); } /** * For "expected" errors this ensures we pass a WrappedException, * which contains a streamId, to the error handler. This makes * sure that whereever possible, the streamId is propagated back * to the client. * This variant doesn't call release as it is intended for use * when an error occurs without any capacity being acquired. * Typically, this would be the result of an acquisition failure * if the THROW_ON_OVERLOAD option has been specified by the client. */ private void handleError(Throwable t, int streamId) { errorHandler.accept(ErrorMessage.wrap(t, streamId)); } /** * For use in the case where the error can't be mapped to a specific stream id, * such as a corrupted frame, or when extracting a CQL message from the frame's * payload fails. This does not attempt to release any resources, as these errors * should only occur before any capacity acquisition is attempted (e.g. on receipt * of a corrupt frame, or failure to extract a CQL message from the envelope). */ private void handleError(Throwable t) { errorHandler.accept(t); } // Acts as a Dispatcher.FlushItemConverter private Framed toFlushItem(Channel channel, Message.Request request, Message.Response response) { // Returns a FlushItem.Framed instance which wraps a Consumer<FlushItem> that performs // the work of returning the capacity allocated for processing the request. // The Dispatcher will call this to obtain the FlushItem to enqueue with its Flusher once // a dispatched request has been processed. Envelope responseFrame = response.encode(request.getSource().header.version); int responseSize = envelopeSize(responseFrame.header); ClientMessageSizeMetrics.bytesSent.inc(responseSize); ClientMessageSizeMetrics.bytesSentPerResponse.update(responseSize); return new Framed(channel, responseFrame, request.getSource(), payloadAllocator, this::release); } private void release(Flusher.FlushItem<Envelope> flushItem) { release(flushItem.request.header); flushItem.request.release(); flushItem.response.release(); } private void release(Envelope.Header header) { releaseCapacity(Ints.checkedCast(header.bodySizeInBytes)); channelPayloadBytesInFlight -= header.bodySizeInBytes; } /* * Handling of multi-frame large messages */ protected boolean processFirstFrameOfLargeMessage(IntactFrame frame, Limit endpointReserve, Limit globalReserve) throws IOException { ShareableBytes bytes = frame.contents; ByteBuffer buf = bytes.get(); try { Envelope.Decoder.HeaderExtractionResult extracted = envelopeDecoder.extractHeader(buf); if (!extracted.isSuccess()) { // Hard fail on any decoding error as we can't trust the subsequent frames of // the large message handleError(ProtocolException.toFatalException(extracted.error())); return false; } Envelope.Header header = extracted.header(); // max CQL message size defaults to 256mb, so should be safe to downcast int messageSize = Ints.checkedCast(header.bodySizeInBytes); receivedBytes += buf.remaining(); LargeMessage largeMessage = new LargeMessage(header); if (!acquireCapacity(header, endpointReserve, globalReserve)) { // In the case of large messages, never stop processing incoming frames // as this will halt the client meaning no further frames will be sent, // leading to starvation. // If the throwOnOverload option is set, don't process the message once // read, return an error response to notify the client that resource // limits have been exceeded. If the option isn't set, the only thing we // can do is to consume the subsequent frames and process the message. // Large and small messages are never interleaved for a single client, so // we know that this client will finish sending the large message before // anything else. Other clients sending small messages concurrently will // be backpressured by the global resource limits. The server is still // vulnerable to overload by multiple clients sending large messages // concurrently. if (throwOnOverload) { // Mark as overloaded so that discard the message after consuming any subsequent frames. ClientMetrics.instance.markRequestDiscarded(); logOverload(endpointReserve, globalReserve, header, messageSize); largeMessage.markOverloaded(Overload.BYTES_IN_FLIGHT); } } else if (DatabaseDescriptor.getNativeTransportRateLimitingEnabled()) { if (throwOnOverload) { if (!requestRateLimiter.tryReserve()) { ClientMetrics.instance.markRequestDiscarded(); logOverload(endpointReserve, globalReserve, header, messageSize); // Mark as overloaded so that we discard the message after consuming any subsequent frames. // (i.e. Request resources we may already have acquired above will be released.) largeMessage.markOverloaded(Overload.REQUESTS); this.largeMessage = largeMessage; largeMessage.supply(frame); return true; } } else { long delay = requestRateLimiter.reserveAndGetDelay(RATE_LIMITER_DELAY_UNIT); if (delay > 0) { this.largeMessage = largeMessage; largeMessage.markBackpressure(Overload.REQUESTS); largeMessage.supply(frame); if (decoder.isActive()) ClientMetrics.instance.pauseConnection(); scheduleConnectionWakeupTask(delay, RATE_LIMITER_DELAY_UNIT); return false; } } } this.largeMessage = largeMessage; largeMessage.supply(frame); return true; } catch (Exception e) { throw new IOException("Error decoding CQL Message", e); } } protected String id() { return channel.id().asShortText(); } private void scheduleConnectionWakeupTask(long waitLength, TimeUnit unit) { channel.eventLoop().schedule(() -> { try { // We might have already reactivated via another wake task. if (!decoder.isActive()) { decoder.reactivate(); // Only update the relevant metric if we've actually activated. if (decoder.isActive()) ClientMetrics.instance.unpauseConnection(); } } catch (Throwable t) { fatalExceptionCaught(t); } }, waitLength, unit); } @SuppressWarnings("BooleanMethodIsAlwaysInverted") private boolean acquireCapacityAndQueueOnFailure(Envelope.Header header, Limit endpointReserve, Limit globalReserve) { int bytesRequired = Ints.checkedCast(header.bodySizeInBytes); long currentTimeNanos = approxTime.now(); return acquireCapacity(endpointReserve, globalReserve, bytesRequired, currentTimeNanos, Long.MAX_VALUE); } @SuppressWarnings("BooleanMethodIsAlwaysInverted") private boolean acquireCapacity(Envelope.Header header, Limit endpointReserve, Limit globalReserve) { int bytesRequired = Ints.checkedCast(header.bodySizeInBytes); return acquireCapacity(endpointReserve, globalReserve, bytesRequired) == ResourceLimits.Outcome.SUCCESS; } /* * Although it would be possible to recover when certain types of corrupt frame are encountered, * this could cause problems for clients as the payload may contain CQL messages from multiple * streams. Simply dropping the corrupt frame or returning an error response would not give the * client enough information to map back to inflight requests, leading to timeouts. * Instead, we need to fail fast, possibly dropping the connection whenever a corrupt frame is * encountered. Consequently, we terminate the connection (via a ProtocolException) whenever a * corrupt frame is encountered, regardless of its type. */ protected void processCorruptFrame(FrameDecoder.CorruptFrame frame) { corruptFramesUnrecovered++; String error = String.format("%s invalid, unrecoverable CRC mismatch detected in frame %s. Read %d, Computed %d", id(), frame.isRecoverable() ? "body" : "header", frame.readCRC, frame.computedCRC); noSpamLogger.error(error); // If this is part of a multi-frame message, process it before passing control to the error handler. // This is so we can take care of any housekeeping associated with large messages. if (!frame.isSelfContained) { if (null == largeMessage) // first frame of a large message receivedBytes += frame.frameSize; else // subsequent frame of a large message processSubsequentFrameOfLargeMessage(frame); } handleError(ProtocolException.toFatalException(new ProtocolException(error))); } protected void fatalExceptionCaught(Throwable cause) { decoder.discard(); logger.warn("Unrecoverable exception caught in CQL message processing pipeline, closing the connection", cause); channel.close(); } static int envelopeSize(Envelope.Header header) { return Envelope.Header.LENGTH + Ints.checkedCast(header.bodySizeInBytes); } private class LargeMessage extends AbstractMessageHandler.LargeMessage<Envelope.Header> { private static final long EXPIRES_AT = Long.MAX_VALUE; private Overload overload = Overload.NONE; private Overload backpressure = Overload.NONE; private LargeMessage(Envelope.Header header) { super(envelopeSize(header), header, EXPIRES_AT, false); } private Envelope assembleFrame() { ByteBuf body = Unpooled.wrappedBuffer(buffers.stream() .map(ShareableBytes::get) .toArray(ByteBuffer[]::new)); body.readerIndex(Envelope.Header.LENGTH); body.retain(); return new Envelope(header, body); } /** * Used to indicate that a message should be dropped and not processed. * We do this on receipt of the first frame of a large message if sufficient capacity * cannot be acquired to process it and throwOnOverload is set for the connection. * In this case, the client has elected to shed load rather than apply backpressure * so we must ensure that subsequent frames are consumed from the channel. At that * point an error response is returned to the client, rather than processing the message. */ private void markOverloaded(Overload overload) { this.overload = overload; } private void markBackpressure(Overload backpressure) { this.backpressure = backpressure; } protected void onComplete() { if (overload != Overload.NONE) handleErrorAndRelease(buildOverloadedException(endpointReserveCapacity, globalReserveCapacity, overload), header); else if (!isCorrupt) processRequest(assembleFrame(), backpressure); } protected void abort() { if (!isCorrupt) releaseBuffersAndCapacity(); // release resources if in normal state when abort() is invoked } } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.bluetoothlegatt; // // Created 15.12.2015 Khurshid Aliev // /** * For a given BLE device, this Activity provides the user interface to connect, display data, * and display GATT services and characteristics supported by the device. The Activity * communicates with {@code BluetoothLeService}, which in turn interacts with the * Bluetooth LE API. */ import android.app.Activity; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattService; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.ServiceConnection; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.AsyncTask; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.IBinder; import android.text.format.DateFormat; import android.util.Log; import android.view.View; import android.view.Window; import android.widget.CompoundButton; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.TextView; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import android.widget.Toast; import android.widget.ToggleButton; import com.example.android.bluetoothlegatt.model.Weather; import org.json.JSONException; /** * For a given BLE device, this Activity provides the user interface to connect, display data, * and display GATT services and characteristics supported by the device. The Activity * communicates with {@code BluetoothLeService}, which in turn interacts with the * Bluetooth LE API. */ public class DeviceControlActivity extends Activity { private final static String TAG = DeviceControlActivity.class.getSimpleName(); public static final String EXTRAS_DEVICE_NAME = "DEVICE_NAME"; public static final String EXTRAS_DEVICE_ADDRESS = "DEVICE_ADDRESS"; private static final UUID CONFIG_DESCRIPTOR = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb"); long BLUETOOTH_TIMER = 15000; private TextView switchStatus,inDoor, outDoor; private TextView cityText, temp2, fara2_text, hum, celcius_text, fara_text, date_text, clock_text, humid_text; private ProgressBar vProgressBar, vProgressBar2; private ToggleButton mySwitch; private Timer timer, timer2; private TimerTask timer_humid, timer_temp; private SimpleDateFormat sdf; private LinearLayout layoutAnalog; private boolean flag = true; private LinearLayout ll,layoutDigital; private int count = 0; private FileWriter writer; private int[] RGBFrame = {0, 0, 0}; private String mDeviceAddress,date = "", city = "Torino,IT", mDeviceName; float temp = 0; float humid = 0; private BluetoothLeService mBluetoothLeService; private boolean mConnected = false; private BluetoothGattCharacteristic characteristicTX; private BluetoothGattCharacteristic characteristicRX; private boolean humidity = true, sleep = true; public final static UUID HM_RX_TX = UUID.fromString(SampleGattAttributes.HM_RX_TX); private final String LIST_NAME = "NAME"; private final String LIST_UUID = "UUID"; final Handler handler = new Handler(); // Code to manage Service lifecycle. private final ServiceConnection mServiceConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName componentName, IBinder service) { mBluetoothLeService = ((BluetoothLeService.LocalBinder) service).getService(); if (!mBluetoothLeService.initialize()) { Log.e(TAG, "Unable to initialize Bluetooth"); finish(); } // Automatically connects to the device upon successful start-up initialization. mBluetoothLeService.connect(mDeviceAddress); Log.e(TAG, "Unable to initialize Bluetooth"); } @Override public void onServiceDisconnected(ComponentName componentName) { mBluetoothLeService.disconnect(); mBluetoothLeService = null; } }; // Handles various events fired by the Service. // ACTION_GATT_CONNECTED: connected to a GATT server. // ACTION_GATT_DISCONNECTED: disconnected from a GATT server. // ACTION_GATT_SERVICES_DISCOVERED: discovered GATT services. // ACTION_DATA_AVAILABLE: received data from the device. This can be a result of read // or notification operations. private final BroadcastReceiver mGattUpdateReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); if (BluetoothLeService.ACTION_GATT_CONNECTED.equals(action)) { mConnected = true; updateConnectionState(R.string.connected); invalidateOptionsMenu(); } else if (BluetoothLeService.ACTION_GATT_DISCONNECTED.equals(action)) { mConnected = false; updateConnectionState(R.string.disconnected); invalidateOptionsMenu(); clearUI(); } else if (BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED.equals(action)) { // Show all the supported services and characteristics on the user interface. //Log.d("new", BluetoothLeService.UUID_HM_RX_TX.toString()); Log.d("new", "service is connected"); displayGattServices(mBluetoothLeService.getSupportedGattServices()); } else if (BluetoothLeService.ACTION_DATA_AVAILABLE.equals(action)) { String sensedData = intent.getStringExtra(mBluetoothLeService.EXTRA_DATA); appendDataToBuffer(sensedData); Log.d("timer", "Data recieved: " + sensedData); } } }; private void clearUI() { // mDataField.setText(R.string.no_data); } @Override public void onCreate(Bundle savedInstanceState) { //Remove title bar this.requestWindowFeature(Window.FEATURE_NO_TITLE); super.onCreate(savedInstanceState); setContentView(R.layout.gatt_services_characteristics_layout); layoutDigital = (LinearLayout) findViewById(R.id.layoutDigital); ///////////////////////////////////////////////////////////////// layoutAnalog = (LinearLayout) findViewById(R.id.layoutAnalog); inDoor = (TextView) findViewById(R.id.inDoorText); outDoor = (TextView) findViewById(R.id.outDoorText); vProgressBar = (ProgressBar) findViewById(R.id.vprogressbar); vProgressBar2 = (ProgressBar) findViewById(R.id.vprogressbar2); //////////////////////////////////////////////////////////////////// //start(); final Intent intent = getIntent(); mDeviceName = intent.getStringExtra(EXTRAS_DEVICE_NAME); BLUETOOTH_TIMER=intent.getIntExtra("timer", 0); inDoor.setText(mDeviceName); ((TextView) findViewById(R.id.appname_text)).setText(mDeviceName); mDeviceAddress = intent.getStringExtra(EXTRAS_DEVICE_ADDRESS); sdf = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss"); File root = Environment.getExternalStorageDirectory(); File gpxfile = new File(root, "sensor_data.csv"); try { writer = new FileWriter(gpxfile); writeCsvHeader("Date", "Temperature", "Humidity"); } catch (IOException e) { e.printStackTrace(); } celcius_text = (TextView) findViewById(R.id.celcius_text); humid_text = (TextView) findViewById(R.id.humidity_text); clock_text = (TextView) findViewById(R.id.clock_text); fara_text = (TextView) findViewById(R.id.fara_text); date_text = (TextView) findViewById(R.id.date_text); gattServiceIntent = new Intent(this, BluetoothLeService.class); Intent gattServiceIntent = new Intent(this, BluetoothLeService.class); bindService(gattServiceIntent, mServiceConnection, BIND_AUTO_CREATE); startTimer(); updateTimeThread(); cityText = (TextView) findViewById(R.id.cityText); temp2 = (TextView) findViewById(R.id.celcius2_text); hum = (TextView) findViewById(R.id.humidity2_text); fara2_text = (TextView) findViewById(R.id.fara2_text); initSwitch(); timerBluetooth.schedule(timerTask, 0, BLUETOOTH_TIMER*1000); Log.d("timer", "timer is set to" + BLUETOOTH_TIMER * 1000); } public void start() { // mContext is defined upper in code, I think it is not necessary to explain what is it isBound=bindService(gattServiceIntent, mServiceConnection, BIND_AUTO_CREATE); startService(gattServiceIntent); Log.d("timer", "Service Started"); } Intent gattServiceIntent; public void stop() { /* stopService(gattServiceIntent); if (isBound) unbindService(mServiceConnection); Log.d("timer", "Service Stopped");*/ } private void writeCsvHeader(String h1, String h2, String h3) throws IOException { String line = String.format("%s,%s,%s\n", h1, h2, h3); writer.write(line); } private void writeCsvData() throws IOException { String line = String.format("%s,%.0f,%.0f\n", resultDate, resultTemp, resultHumid); writer.write(line); } private void updateTimeThread() { Thread t = new Thread() { @Override public void run() { try { while (!isInterrupted()) { Thread.sleep(1000); runOnUiThread(new Runnable() { @Override public void run() { java.util.Date noteTS = Calendar.getInstance().getTime(); String time = "hh:mm aa"; // 12:00 clock_text.setText(DateFormat.format(time, noteTS)); String date = "EEE, MMM d"; // 01 January 2013 date_text.setText(DateFormat.format(date, noteTS)); } }); } } catch (InterruptedException e) { } } }; t.start(); } @Override protected void onResume() { super.onResume(); registerReceiver(mGattUpdateReceiver, makeGattUpdateIntentFilter()); if (mBluetoothLeService != null) { final boolean result = mBluetoothLeService.connect(mDeviceAddress); Log.d(TAG, "Connect request result=" + result); } task = new JSONWeatherTask(); task.execute(new String[]{city}); } JSONWeatherTask task; @Override protected void onPause() { super.onPause(); unregisterReceiver(mGattUpdateReceiver); flag = false; stoptimertask(); } boolean isBound = false; @Override protected void onDestroy() { super.onDestroy(); unbindService(mServiceConnection); mBluetoothLeService = null; try { writer.flush(); writer.close(); } catch (IOException e) { e.printStackTrace(); } flag = false; // task.cancel(true); } private void updateConnectionState(final int resourceId) { runOnUiThread(new Runnable() { @Override public void run() { //mConnectionState.setText(resourceId); } }); } private List<Byte> serialbuffer = new ArrayList<Byte>(); private void appendDataToBuffer(String data) { byte[] rawArray = data.getBytes(Charset.forName("ISO-8859-1")); // Latin1 for (int i = 0; i < rawArray.length; i++) serialbuffer.add(rawArray[i]); boolean finished = false; while (!finished) { while ((serialbuffer.size() > 0) && ((serialbuffer.get(0) & 0xFF) != 0xC0)) // UTF8: C3 80 serialbuffer.remove(0); if (serialbuffer.size() <= 0) break; int indexofstop = -1; for (int i = 1; i < serialbuffer.size(); i++) { if ((serialbuffer.get(i) & 0xFF) == 0xD8) // UTF8: C3 98 { indexofstop = i; break; } } if (indexofstop < 0) { // No more stop bytes inside buffer finished = true; } else { if (indexofstop > 2) { byte[] packet = new byte[indexofstop - 1]; for (int i = 0; i < indexofstop - 1; i++) packet[i] = serialbuffer.get(i + 1); displayData(packet); for (int i = 0; i < indexofstop + 1; i++) serialbuffer.remove(0); } } } } private void displayData(byte[] rawArray) { if (rawArray != null) { //byte[] rawArray = data.clone(); int len = rawArray.length; if (len >= 4) { int t = (((int) rawArray[0]) & 0xFF) << 9; //Take the first byte and shift it of 8 t |= (((int) rawArray[1]) & 0xFF) << 2; //Add a second byte. In total 14 bit int h = (((int) rawArray[2]) & 0xFF) << 9; h |= (((int) rawArray[3]) & 0xFF) << 2; float tc = Math.round((-46.85 + (175.72 / 65536.0) * (float) t)); celcius_text.setText(String.format("%.1f\u00B0C", tc)); vProgressBar.setProgress((int)tc+30); float farangeit = (tc) * (9 / 5) + 32; fara_text.setText(String.format("%.1f\u00B0F", farangeit)); temp = tc; float RH = Math.round((-6 + (125.0 / 65536.0) * (float) h));//Return the humidity humid_text.setText("HUMIDITY:" + " " + String.format("%.0f%%", RH)); humid = RH; date = sdf.format(new Date()); resultDate =date; resultTemp=temp; resultHumid=humid; //writeCsvData(date, temp, humid); } } } String resultDate="No data yet"; float resultTemp = 0; float resultHumid = 0; // Demonstrates how to iterate through the supported GATT Services/Characteristics. // In this sample, we populate the data structure that is bound to the ExpandableListView // on the UI. private void displayGattServices(List<BluetoothGattService> gattServices) { if (gattServices == null) return; String uuid = null; String unknownServiceString = getResources().getString(R.string.unknown_service); ArrayList<HashMap<String, String>> gattServiceData = new ArrayList<HashMap<String, String>>(); // Loops through available GATT Services. for (BluetoothGattService gattService : gattServices) { HashMap<String, String> currentServiceData = new HashMap<String, String>(); uuid = gattService.getUuid().toString(); currentServiceData.put( LIST_NAME, SampleGattAttributes.lookup(uuid, unknownServiceString)); // If the service exists for HM 10 Serial, say so. if (SampleGattAttributes.lookup(uuid, unknownServiceString) == "HM 10 Serial") { // isSerial.setText(" Yes, serial connection"); } else { //isSerial.setText(" No, serial connection"); } currentServiceData.put(LIST_UUID, uuid); gattServiceData.add(currentServiceData); // get characteristic when UUID matches RX/TX UUID characteristicTX = gattService.getCharacteristic(BluetoothLeService.UUID_HM_RX_TX); characteristicRX = gattService.getCharacteristic(BluetoothLeService.UUID_HM_RX_TX); } timer.schedule(timer_humid, 5000, 15000); // timer2.schedule(timer_temp, 1000, 15000); // } private static IntentFilter makeGattUpdateIntentFilter() { final IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(BluetoothLeService.ACTION_GATT_CONNECTED); intentFilter.addAction(BluetoothLeService.ACTION_GATT_DISCONNECTED); intentFilter.addAction(BluetoothLeService.ACTION_GATT_SERVICES_DISCOVERED); intentFilter.addAction(BluetoothLeService.ACTION_DATA_AVAILABLE); return intentFilter; } public void temp_update_timer_function(View view) { humidity = false; characteristicTX.setValue("D"); mBluetoothLeService.writeCharacteristic(characteristicTX); mBluetoothLeService.setCharacteristicNotification(characteristicRX, true); } public void humid_update_timer_funtion(View view) { humidity = true; } private static Integer shortSignedAtOffset(BluetoothGattCharacteristic characteristicRX, int offset) { Integer lowerByte = characteristicRX.getIntValue(BluetoothGattCharacteristic.FORMAT_UINT8, offset); Integer upperByte = characteristicRX.getIntValue(BluetoothGattCharacteristic.FORMAT_SINT8, offset + 1); // Note: interpret MSB as signed. return (upperByte << 8) + lowerByte; } private static Integer shortUnsignedAtOffset(BluetoothGattCharacteristic characteristicRX, int offset) { Integer lowerByte = characteristicRX.getIntValue(BluetoothGattCharacteristic.FORMAT_UINT8, offset); Integer upperByte = characteristicRX.getIntValue(BluetoothGattCharacteristic.FORMAT_UINT8, offset + 1); // Note: interpret MSB as unsigned. return (upperByte << 8) + lowerByte; } Timer timerBluetooth; public void startTimer() { timer = new Timer(); timerBluetooth = new Timer(); timer2 = new Timer(); initializeTimerTask(); } public void stoptimertask() { //stop the timer, if it's not already null if (timer != null) { timer.cancel(); timer = null; } if (timer2 != null) { timer2.cancel(); timer2 = null; } if (timerTask != null) { timerTask.cancel(); timerTask = null; } } TimerTask timerTask; public void initializeTimerTask() { timer_humid = new TimerTask() { public void run() { handler.post(new Runnable() { public void run() { Calendar calendar = Calendar.getInstance(); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd:MMMM:yyyy HH:mm:ss a"); final String strDate = simpleDateFormat.format(calendar.getTime()); humid_update_timer_funtion(null); } }); } }; timerTask = new TimerTask() { public void run() { handler.post(new Runnable() { public void run() { try { writeCsvData(); } catch (IOException e) { e.printStackTrace(); } Log.d("timer", "stored in file"); } }); } }; timer_temp = new TimerTask() { public void run() { handler.post(new Runnable() { public void run() { Calendar calendar = Calendar.getInstance(); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd:MMMM:yyyy HH:mm:ss a"); final String strDate = simpleDateFormat.format(calendar.getTime()); int duration = Toast.LENGTH_SHORT; temp_update_timer_function(null); } }); } }; } private class JSONWeatherTask extends AsyncTask<String, Weather, Weather> { @Override protected Weather doInBackground(String... params) { Weather weather = new Weather(); flag = true; while (flag == true && !isCancelled()) { while (isNetworkAvailable() == true && flag == true && !isCancelled()) { String data = ((new WeatherHttpClient()).getWeatherData(params[0])); Log.d("weather2",data); try { weather = JSONWeatherParser.getWeather(data); //weather.iconData = ((new WeatherHttpClient()).getImage(weather.currentCondition.getIcon())); } catch (JSONException e) { e.printStackTrace(); } publishProgress(weather); try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } return weather; } @Override protected void onProgressUpdate(Weather... weather2) { super.onProgressUpdate(weather2); Weather weather = weather2[0]; if (weather!=null &&weather.location!=null) { ll = (LinearLayout) findViewById(R.id.internet_data); ll.setVisibility(View.VISIBLE); //cityText.setVisibility(View.VISIBLE); cityText.setText(weather.location.getCity() + "," + weather.location.getCountry()); temp2.setText("" + Math.round((weather.temperature.getTemp() - 273.15)) + (char) 0x00B0 + "C"); float farangeit2 = (Math.round((weather.temperature.getTemp() - 273.15))) * (9 / 5) + 32; fara2_text.setText(String.format("%.1f\u00B0F", farangeit2)); hum.setText("HUMIDITY: " + weather.currentCondition.getHumidity() + "%"); count++; Context context = getApplicationContext(); CharSequence text = "Times "; int temping = (int) (Math.round((weather.temperature.getTemp() - 273.15))); vProgressBar2.setProgress(temping+30); int duration = Toast.LENGTH_LONG; Toast toast = Toast.makeText(context, text + " " + count, duration); } } } private boolean isNetworkAvailable() { ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } private void initSwitch() { switchStatus = (TextView) findViewById(R.id.switchStatus); mySwitch = (ToggleButton) findViewById(R.id.mySwitch); mySwitch.setChecked(true); mySwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) switchDigital(); else switchAnalog(); } }); if (mySwitch.isChecked()) switchDigital(); else switchAnalog(); } private void switchDigital() { layoutDigital.setVisibility(View.VISIBLE); layoutAnalog.setVisibility(View.INVISIBLE); inDoor.setVisibility(View.INVISIBLE); outDoor.setVisibility(View.INVISIBLE); clock_text.setVisibility(View.VISIBLE); date_text.setVisibility(View.VISIBLE); switchStatus.setText("Digital"); } private void switchAnalog() { layoutDigital.setVisibility(View.INVISIBLE); layoutAnalog.setVisibility(View.VISIBLE); inDoor.setVisibility(View.VISIBLE); outDoor.setVisibility(View.VISIBLE); clock_text.setVisibility(View.INVISIBLE); date_text.setVisibility(View.INVISIBLE); switchStatus.setText("Analog"); } }
/* * Copyright (c) 2003, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.management; import java.io.File; import java.io.InputStream; import java.io.FileInputStream; import java.io.BufferedInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.text.MessageFormat; import java.util.Properties; import java.util.ResourceBundle; import java.util.MissingResourceException; import java.lang.management.ManagementFactory; import java.lang.reflect.Method; import javax.management.remote.JMXConnectorServer; import sun.management.jmxremote.ConnectorBootstrap; import static sun.management.AgentConfigurationError.*; import sun.misc.VMSupport; /** * This Agent is started by the VM when -Dcom.sun.management.snmp * or -Dcom.sun.management.jmxremote is set. This class will be * loaded by the system class loader. */ public class Agent { // management properties private static Properties mgmtProps; private static ResourceBundle messageRB; private static final String CONFIG_FILE = "com.sun.management.config.file"; private static final String SNMP_PORT = "com.sun.management.snmp.port"; private static final String JMXREMOTE = "com.sun.management.jmxremote"; private static final String JMXREMOTE_PORT = "com.sun.management.jmxremote.port"; private static final String ENABLE_THREAD_CONTENTION_MONITORING = "com.sun.management.enableThreadContentionMonitoring"; private static final String LOCAL_CONNECTOR_ADDRESS_PROP = "com.sun.management.jmxremote.localConnectorAddress"; private static final String SNMP_ADAPTOR_BOOTSTRAP_CLASS_NAME = "sun.management.snmp.AdaptorBootstrap"; // invoked by -javaagent or -Dcom.sun.management.agent.class public static void premain(String args) throws Exception { agentmain(args); } // invoked by attach mechanism public static void agentmain(String args) throws Exception { if (args == null || args.length() == 0) { args = JMXREMOTE; // default to local management } // Parse agent options into properties Properties arg_props = new Properties(); if (args != null) { String[] options = args.split(","); for (int i=0; i<options.length; i++) { String[] option = options[i].split("="); if (option.length >= 1 && option.length <= 2) { String name = option[0]; String value = (option.length == 1) ? "" : option[1]; if (name != null && name.length() > 0) { // Assume that any com.sun.management.* options are okay if (name.startsWith("com.sun.management.")) { arg_props.setProperty(name, value); } else { error(INVALID_OPTION, name); } } } } } // Read properties from the config file Properties config_props = new Properties(); String fname = arg_props.getProperty(CONFIG_FILE); readConfiguration(fname, config_props); // Arguments override config file config_props.putAll(arg_props); startAgent(config_props); } private static void startAgent(Properties props) throws Exception { String snmpPort = props.getProperty(SNMP_PORT); String jmxremote = props.getProperty(JMXREMOTE); String jmxremotePort = props.getProperty(JMXREMOTE_PORT); // Enable optional monitoring functionality if requested final String enableThreadContentionMonitoring = props.getProperty(ENABLE_THREAD_CONTENTION_MONITORING); if (enableThreadContentionMonitoring != null) { ManagementFactory.getThreadMXBean(). setThreadContentionMonitoringEnabled(true); } try { if (snmpPort != null) { loadSnmpAgent(snmpPort, props); } /* * If the jmxremote.port property is set then we start the * RMIConnectorServer for remote M&M. * * If the jmxremote or jmxremote.port properties are set then * we start a RMIConnectorServer for local M&M. The address * of this "local" server is exported as a counter to the jstat * instrumentation buffer. */ if (jmxremote != null || jmxremotePort != null) { if (jmxremotePort != null) { ConnectorBootstrap.initialize(jmxremotePort, props); } Properties agentProps = VMSupport.getAgentProperties(); // start local connector if not started // System.out.println("local address : " + // agentProps.get(LOCAL_CONNECTOR_ADDRESS_PROP)); if (agentProps.get(LOCAL_CONNECTOR_ADDRESS_PROP) == null) { JMXConnectorServer cs = ConnectorBootstrap.startLocalConnectorServer(); String address = cs.getAddress().toString(); // Add the local connector address to the agent properties agentProps.put(LOCAL_CONNECTOR_ADDRESS_PROP, address); try { // export the address to the instrumentation buffer ConnectorAddressLink.export(address); } catch (Exception x) { // Connector server started but unable to export address // to instrumentation buffer - non-fatal error. warning(EXPORT_ADDRESS_FAILED, x.getMessage()); } } } } catch (AgentConfigurationError e) { error(e.getError(), e.getParams()); } catch (Exception e) { error(e); } } public static Properties loadManagementProperties() { Properties props = new Properties(); // Load the management properties from the config file String fname = System.getProperty(CONFIG_FILE); readConfiguration(fname, props); // management properties can be overridden by system properties // which take precedence props.putAll(System.getProperties()); return props; } public static synchronized Properties getManagementProperties() { if (mgmtProps == null) { String configFile = System.getProperty(CONFIG_FILE); String snmpPort = System.getProperty(SNMP_PORT); String jmxremote = System.getProperty(JMXREMOTE); String jmxremotePort = System.getProperty(JMXREMOTE_PORT); if (configFile == null && snmpPort == null && jmxremote == null && jmxremotePort == null) { // return if out-of-the-management option is not specified return null; } mgmtProps = loadManagementProperties(); } return mgmtProps; } private static void loadSnmpAgent(String snmpPort, Properties props) { try { // invoke the following through reflection: // AdaptorBootstrap.initialize(snmpPort, props); final Class<?> adaptorClass = Class.forName(SNMP_ADAPTOR_BOOTSTRAP_CLASS_NAME,true,null); final Method initializeMethod = adaptorClass.getMethod("initialize", String.class, Properties.class); initializeMethod.invoke(null,snmpPort,props); } catch (ClassNotFoundException x) { // The SNMP packages are not present: throws an exception. throw new UnsupportedOperationException("Unsupported management property: " + SNMP_PORT,x); } catch (NoSuchMethodException x) { // should not happen... throw new UnsupportedOperationException("Unsupported management property: " + SNMP_PORT,x); } catch (InvocationTargetException x) { final Throwable cause = x.getCause(); if (cause instanceof RuntimeException) throw (RuntimeException) cause; else if (cause instanceof Error) throw (Error) cause; // should not happen... throw new UnsupportedOperationException("Unsupported management property: " + SNMP_PORT,cause); } catch (IllegalAccessException x) { // should not happen... throw new UnsupportedOperationException("Unsupported management property: " + SNMP_PORT,x); } } // read config file and initialize the properties private static void readConfiguration(String fname, Properties p) { if (fname == null) { String home = System.getProperty("java.home"); if (home == null) { throw new Error("Can't find java.home ??"); } StringBuffer defaultFileName = new StringBuffer(home); defaultFileName.append(File.separator).append("lib"); defaultFileName.append(File.separator).append("management"); defaultFileName.append(File.separator).append("management.properties"); // Set file name fname = defaultFileName.toString(); } final File configFile = new File(fname); if (!configFile.exists()) { error(CONFIG_FILE_NOT_FOUND, fname); } InputStream in = null; try { in = new FileInputStream(configFile); BufferedInputStream bin = new BufferedInputStream(in); p.load(bin); } catch (FileNotFoundException e) { error(CONFIG_FILE_OPEN_FAILED, e.getMessage()); } catch (IOException e) { error(CONFIG_FILE_OPEN_FAILED, e.getMessage()); } catch (SecurityException e) { error(CONFIG_FILE_ACCESS_DENIED, fname); } finally { if (in != null) { try { in.close(); } catch (IOException e) { error(CONFIG_FILE_CLOSE_FAILED, fname); } } } } public static void startAgent() throws Exception { String prop = System.getProperty("com.sun.management.agent.class"); // -Dcom.sun.management.agent.class not set so read management // properties and start agent if (prop == null) { // initialize management properties Properties props = getManagementProperties(); if (props != null) { startAgent(props); } return; } // -Dcom.sun.management.agent.class=<agent classname>:<agent args> String[] values = prop.split(":"); if (values.length < 1 || values.length > 2) { error(AGENT_CLASS_INVALID, "\"" + prop + "\""); } String cname = values[0]; String args = (values.length == 2 ? values[1] : null); if (cname == null || cname.length() == 0) { error(AGENT_CLASS_INVALID, "\"" + prop + "\""); } if (cname != null) { try { // Instantiate the named class. // invoke the premain(String args) method Class<?> clz = ClassLoader.getSystemClassLoader().loadClass(cname); Method premain = clz.getMethod("premain", new Class[] { String.class }); premain.invoke(null, /* static */ new Object[] { args }); } catch (ClassNotFoundException ex) { error(AGENT_CLASS_NOT_FOUND, "\"" + cname + "\""); } catch (NoSuchMethodException ex) { error(AGENT_CLASS_PREMAIN_NOT_FOUND, "\"" + cname + "\""); } catch (SecurityException ex) { error(AGENT_CLASS_ACCESS_DENIED); } catch (Exception ex) { String msg = (ex.getCause() == null ? ex.getMessage() : ex.getCause().getMessage()); error(AGENT_CLASS_FAILED, msg); } } } public static void error(String key) { String keyText = getText(key); System.err.print(getText("agent.err.error") + ": " + keyText); throw new RuntimeException(keyText); } public static void error(String key, String[] params) { if (params == null || params.length == 0) { error(key); } else { StringBuffer message = new StringBuffer(params[0]); for (int i = 1; i < params.length; i++) { message.append(" " + params[i]); } error(key, message.toString()); } } public static void error(String key, String message) { String keyText = getText(key); System.err.print(getText("agent.err.error") + ": " + keyText); System.err.println(": " + message); throw new RuntimeException(keyText); } public static void error(Exception e) { e.printStackTrace(); System.err.println(getText(AGENT_EXCEPTION) + ": " + e.toString()); throw new RuntimeException(e); } public static void warning(String key, String message) { System.err.print(getText("agent.err.warning") + ": " + getText(key)); System.err.println(": " + message); } private static void initResource() { try { messageRB = ResourceBundle.getBundle("sun.management.resources.agent"); } catch (MissingResourceException e) { throw new Error("Fatal: Resource for management agent is missing"); } } public static String getText(String key) { if (messageRB == null) { initResource(); } try { return messageRB.getString(key); } catch (MissingResourceException e) { return "Missing management agent resource bundle: key = \"" + key + "\""; } } public static String getText(String key, String... args) { if (messageRB == null) { initResource(); } String format = messageRB.getString(key); if (format == null) { format = "missing resource key: key = \"" + key + "\", " + "arguments = \"{0}\", \"{1}\", \"{2}\""; } return MessageFormat.format(format, (Object[]) args); } }
/* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.csp.sentinel.slots.block.flow.param; import com.alibaba.csp.sentinel.EntryType; import com.alibaba.csp.sentinel.slotchain.ResourceWrapper; import com.alibaba.csp.sentinel.slotchain.StringResourceWrapper; import com.alibaba.csp.sentinel.slots.block.RuleConstant; import com.alibaba.csp.sentinel.slots.statistic.cache.ConcurrentLinkedHashMapWrapper; import com.alibaba.csp.sentinel.util.TimeUtil; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Test cases for {@link ParamFlowChecker}. * * @author Eric Zhao */ public class ParamFlowCheckerTest { @Test public void testHotParamCheckerPassCheckExceedArgs() { final String resourceName = "testHotParamCheckerPassCheckExceedArgs"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 1; ParamFlowRule rule = new ParamFlowRule(); rule.setResource(resourceName); rule.setCount(10); rule.setParamIdx(paramIdx); assertTrue("The rule will pass if the paramIdx exceeds provided args", ParamFlowChecker.passCheck(resourceWrapper, rule, 1, "abc")); } @Test public void testSingleValueCheckQpsWithExceptionItems() throws InterruptedException { final String resourceName = "testSingleValueCheckQpsWithExceptionItems"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); TimeUtil.currentTimeMillis(); int paramIdx = 0; long globalThreshold = 5L; int thresholdB = 0; int thresholdD = 7; ParamFlowRule rule = new ParamFlowRule(); rule.setResource(resourceName); rule.setCount(globalThreshold); rule.setParamIdx(paramIdx); rule.setControlBehavior(RuleConstant.CONTROL_BEHAVIOR_RATE_LIMITER); String valueA = "valueA"; String valueB = "valueB"; String valueC = "valueC"; String valueD = "valueD"; // Directly set parsed map for test. Map<Object, Integer> map = new HashMap<Object, Integer>(); map.put(valueB, thresholdB); map.put(valueD, thresholdD); rule.setParsedHotItems(map); ParameterMetric metric = new ParameterMetric(); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); metric.getRuleTimeCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueA)); assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueB)); TimeUnit.SECONDS.sleep(3); } @Test public void testSingleValueCheckThreadCountWithExceptionItems() { final String resourceName = "testSingleValueCheckThreadCountWithExceptionItems"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 0; long globalThreshold = 5L; int thresholdB = 3; int thresholdD = 7; ParamFlowRule rule = new ParamFlowRule(resourceName).setCount(globalThreshold).setParamIdx(paramIdx) .setGrade(RuleConstant.FLOW_GRADE_THREAD); String valueA = "valueA"; String valueB = "valueB"; String valueC = "valueC"; String valueD = "valueD"; // Directly set parsed map for test. Map<Object, Integer> map = new HashMap<Object, Integer>(); map.put(valueB, thresholdB); map.put(valueD, thresholdD); rule.setParsedHotItems(map); ParameterMetric metric = mock(ParameterMetric.class); when(metric.getThreadCount(paramIdx, valueA)).thenReturn(globalThreshold - 1); when(metric.getThreadCount(paramIdx, valueB)).thenReturn(globalThreshold - 1); when(metric.getThreadCount(paramIdx, valueC)).thenReturn(globalThreshold - 1); when(metric.getThreadCount(paramIdx, valueD)).thenReturn(globalThreshold + 1); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueA)); assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueB)); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueC)); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD)); when(metric.getThreadCount(paramIdx, valueA)).thenReturn(globalThreshold); when(metric.getThreadCount(paramIdx, valueB)).thenReturn(thresholdB - 1L); when(metric.getThreadCount(paramIdx, valueC)).thenReturn(globalThreshold + 1); when(metric.getThreadCount(paramIdx, valueD)).thenReturn(globalThreshold - 1).thenReturn((long) thresholdD); assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueA)); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueB)); assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueC)); assertTrue(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD)); assertFalse(ParamFlowChecker.passSingleValueCheck(resourceWrapper, rule, 1, valueD)); } @Test public void testPassLocalCheckForCollection() throws InterruptedException { final String resourceName = "testPassLocalCheckForCollection"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 0; double globalThreshold = 1; ParamFlowRule rule = new ParamFlowRule(resourceName).setParamIdx(paramIdx).setCount(globalThreshold); String v1 = "a", v2 = "B", v3 = "Cc"; List<String> list = Arrays.asList(v1, v2, v3); ParameterMetric metric = new ParameterMetric(); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); metric.getRuleTimeCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); metric.getRuleTokenCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); assertTrue(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, list)); assertFalse(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, list)); } @Test public void testPassLocalCheckForArray() throws InterruptedException { final String resourceName = "testPassLocalCheckForArray"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 0; double globalThreshold = 1; ParamFlowRule rule = new ParamFlowRule(resourceName).setParamIdx(paramIdx) .setControlBehavior(RuleConstant.CONTROL_BEHAVIOR_RATE_LIMITER).setCount(globalThreshold); TimeUtil.currentTimeMillis(); String v1 = "a", v2 = "B", v3 = "Cc"; Object arr = new String[]{v1, v2, v3}; ParameterMetric metric = new ParameterMetric(); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); metric.getRuleTimeCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); assertTrue(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, arr)); assertFalse(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, arr)); } @Test public void testPassLocalCheckForComplexParam() throws InterruptedException { class User implements ParamFlowArgument { Integer id; String name; String address; public User(Integer id, String name, String address) { this.id = id; this.name = name; this.address = address; } @Override public Object paramFlowKey() { return name; } } final String resourceName = "testPassLocalCheckForComplexParam"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 0; double globalThreshold = 1; ParamFlowRule rule = new ParamFlowRule(resourceName).setParamIdx(paramIdx).setCount(globalThreshold); Object[] args = new Object[]{new User(1, "Bob", "Hangzhou"), 10, "Demo"}; ParameterMetric metric = new ParameterMetric(); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); metric.getRuleTimeCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); metric.getRuleTokenCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); assertTrue(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, args)); assertFalse(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, args)); } @Before public void setUp() throws Exception { ParameterMetricStorage.getMetricsMap().clear(); } @After public void tearDown() throws Exception { ParameterMetricStorage.getMetricsMap().clear(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; /** * This class is used to specify the setup of namenodes when instantiating * a MiniDFSCluster. It consists of a set of nameservices, each of which * may have one or more namenodes (in the case of HA) */ @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "Hive", "MapReduce", "Pig"}) @InterfaceStability.Unstable public class MiniDFSNNTopology { private final List<NSConf> nameservices = Lists.newArrayList(); private boolean federation; public MiniDFSNNTopology() { } /** * Set up a simple non-federated non-HA NN. */ public static MiniDFSNNTopology simpleSingleNN( int nameNodePort, int nameNodeHttpPort) { return new MiniDFSNNTopology() .addNameservice(new MiniDFSNNTopology.NSConf(null) .addNN(new MiniDFSNNTopology.NNConf(null) .setHttpPort(nameNodeHttpPort) .setIpcPort(nameNodePort))); } /** * Set up an HA topology with a single HA nameservice. */ public static MiniDFSNNTopology simpleHATopology() { return simpleHATopology(2); } /** * Set up an HA topology with a single HA nameservice. * @param nnCount of namenodes to use with the nameservice */ public static MiniDFSNNTopology simpleHATopology(int nnCount) { MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf("minidfs-ns"); for (int i = 1; i <= nnCount; i++) { nameservice.addNN(new MiniDFSNNTopology.NNConf("nn" + i)); } MiniDFSNNTopology topology = new MiniDFSNNTopology().addNameservice(nameservice); return topology; } /** * Set up federated cluster with the given number of nameservices, each * of which has only a single NameNode. */ public static MiniDFSNNTopology simpleFederatedTopology( int numNameservices) { MiniDFSNNTopology topology = new MiniDFSNNTopology(); for (int i = 1; i <= numNameservices; i++) { topology.addNameservice(new MiniDFSNNTopology.NSConf("ns" + i) .addNN(new MiniDFSNNTopology.NNConf(null))); } topology.setFederation(true); return topology; } /** * Set up federated cluster with the given nameservices, each * of which has only a single NameNode. */ public static MiniDFSNNTopology simpleFederatedTopology(String nameservicesIds) { MiniDFSNNTopology topology = new MiniDFSNNTopology(); String nsIds[] = nameservicesIds.split(","); for (String nsId : nsIds) { topology.addNameservice(new MiniDFSNNTopology.NSConf(nsId) .addNN(new MiniDFSNNTopology.NNConf(null))); } topology.setFederation(true); return topology; } /** * Set up federated cluster with the given number of nameservices, each * of which has two NameNodes. */ public static MiniDFSNNTopology simpleHAFederatedTopology( int numNameservices) { MiniDFSNNTopology topology = new MiniDFSNNTopology(); for (int i = 0; i < numNameservices; i++) { topology.addNameservice(new MiniDFSNNTopology.NSConf("ns" + i) .addNN(new MiniDFSNNTopology.NNConf("nn0")) .addNN(new MiniDFSNNTopology.NNConf("nn1"))); } topology.setFederation(true); return topology; } public MiniDFSNNTopology setFederation(boolean federation) { this.federation = federation; return this; } public MiniDFSNNTopology addNameservice(NSConf nameservice) { Preconditions.checkArgument(!nameservice.getNNs().isEmpty(), "Must have at least one NN in a nameservice"); this.nameservices.add(nameservice); return this; } public int countNameNodes() { int count = 0; for (NSConf ns : nameservices) { count += ns.nns.size(); } return count; } public NNConf getOnlyNameNode() { Preconditions.checkState(countNameNodes() == 1, "must have exactly one NN!"); return nameservices.get(0).getNNs().get(0); } public boolean isFederated() { return nameservices.size() > 1 || federation; } /** * @return true if at least one of the nameservices * in the topology has HA enabled. */ public boolean isHA() { for (NSConf ns : nameservices) { if (ns.getNNs().size() > 1) { return true; } } return false; } /** * @return true if all of the NNs in the cluster have their HTTP * port specified to be non-ephemeral. */ public boolean allHttpPortsSpecified() { for (NSConf ns : nameservices) { for (NNConf nn : ns.getNNs()) { if (nn.getHttpPort() == 0) { return false; } } } return true; } /** * @return true if all of the NNs in the cluster have their IPC * port specified to be non-ephemeral. */ public boolean allIpcPortsSpecified() { for (NSConf ns : nameservices) { for (NNConf nn : ns.getNNs()) { if (nn.getIpcPort() == 0) { return false; } } } return true; } public List<NSConf> getNameservices() { return nameservices; } public static class NSConf { private final String id; private final List<NNConf> nns = Lists.newArrayList(); public NSConf(String id) { this.id = id; } public NSConf addNN(NNConf nn) { this.nns.add(nn); return this; } public String getId() { return id; } public List<NNConf> getNNs() { return nns; } } public static class NNConf { private final String nnId; private int httpPort; private int ipcPort; private String clusterId; public NNConf(String nnId) { this.nnId = nnId; } public String getNnId() { return nnId; } int getIpcPort() { return ipcPort; } int getHttpPort() { return httpPort; } String getClusterId() { return clusterId; } public NNConf setHttpPort(int httpPort) { this.httpPort = httpPort; return this; } public NNConf setIpcPort(int ipcPort) { this.ipcPort = ipcPort; return this; } public NNConf setClusterId(String clusterId) { this.clusterId = clusterId; return this; } } }
// Copyright 2016 Twitter. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.heron.scheduler.aurora; import java.io.File; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; import com.google.common.base.Optional; import com.twitter.heron.api.generated.TopologyAPI; import com.twitter.heron.common.basics.FileUtils; import com.twitter.heron.proto.scheduler.Scheduler; import com.twitter.heron.scheduler.UpdateTopologyManager; import com.twitter.heron.spi.common.Config; import com.twitter.heron.spi.common.Context; import com.twitter.heron.spi.common.Misc; import com.twitter.heron.spi.packing.PackingPlan; import com.twitter.heron.spi.packing.Resource; import com.twitter.heron.spi.scheduler.IScalable; import com.twitter.heron.spi.scheduler.IScheduler; import com.twitter.heron.spi.utils.Runtime; import com.twitter.heron.spi.utils.SchedulerUtils; import com.twitter.heron.spi.utils.TopologyUtils; public class AuroraScheduler implements IScheduler, IScalable { private static final Logger LOG = Logger.getLogger(AuroraLauncher.class.getName()); private Config config; private Config runtime; private AuroraController controller; private UpdateTopologyManager updateTopologyManager; @Override public void initialize(Config mConfig, Config mRuntime) { this.config = mConfig; this.runtime = mRuntime; this.controller = getController(); this.updateTopologyManager = new UpdateTopologyManager(runtime, Optional.<IScalable>of(this)); } /** * Get an AuroraControl basing on the config and runtime * * @return AuroraControl */ protected AuroraController getController() { return new AuroraController( Runtime.topologyName(runtime), Context.cluster(config), Context.role(config), Context.environ(config), Context.verbose(config)); } @Override public void close() { // Nothing to do here } @Override public boolean onSchedule(PackingPlan packing) { if (packing == null || packing.getContainers().isEmpty()) { LOG.severe("No container requested. Can't schedule"); return false; } LOG.info("Launching topology in aurora"); Map<String, String> auroraProperties = createAuroraProperties(packing); return controller.createJob(getHeronAuroraPath(), auroraProperties); } @Override public List<String> getJobLinks() { List<String> jobLinks = new ArrayList<>(); //Only the aurora job page is returned String jobLinkFormat = AuroraContext.getJobLinkTemplate(config); if (jobLinkFormat != null && !jobLinkFormat.isEmpty()) { String jobLink = Misc.substitute(config, jobLinkFormat); jobLinks.add(jobLink); } return jobLinks; } @Override public boolean onKill(Scheduler.KillTopologyRequest request) { return controller.killJob(); } @Override public boolean onRestart(Scheduler.RestartTopologyRequest request) { int containerId = request.getContainerIndex(); return controller.restartJob(containerId); } @Override public boolean onUpdate(Scheduler.UpdateTopologyRequest request) { try { updateTopologyManager.updateTopology( request.getCurrentPackingPlan(), request.getProposedPackingPlan()); } catch (ExecutionException | InterruptedException e) { LOG.log(Level.SEVERE, "Could not update topology for request: " + request, e); return false; } return true; } @Override public void addContainers(Set<PackingPlan.ContainerPlan> containersToAdd) { controller.addContainers(containersToAdd.size()); } @Override public void removeContainers(Set<PackingPlan.ContainerPlan> containersToRemove) { controller.removeContainers(containersToRemove); } /** * Encode the JVM options * * @return encoded string */ protected String formatJavaOpts(String javaOpts) { String javaOptsBase64 = DatatypeConverter.printBase64Binary( javaOpts.getBytes(Charset.forName("UTF-8"))); return String.format("\"%s\"", javaOptsBase64.replace("=", "&equals;")); } protected String getHeronAuroraPath() { return new File(Context.heronConf(config), "heron.aurora").getPath(); } protected Map<String, String> createAuroraProperties(PackingPlan packing) { Map<String, String> auroraProperties = new HashMap<>(); TopologyAPI.Topology topology = Runtime.topology(runtime); // Align the cpu, ram, disk to the maximal one Resource containerResource = SchedulerUtils.getMaxRequiredResource(packing); auroraProperties.put("SANDBOX_EXECUTOR_BINARY", Context.executorSandboxBinary(config)); auroraProperties.put("TOPOLOGY_NAME", topology.getName()); auroraProperties.put("TOPOLOGY_ID", topology.getId()); auroraProperties.put("TOPOLOGY_DEFINITION_FILE", FileUtils.getBaseName(Context.topologyDefinitionFile(config))); auroraProperties.put("STATEMGR_CONNECTION_STRING", Context.stateManagerConnectionString(config)); auroraProperties.put("STATEMGR_ROOT_PATH", Context.stateManagerRootPath(config)); auroraProperties.put("SANDBOX_TMASTER_BINARY", Context.tmasterSandboxBinary(config)); auroraProperties.put("SANDBOX_STMGR_BINARY", Context.stmgrSandboxBinary(config)); auroraProperties.put("SANDBOX_METRICSMGR_CLASSPATH", Context.metricsManagerSandboxClassPath(config)); auroraProperties.put("INSTANCE_JVM_OPTS_IN_BASE64", formatJavaOpts(TopologyUtils.getInstanceJvmOptions(topology))); auroraProperties.put("TOPOLOGY_CLASSPATH", TopologyUtils.makeClassPath(topology, Context.topologyBinaryFile(config))); auroraProperties.put("SANDBOX_SYSTEM_YAML", Context.systemConfigSandboxFile(config)); auroraProperties.put("COMPONENT_RAMMAP", Runtime.componentRamMap(runtime)); auroraProperties.put("COMPONENT_JVM_OPTS_IN_BASE64", formatJavaOpts(TopologyUtils.getComponentJvmOptions(topology))); auroraProperties.put("TOPOLOGY_PACKAGE_TYPE", Context.topologyPackageType(config)); auroraProperties.put("TOPOLOGY_BINARY_FILE", FileUtils.getBaseName(Context.topologyBinaryFile(config))); auroraProperties.put("HERON_SANDBOX_JAVA_HOME", Context.javaSandboxHome(config)); auroraProperties.put("SANDBOX_SHELL_BINARY", Context.shellSandboxBinary(config)); auroraProperties.put("SANDBOX_PYTHON_INSTANCE_BINARY", Context.pythonInstanceSandboxBinary(config)); auroraProperties.put("CPUS_PER_CONTAINER", Double.toString(containerResource.getCpu())); auroraProperties.put("DISK_PER_CONTAINER", Long.toString(containerResource.getDisk())); auroraProperties.put("RAM_PER_CONTAINER", Long.toString(containerResource.getRam())); auroraProperties.put("NUM_CONTAINERS", (1 + TopologyUtils.getNumContainers(topology)) + ""); auroraProperties.put("CLUSTER", Context.cluster(config)); auroraProperties.put("ENVIRON", Context.environ(config)); auroraProperties.put("ROLE", Context.role(config)); auroraProperties.put("ISPRODUCTION", isProduction() + ""); auroraProperties.put("SANDBOX_INSTANCE_CLASSPATH", Context.instanceSandboxClassPath(config)); auroraProperties.put("SANDBOX_METRICS_YAML", Context.metricsSinksSandboxFile(config)); String completeSchedulerClassPath = new StringBuilder() .append(Context.schedulerSandboxClassPath(config)).append(":") .append(Context.packingSandboxClassPath(config)).append(":") .append(Context.stateManagerSandboxClassPath(config)) .toString(); auroraProperties.put("SANDBOX_SCHEDULER_CLASSPATH", completeSchedulerClassPath); String heronCoreReleasePkgURI = Context.corePackageUri(config); String topologyPkgURI = Runtime.topologyPackageUri(runtime).toString(); auroraProperties.put("CORE_PACKAGE_URI", heronCoreReleasePkgURI); auroraProperties.put("TOPOLOGY_PACKAGE_URI", topologyPkgURI); return auroraProperties; } protected boolean isProduction() { // TODO (nlu): currently enforce environment to be "prod" for a Production job return "prod".equals(Context.environ(config)); } }
// Copyright 2013 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.awreporting.model.definitions; import com.google.api.ads.adwords.awreporting.model.entities.ReportGender; import com.google.api.ads.adwords.lib.jaxb.v201506.ReportDefinitionReportType; import junit.framework.Assert; import org.junit.runner.RunWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * Tests the Campaign Performance report definition. */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration("classpath:aw-report-model-test-beans.xml") public class ReportGenderDefinitionTest extends AbstractReportDefinitionTest<ReportGender> { /** * C'tor */ public ReportGenderDefinitionTest() { super(ReportGender.class, ReportDefinitionReportType.GENDER_PERFORMANCE_REPORT, "src/test/resources/csv/gender.csv"); } @Override protected void testFirstEntry(ReportGender row) { Assert.assertEquals(1234567890L, row.getAccountId().longValue()); Assert.assertEquals("2013-09-10", row.getDay()); Assert.assertEquals("My account", row.getAccountDescriptiveName()); Assert.assertEquals(0.00, row.getCost().doubleValue()); Assert.assertEquals(0L, row.getClicks().longValue()); Assert.assertEquals(0.00, row.getAvgCpc().doubleValue()); Assert.assertEquals(0.00, row.getAvgCpm().doubleValue()); Assert.assertEquals("USD", row.getCurrencyCode()); Assert.assertEquals("Computers", row.getDevice()); Assert.assertEquals("Headline", row.getClickType()); Assert.assertEquals("Search Network", row.getAdNetwork()); Assert.assertEquals("Search partners", row.getAdNetworkPartners()); Assert.assertEquals("(GMT-08:00) Pacific Time", row.getAccountTimeZoneId()); Assert.assertEquals(4545192429L, row.getAdGroupId().longValue()); Assert.assertEquals("AdGroup1" , row.getAdGroupName()); Assert.assertEquals("enabled", row.getAdGroupStatus()); Assert.assertEquals(12345678L, row.getCampaignId().longValue()); Assert.assertEquals("Campaign1", row.getCampaignName()); Assert.assertEquals("enabled", row.getCampaignStatus()); Assert.assertEquals(0.00, row.getConversionValueBigDecimal().doubleValue()); Assert.assertEquals(0.00, row.getCostPerConversionManyPerClick().doubleValue()); Assert.assertEquals(0.00, row.getMaxCpc().doubleValue()); Assert.assertEquals("ad group criteria", row.getCpcBidSource()); Assert.assertEquals("Male", row.getCriteria()); Assert.assertEquals("http://example.com/index/A", row.getCriteriaDestinationUrl()); Assert.assertEquals("ClientName1", row.getCustomerDescriptiveName()); Assert.assertEquals("Tuesday", row.getDayOfWeek()); Assert.assertEquals(41224769349L, row.getCriterionId().longValue()); Assert.assertEquals(false, row.isNegative()); Assert.assertEquals(false, row.isRestrict()); Assert.assertEquals("September", row.getMonthOfYear()); Assert.assertEquals("My company name", row.getPrimaryCompanyName()); Assert.assertEquals("enabled", row.getStatus()); Assert.assertEquals(0L, row.getViewThroughConversions().longValue()); } @Override protected void testLastEntry(ReportGender row) { Assert.assertEquals(1234567890L, row.getAccountId().longValue()); Assert.assertEquals("2013-09-10", row.getDay()); Assert.assertEquals("My account", row.getAccountDescriptiveName()); Assert.assertEquals(0.00, row.getCost().doubleValue()); Assert.assertEquals(0L, row.getClicks().longValue()); Assert.assertEquals(0.00, row.getAvgCpc().doubleValue()); Assert.assertEquals(0.00, row.getAvgCpm().doubleValue()); Assert.assertEquals("USD", row.getCurrencyCode()); Assert.assertEquals("Computers", row.getDevice()); Assert.assertEquals("Sitelink", row.getClickType()); Assert.assertEquals("Search Network", row.getAdNetwork()); Assert.assertEquals("Search partners", row.getAdNetworkPartners()); Assert.assertEquals("(GMT-08:00) Pacific Time", row.getAccountTimeZoneId()); Assert.assertEquals(4545190149L, row.getAdGroupId().longValue()); Assert.assertEquals("AdGroup2" , row.getAdGroupName()); Assert.assertEquals("enabled", row.getAdGroupStatus()); Assert.assertEquals(12345678L, row.getCampaignId().longValue()); Assert.assertEquals("Campaign1", row.getCampaignName()); Assert.assertEquals("enabled", row.getCampaignStatus()); Assert.assertEquals(0.00, row.getConversionValueBigDecimal().doubleValue()); Assert.assertEquals(0.00, row.getCostPerConversionManyPerClick().doubleValue()); Assert.assertEquals(2.50, row.getMaxCpc().doubleValue()); Assert.assertEquals("ad group criteria", row.getCpcBidSource()); Assert.assertEquals("Undetermined", row.getCriteria()); Assert.assertEquals("http://example.com/index/A", row.getCriteriaDestinationUrl()); Assert.assertEquals("ClientName1", row.getCustomerDescriptiveName()); Assert.assertEquals("Tuesday", row.getDayOfWeek()); Assert.assertEquals(20115029265L, row.getCriterionId().longValue()); Assert.assertEquals(false, row.isNegative()); Assert.assertEquals(false, row.isRestrict()); Assert.assertEquals("September", row.getMonthOfYear()); Assert.assertEquals("My company name", row.getPrimaryCompanyName()); Assert.assertEquals("enabled", row.getStatus()); Assert.assertEquals(0L, row.getViewThroughConversions().longValue()); } @Override protected int retrieveCsvEntries() { return 10; } @Override protected String[] retrievePropertiesToBeSelected() { return new String[] { // Report "ExternalCustomerId", // ReportBase "AccountDescriptiveName", "AccountTimeZoneId", "CustomerDescriptiveName", "PrimaryCompanyName", "AccountCurrencyCode", "Date", "DayOfWeek", "Week", "Month", "MonthOfYear", "Quarter", "Year", "Cost", "Clicks", "Impressions", "Ctr", "AverageCpm", "AverageCpc", "AveragePosition", "Device", "ClickType", "AdNetworkType1", "AdNetworkType2", "ConversionsManyPerClick", "ConversionRateManyPerClick", "CostPerConversionManyPerClick", "ValuePerConversionManyPerClick", "ConvertedClicks", "ClickConversionRate", "CostPerConvertedClick", "ValuePerConvertedClick", "ConversionCategoryName", "ConversionTypeName", "ConversionValue", "ViewThroughConversions", // Specific to Campaign Performance Report "ActiveViewCpm", "ActiveViewImpressions", "AdGroupId", "AdGroupName", "AdGroupStatus", "BidModifier", "BidType", "CampaignId", "CampaignName", "CampaignStatus", "ConversionTrackerId", "CpcBid", "CpcBidSource", "CpmBid", "CpmBidSource", "Criteria", "CriteriaDestinationUrl", "FinalAppUrls", "FinalMobileUrls", "FinalUrls", "GmailForwards", "GmailSaves", "GmailSecondaryClicks", "Id", "IsNegative", "IsRestrict", "Status", "TrackingUrlTemplate", "UrlCustomParameters" }; } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.dmn.engine.evaluate; import static org.assertj.core.api.Assertions.assertThat; import static org.camunda.bpm.dmn.engine.test.asserts.DmnEngineTestAssertions.assertThat; import static org.camunda.bpm.engine.variable.Variables.createVariables; import java.util.List; import java.util.Map; import org.camunda.bpm.dmn.engine.DmnDecisionRequirementsGraph; import org.camunda.bpm.dmn.engine.DmnDecisionResult; import org.camunda.bpm.dmn.engine.DmnDecisionTableResult; import org.camunda.bpm.dmn.engine.DmnEngineException; import org.camunda.bpm.dmn.engine.impl.DmnEvaluationException; import org.camunda.bpm.dmn.engine.test.DmnEngineTest; import org.camunda.commons.utils.IoUtil; import org.junit.Test; public class DmnDecisionEvaluationTest extends DmnEngineTest { public static final String DMN_MULTI_LEVEL_MULTIPLE_INPUT_SINGLE_OUTPUT = "org/camunda/bpm/dmn/engine/evaluate/EvaluateMultiLevelDecisionsWithMultipleInputAndSingleOutput.dmn"; public static final String DMN_DECISIONS_WITH_MULTIPLE_MATCHING_RULES = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithMultipleMatchingRules.groovy.dmn"; public static final String DMN_DECISIONS_WITH_NO_MATCHING_RULE_IN_PARENT = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithNoMatchingRuleInParent.groovy.dmn"; public static final String DMN_DECISIONS_WITH_MULTIPLE_MATCHING_RULES_MULTIPLE_OUTPUTS = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithMultipleMatchingRulesAndMultipleOutputs.groovy.dmn"; public static final String DMN_SHARED_DECISIONS = "org/camunda/bpm/dmn/engine/evaluate/EvaluateSharedDecisions.dmn"; public static final String DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithDifferentInputAndOutputTypes.groovy.dmn"; public static final String DMN_DECISIONS_WITH_DEFAULT_RULE_IN_CHILD = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithDefaultRuleInChild.groovy.dmn"; public static final String DMN_DECISIONS_WITH_INVALID_INPUT_TYPE = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithInvalidInputTypeInParent.groovy.dmn"; public static final String DMN_DECISIONS_WITH_PARENT_DECISION = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDecisionsWithParentDecision.dmn"; public static final String DMN_DECISIONS_WITH_DISH_DECISON_EXAMPLE = "org/camunda/bpm/dmn/engine/evaluate/EvaluateDrdDishDecisionExample.dmn"; public static final String DMN_DECISION_WITH_LITERAL_EXPRESSION = "org/camunda/bpm/dmn/engine/evaluate/DecisionWithLiteralExpression.dmn"; public static final String DMN_DRG_WITH_LITERAL_EXPRESSION = "org/camunda/bpm/dmn/engine/evaluate/DrgWithLiteralExpression.dmn"; public static final String DMN_DECISION_WITH_BEAN_INVOCATION_IN_LITERAL_EXPRESSION = "org/camunda/bpm/dmn/engine/evaluate/DecisionWithBeanInvocationInLiteralExpression.dmn"; public static final String DRG_COLLECT_DMN = "org/camunda/bpm/dmn/engine/transform/DrgCollectTest.dmn"; public static final String DRG_RULE_ORDER_DMN = "org/camunda/bpm/dmn/engine/transform/DrgRuleOrderTest.dmn"; @Test public void shouldEvaluateDrdDishDecisionExample() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("Dish", DMN_DECISIONS_WITH_DISH_DECISON_EXAMPLE) , createVariables() .putValue("temperature", 20) .putValue("dayType", "Weekend")); assertThat(results) .hasSingleResult() .containsEntry("desiredDish", "Steak"); } @Test public void shouldEvaluateDecisionWithRequiredDecisionByKey() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_MULTI_LEVEL_MULTIPLE_INPUT_SINGLE_OUTPUT) , createVariables() .putValue("xx", "xx") .putValue("yy", "yy") .putValue("zz", "zz") .putValue("ll", "ll") .asVariableContext()); assertThat(results) .hasSingleResult() .containsEntry("aa", "aa"); } @Test public void shouldFailDecisionEvaluationWithRequiredDecisionAndNoMatchingRuleInChildDecision() { try { dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_MULTI_LEVEL_MULTIPLE_INPUT_SINGLE_OUTPUT) , createVariables() .putValue("xx", "pp") .putValue("yy", "yy") .putValue("zz", "zz") .putValue("ll", "ll") .asVariableContext()); } catch(DmnEvaluationException e) { assertThat(e) .hasMessageStartingWith("DMN-01002") .hasMessageContaining("Unable to evaluate expression for language 'juel': '${dd}'"); } } @Test public void shouldFailDecisionEvaluationWithRequiredDecisionAndMissingInput() { try { dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_MULTI_LEVEL_MULTIPLE_INPUT_SINGLE_OUTPUT) , createVariables() .putValue("xx", "xx") .putValue("yy", "yy") .putValue("zz", "zz") .asVariableContext()); } catch(DmnEvaluationException e) { assertThat(e) .hasMessageStartingWith("DMN-01002") .hasMessageContaining("Unable to evaluate expression for language 'juel': '${ll}'"); } } @Test public void shouldEvaluateDecisionsWithRequiredDecisionAndMultipleMatchingRules() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_MULTIPLE_MATCHING_RULES) , createVariables() .putValue("dd", 3) .putValue("ee", "ee") .asVariableContext()); List<Map<String, Object>> resultList = results.getResultList(); assertThat(resultList.get(0)).containsEntry("aa", "aa"); assertThat(resultList.get(1)).containsEntry("aa", "aaa"); } @Test public void shouldEvaluateDecisionsWithRequiredDecisionAndMultipleMatchingRulesMultipleOutputs() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_MULTIPLE_MATCHING_RULES_MULTIPLE_OUTPUTS) , createVariables() .putValue("dd", "dd") .putValue("ee", "ee") .asVariableContext()); List<Map<String, Object>> resultList = results.getResultList(); assertThat(resultList.get(0)).containsEntry("aa", "aa"); assertThat(resultList.get(1)).containsEntry("aa", "aaa"); } @Test public void shouldEvaluateDecisionWithRequiredDecisionAndNoMatchingRuleInParentDecision() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_NO_MATCHING_RULE_IN_PARENT) , createVariables() .putValue("dd", "dd") .putValue("ee", "ee") .asVariableContext()); List<Map<String, Object>> resultList = results.getResultList(); assertThat(resultList.size()).isEqualTo(0); } @Test public void shouldEvaluateDecisionsWithRequiredDecisionAndParentDecision() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_PARENT_DECISION) , createVariables() .putValue("ff", true) .putValue("dd", 5) .asVariableContext()); assertThat(results) .hasSingleResult() .containsEntry("aa", 7.0); } @Test public void shouldEvaluateSharedDecisions() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_SHARED_DECISIONS) , createVariables() .putValue("ff", "ff") .asVariableContext()); assertThat(results) .hasSingleResult() .containsEntry("aa", "aa"); } @Test public void shouldEvaluateDecisionsWithDifferentInputAndOutputTypes() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES) , createVariables() .putValue("dd", "5") .putValue("ee", 21) .asVariableContext()); assertThat(results.get(0)) .containsEntry("aa", 7.1); results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES) , createVariables() .putValue("dd", "5") .putValue("ee", 2147483650L) .asVariableContext()); assertThat(results.get(0)) .containsEntry("aa", 7.0); } @Test public void shouldEvaluateDecisionsWithNoMatchingRuleAndDefaultRuleInParent() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES) , createVariables() .putValue("dd", "7") .putValue("ee", 2147483650L) .asVariableContext()); assertThat(results) .hasSingleResult() .containsEntry("aa", 7.2); } @Test public void shouldEvaluateDecisionsWithDefaultRuleInChildDecision() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DEFAULT_RULE_IN_CHILD) , createVariables() .putValue("dd", "7") // There is no rule in the table matching the input 7 .asVariableContext()); assertThat(results) .hasSingleResult() .containsEntry("aa", 7.0); } @Test public void shouldEvaluateDecisionsWithUserInputForParentDecision() { DmnDecisionTableResult results = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES) , createVariables() .putValue("bb", "bb") .putValue("dd", "7") .putValue("ee", 2147483650L) .asVariableContext()); // input value provided by the user is overriden by the child decision assertThat(results) .hasSingleResult() .containsEntry("aa", 7.2); } @Test public void shouldEvaluateDecisionsWithInputTypeMisMatchInChildDecision() { try { dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_DIFFERENT_INPUT_OUTPUT_TYPES) , createVariables() .putValue("dd", "7") .putValue("ee", "abc") .asVariableContext()); } catch(DmnEngineException e) { assertThat(e) .hasMessageStartingWith("DMN-01005") .hasMessageContaining("Invalid value 'abc' for clause with type 'long'"); } } @Test public void shouldEvaluateDecisionsWithInputTypeMisMatchInParentDecision() { try { dmnEngine.evaluateDecisionTable(parseDecisionFromFile("A", DMN_DECISIONS_WITH_INVALID_INPUT_TYPE) , createVariables() .putValue("dd", 5) .asVariableContext()); } catch(DmnEngineException e) { assertThat(e) .hasMessageStartingWith("DMN-01005") .hasMessageContaining("Invalid value 'bb' for clause with type 'integer'"); } } @Test public void shouldEvaluateDecisionWithLiteralExpression() { DmnDecisionResult result = dmnEngine.evaluateDecision(parseDecisionFromFile("decision", DMN_DECISION_WITH_LITERAL_EXPRESSION) , createVariables() .putValue("a", 2) .putValue("b", 3)); assertThat(result.getSingleResult().keySet()).containsOnly("c"); assertThat(result.getSingleEntry()) .isNotNull() .isEqualTo(5); } @Test public void shouldEvaluateDecisionsDrgWithLiteralExpression() { DmnDecisionTableResult result = dmnEngine.evaluateDecisionTable(parseDecisionFromFile("dish-decision", DMN_DRG_WITH_LITERAL_EXPRESSION) , createVariables() .putValue("temperature", 31) .putValue("dayType", "WeekDay")); assertThat(result) .hasSingleResult() .containsEntry("desiredDish", "Light Salad"); } @Test public void shouldEvaluateDecisionWithBeanInvocationInLiteralExpression() { DmnDecisionResult result = dmnEngine.evaluateDecision(parseDecisionFromFile("decision", DMN_DECISION_WITH_BEAN_INVOCATION_IN_LITERAL_EXPRESSION) , createVariables() .putValue("x", 2) .putValue("bean", new TestBean(3))); assertThat(result.getSingleEntry()) .isNotNull() .isEqualTo(6); } @Test public void shouldEvaluateDecisionWithCollectHitPolicyReturningAList() { DmnDecisionRequirementsGraph graph = dmnEngine.parseDecisionRequirementsGraph(IoUtil.fileAsStream(DRG_COLLECT_DMN)); initVariables(); variables.putValue("dayType","WeekDay"); DmnDecisionResult result = dmnEngine.evaluateDecision(graph.getDecision("dish-decision"), variables); assertThat(result.getSingleEntry()) .isNotNull() .isEqualTo("Steak"); } @Test public void shouldEvaluateDecisionWithRuleOrderHitPolicyReturningAList() { DmnDecisionRequirementsGraph graph = dmnEngine.parseDecisionRequirementsGraph(IoUtil.fileAsStream(DRG_RULE_ORDER_DMN)); initVariables(); variables.putValue("dayType","WeekDay"); DmnDecisionResult result = dmnEngine.evaluateDecision(graph.getDecision("dish-decision"), variables); assertThat(result.getSingleEntry()) .isNotNull() .isEqualTo("Steak"); } }
package org.restler.spring.data.methods.associations; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializable; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.jsontype.TypeSerializer; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.ser.FilterProvider; import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import org.restler.client.RestlerException; import org.restler.spring.data.proxy.ResourceProxy; import org.restler.spring.data.util.Placeholder; import org.restler.spring.data.util.Repositories; import org.restler.spring.data.util.ResourceHelper; import org.restler.util.Pair; import javax.persistence.Entity; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import java.io.IOException; import java.lang.reflect.Field; import java.util.*; import java.util.stream.Collectors; public class ResourcesAndAssociations { //class for creating filter @JsonFilter("filter properties by name") private class PropertyFilterMixIn {} private static final ObjectMapper objectMapper = new ObjectMapper(); static { objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); objectMapper.addMixIn(Object.class, PropertyFilterMixIn.class); } private final Repositories repositories; private final String baseUri; private final List<AssociatedResource> resources = new ArrayList<>(); private final List<Association> associations = new ArrayList<>(); private final Map<AssociatedResource, List<Association>> associationsByResource = new HashMap<>(); public ResourcesAndAssociations(Repositories repositories, String baseUri, Object resource) { this.repositories = repositories; this.baseUri = baseUri; fillResourcesAndAssociations(resource, new HashSet<>()); } public List<AssociatedResource> getResources() { return resources; } public List<Association> getAssociations() { return associations; } public List<Association> getAssociationsByResource(AssociatedResource resource) { List<Association> associations = associationsByResource.get(resource); if(associations != null) { return associations; } return new ArrayList<>(); } //recursive method, get child resources for some resource and associations between them private AssociatedResource fillResourcesAndAssociations(Object object, Set<Object> set) { Object objectAtStart = object; set.add(object); if(object instanceof ResourceProxy) { object = ((ResourceProxy) object).getObject(); } List<Field> associateFields = new ArrayList<>(); List<Pair<Field, Object>> children = getChildren(object). stream(). filter(o -> takeNullFields(associateFields, o)). filter(this::isResourceOrCollection). collect(Collectors.toList()); List<String> ignorableFields = new ArrayList<>(); AssociatedResourceState resourceState = (objectAtStart instanceof ResourceProxy) ? AssociatedResourceState.Update : AssociatedResourceState.Create; AssociatedResource currentResource = new AssociatedResource(objectAtStart, new ObjectNode(JsonNodeFactory.instance), associateFields, resourceState); resources.add(currentResource); for(Pair<Field, Object> child : children) { child.getFirstValue().setAccessible(true); if(!set.contains(child.getSecondValue())) { if (child.getSecondValue() instanceof Collection) { ((Collection) child.getSecondValue()).stream().filter(item -> !set.contains(item)).forEach(item -> { AssociatedResource childResource = fillResourcesAndAssociations(item, set); List<Association> associateResult = associate(currentResource, child.getFirstValue(), childResource); associateResult.forEach(associations::add); associateResult.forEach(this::addAssociationByResource); }); } else { AssociatedResource childResource = fillResourcesAndAssociations(child.getSecondValue(), set); List<Association> associateResult = associate(currentResource, child.getFirstValue(), childResource); associateResult.forEach(associations::add); associateResult.forEach(this::addAssociationByResource); } } ignorableFields.add(child.getFirstValue().getName()); associateFields.add(child.getFirstValue()); child.getFirstValue().setAccessible(false); } //filtering associations fields FilterProvider filters = new SimpleFilterProvider() .addFilter("filter properties by name", SimpleBeanPropertyFilter.serializeAllExcept(ignorableFields.toArray(new String[ignorableFields.size()]))); ObjectWriter writer = objectMapper.writer(filters); try { //creates base json body without associations ObjectNode node = (ObjectNode) objectMapper.readTree(writer.writeValueAsString(object)); currentResource.getObjectNode().setAll(node); } catch (IOException e) { throw new RestlerException("Can't convert object to json", e); } associations.forEach(a -> a.getSecondResource().addIdPlaceholder(a.getIdPlaceholder())); return currentResource; } //create associations for parent and child resources private List<Association> associate(AssociatedResource parent, Field childField, AssociatedResource resource) { Object parentResource = parent.getResource(); if(parentResource instanceof ResourceProxy) { parentResource = ((ResourceProxy)parentResource).getObject(); } Object childResource = resource.getResource(); if(childResource instanceof ResourceProxy) { childResource = ((ResourceProxy) childResource).getObject(); } List<Association> result = new ArrayList<>(); for(Field nullField : resource.getAssociateFields()) { //OneToOne oneToOneChild = nullField.getAnnotation(OneToOne.class); ManyToOne manyToOneChild = nullField.getAnnotation(ManyToOne.class); OneToMany oneToManyChild = nullField.getAnnotation(OneToMany.class); ManyToMany manyToManyChild = nullField.getAnnotation(ManyToMany.class); //OneToOne oneToOneParent = childField.getAnnotation(OneToOne.class); ManyToOne manyToOneParent = childField.getAnnotation(ManyToOne.class); OneToMany oneToManyParent = childField.getAnnotation(OneToMany.class); ManyToMany manyToManyParent = childField.getAnnotation(ManyToMany.class); if(manyToOneChild != null) { if(oneToManyParent != null && (oneToManyParent.mappedBy().equals(nullField.getName()) || nullField.getName().equals(parentResource.getClass().getSimpleName().toLowerCase()))) { Optional<Object> id = Optional.ofNullable(ResourceHelper.getId(parent.getResource())); Placeholder<Object> idPlaceholder = new Placeholder<>(id.orElse("{missing id}").toString()); result.add(new Association(resource, parent, new Pair<>(nullField.getName(), ResourceHelper.getUri(repositories, baseUri, parent.getResource(), idPlaceholder)), AssociationType.ManyToOne, idPlaceholder)); if(ResourceHelper.getId(resource.getResource()) != null) { result.add(new Association(parent, resource, new Pair<>(childField.getName(), ResourceHelper.getUri(repositories, baseUri, resource.getResource())), AssociationType.OneToMany)); } return result; } } if(oneToManyChild != null) { if(oneToManyChild.mappedBy().equals(childField.getName()) || childField.getName().equals(resource.getClass().getSimpleName().toLowerCase())) { if(manyToOneParent != null) { Optional<Object> id = Optional.ofNullable(ResourceHelper.getId(parent.getResource())); Placeholder<Object> idPlaceholder = new Placeholder<>(id.orElse("{missing id}").toString()); result.add(new Association(parent, resource, new Pair<>(childField.getName(), ResourceHelper.getUri(repositories, baseUri, resource.getResource(), idPlaceholder)), AssociationType.ManyToOne, idPlaceholder)); } if(ResourceHelper.getId(parent.getResource()) != null) { result.add(new Association(resource, parent, new Pair<>(childField.getName(), ResourceHelper.getUri(repositories, baseUri, parent.getResource())), AssociationType.OneToMany)); } return result; } } if(manyToManyChild != null) { if(manyToManyChild.mappedBy().equals(childField.getName()) || (childResource.getClass().getSimpleName().toLowerCase() + "s").equals(childField.getName()) || (manyToManyParent != null && manyToManyParent.mappedBy().equals(nullField.getName())) || (parentResource.getClass().getSimpleName().toLowerCase() + "s").equals(nullField.getName())) { if(manyToManyParent != null) { Optional<Object> id = Optional.ofNullable(ResourceHelper.getId(parent.getResource())); Placeholder<Object> idPlaceholder = new Placeholder<>(id.orElse("{missing id}").toString()); result.add(new Association(parent, resource, new Pair<>(childField.getName(), ResourceHelper.getUri(repositories, baseUri, resource.getResource(), idPlaceholder)), AssociationType.ManyToMany, idPlaceholder)); } Optional<Object> id = Optional.ofNullable(ResourceHelper.getId(parent.getResource())); Placeholder<Object> idPlaceholder = new Placeholder<>(id.orElse("{missing id}").toString()); result.add(new Association(resource, parent, new Pair<>(nullField.getName(), ResourceHelper.getUri(repositories, baseUri, parent.getResource(), idPlaceholder)), AssociationType.ManyToMany, idPlaceholder)); return result; } } } throw new RestlerException("Can't make association."); } private void addAssociationByResource(Association association) { List<Association> resourceAssociations = associationsByResource.get(association.getFirstResource()); if(resourceAssociations != null) { resourceAssociations.add(association); } else { resourceAssociations = new ArrayList<>(); resourceAssociations.add(association); associationsByResource.put(association.getFirstResource(), resourceAssociations); } } private List<Pair<Field, Object>> getChildren(Object object) { List<Pair<Field, Object>> result = new ArrayList<>(); if(object instanceof ResourceProxy) { object = ((ResourceProxy) object).getObject(); } Class<?> argClass = object.getClass(); Field[] fields = argClass.getDeclaredFields(); for(Field field : fields) { try { field.setAccessible(true); Object fieldValue = field.get(object); result.add(new Pair<>(field, fieldValue)); field.setAccessible(false); } catch (IllegalAccessException e) { throw new RestlerException("Can't get value from field", e); } } return result; } private boolean isResourceOrCollection(Pair<Field, Object> item) { Object value = item.getSecondValue(); if(value instanceof ResourceProxy) { value = ((ResourceProxy)item.getSecondValue()).getObject(); } return value.getClass().isAnnotationPresent(Entity.class) || value instanceof Collection; } private boolean takeNullFields(List<Field> nullFields, Pair<Field, Object> object) { if(object.getSecondValue() == null) { nullFields.add(object.getFirstValue()); return false; } return true; } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.destination.hdfs.writer; import com.google.common.annotations.VisibleForTesting; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.impl.Utils; import com.streamsets.pipeline.lib.hdfs.common.Errors; import org.apache.hadoop.fs.FileSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Queue; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; public class ActiveRecordWriters { private static final Logger LOG = LoggerFactory.getLogger(ActiveRecordWriters.class); private static final boolean IS_TRACE_ENABLED = LOG.isTraceEnabled(); private static class DelayedRecordWriter implements Delayed { private final RecordWriter writer; public DelayedRecordWriter(RecordWriter writer) { this.writer = writer; } @Override public long getDelay(TimeUnit unit) { return unit.convert(writer.getExpiresOn() - System.currentTimeMillis(), TimeUnit.MILLISECONDS); } @Override public int compareTo(Delayed o) { long diff = writer.getExpiresOn() - ((DelayedRecordWriter)o).writer.getExpiresOn(); return (diff > 0) ? 1 : (diff < 0) ? -1 : 0; } public boolean equals(Delayed o) { return compareTo(o) == 0; } public RecordWriter getWriter() { return writer; } @Override public String toString() { return Utils.format("DelayedRecordWriter[path='{}' expiresInSecs='{}'", writer.getPath(), getDelay(TimeUnit.SECONDS)); } } private final RecordWriterManager manager; @VisibleForTesting Map<String, RecordWriter> writers; private Queue<DelayedRecordWriter> cutOffQueue; public ActiveRecordWriters(RecordWriterManager manager) { writers = new HashMap<>(); cutOffQueue = new DelayQueue<>(); this.manager = manager; } public void commitOldFiles(FileSystem fs) throws IOException, StageException { manager.commitOldFiles(fs); } public void purge() throws IOException, StageException { if (IS_TRACE_ENABLED) { LOG.trace("Purge"); } DelayedRecordWriter delayedWriter = cutOffQueue.poll(); while (delayedWriter != null) { if (!delayedWriter.getWriter().isClosed()) { if (IS_TRACE_ENABLED) { LOG.trace("Purging '{}'", delayedWriter.getWriter().getPath()); } //We are fine no lock on writer needed. synchronized (this) { writers.remove(delayedWriter.getWriter().getPath().toString()); } manager.commitWriter(delayedWriter.getWriter()); } delayedWriter = cutOffQueue.poll(); } } public RecordWriter get(Date now, Date recordDate, Record record) throws StageException, IOException { String path = manager.getPath(recordDate, record).toString(); RecordWriter writer = null; //We are fine no lock on writer needed. synchronized (this) { writer = writers.get(path); } if(writer != null && manager.shouldRoll(writer, record)) { release(writer, true); writer = null; } if (writer == null) { writer = manager.getWriter(now, recordDate, record); if (writer != null) { if (IS_TRACE_ENABLED) { LOG.trace("Got '{}'", writer.getPath()); } writer.setActiveRecordWriters(this); //We are fine no lock on writer needed. synchronized(this) { writers.put(path, writer); } cutOffQueue.add(new DelayedRecordWriter(writer)); } } return writer; } public RecordWriterManager getWriterManager() { return manager; } @VisibleForTesting public int getActiveWritersCount() { return cutOffQueue.size(); } //The whole function is synchronized because //the locks always have to taken in the following order //1. ActiveRecordWriters and 2. RecordWriter (if we need both of them) //or else we will get into a deadlock //For Ex: idle close thread calls this method //and the hdfsTarget (in the pipeline runnable thread), calls flushAll public synchronized void release(RecordWriter writer, boolean roll) throws StageException, IOException { writer.closeLock(); try { if (roll || writer.isIdleClosed() || manager.isOverThresholds(writer)) { if (IS_TRACE_ENABLED) { LOG.trace("Release '{}'", writer.getPath()); } writers.remove(writer.getPath().toString()); manager.commitWriter(writer); } } finally { writer.closeUnlock(); } purge(); } public synchronized void flushAll() throws StageException { if (IS_TRACE_ENABLED) { LOG.trace("Flush all '{}'", toString()); } for (RecordWriter writer : writers.values()) { if (!writer.isClosed()) { try { writer.flush(); } catch (IOException ex) { String msg = Utils.format("Flush failed on file : '{}'", writer.getPath().toString()); LOG.error(msg); throw new StageException(Errors.HADOOPFS_58, writer.getPath().toString(), ex); } } } } public synchronized void closeAll() throws StageException{ if (IS_TRACE_ENABLED) { LOG.trace("Close all '{}'", toString()); } if(writers != null) { for (RecordWriter writer : writers.values()) { writer.closeLock(); try { if (!writer.isClosed()) { manager.commitWriter(writer); } } catch (IOException ex) { String msg = Utils.format("Error closing writer {} : {}", writer, ex); LOG.warn(msg, ex); } finally { writer.closeUnlock(); } } } writers = null; cutOffQueue = null; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrules; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.commons.collections.map.ReferenceMap; import org.apache.log4j.Logger; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; public class PersistentXssUtils { private static int uniqueIndex; public static String PXSS_PREFIX = "zApPX"; public static String PXSS_POSTFIX = "sS"; private static Map<String, UserDataSource> map; private static Map<String, HashSet<Integer>> sourceToSinks; /** * A {@code Map} to cache the URIs used by source messages ({@code UserDataSource}). * * <p>The URIs will be different {@code String} objects (see {@code URI#toString()}) while * representing the same URI. This happens for each parameter attacked per source message which * would lead to multiple duplicated {@code String}s. * * @see #getCachedItem(Map, String) * @see UserDataSource#UserDataSource(HttpMessage, String) * @see org.apache.commons.httpclient.URI#toString() */ private static Map<String, String> cachedUris; /** * A {@code Map} to cache the parameter names used by source messages ({@code UserDataSource}). * * <p>The parameter names will be different {@code String} objects (see {@code Variant} * implementations) while representing the same parameter names. This happens for each parameter * attacked per source message which would lead to multiple duplicated {@code String}s. * * @see #getCachedItem(Map, String) * @see UserDataSource#UserDataSource(HttpMessage, String) * @see org.parosproxy.paros.core.scanner.Variant */ private static Map<String, String> cachedParams; private static Logger log = Logger.getLogger(PersistentXssUtils.class); static { reset(); } public static String getUniqueValue(HttpMessage msg, String param) { String uniqueVal = PXSS_PREFIX + uniqueIndex++ + PXSS_POSTFIX; map.put(uniqueVal, new UserDataSource(msg, param)); return uniqueVal; } public static void testForSink(HttpMessage msg) { String body = msg.getResponseBody().toString(); int start = body.indexOf(PXSS_PREFIX); while (start > 0) { int end = body.indexOf(PXSS_POSTFIX, start); if (end > 0) { String uniqueVal = body.substring(start, end + PXSS_POSTFIX.length()); UserDataSource source = map.get(uniqueVal); if (source != null) { setSinkForSource(source, msg); } start = body.indexOf(PXSS_PREFIX, end); } else { break; } } } public static void setSinkForSource(HttpMessage sourceMsg, String param, HttpMessage sinkMsg) { setSinkForSource(new UserDataSource(sourceMsg, param), sinkMsg); } private static void setSinkForSource(UserDataSource source, HttpMessage sinkMsg) { if (log.isDebugEnabled()) { log.debug( "setSinkForSource src=" + source.getUri() + " param=" + source.getParam() + " sink=" + sinkMsg.getRequestHeader().getURI()); } HashSet<Integer> sinks = sourceToSinks.get(source.toString()); if (sinks == null) { sinks = new HashSet<>(); } try { HistoryReference hRef = new HistoryReference( Model.getSingleton().getSession(), HistoryReference.TYPE_SCANNER_TEMPORARY, sinkMsg); sinks.add(Integer.valueOf(hRef.getHistoryId())); sourceToSinks.put(source.toString(), sinks); } catch (HttpMalformedHeaderException | DatabaseException e) { log.warn("Failed to persist HTTP message to database:", e); } } /** * Gets the IDs of the sink messages for the given message and parameter. * * @param sourceMsg the source message * @param param the parameter being tested * @return the IDs of the messages that match the given source message and parameter, {@code * null} if no matches * @see #getMessage(int) */ public static Set<Integer> getSinksIdsForSource(HttpMessage sourceMsg, String param) { UserDataSource source = new UserDataSource(sourceMsg, param); if (log.isDebugEnabled()) { log.debug( "getSinksIdsForSource src=" + source.getUri() + " param=" + param + " sinks=" + sourceToSinks.get(source.toString())); } return sourceToSinks.get(source.toString()); } /** Resets the state of {@code PersistentXssUtils}. */ @SuppressWarnings("unchecked") public static void reset() { uniqueIndex = 0; map = new HashMap<>(); sourceToSinks = new HashMap<>(); cachedUris = Collections.synchronizedMap(new ReferenceMap(ReferenceMap.SOFT, ReferenceMap.SOFT)); cachedParams = Collections.synchronizedMap(new ReferenceMap(ReferenceMap.SOFT, ReferenceMap.SOFT)); } /** * Gets the message with the given ID. * * @param sinkMsgId the ID of the message * @return the message with the given ID, or {@code null} if it was not possible to obtain the * message * @see #getSinksIdsForSource(HttpMessage, String) */ public static HttpMessage getMessage(int sinkMsgId) { try { return new HistoryReference(sinkMsgId).getHttpMessage(); } catch (HttpMalformedHeaderException | DatabaseException e) { log.warn("Failed to read HTTP message from database:", e); } return null; } private static String getCachedItem(Map<String, String> map, String item) { String cachedItem = map.get(item); if (cachedItem != null) { return cachedItem; } map.put(item, item); return item; } private static class UserDataSource { private final String uri; private final String param; private final String stringRepresentation; public UserDataSource(HttpMessage sourceMsg, String param) { super(); this.uri = getCachedItem(cachedUris, sourceMsg.getRequestHeader().getURI().toString()); this.param = getCachedItem(cachedParams, param); this.stringRepresentation = uri + "#" + param; } @Override public String toString() { return stringRepresentation; } public String getUri() { return uri; } public String getParam() { return param; } } }
package com.planet_ink.coffee_mud.Abilities.Skills; import com.planet_ink.coffee_mud.Abilities.StdAbility; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.TrackingLibrary; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2019-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Skill_FindClanHome extends StdAbility { @Override public String ID() { return "Skill_FindClanHome"; } private final static String localizedName = CMLib.lang().L("Find Clan Home"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(finding your clan home)"); @Override public String displayText() { return localizedStaticDisplay; } @Override protected int canAffectCode() { return CAN_MOBS; } @Override protected int canTargetCode() { return 0; } @Override public int abstractQuality() { return Ability.QUALITY_OK_SELF; } private static final String[] triggerStrings = I(new String[] { "FINDCLANHOME" }); @Override public String[] triggerStrings() { return triggerStrings; } @Override public int classificationCode() { return Ability.ACODE_SKILL | Ability.DOMAIN_NATURELORE; } @Override public long flags() { return Ability.FLAG_TRACKING; } protected List<Room> theTrail = null; public int nextDirection = -2; @Override public void unInvoke() { if(!(affected instanceof MOB)) return; super.unInvoke(); } @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if(tickID==Tickable.TICKID_MOB) { if(nextDirection==-999) return true; if((theTrail==null) ||(affected == null) ||(!(affected instanceof MOB))) return false; final MOB mob=(MOB)affected; if(nextDirection==999) { if(isClanHome(mob,mob.location())) mob.tell(L("You feel like this is home.")); else mob.tell(L("The trail home dries up here.")); nextDirection=-2; unInvoke(); } else if(nextDirection==-1) { if(!isClanHome(mob,mob.location())) mob.tell(L("The trail home dries up here.")); nextDirection=-999; unInvoke(); } else if(nextDirection>=0) { mob.tell(L("The way home seems to continue @x1.",CMLib.directions().getDirectionName(nextDirection))); if(mob.isMonster()) { final Room nextRoom=mob.location().getRoomInDir(nextDirection); if((nextRoom!=null)&&(nextRoom.getArea()==mob.location().getArea())) { final int dir=nextDirection; nextDirection=-2; CMLib.tracking().walk(mob,dir,false,false); } else unInvoke(); } else nextDirection=-2; } } return true; } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; if((msg.amISource(mob)) &&(msg.amITarget(mob.location())) &&(CMLib.flags().canBeSeenBy(mob.location(),mob)) &&(msg.targetMinor()==CMMsg.TYP_LOOK)) nextDirection=CMLib.tracking().trackNextDirectionFromHere(theTrail,mob.location(),false); else if((affected!=null) &&(affected instanceof MOB) &&(msg.target() instanceof Room) &&(msg.amISource((MOB)affected)) &&((msg.sourceMinor()==CMMsg.TYP_LOOK)||(msg.sourceMinor()==CMMsg.TYP_EXAMINE))) { if((msg.tool()!=null)&&(msg.tool().ID().equals(ID()))) { if(isClanHome((MOB)affected,(Room)msg.target())) ((MOB)affected).tell(L("This place feels very clan homey.")); } else if(isClanHome((MOB)affected,(Room)msg.target())) { final CMMsg msg2=CMClass.getMsg(msg.source(),msg.target(),this,CMMsg.MSG_LOOK,CMMsg.NO_EFFECT,CMMsg.NO_EFFECT,null); msg.addTrailerMsg(msg2); } } } @Override public void affectPhyStats(final Physical affectedEnv, final PhyStats affectableStats) { affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_TRACK); super.affectPhyStats(affectedEnv, affectableStats); } private boolean isClanHome(final MOB mob, final Room room) { if(CMLib.law().doesHaveWeakPriviledgesHere(mob, room)) { final String ownerName=CMLib.law().getLandOwnerName(room); if(CMLib.clans().getClanExact(ownerName)!=null) return true; } return false; } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { final List<Ability> V=CMLib.flags().flaggedAffects(mob,Ability.FLAG_TRACKING); for(final Ability A : V) A.unInvoke(); if(V.size()>0) { mob.tell(L("You stop tracking.")); if(commands.size()==0) return true; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; if(isClanHome(mob, mob.location())) { mob.tell(L("You already feel at home")); return true; } final boolean success=proficiencyCheck(mob,0,auto); final ArrayList<Room> rooms=new ArrayList<Room>(); TrackingLibrary.TrackingFlags flags; flags = CMLib.tracking().newFlags() .plus(TrackingLibrary.TrackingFlag.NOEMPTYGRIDS) .plus(TrackingLibrary.TrackingFlag.NOAIR); final int range=60 + (2*super.getXLEVELLevel(mob))+(10*super.getXMAXRANGELevel(mob)); final List<Room> checkSet=CMLib.tracking().getRadiantRooms(mob.location(),flags,range); for (final Room room : checkSet) { final Room R=CMLib.map().getRoom(room); if(isClanHome(mob,R)) rooms.add(R); } if(rooms.size()>0) theTrail=CMLib.tracking().findTrailToAnyRoom(mob.location(),rooms,flags,range); if((success)&&(theTrail!=null)) { final CMMsg msg=CMClass.getMsg(mob,null,this,CMMsg.MSG_QUIETMOVEMENT,auto?L("<S-NAME> begin(s) feeling <S-HIS-HER> way to <S-HIS-HER> clan home!"):L("<S-NAME> begin(s) heading to <S-HIS-HER> clan home.")); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); final Skill_FindClanHome newOne=(Skill_FindClanHome)this.copyOf(); if(mob.fetchEffect(newOne.ID())==null) mob.addEffect(newOne); mob.recoverPhyStats(); newOne.nextDirection=CMLib.tracking().trackNextDirectionFromHere(newOne.theTrail,mob.location(),false); } } else beneficialVisualFizzle(mob,null,L("<S-NAME> attempt(s) to find <S-HIS-HER> clan home, but fail(s).")); return success; } }
/** * Copyright (C) 2006 Dragos Balan (dragos.balan@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package net.sf.reportengine.core.steps.crosstab; import java.util.List; import net.sf.reportengine.config.PivotData; import net.sf.reportengine.components.CellProps; import net.sf.reportengine.components.RowProps; import net.sf.reportengine.config.DataColumn; import net.sf.reportengine.config.GroupColumn; import net.sf.reportengine.config.HorizAlign; import net.sf.reportengine.config.SecondProcessDataColumn; import net.sf.reportengine.config.SecondProcessDataColumnFromOriginalDataColumn; import net.sf.reportengine.config.SecondProcessTotalColumn; import net.sf.reportengine.core.steps.AbstractOutputInitStep; import net.sf.reportengine.core.steps.ColumnHeaderOutputInitStep; import net.sf.reportengine.core.steps.StepInput; import net.sf.reportengine.core.steps.StepResult; import net.sf.reportengine.util.StepIOKeys; import net.sf.reportengine.util.CtMetadata; import net.sf.reportengine.util.AlgoIOKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static net.sf.reportengine.util.AlgoIOKeys.*; import static net.sf.reportengine.util.StepIOKeys.*; /** * displays the column headers for the crosstab report * * @author dragos balan * @since 0.4 */ public class CrosstabHeaderOutputInitStep extends AbstractOutputInitStep<String>{ /** * the one and only logger */ private static final Logger LOGGER = LoggerFactory.getLogger(CrosstabHeaderOutputInitStep.class); /** * */ public StepResult<String> init(StepInput stepInput){ // outputTitle(getReportTitle(stepInput), // getDataColumnsLength(stepInput) + getGroupColumnsLength(stepInput), // getReportOutput(stepInput)); outputHeaderRows( stepInput, getCrosstabMetadata(stepInput), getDataColumns(stepInput), getGroupColumns(stepInput), getCrosstabData(stepInput)); return StepResult.NO_RESULT; } protected CtMetadata getCrosstabMetadata(StepInput stepInput){ return (CtMetadata)stepInput.getContextParam(CROSSTAB_METADATA); } public PivotData getCrosstabData(StepInput stepInput){ return (PivotData)stepInput.getAlgoInput(CROSSTAB_DATA); } @Override public List<DataColumn> getDataColumns(StepInput stepInput){ return (List<DataColumn>)stepInput.getContextParam(INTERNAL_DATA_COLS); } @Override public List<GroupColumn> getGroupColumns(StepInput stepInput){ return (List<GroupColumn>)stepInput.getContextParam(INTERNAL_GROUP_COLS); } /** * * @param reportOutput * @param ctMetadata * @param dataCols * @param groupCols */ private void outputHeaderRows( StepInput stepInput, CtMetadata ctMetadata, List<DataColumn> dataCols, List<GroupColumn> groupCols, PivotData ctData){ //loop through all header rows for (int currHeaderRow = 0; currHeaderRow < ctMetadata.getHeaderRowsCount(); currHeaderRow++) { //reportOutput.startHeaderRow(new RowProps(currHeaderRow)); outputOneValue(stepInput, ColumnHeaderOutputInitStep.START_HEADER_ROW_TEMPLATE, new RowProps(currHeaderRow)); boolean isLastHeaderRow = currHeaderRow == ctMetadata.getHeaderRowsCount()-1; //1. handle grouping columns header first displayHeaderForGroupingCols( stepInput, groupCols, isLastHeaderRow, currHeaderRow); //2. now loop through data columns int currentColumn = 0; while(currentColumn < dataCols.size()){ DataColumn currentDataColumn = dataCols.get(currentColumn); //if this column is a column created during if(currentDataColumn instanceof SecondProcessDataColumn){ int colspan = displayDataColumnHeader( stepInput, (SecondProcessDataColumn)currentDataColumn, ctMetadata, currHeaderRow, isLastHeaderRow); currentColumn += colspan; }else{ if(currentDataColumn instanceof SecondProcessTotalColumn){ displayHeaderForTotalColumn(stepInput, (SecondProcessTotalColumn)currentDataColumn, ctMetadata, currHeaderRow, ctData.getCalculator().getLabel()); currentColumn++; }else{ if(currentDataColumn instanceof SecondProcessDataColumnFromOriginalDataColumn){ displayHeaderForOriginalDataColumn( stepInput, currentDataColumn, isLastHeaderRow, currHeaderRow); currentColumn++; }else{ //no other type of data column is accepted throw new IllegalArgumentException("there's no handler for "+currentDataColumn.getClass()); } } } }//end while //reportOutput.endHeaderRow(); outputNoValue(stepInput, ColumnHeaderOutputInitStep.END_HEADER_ROW_TEMPLATE); } } /** * displays the headers for group columns * @param groupCols * @param reportOutput * @param isLastHeaderRow */ private void displayHeaderForGroupingCols( StepInput stepInput, List<GroupColumn> groupCols, boolean isLastHeaderRow, int rowNumber) { //if last header row write the normal column headers if(groupCols != null && groupCols.size() > 0){ if(isLastHeaderRow){ //for group columns only the last header row will contain something // the first will be empty for (int i = 0; i < groupCols.size(); i++) { CellProps cellProps = new CellProps.Builder(groupCols.get(i).getHeader()).colspan(1).horizAlign(HorizAlign.CENTER).rowNumber(rowNumber).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); } }else{ //first header rows will contain only spaces (for group headers): for (int i = 0; i < groupCols.size(); i++) { CellProps cellProps = new CellProps.Builder(CellProps.WHITESPACE).rowNumber(rowNumber).build(); //reportOutput.outputDataCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); } } }else{ LOGGER.debug("no group columns headers found"); } } /** * displays the header for the original data columns * @param currentDataColumn * @param reportOutput * @param isLastHeaderRow */ private void displayHeaderForOriginalDataColumn( StepInput stepInput, DataColumn currentDataColumn, boolean isLastHeaderRow, int rowNumber) { //only on the last header row we display the header values for the original data columns if(isLastHeaderRow){ SecondProcessDataColumnFromOriginalDataColumn originalDataColumn = (SecondProcessDataColumnFromOriginalDataColumn)currentDataColumn; CellProps cellProps = new CellProps.Builder(originalDataColumn.getHeader()).colspan(1).horizAlign(HorizAlign.CENTER).rowNumber(rowNumber).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); }else{ //first header rows will contain empty cells //reportOutput.outputDataCell(new CellProps.Builder(ReportOutput.WHITESPACE).rowNumber(rowNumber).build()); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, new CellProps.Builder(CellProps.WHITESPACE).rowNumber(rowNumber).build()); } } /** * displays the headers for data columns of type SecondProcessTotalColumn * * @param secondProcessTotalCol * @param reportOutput * @param ctMetadata * @param currHeaderRow */ private void displayHeaderForTotalColumn( StepInput stepInput, SecondProcessTotalColumn secondProcessTotalCol, CtMetadata ctMetadata, int currHeaderRow, String totalLabel) { int[] position = secondProcessTotalCol.getPosition(); if(position != null){ if(currHeaderRow < position.length){ Object value = ctMetadata.getDistincValueFor(currHeaderRow, position[currHeaderRow]); CellProps cellProps = new CellProps.Builder(value).colspan(1).horizAlign(secondProcessTotalCol.getHorizAlign()).rowNumber(currHeaderRow).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); }else{ //if there's no position for this header row then this is a hard-coded "TOTAL" if(currHeaderRow == position.length){ CellProps cellProps = new CellProps.Builder(totalLabel).horizAlign(HorizAlign.CENTER).rowNumber(currHeaderRow).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); }else{ //reportOutput.outputDataCell(new CellProps.Builder(ReportOutput.WHITESPACE).rowNumber(currHeaderRow).build()); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, new CellProps.Builder(CellProps.WHITESPACE).rowNumber(currHeaderRow).build()); } } }else{ //the only data column that has null positions is the grand total column if(currHeaderRow == 0){ CellProps cellProps = new CellProps.Builder("Grand "+totalLabel).horizAlign(HorizAlign.LEFT).rowNumber(currHeaderRow).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); }else{ //reportOutput.outputDataCell(CellProps.EMPTY_CELL); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, CellProps.buildEmptyCell()); } } } /** * displays the column header for objects of type SecondProcessDataColumn * * @param secondProcDataColumn * @param reportOutput * @param ctMetadata * @param currHeaderRow * @param isLastHeaderRow * * @return the colspan */ private int displayDataColumnHeader(StepInput stepInput, SecondProcessDataColumn secondProcDataColumn, CtMetadata ctMetadata, int currHeaderRow, boolean isLastHeaderRow ) { int colspan = 1; if(!isLastHeaderRow){ //for all rows except the last header row we read the colspan colspan = ctMetadata.getColspanForLevel(ctMetadata.getHeaderRowsCount()-2); } Object value = ctMetadata.getDistincValueFor(currHeaderRow, secondProcDataColumn.getPosition()[currHeaderRow]); CellProps cellProps = new CellProps.Builder(value).colspan(colspan).horizAlign(secondProcDataColumn.getHorizAlign()).rowNumber(currHeaderRow).build(); //reportOutput.outputHeaderCell(cellProps); outputOneValue( stepInput, ColumnHeaderOutputInitStep.HEADER_CELL_TEMPLATE, cellProps); return colspan; } }
package us.kbase.narrativejobservice.test; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import us.kbase.auth.AuthToken; import us.kbase.catalog.BuildLog; import us.kbase.catalog.CatalogClient; import us.kbase.catalog.GetBuildLogParams; import us.kbase.catalog.RegisterRepoParams; import us.kbase.catalog.ReleaseReview; import us.kbase.catalog.SelectOneModuleParams; import us.kbase.common.service.ServerException; import us.kbase.common.service.UObject; import us.kbase.narrativejobservice.CheckJobsParams; import us.kbase.narrativejobservice.CheckJobsResults; import us.kbase.narrativejobservice.GetJobLogsParams; import us.kbase.narrativejobservice.JobState; import us.kbase.narrativejobservice.JsonRpcError; import us.kbase.narrativejobservice.LogLine; import us.kbase.narrativejobservice.NarrativeJobServiceClient; import us.kbase.narrativejobservice.NarrativeJobServiceServer; import us.kbase.narrativejobservice.RunJobParams; import us.kbase.workspace.CreateWorkspaceParams; import us.kbase.workspace.GetModuleInfoParams; import us.kbase.workspace.ModuleInfo; import us.kbase.workspace.RegisterTypespecParams; import us.kbase.workspace.WorkspaceClient; public class CatalogRegForTests { private static AuthToken token; private static CatalogClient catalogClient; private static NarrativeJobServiceClient njsClient; private static WorkspaceClient wsCl; public static void main(String[] args) throws Exception { String catalogUrl = TesterUtils.loadConfig().get( NarrativeJobServiceServer.CFG_PROP_CATALOG_SRV_URL); token = TesterUtils.token(TesterUtils.props()); catalogClient = new CatalogClient(new URL(catalogUrl), token); String njsUrl = TesterUtils.loadConfig().get( NarrativeJobServiceServer.CFG_PROP_SELF_EXTERNAL_URL); njsClient = new NarrativeJobServiceClient(new URL(njsUrl), token); String wsUrl = TesterUtils.loadConfig().get( NarrativeJobServiceServer.CFG_PROP_WORKSPACE_SRV_URL); wsCl = new WorkspaceClient(new URL(wsUrl), token); // //makeUserDeveloper("someone"); // token.getUserName() //registerModule("https://github.com/kbaseIncubator/onerepotest"); //registerModule("https://github.com/kbasetest/njs_sdk_test_1"); //registerModule("https://github.com/kbasetest/njs_sdk_test_2"); //registerModule("https://github.com/kbasetest/njs_sdk_test_3"); //promoteModuleToBetaRelease("njs_sdk_test_2", true, false); //submitTestJob(); //registerModule("https://github.com/kbaseapps/DataPaletteService"); //startDynamicService("DataPaletteService"); //registerModule("https://github.com/kbaseapps/DataFileUtil"); //registerModule("https://github.com/kbaseapps/AssemblyUtil"); //registerModule("https://github.com/kbaseapps/SetAPI"); //registerModule("https://github.com/kbaseapps/NarrativeService"); //registerModule("https://github.com/kbaseapps/kb_ea_utils"); //promoteModuleToBetaRelease("kb_ea_utils", true, true); //registerModule("https://github.com/kbaseapps/ReadsUtils"); //submitAssemblyJob(); //promoteModuleToBetaRelease("GenomeFileUtil", true, true); //migrateWorkspaceTypesToNext("KBaseReport"); } private static void makeUserDeveloper(String userName) throws Exception { // It would work only if test user was added to catalog admins catalogClient.approveDeveloper(userName); } private static void registerModule(String gitUrl) throws Exception { registerModule(gitUrl, null); } private static void registerModule(String gitUrl, String commit) throws Exception { String regId = catalogClient.registerRepo(new RegisterRepoParams().withGitUrl(gitUrl) .withGitCommitHash(commit)); System.out.println("Registration ID for [" + gitUrl + "]: " + regId); List<String> logLines = new ArrayList<String>(); System.out.println("Logs:"); while (true) { BuildLog log = catalogClient.getParsedBuildLog( new GetBuildLogParams().withRegistrationId(regId)); for (int i = logLines.size(); i < log.getLog().size(); i++) { String line = log.getLog().get(i).getContent().trim(); logLines.add(line); System.out.println("[" + (i + 1) + "] " + line); } String state = log.getRegistration(); if (state != null && (state.equals("error") || state.equals("complete"))) { break; } Thread.sleep(1000); } } private static void submitTestJob() throws Exception { String moduleName = "onerepotest"; String methodName = "send_data"; String serviceVer = "dev"; RunJobParams params = new RunJobParams().withMethod( moduleName + "." + methodName).withServiceVer(serviceVer).withAppId("myapp/foo") .withParams(Arrays.asList(UObject.fromJsonString( "{\"genomeA\":\"myws.mygenome1\",\"genomeB\":\"myws.mygenome2\"}"))); String jobId = njsClient.runJob(params); System.out.println("Job with ID=" + jobId + " was scheduled for App " + moduleName + "." + methodName); showJobLogs(jobId); } private static void showJobLogs(String jobId) throws Exception { JobState ret = null; boolean started = true; for (int i = 0; i < 60; i++) { try { CheckJobsResults retAll = njsClient.checkJobs(new CheckJobsParams().withJobIds( Arrays.asList(jobId)).withWithJobParams(1L)); ret = retAll.getJobStates().get(jobId); if (ret == null) { JsonRpcError error = retAll.getCheckError().get(jobId); System.out.println("Error: " + error); break; } System.out.println("Job finished: " + ret.getFinished() + ", state=" + UObject.getMapper().writeValueAsString(ret)); if (ret.getFinished() != null && ret.getFinished() == 1L) { break; } Thread.sleep(5000); } catch (ServerException ex) { System.out.println(ex.getData()); throw ex; } } System.out.println("Job state: " + UObject.getMapper().writeValueAsString(ret)); System.out.println("------------------------------------------------"); System.out.println("Logs:"); List<LogLine> lines = njsClient.getJobLogs(new GetJobLogsParams().withJobId(jobId) .withSkipLines((long)0)).getLines(); for (LogLine line : lines) { String lineText = line.getLine(); System.out.println("LOG: " + lineText); } } private static void submitAssemblyJob() throws Exception { String testWorkspace = "test_workspace"; try { wsCl.createWorkspace(new CreateWorkspaceParams().withWorkspace(testWorkspace)); } catch (Exception ex) { System.out.println("Error creating workspace: " + ex.getMessage()); } String moduleName = "AssemblyUtil"; String methodName = "save_assembly_from_fasta"; String serviceVer = "dev"; RunJobParams params = new RunJobParams().withMethod( moduleName + "." + methodName).withServiceVer(serviceVer).withAppId("myapp/foo") .withParams(Arrays.asList(UObject.fromJsonString( "{\"workspace_name\":\"" + testWorkspace + "\"," + "\"assembly_name\":\"Assembly.1\"," + "\"ftp_url\":\"ftp://ftp.ncbi.nlm.nih.gov/genomes/genbank/bacteria/Escherichia_coli/reference/GCA_000005845.2_ASM584v2/GCA_000005845.2_ASM584v2_genomic.fna.gz\"}"))); String jobId = njsClient.runJob(params); System.out.println("Job with ID=" + jobId + " was scheduled for App " + moduleName + "." + methodName); showJobLogs(jobId); } private static void showWorkspaceTypes(String moduleName) throws Exception { ModuleInfo mi = wsCl.getModuleInfo(new GetModuleInfoParams().withMod(moduleName)); System.out.println(mi); System.out.println("\nDate: " + new Date(mi.getVer())); System.out.println(wsCl.getAllTypeInfo(moduleName)); } private static void promoteModuleToBetaRelease(String moduleName, boolean beta, boolean release) throws Exception { if (beta) { catalogClient.pushDevToBeta(new SelectOneModuleParams().withModuleName(moduleName)); } if (release) { catalogClient.requestRelease(new SelectOneModuleParams().withModuleName(moduleName)); catalogClient.reviewReleaseRequest(new ReleaseReview().withModuleName(moduleName).withDecision("approved")); } } private static void migrateWorkspaceTypesToNext(String moduleName, String... types) throws Exception { ModuleInfo orig = wsCl.getModuleInfo(new GetModuleInfoParams().withMod(moduleName)); String spec = orig.getSpec(); List<String> newTypes = new ArrayList<String>(Arrays.asList(types)); System.out.println("Adding types: " + newTypes); WorkspaceClient wsNext = new WorkspaceClient( new URL("https://next.kbase.us/services/ws"), "<next-user>", "<next-password>"); wsNext.setIsInsecureHttpConnectionAllowed(true); wsNext.setAllSSLCertificatesTrusted(true); System.out.println("Registered types: " + wsNext.registerTypespec( new RegisterTypespecParams().withSpec(spec) .withDryrun(0L).withNewTypes(newTypes))); wsNext.releaseModule(moduleName); ModuleInfo mi = wsNext.getModuleInfo(new GetModuleInfoParams().withMod(moduleName)); System.out.println(mi); System.out.println("\nDate: " + new Date(mi.getVer())); System.out.println(wsNext.getAllTypeInfo(moduleName)); } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.core.aggregation; import org.wso2.siddhi.core.config.SiddhiAppContext; import org.wso2.siddhi.core.event.ComplexEventChunk; import org.wso2.siddhi.core.event.state.MetaStateEvent; import org.wso2.siddhi.core.event.state.StateEvent; import org.wso2.siddhi.core.event.stream.MetaStreamEvent; import org.wso2.siddhi.core.event.stream.StreamEvent; import org.wso2.siddhi.core.exception.SiddhiAppCreationException; import org.wso2.siddhi.core.executor.ExpressionExecutor; import org.wso2.siddhi.core.executor.VariableExpressionExecutor; import org.wso2.siddhi.core.query.input.stream.single.SingleStreamRuntime; import org.wso2.siddhi.core.query.selector.GroupByKeyGenerator; import org.wso2.siddhi.core.table.Table; import org.wso2.siddhi.core.util.collection.operator.CompiledCondition; import org.wso2.siddhi.core.util.collection.operator.IncrementalAggregateCompileCondition; import org.wso2.siddhi.core.util.collection.operator.MatchingMetaInfoHolder; import org.wso2.siddhi.core.util.parser.ExpressionParser; import org.wso2.siddhi.core.util.parser.OperatorParser; import org.wso2.siddhi.core.util.snapshot.SnapshotService; import org.wso2.siddhi.core.util.statistics.LatencyTracker; import org.wso2.siddhi.core.util.statistics.MemoryCalculable; import org.wso2.siddhi.core.util.statistics.ThroughputTracker; import org.wso2.siddhi.query.api.aggregation.TimePeriod; import org.wso2.siddhi.query.api.aggregation.Within; import org.wso2.siddhi.query.api.definition.AbstractDefinition; import org.wso2.siddhi.query.api.definition.AggregationDefinition; import org.wso2.siddhi.query.api.definition.Attribute; import org.wso2.siddhi.query.api.definition.StreamDefinition; import org.wso2.siddhi.query.api.expression.AttributeFunction; import org.wso2.siddhi.query.api.expression.Expression; import org.wso2.siddhi.query.api.expression.condition.Compare; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.wso2.siddhi.core.util.SiddhiConstants.UNKNOWN_STATE; /** * Aggregation runtime managing aggregation operations for aggregation definition. */ public class AggregationRuntime implements MemoryCalculable { private final AggregationDefinition aggregationDefinition; private final Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap; private final Map<TimePeriod.Duration, Table> aggregationTables; private final SiddhiAppContext siddhiAppContext; private final MetaStreamEvent tableMetaStreamEvent; private final MetaStreamEvent aggregateMetaSteamEvent; private final LatencyTracker latencyTrackerFind; private final ThroughputTracker throughputTrackerFind; private final List<List<ExpressionExecutor>> aggregateProcessingExecutorsList; private final List<GroupByKeyGenerator> groupByKeyGeneratorList; private List<TimePeriod.Duration> incrementalDurations; private SingleStreamRuntime singleStreamRuntime; private List<ExpressionExecutor> baseExecutors; private List<ExpressionExecutor> outputExpressionExecutors; private RecreateInMemoryData recreateInMemoryData; private boolean processingOnExternalTime; private boolean isFirstEventArrived; private long lastExecutorsRefreshedTime = -1; private IncrementalDataPurging incrementalDataPurging; private ExpressionExecutor shouldUpdateExpressionExecutor; public AggregationRuntime(AggregationDefinition aggregationDefinition, Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap, Map<TimePeriod.Duration, Table> aggregationTables, SingleStreamRuntime singleStreamRuntime, List<TimePeriod.Duration> incrementalDurations, SiddhiAppContext siddhiAppContext, List<ExpressionExecutor> baseExecutors, MetaStreamEvent tableMetaStreamEvent, List<ExpressionExecutor> outputExpressionExecutors, LatencyTracker latencyTrackerFind, ThroughputTracker throughputTrackerFind, RecreateInMemoryData recreateInMemoryData, boolean processingOnExternalTime, List<List<ExpressionExecutor>> aggregateProcessingExecutorsList, List<GroupByKeyGenerator> groupByKeyGeneratorList, IncrementalDataPurging incrementalDataPurging, ExpressionExecutor shouldUpdateExpressionExecutor) { this.aggregationDefinition = aggregationDefinition; this.incrementalExecutorMap = incrementalExecutorMap; this.aggregationTables = aggregationTables; this.incrementalDurations = incrementalDurations; this.siddhiAppContext = siddhiAppContext; this.singleStreamRuntime = singleStreamRuntime; this.baseExecutors = baseExecutors; this.tableMetaStreamEvent = tableMetaStreamEvent; this.outputExpressionExecutors = outputExpressionExecutors; this.latencyTrackerFind = latencyTrackerFind; this.throughputTrackerFind = throughputTrackerFind; this.recreateInMemoryData = recreateInMemoryData; this.processingOnExternalTime = processingOnExternalTime; this.aggregateProcessingExecutorsList = aggregateProcessingExecutorsList; this.groupByKeyGeneratorList = groupByKeyGeneratorList; this.incrementalDataPurging = incrementalDataPurging; this.shouldUpdateExpressionExecutor = shouldUpdateExpressionExecutor; aggregateMetaSteamEvent = new MetaStreamEvent(); aggregationDefinition.getAttributeList().forEach(aggregateMetaSteamEvent::addOutputData); } private static void initMetaStreamEvent(MetaStreamEvent metaStreamEvent, AbstractDefinition inputDefinition) { metaStreamEvent.addInputDefinition(inputDefinition); metaStreamEvent.initializeAfterWindowData(); inputDefinition.getAttributeList().forEach(metaStreamEvent::addData); } private static void cloneStreamDefinition(StreamDefinition originalStreamDefinition, StreamDefinition newStreamDefinition) { for (Attribute attribute : originalStreamDefinition.getAttributeList()) { newStreamDefinition.attribute(attribute.getName(), attribute.getType()); } } private static MetaStreamEvent createNewMetaStreamEventWithStartEnd(MatchingMetaInfoHolder matchingMetaInfoHolder, List<Attribute> additionalAttributes) { MetaStreamEvent metaStreamEventWithStartEnd; StreamDefinition streamDefinitionWithStartEnd = new StreamDefinition(); if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) { metaStreamEventWithStartEnd = new MetaStreamEvent(); } else { metaStreamEventWithStartEnd = matchingMetaInfoHolder.getMetaStateEvent() .getMetaStreamEvent(matchingMetaInfoHolder.getMatchingStreamEventIndex()); cloneStreamDefinition((StreamDefinition) metaStreamEventWithStartEnd.getLastInputDefinition(), streamDefinitionWithStartEnd); } streamDefinitionWithStartEnd.attribute(additionalAttributes.get(0).getName(), additionalAttributes.get(0).getType()); streamDefinitionWithStartEnd.attribute(additionalAttributes.get(1).getName(), additionalAttributes.get(1).getType()); initMetaStreamEvent(metaStreamEventWithStartEnd, streamDefinitionWithStartEnd); return metaStreamEventWithStartEnd; } private static MatchingMetaInfoHolder alterMetaInfoHolderForStoreQuery( MetaStreamEvent newMetaStreamEventWithStartEnd, MatchingMetaInfoHolder matchingMetaInfoHolder) { MetaStateEvent metaStateEvent = new MetaStateEvent(2); MetaStreamEvent incomingMetaStreamEvent = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0); metaStateEvent.addEvent(newMetaStreamEventWithStartEnd); metaStateEvent.addEvent(incomingMetaStreamEvent); return new MatchingMetaInfoHolder(metaStateEvent, 0, 1, newMetaStreamEventWithStartEnd.getLastInputDefinition(), incomingMetaStreamEvent.getLastInputDefinition(), UNKNOWN_STATE); } private static MatchingMetaInfoHolder createNewStreamTableMetaInfoHolder( MetaStreamEvent metaStreamEventWithStartEnd, AbstractDefinition tableDefinition) { MetaStateEvent metaStateEvent = new MetaStateEvent(2); MetaStreamEvent metaStreamEventForTable = new MetaStreamEvent(); metaStreamEventForTable.setEventType(MetaStreamEvent.EventType.TABLE); initMetaStreamEvent(metaStreamEventForTable, tableDefinition); metaStateEvent.addEvent(metaStreamEventWithStartEnd); metaStateEvent.addEvent(metaStreamEventForTable); return new MatchingMetaInfoHolder(metaStateEvent, 0, 1, metaStreamEventWithStartEnd.getLastInputDefinition(), tableDefinition, UNKNOWN_STATE); } public AggregationDefinition getAggregationDefinition() { return aggregationDefinition; } public SingleStreamRuntime getSingleStreamRuntime() { return singleStreamRuntime; } public StreamEvent find(StateEvent matchingEvent, CompiledCondition compiledCondition) { try { SnapshotService.getSkipSnapshotableThreadLocal().set(true); if (latencyTrackerFind != null && siddhiAppContext.isStatsEnabled()) { latencyTrackerFind.markIn(); throughputTrackerFind.eventIn(); } if (!isFirstEventArrived && ( lastExecutorsRefreshedTime == -1 || System.currentTimeMillis() - lastExecutorsRefreshedTime > 1000)) { recreateInMemoryData(false); lastExecutorsRefreshedTime = System.currentTimeMillis(); } return ((IncrementalAggregateCompileCondition) compiledCondition).find(matchingEvent, aggregationDefinition, incrementalExecutorMap, aggregationTables, incrementalDurations, baseExecutors, outputExpressionExecutors, siddhiAppContext, aggregateProcessingExecutorsList, groupByKeyGeneratorList, shouldUpdateExpressionExecutor); } finally { SnapshotService.getSkipSnapshotableThreadLocal().set(null); if (latencyTrackerFind != null && siddhiAppContext.isStatsEnabled()) { latencyTrackerFind.markOut(); } } } public CompiledCondition compileExpression(Expression expression, Within within, Expression per, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName, SiddhiAppContext siddhiAppContext) { Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>(); CompiledCondition withinInMemoryCompileCondition; CompiledCondition onCompiledCondition; List<Attribute> additionalAttributes = new ArrayList<>(); // Define additional attribute list additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG)); additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG)); // Get table definition. Table definitions for all the tables used to persist aggregates are similar. // Therefore it's enough to get the definition from one table. AbstractDefinition tableDefinition = ((Table) aggregationTables.values().toArray()[0]).getTableDefinition(); // Alter existing meta stream event or create new one if a meta stream doesn't exist // After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered MetaStreamEvent newMetaStreamEventWithStartEnd = createNewMetaStreamEventWithStartEnd(matchingMetaInfoHolder, additionalAttributes); MatchingMetaInfoHolder alteredMatchingMetaInfoHolder = null; // Alter meta info holder to contain stream event and aggregate both when it's a store query if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) { matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(newMetaStreamEventWithStartEnd, matchingMetaInfoHolder); alteredMatchingMetaInfoHolder = matchingMetaInfoHolder; } // Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event MatchingMetaInfoHolder streamTableMetaInfoHolderWithStartEnd = createNewStreamTableMetaInfoHolder( newMetaStreamEventWithStartEnd, tableDefinition); // Create per expression executor ExpressionExecutor perExpressionExecutor; if (per != null) { perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName); if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) { throw new SiddhiAppCreationException( "Query " + queryName + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex()); } } else { throw new SiddhiAppCreationException("Syntax Error: Aggregation join query must contain a `per` " + "definition for granularity"); } // Create within expression Expression timeFilterExpression; if (processingOnExternalTime) { timeFilterExpression = Expression.variable("AGG_EVENT_TIMESTAMP"); } else { timeFilterExpression = Expression.variable("AGG_TIMESTAMP"); } Expression withinExpression; Expression start = Expression.variable(additionalAttributes.get(0).getName()); Expression end = Expression.variable(additionalAttributes.get(1).getName()); Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, timeFilterExpression); Expression compareWithEndTime = Compare.compare(timeFilterExpression, Compare.Operator.LESS_THAN, end); withinExpression = Expression.and(compareWithStartTime, compareWithEndTime); // Create start and end time expression Expression startEndTimeExpression; ExpressionExecutor startTimeEndTimeExpressionExecutor; if (within != null) { if (within.getTimeRange().size() == 1) { startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0)); } else { // within.getTimeRange().size() == 2 startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1)); } startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName); } else { throw new SiddhiAppCreationException("Syntax Error : Aggregation read query must contain a `within` " + "definition for filtering of aggregation data."); } // Create compile condition per each table used to persist aggregates. // These compile conditions are used to check whether the aggregates in tables are within the given duration. for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) { CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName); withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition); } // Create compile condition for in-memory data. // This compile condition is used to check whether the running aggregates (in-memory data) // are within given duration withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName); // On compile condition. // After finding all the aggregates belonging to within duration, the final on condition (given as // "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data. // This condition is used for that purpose. onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName); return new IncrementalAggregateCompileCondition(withinTableCompiledConditions, withinInMemoryCompileCondition, onCompiledCondition, tableMetaStreamEvent, aggregateMetaSteamEvent, additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor, startTimeEndTimeExpressionExecutor, processingOnExternalTime); } public void startPurging() { incrementalDataPurging.executeIncrementalDataPurging(); } public void recreateInMemoryData(boolean isEventArrived) { isFirstEventArrived = isEventArrived; if (isEventArrived) { for (Map.Entry<TimePeriod.Duration, IncrementalExecutor> durationIncrementalExecutorEntry : this.incrementalExecutorMap.entrySet()) { durationIncrementalExecutorEntry.getValue().setProcessingExecutor(isEventArrived); } } recreateInMemoryData.recreateInMemoryData(); } public void processEvents(ComplexEventChunk<StreamEvent> streamEventComplexEventChunk) { incrementalExecutorMap.get(incrementalDurations.get(0)).execute(streamEventComplexEventChunk); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lexer; import com.intellij.codeInsight.completion.CompletionUtilCore; import com.intellij.lang.HtmlScriptContentProvider; import com.intellij.lang.Language; import com.intellij.lang.LanguageHtmlScriptContentProvider; import com.intellij.lang.html.HTMLLanguage; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.impl.source.tree.TreeUtil; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.xml.XmlTokenType; import com.intellij.util.text.CharArrayUtil; import com.intellij.xml.util.documentation.HtmlDescriptorsTable; import consulo.lang.LanguageVersion; import consulo.lang.util.LanguageVersionUtil; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Locale; /** * @author Maxim.Mossienko */ public abstract class BaseHtmlLexer extends DelegateLexer { @Nullable public static final Language ourDefaultLanguage = Language.findLanguageByID("JavaScript"); @Nullable public static final Language ourDefaultStyleLanguage = Language.findLanguageByID("CSS"); protected static final int BASE_STATE_MASK = 0x3F; private static final int SEEN_STYLE = 0x40; private static final int SEEN_TAG = 0x80; private static final int SEEN_SCRIPT = 0x100; private static final int SEEN_ATTRIBUTE = 0x200; private static final int SEEN_CONTENT_TYPE = 0x400; private static final int SEEN_STYLESHEET_TYPE = 0x800; protected static final int BASE_STATE_SHIFT = 11; protected boolean seenTag; protected boolean seenAttribute; protected boolean seenStyle; protected boolean seenScript; @Nullable protected String scriptType = null; @Nullable protected String styleType = null; private final boolean caseInsensitive; protected boolean seenContentType; protected boolean seenStylesheetType; private CharSequence cachedBufferSequence; private Lexer lexerOfCacheBufferSequence; static final TokenSet TOKENS_TO_MERGE = TokenSet.create(XmlTokenType.XML_COMMENT_CHARACTERS, XmlTokenType.XML_WHITE_SPACE, XmlTokenType.XML_REAL_WHITE_SPACE, XmlTokenType .XML_ATTRIBUTE_VALUE_TOKEN, XmlTokenType.XML_DATA_CHARACTERS, XmlTokenType.XML_TAG_CHARACTERS); public interface TokenHandler { void handleElement(Lexer lexer); } public class XmlNameHandler implements TokenHandler { @NonNls private static final String TOKEN_SCRIPT = "script"; @NonNls private static final String TOKEN_STYLE = "style"; @NonNls private static final String TOKEN_ON = "on"; @Override public void handleElement(Lexer lexer) { final CharSequence buffer; if(lexerOfCacheBufferSequence == lexer) { buffer = cachedBufferSequence; } else { cachedBufferSequence = lexer.getBufferSequence(); buffer = cachedBufferSequence; lexerOfCacheBufferSequence = lexer; } final char firstCh = buffer.charAt(lexer.getTokenStart()); if(seenScript && !seenTag) { seenContentType = false; if(((firstCh == 'l' || firstCh == 't') || (caseInsensitive && (firstCh == 'L' || firstCh == 'T')))) { @NonNls String name = TreeUtil.getTokenText(lexer); seenContentType = Comparing.strEqual("language", name, !caseInsensitive) || Comparing.strEqual("type", name, !caseInsensitive); return; } } if(seenStyle && !seenTag) { seenStylesheetType = false; if(firstCh == 't' || caseInsensitive && firstCh == 'T') { seenStylesheetType = Comparing.strEqual(TreeUtil.getTokenText(lexer), "type", !caseInsensitive); return; } } if(firstCh != 'o' && firstCh != 's' && (!caseInsensitive || (firstCh != 'S' && firstCh != 'O'))) { return; // optimization } String name = TreeUtil.getTokenText(lexer); if(caseInsensitive) { name = name.toLowerCase(); } final boolean style = name.equals(TOKEN_STYLE); final int state = getState() & BASE_STATE_MASK; final boolean script = name.equals(TOKEN_SCRIPT) || ((name.startsWith(TOKEN_ON) && name.indexOf(':') == -1 && !isHtmlTagState(state) && HtmlDescriptorsTable.getAttributeDescriptor(name) != null)); if(style || script) { // encountered tag name in end of tag if(seenTag) { if(isHtmlTagState(state)) { seenTag = false; } return; } seenStyle = style; seenScript = script; if(!isHtmlTagState(state)) { seenAttribute = true; } } } } class XmlAttributeValueEndHandler implements TokenHandler { @Override public void handleElement(Lexer lexer) { if(seenAttribute) { seenStyle = false; seenScript = false; seenAttribute = false; } seenContentType = false; seenStylesheetType = false; } } class XmlAttributeValueHandler implements TokenHandler { @Override public void handleElement(Lexer lexer) { if(seenContentType && seenScript && !seenAttribute) { @NonNls String mimeType = TreeUtil.getTokenText(lexer); scriptType = caseInsensitive ? mimeType.toLowerCase(Locale.US) : mimeType; } if(seenStylesheetType && seenStyle && !seenAttribute) { @NonNls String type = TreeUtil.getTokenText(lexer).trim(); styleType = caseInsensitive ? type.toLowerCase(Locale.US) : type; } } } @Nullable protected Language getScriptLanguage() { String mimeType = scriptType != null ? scriptType.trim() : null; if(mimeType == null) { return null; } Collection<Language> instancesByMimeType = Language.findInstancesByMimeType(mimeType); return instancesByMimeType.isEmpty() ? null : instancesByMimeType.iterator().next(); } @Nullable protected LanguageVersion getStyleLanguageVersion() { if(ourDefaultStyleLanguage != null && styleType != null && !"text/css".equals(styleType)) { for(LanguageVersion languageVersion : ourDefaultStyleLanguage.getVersions()) { for(String mimeType : languageVersion.getMimeTypes()) { if(styleType.equals(mimeType)) { return languageVersion; } } } } //noinspection RequiredXAction return ourDefaultStyleLanguage == null ? null : LanguageVersionUtil.findDefaultVersion(ourDefaultStyleLanguage); } @Nullable protected IElementType getCurrentScriptElementType() { HtmlScriptContentProvider scriptContentProvider = findScriptContentProvider(scriptType); return scriptContentProvider == null ? null : scriptContentProvider.getScriptElementType(); } @Nullable protected IElementType getCurrentStylesheetElementType() { LanguageVersion languageVersion = getStyleLanguageVersion(); if(languageVersion != null) { for(EmbeddedTokenTypesProvider provider : EmbeddedTokenTypesProvider.EXTENSION_POINT_NAME.getExtensions()) { if(provider.isMyVersion(languageVersion)) { return provider.getElementType(); } } } return null; } @Nullable protected HtmlScriptContentProvider findScriptContentProvider(@Nullable String mimeType) { if(StringUtil.isEmpty(mimeType)) { return ourDefaultLanguage != null ? LanguageHtmlScriptContentProvider.getScriptContentProvider(ourDefaultLanguage) : null; } Collection<Language> instancesByMimeType = Language.findInstancesByMimeType(mimeType.trim()); if(instancesByMimeType.isEmpty() && mimeType.contains("template")) { instancesByMimeType = Collections.singletonList(HTMLLanguage.INSTANCE); } for(Language language : instancesByMimeType) { HtmlScriptContentProvider scriptContentProvider = LanguageHtmlScriptContentProvider.getScriptContentProvider(language); if(scriptContentProvider != null) { return scriptContentProvider; } } return null; } class XmlTagClosedHandler implements TokenHandler { @Override public void handleElement(Lexer lexer) { if(seenAttribute) { seenScript = false; seenStyle = false; seenAttribute = false; } else { if(seenStyle || seenScript) { seenTag = true; } } } } class XmlTagEndHandler implements TokenHandler { @Override public void handleElement(Lexer lexer) { seenStyle = false; seenScript = false; seenAttribute = false; seenContentType = false; seenStylesheetType = false; scriptType = null; styleType = null; } } private final HashMap<IElementType, TokenHandler> tokenHandlers = new HashMap<>(); protected BaseHtmlLexer(Lexer _baseLexer, boolean _caseInsensitive) { super(_baseLexer); caseInsensitive = _caseInsensitive; XmlNameHandler value = new XmlNameHandler(); tokenHandlers.put(XmlTokenType.XML_NAME, value); tokenHandlers.put(XmlTokenType.XML_TAG_NAME, value); tokenHandlers.put(XmlTokenType.XML_TAG_END, new XmlTagClosedHandler()); tokenHandlers.put(XmlTokenType.XML_END_TAG_START, new XmlTagEndHandler()); tokenHandlers.put(XmlTokenType.XML_EMPTY_ELEMENT_END, new XmlTagEndHandler()); tokenHandlers.put(XmlTokenType.XML_ATTRIBUTE_VALUE_END_DELIMITER, new XmlAttributeValueEndHandler()); tokenHandlers.put(XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN, new XmlAttributeValueHandler()); } protected void registerHandler(IElementType elementType, TokenHandler value) { final TokenHandler tokenHandler = tokenHandlers.get(elementType); if(tokenHandler != null) { final TokenHandler newHandler = value; value = new TokenHandler() { @Override public void handleElement(final Lexer lexer) { tokenHandler.handleElement(lexer); newHandler.handleElement(lexer); } }; } tokenHandlers.put(elementType, value); } @Override public void start(@Nonnull final CharSequence buffer, final int startOffset, final int endOffset, final int initialState) { initState(initialState); super.start(buffer, startOffset, endOffset, initialState & BASE_STATE_MASK); } private void initState(final int initialState) { seenScript = (initialState & SEEN_SCRIPT) != 0; seenStyle = (initialState & SEEN_STYLE) != 0; seenTag = (initialState & SEEN_TAG) != 0; seenAttribute = (initialState & SEEN_ATTRIBUTE) != 0; seenContentType = (initialState & SEEN_CONTENT_TYPE) != 0; seenStylesheetType = (initialState & SEEN_STYLESHEET_TYPE) != 0; lexerOfCacheBufferSequence = null; cachedBufferSequence = null; } protected int skipToTheEndOfTheEmbeddment() { Lexer base = getDelegate(); int tokenEnd = base.getTokenEnd(); int lastState = 0; int lastStart = 0; final CharSequence buf = base.getBufferSequence(); final char[] bufArray = CharArrayUtil.fromSequenceWithoutCopying(buf); if(seenTag) { FoundEnd: while(true) { FoundEndOfTag: while(base.getTokenType() != XmlTokenType.XML_END_TAG_START) { if(base.getTokenType() == XmlTokenType.XML_COMMENT_CHARACTERS) { // we should terminate on first occurence of </ final int end = base.getTokenEnd(); for(int i = base.getTokenStart(); i < end; ++i) { if((bufArray != null ? bufArray[i] : buf.charAt(i)) == '<' && i + 1 < end && (bufArray != null ? bufArray[i + 1] : buf.charAt(i + 1)) == '/') { tokenEnd = i; lastStart = i - 1; lastState = 0; break FoundEndOfTag; } } } lastState = base.getState(); tokenEnd = base.getTokenEnd(); lastStart = base.getTokenStart(); if(tokenEnd == getBufferEnd()) { break FoundEnd; } base.advance(); } // check if next is script if(base.getTokenType() != XmlTokenType.XML_END_TAG_START) { // we are inside comment base.start(buf, lastStart + 1, getBufferEnd(), lastState); base.getTokenType(); base.advance(); } else { base.advance(); } while(XmlTokenType.WHITESPACES.contains(base.getTokenType())) { base.advance(); } if(base.getTokenType() == XmlTokenType.XML_NAME) { String name = TreeUtil.getTokenText(base); if(caseInsensitive) { name = name.toLowerCase(); } if(endOfTheEmbeddment(name)) { break; // really found end } } } base.start(buf, lastStart, getBufferEnd(), lastState); base.getTokenType(); } else if(seenAttribute) { while(true) { if(!isValidAttributeValueTokenType(base.getTokenType())) { break; } tokenEnd = base.getTokenEnd(); lastState = base.getState(); lastStart = base.getTokenStart(); if(tokenEnd == getBufferEnd()) { break; } base.advance(); } base.start(buf, lastStart, getBufferEnd(), lastState); base.getTokenType(); } return tokenEnd; } protected boolean endOfTheEmbeddment(String name) { return (hasSeenScript() && XmlNameHandler.TOKEN_SCRIPT.equals(name)) || (hasSeenStyle() && XmlNameHandler.TOKEN_STYLE.equals(name)) || CompletionUtilCore.DUMMY_IDENTIFIER_TRIMMED .equalsIgnoreCase(name); } protected boolean isValidAttributeValueTokenType(final IElementType tokenType) { return tokenType == XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN || tokenType == XmlTokenType.XML_ENTITY_REF_TOKEN || tokenType == XmlTokenType.XML_CHAR_ENTITY_REF; } @Override public void advance() { super.advance(); IElementType type = getDelegate().getTokenType(); TokenHandler tokenHandler = tokenHandlers.get(type); if(tokenHandler != null) { tokenHandler.handleElement(this); } } @Override public int getState() { int state = super.getState(); state |= ((seenScript) ? SEEN_SCRIPT : 0); state |= ((seenTag) ? SEEN_TAG : 0); state |= ((seenStyle) ? SEEN_STYLE : 0); state |= ((seenAttribute) ? SEEN_ATTRIBUTE : 0); state |= ((seenContentType) ? SEEN_CONTENT_TYPE : 0); state |= ((seenStylesheetType) ? SEEN_STYLESHEET_TYPE : 0); return state; } protected final boolean hasSeenStyle() { return seenStyle; } protected final boolean hasSeenAttribute() { return seenAttribute; } protected final boolean hasSeenTag() { return seenTag; } protected boolean hasSeenScript() { return seenScript; } protected abstract boolean isHtmlTagState(int state); }
/* * Copyright (c) 2016. Veera Siva Sri Aditya Katam * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.katamaditya.apps.weather4u; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.text.format.Time; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; /** * Created by VeeraSivaSriAditya on 6/28/2016. */ public class WeatherUtil { public static String getPreferredLocation(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_location_key), context.getString(R.string.pref_location_default)); } public static boolean isMetric(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_units_key), context.getString(R.string.pref_units_metric)) .equals(context.getString(R.string.pref_units_metric)); } public static String getNotificationFrequency(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_notification_key), context.getString(R.string.pref_notification_eighthours)); } public static String formatTemperature(Context context, double temperature) { // Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert // the values here. String suffix = "\u00B0"; if (!isMetric(context)) { temperature = (temperature * 1.8) + 32; } // For presentation, assume the user doesn't care about tenths of a degree. return String.format(context.getString(R.string.format_temperature), temperature); } static String formatDate(long dateInMilliseconds) { Date date = new Date(dateInMilliseconds); return DateFormat.getDateInstance().format(date); } // Format used for storing dates in the database. ALso used for converting those strings // back into date objects for comparison/processing. public static final String DATE_FORMAT = "yyyyMMdd"; /** * Helper method to convert the database representation of the date into something to display * to users. As classy and polished a user experience as "20140102" is, we can do better. * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return a user-friendly representation of the date. */ public static String getFriendlyDayString(Context context, long dateInMillis) { // The day string for forecast uses the following logic: // For today: "Today, June 8" // For tomorrow: "Tomorrow" // For the next 5 days: "Wednesday" (just the day name) // For all days after that: "Mon Jun 8" Time time = new Time(); time.setToNow(); long currentTime = System.currentTimeMillis(); int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff); int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff); // If the date we're building the String for is today's date, the format // is "Today, June 24" if (julianDay == currentJulianDay) { String today = context.getString(R.string.today); int formatId = R.string.format_full_friendly_date; return String.format(context.getString( formatId, today, getFormattedMonthDay(context, dateInMillis))); } else if ( julianDay < currentJulianDay + 7 ) { // If the input date is less than a week in the future, just return the day name. return getDayName(context, dateInMillis); } else { // Otherwise, use the form "Mon Jun 3" SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd"); return shortenedDateFormat.format(dateInMillis); } } /** * Given a day, returns just the name to use for that day. * E.g "today", "tomorrow", "wednesday". * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return */ public static String getDayName(Context context, long dateInMillis) { // If the date is today, return the localized version of "Today" instead of the actual // day name. Time t = new Time(); t.setToNow(); int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff); int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff); if (julianDay == currentJulianDay) { return context.getString(R.string.today); } else if ( julianDay == currentJulianDay +1 ) { return context.getString(R.string.tomorrow); } else { Time time = new Time(); time.setToNow(); // Otherwise, the format is just the day of the week (e.g "Wednesday". SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE"); return dayFormat.format(dateInMillis); } } /** * Converts db date format to the format "Month day", e.g "June 24". * @param context Context to use for resource localization * @param dateInMillis The db formatted date string, expected to be of the form specified * in Utility.DATE_FORMAT * @return The day in the form of a string formatted "December 6" */ public static String getFormattedMonthDay(Context context, long dateInMillis ) { Time time = new Time(); time.setToNow(); SimpleDateFormat dbDateFormat = new SimpleDateFormat(WeatherUtil.DATE_FORMAT); SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd"); String monthDayString = monthDayFormat.format(dateInMillis); return monthDayString; } public static String getFormattedWind(Context context, float windSpeed, float degrees) { int windFormat; if (WeatherUtil.isMetric(context)) { windFormat = R.string.format_wind_kmh; } else { windFormat = R.string.format_wind_mph; windSpeed = .621371192237334f * windSpeed; } // From wind direction in degrees, determine compass direction as a string (e.g NW) // You know what's fun, writing really long if/else statements with tons of possible // conditions. Seriously, try it! String direction = "Unknown"; if (degrees >= 337.5 || degrees < 22.5) { direction = "N"; } else if (degrees >= 22.5 && degrees < 67.5) { direction = "NE"; } else if (degrees >= 67.5 && degrees < 112.5) { direction = "E"; } else if (degrees >= 112.5 && degrees < 157.5) { direction = "SE"; } else if (degrees >= 157.5 && degrees < 202.5) { direction = "S"; } else if (degrees >= 202.5 && degrees < 247.5) { direction = "SW"; } else if (degrees >= 247.5 && degrees < 292.5) { direction = "W"; } else if (degrees >= 292.5 && degrees < 337.5) { direction = "NW"; } return String.format(context.getString(windFormat), windSpeed, direction); } /** * Helper method to provide the icon resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getIconResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.ic_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.ic_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.ic_rain; } else if (weatherId == 511) { return R.drawable.ic_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.ic_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.ic_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.ic_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.ic_storm; } else if (weatherId == 800) { return R.drawable.ic_clear; } else if (weatherId == 801) { return R.drawable.ic_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.ic_cloudy; } return -1; } /** * Helper method to provide the art resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getArtResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.art_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.art_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.art_rain; } else if (weatherId == 511) { return R.drawable.art_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.art_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.art_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.art_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.art_storm; } else if (weatherId == 800) { return R.drawable.art_clear; } else if (weatherId == 801) { return R.drawable.art_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.art_clouds; } return -1; } }
package ca.uhn.fhir.parser; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2017 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.apache.commons.lang3.StringUtils.*; import java.util.*; import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.tuple.Pair; import org.hl7.fhir.instance.model.api.*; import ca.uhn.fhir.context.*; import ca.uhn.fhir.context.BaseRuntimeChildDefinition.IMutator; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.base.resource.ResourceMetadataMap; import ca.uhn.fhir.model.primitive.*; import ca.uhn.fhir.parser.json.JsonLikeValue.ScalarType; import ca.uhn.fhir.parser.json.JsonLikeValue.ValueType; import ca.uhn.fhir.util.*; class ParserState<T> { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ParserState.class); private List<String> myComments = new ArrayList<String>(2); private final FhirContext myContext; private final IParserErrorHandler myErrorHandler; private final boolean myJsonMode; private T myObject; private final IParser myParser; private IBase myPreviousElement; private BaseState myState; private ParserState(IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { myParser = theParser; myContext = theContext; myJsonMode = theJsonMode; myErrorHandler = theErrorHandler; } public void attributeValue(String theName, String theValue) throws DataFormatException { myState.attributeValue(theName, theValue); } public void commentPost(String theCommentText) { if (myPreviousElement != null) { myPreviousElement.getFormatCommentsPost().add(theCommentText); } } public void commentPre(String theCommentText) { if (myState.getCurrentElement() != null) { IBase element = myState.getCurrentElement(); element.getFormatCommentsPre().add(theCommentText); } } public boolean elementIsRepeating(String theChildName) { return myState.elementIsRepeating(theChildName); } public void endingElement() throws DataFormatException { myState.endingElement(); } public void enteringNewElement(String theNamespaceUri, String theName) throws DataFormatException { myState.enteringNewElement(theNamespaceUri, theName); } public void enteringNewElementExtension(StartElement theElem, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { myState.enteringNewElementExtension(theElem, theUrlAttr, theIsModifier, baseServerUrl); } public T getObject() { return myObject; } public boolean isPreResource() { return myState.isPreResource(); } private Object newContainedDt(IResource theTarget) { return ReflectionUtil.newInstance(theTarget.getStructureFhirVersionEnum().getVersionImplementation().getContainedType()); } @SuppressWarnings("unchecked") private void pop() { myPreviousElement = myState.getCurrentElement(); if (myState.myStack != null) { myState = myState.myStack; myState.wereBack(); } else { myObject = (T) myState.getCurrentElement(); myState = null; } } private void push(BaseState theState) { theState.setStack(myState); myState = theState; if (myComments.isEmpty() == false) { if (myState.getCurrentElement() != null) { myState.getCurrentElement().getFormatCommentsPre().addAll(myComments); myComments.clear(); } } } public void string(String theData) { myState.string(theData); } public boolean verifyNamespace(String theExpect, String theActual) { if (myJsonMode) { return true; } return StringUtils.equals(theExpect, theActual); } /** * Invoked after any new XML event is individually processed, containing a copy of the XML event. This is basically * intended for embedded XHTML content */ public void xmlEvent(XMLEvent theNextEvent) { if (myState != null) { myState.xmlEvent(theNextEvent); } } /** * @param theResourceType * May be null */ static <T extends IBaseResource> ParserState<T> getPreResourceInstance(IParser theParser, Class<T> theResourceType, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) throws DataFormatException { ParserState<T> retVal = new ParserState<T>(theParser, theContext, theJsonMode, theErrorHandler); if (theResourceType == null) { if (theContext.getVersion().getVersion().isRi()) { retVal.push(retVal.new PreResourceStateHl7Org(theResourceType)); } else { retVal.push(retVal.new PreResourceStateHapi(theResourceType)); } } else { if (IResource.class.isAssignableFrom(theResourceType)) { retVal.push(retVal.new PreResourceStateHapi(theResourceType)); } else { retVal.push(retVal.new PreResourceStateHl7Org(theResourceType)); } } return retVal; } static ParserState<TagList> getPreTagListInstance(IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { ParserState<TagList> retVal = new ParserState<TagList>(theParser, theContext, theJsonMode, theErrorHandler); retVal.push(retVal.new PreTagListState()); return retVal; } private abstract class BaseState { private PreResourceState myPreResourceState; private BaseState myStack; public BaseState(PreResourceState thePreResourceState) { super(); myPreResourceState = thePreResourceState; } /** * @param theValue * The attribute value */ public void attributeValue(String theName, String theValue) throws DataFormatException { myErrorHandler.unknownAttribute(null, theName); } public boolean elementIsRepeating(String theChildName) { return false; } public void endingElement() throws DataFormatException { // ignore by default } /** * @param theNamespaceUri * The XML namespace (if XML) or null */ public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { myErrorHandler.unknownElement(null, theLocalPart); } /** * Default implementation just handles undeclared extensions */ @SuppressWarnings("unused") public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { if (myPreResourceState != null && getCurrentElement() instanceof ISupportsUndeclaredExtensions) { ExtensionDt newExtension = new ExtensionDt(theIsModifier); newExtension.setUrl(theUrlAttr); ISupportsUndeclaredExtensions elem = (ISupportsUndeclaredExtensions) getCurrentElement(); elem.addUndeclaredExtension(newExtension); ExtensionState newState = new ExtensionState(myPreResourceState, newExtension); push(newState); } else { if (theIsModifier == false) { if (getCurrentElement() instanceof IBaseHasExtensions) { IBaseExtension<?, ?> ext = ((IBaseHasExtensions) getCurrentElement()).addExtension(); ext.setUrl(theUrlAttr); ParserState<T>.ExtensionState newState = new ExtensionState(myPreResourceState, ext); push(newState); } else { logAndSwallowUnexpectedElement("extension"); } } else { if (getCurrentElement() instanceof IBaseHasModifierExtensions) { IBaseExtension<?, ?> ext = ((IBaseHasModifierExtensions) getCurrentElement()).addModifierExtension(); ext.setUrl(theUrlAttr); ParserState<T>.ExtensionState newState = new ExtensionState(myPreResourceState, ext); push(newState); } else { logAndSwallowUnexpectedElement("modifierExtension"); } } } } protected IBase getCurrentElement() { return null; } public PreResourceState getPreResourceState() { return myPreResourceState; } public boolean isPreResource() { return false; } protected void logAndSwallowUnexpectedElement(String theLocalPart) { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); } public void setStack(BaseState theState) { myStack = theState; } /** * @param theData * The string value */ public void string(String theData) { // ignore by default } public void wereBack() { // allow an implementor to override } /** * @param theNextEvent * The XML event */ public void xmlEvent(XMLEvent theNextEvent) { // ignore } } private class ContainedResourcesStateHapi extends PreResourceState { public ContainedResourcesStateHapi(PreResourceState thePreResourcesState) { super(thePreResourcesState, ((IResource) thePreResourcesState.myInstance).getStructureFhirVersionEnum()); } @Override public void endingElement() throws DataFormatException { pop(); } @Override protected void populateTarget() { // nothing } @Override public void wereBack() { super.wereBack(); IResource res = (IResource) getCurrentElement(); assert res != null; if (res.getId() == null || res.getId().isEmpty()) { // If there is no ID, we don't keep the resource because it's useless (contained resources // need an ID to be referred to) myErrorHandler.containedResourceWithNoId(null); } else { if (!res.getId().isLocal()) { res.setId(new IdDt('#' + res.getId().getIdPart())); } getPreResourceState().getContainedResources().put(res.getId().getValueAsString(), res); } IResource preResCurrentElement = (IResource) getPreResourceState().getCurrentElement(); @SuppressWarnings("unchecked") List<IResource> containedResources = (List<IResource>) preResCurrentElement.getContained().getContainedResources(); containedResources.add(res); } } private class ContainedResourcesStateHl7Org extends PreResourceState { public ContainedResourcesStateHl7Org(PreResourceState thePreResourcesState) { super(thePreResourcesState, thePreResourcesState.myParentVersion); } @Override public void endingElement() throws DataFormatException { pop(); } @Override protected void populateTarget() { // nothing } @Override public void wereBack() { super.wereBack(); IBaseResource res = getCurrentElement(); assert res != null; if (res.getIdElement() == null || res.getIdElement().isEmpty()) { // If there is no ID, we don't keep the resource because it's useless (contained resources // need an ID to be referred to) myErrorHandler.containedResourceWithNoId(null); } else { res.getIdElement().setValue('#' + res.getIdElement().getIdPart()); getPreResourceState().getContainedResources().put(res.getIdElement().getValue(), res); } IBaseResource preResCurrentElement = getPreResourceState().getCurrentElement(); RuntimeResourceDefinition def = myContext.getResourceDefinition(preResCurrentElement); def.getChildByName("contained").getMutator().addValue(preResCurrentElement, res); } } private class DeclaredExtensionState extends BaseState { private IBase myChildInstance; private RuntimeChildDeclaredExtensionDefinition myDefinition; private IBase myParentInstance; private PreResourceState myPreResourceState; public DeclaredExtensionState(PreResourceState thePreResourceState, RuntimeChildDeclaredExtensionDefinition theDefinition, IBase theParentInstance) { super(thePreResourceState); myPreResourceState = thePreResourceState; myDefinition = theDefinition; myParentInstance = theParentInstance; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { if (theName.equals("url")) { // This can be ignored return; } super.attributeValue(theName, theValue); } @Override public void endingElement() throws DataFormatException { pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { BaseRuntimeElementDefinition<?> target = myDefinition.getChildByName(theLocalPart); if (target == null) { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); return; } switch (target.getChildType()) { case COMPOSITE_DATATYPE: { BaseRuntimeElementCompositeDefinition<?> compositeTarget = (BaseRuntimeElementCompositeDefinition<?>) target; ICompositeType newChildInstance = (ICompositeType) compositeTarget.newInstance(myDefinition.getInstanceConstructorArguments()); myDefinition.getMutator().addValue(myParentInstance, newChildInstance); ElementCompositeState newState = new ElementCompositeState(myPreResourceState, theLocalPart, compositeTarget, newChildInstance); push(newState); return; } case ID_DATATYPE: case PRIMITIVE_DATATYPE: { RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) target; IPrimitiveType<?> newChildInstance = primitiveTarget.newInstance(myDefinition.getInstanceConstructorArguments()); myDefinition.getMutator().addValue(myParentInstance, newChildInstance); PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance); push(newState); return; } case PRIMITIVE_XHTML: case RESOURCE: case RESOURCE_BLOCK: case UNDECL_EXT: case EXTENSION_DECLARED: default: break; } } @Override public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { RuntimeChildDeclaredExtensionDefinition declaredExtension = myDefinition.getChildExtensionForUrl(theUrlAttr); if (declaredExtension != null) { if (myChildInstance == null) { myChildInstance = myDefinition.newInstance(); myDefinition.getMutator().addValue(myParentInstance, myChildInstance); } BaseState newState = new DeclaredExtensionState(getPreResourceState(), declaredExtension, myChildInstance); push(newState); } else { super.enteringNewElementExtension(theElement, theUrlAttr, theIsModifier, baseServerUrl); } } @Override protected IBase getCurrentElement() { return myParentInstance; } } private class ElementCompositeState extends BaseState { private BaseRuntimeElementCompositeDefinition<?> myDefinition; private IBase myInstance; private Set<String> myParsedNonRepeatableNames = new HashSet<String>(); private String myElementName; public ElementCompositeState(PreResourceState thePreResourceState, String theElementName, BaseRuntimeElementCompositeDefinition<?> theDef, IBase theInstance) { super(thePreResourceState); myDefinition = theDef; myInstance = theInstance; myElementName = theElementName; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { if ("id".equals(theName)) { if (myInstance instanceof IIdentifiableElement) { ((IIdentifiableElement) myInstance).setElementSpecificId((theValue)); } else if (myInstance instanceof IBaseElement) { ((IBaseElement) myInstance).setId(theValue); } else if (myInstance instanceof IBaseResource) { new IdDt(theValue).applyTo((IBaseResource) myInstance); } } else if ("url".equals(theName) && myInstance instanceof ExtensionDt) { ((ExtensionDt) myInstance).setUrl(theValue); } else { if (myJsonMode) { myErrorHandler.incorrectJsonType(null, myElementName, ValueType.OBJECT, null, ValueType.SCALAR, ScalarType.STRING); } else { myErrorHandler.unknownAttribute(null, theName); } } } @Override public boolean elementIsRepeating(String theChildName) { BaseRuntimeChildDefinition child = myDefinition.getChildByName(theChildName); if (child == null) { return false; } return child.getMax() > 1 || child.getMax() == Child.MAX_UNLIMITED; } @Override public void endingElement() { pop(); } @Override public void enteringNewElement(String theNamespace, String theChildName) throws DataFormatException { BaseRuntimeChildDefinition child = myDefinition.getChildByName(theChildName); if (child == null) { if (theChildName.equals("id")) { if (getCurrentElement() instanceof IIdentifiableElement) { push(new IdentifiableElementIdState(getPreResourceState(), (IIdentifiableElement) getCurrentElement())); return; } } /* * This means we've found an element that doesn't exist on the structure. If the error handler doesn't throw * an exception, swallow the element silently along with any child elements */ myErrorHandler.unknownElement(null, theChildName); push(new SwallowChildrenWholeState(getPreResourceState())); return; } if ((child.getMax() == 0 || child.getMax() == 1) && !myParsedNonRepeatableNames.add(theChildName)) { myErrorHandler.unexpectedRepeatingElement(null, theChildName); push(new SwallowChildrenWholeState(getPreResourceState())); return; } BaseRuntimeElementDefinition<?> target = child.getChildByName(theChildName); if (target == null) { // This is a bug with the structures and shouldn't happen.. throw new DataFormatException("Found unexpected element '" + theChildName + "' in parent element '" + myDefinition.getName() + "'. Valid names are: " + child.getValidChildNames()); } switch (target.getChildType()) { case COMPOSITE_DATATYPE: { BaseRuntimeElementCompositeDefinition<?> compositeTarget = (BaseRuntimeElementCompositeDefinition<?>) target; ICompositeType newChildInstance = (ICompositeType) compositeTarget.newInstance(child.getInstanceConstructorArguments()); child.getMutator().addValue(myInstance, newChildInstance); ParserState<T>.ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theChildName, compositeTarget, newChildInstance); push(newState); return; } case ID_DATATYPE: case PRIMITIVE_DATATYPE: { RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) target; IPrimitiveType<?> newChildInstance; newChildInstance = primitiveTarget.newInstance(child.getInstanceConstructorArguments()); child.getMutator().addValue(myInstance, newChildInstance); PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance); push(newState); return; } case RESOURCE_BLOCK: { RuntimeResourceBlockDefinition blockTarget = (RuntimeResourceBlockDefinition) target; IBase newBlockInstance = blockTarget.newInstance(); child.getMutator().addValue(myInstance, newBlockInstance); ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theChildName, blockTarget, newBlockInstance); push(newState); return; } case PRIMITIVE_XHTML: { RuntimePrimitiveDatatypeNarrativeDefinition xhtmlTarget = (RuntimePrimitiveDatatypeNarrativeDefinition) target; XhtmlDt newDt = xhtmlTarget.newInstance(); child.getMutator().addValue(myInstance, newDt); XhtmlState state = new XhtmlState(getPreResourceState(), newDt, true); push(state); return; } case PRIMITIVE_XHTML_HL7ORG: { RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition xhtmlTarget = (RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition) target; IBaseXhtml newDt = xhtmlTarget.newInstance(); child.getMutator().addValue(myInstance, newDt); XhtmlStateHl7Org state = new XhtmlStateHl7Org(getPreResourceState(), newDt); push(state); return; } case CONTAINED_RESOURCES: { List<? extends IBase> values = child.getAccessor().getValues(myInstance); Object newDt; if (values == null || values.isEmpty() || values.get(0) == null) { newDt = newContainedDt((IResource) getPreResourceState().myInstance); child.getMutator().addValue(myInstance, (IBase) newDt); } else { newDt = values.get(0); } ContainedResourcesStateHapi state = new ContainedResourcesStateHapi(getPreResourceState()); push(state); return; } case CONTAINED_RESOURCE_LIST: { ContainedResourcesStateHl7Org state = new ContainedResourcesStateHl7Org(getPreResourceState()); push(state); return; } case RESOURCE: { if (myInstance instanceof IAnyResource || myInstance instanceof IBaseBackboneElement) { ParserState<T>.PreResourceStateHl7Org state = new PreResourceStateHl7Org(myInstance, child.getMutator(), null); push(state); } else { ParserState<T>.PreResourceStateHapi state = new PreResourceStateHapi(myInstance, child.getMutator(), null); push(state); } return; } case UNDECL_EXT: case EXTENSION_DECLARED: { // Throw an exception because this shouldn't happen here break; } } throw new DataFormatException("Illegal resource position: " + target.getChildType()); } @Override public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { RuntimeChildDeclaredExtensionDefinition declaredExtension = myDefinition.getDeclaredExtension(theUrlAttr, baseServerUrl); if (declaredExtension != null) { BaseState newState = new DeclaredExtensionState(getPreResourceState(), declaredExtension, myInstance); push(newState); } else { super.enteringNewElementExtension(theElement, theUrlAttr, theIsModifier, baseServerUrl); } } @Override protected IBase getCurrentElement() { return myInstance; } } public class ElementIdState extends BaseState { private IBaseElement myElement; public ElementIdState(ParserState<T>.PreResourceState thePreResourceState, IBaseElement theElement) { super(thePreResourceState); myElement = theElement; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { myElement.setId(theValue); } @Override public void endingElement() { pop(); } } private class ExtensionState extends BaseState { private IBaseExtension<?, ?> myExtension; public ExtensionState(PreResourceState thePreResourceState, IBaseExtension<?, ?> theExtension) { super(thePreResourceState); myExtension = theExtension; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { if ("url".equals(theName)) { // The URL attribute is handles in the XML loop as a special case since it is "url" instead // of "value" like every single other place return; } if ("id".equals(theName)) { if (getCurrentElement() instanceof IBaseElement) { ((IBaseElement) getCurrentElement()).setId(theValue); return; } else if (getCurrentElement() instanceof IIdentifiableElement) { ((IIdentifiableElement) getCurrentElement()).setElementSpecificId(theValue); return; } } super.attributeValue(theName, theValue); } @Override public void endingElement() throws DataFormatException { if (myExtension.getValue() != null && myExtension.getExtension().size() > 0) { throw new DataFormatException("Extension (URL='" + myExtension.getUrl() + "') must not have both a value and other contained extensions"); } pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { if (theLocalPart.equals("id")) { if (getCurrentElement() instanceof IBaseElement) { push(new ElementIdState(getPreResourceState(), (IBaseElement) getCurrentElement())); return; } else if (getCurrentElement() instanceof IIdentifiableElement) { push(new IdentifiableElementIdState(getPreResourceState(), (IIdentifiableElement) getCurrentElement())); return; } } BaseRuntimeElementDefinition<?> target = myContext.getRuntimeChildUndeclaredExtensionDefinition().getChildByName(theLocalPart); if (target != null) { switch (target.getChildType()) { case COMPOSITE_DATATYPE: { BaseRuntimeElementCompositeDefinition<?> compositeTarget = (BaseRuntimeElementCompositeDefinition<?>) target; ICompositeType newChildInstance = (ICompositeType) compositeTarget.newInstance(); myExtension.setValue(newChildInstance); ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theLocalPart, compositeTarget, newChildInstance); push(newState); return; } case ID_DATATYPE: case PRIMITIVE_DATATYPE: { RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) target; IPrimitiveType<?> newChildInstance = primitiveTarget.newInstance(); myExtension.setValue(newChildInstance); PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance); push(newState); return; } case CONTAINED_RESOURCES: case CONTAINED_RESOURCE_LIST: case EXTENSION_DECLARED: case PRIMITIVE_XHTML: case PRIMITIVE_XHTML_HL7ORG: case RESOURCE: case RESOURCE_BLOCK: case UNDECL_EXT: break; } } // We hit an invalid type for the extension myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); return; } @Override protected IBaseExtension<?, ?> getCurrentElement() { return myExtension; } } public class IdentifiableElementIdState extends BaseState { private IIdentifiableElement myElement; public IdentifiableElementIdState(ParserState<T>.PreResourceState thePreResourceState, IIdentifiableElement theElement) { super(thePreResourceState); myElement = theElement; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { myElement.setElementSpecificId(theValue); } @Override public void endingElement() { pop(); } } private class MetaElementState extends BaseState { private ResourceMetadataMap myMap; public MetaElementState(ParserState<T>.PreResourceState thePreResourceState, ResourceMetadataMap theMap) { super(thePreResourceState); myMap = theMap; } @Override public void endingElement() throws DataFormatException { pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { if (theLocalPart.equals("versionId")) { push(new MetaVersionElementState(getPreResourceState(), myMap)); // } else if (theLocalPart.equals("profile")) { // } else if (theLocalPart.equals("lastUpdated")) { InstantDt updated = new InstantDt(); push(new PrimitiveState(getPreResourceState(), updated)); myMap.put(ResourceMetadataKeyEnum.UPDATED, updated); } else if (theLocalPart.equals("security")) { @SuppressWarnings("unchecked") List<IBase> securityLabels = (List<IBase>) myMap.get(ResourceMetadataKeyEnum.SECURITY_LABELS); if (securityLabels == null) { securityLabels = new ArrayList<IBase>(); myMap.put(ResourceMetadataKeyEnum.SECURITY_LABELS, securityLabels); } IBase securityLabel = myContext.getVersion().newCodingDt(); BaseRuntimeElementCompositeDefinition<?> codinfDef = (BaseRuntimeElementCompositeDefinition<?>) myContext.getElementDefinition(securityLabel.getClass()); push(new SecurityLabelElementStateHapi(getPreResourceState(), codinfDef, securityLabel)); securityLabels.add(securityLabel); } else if (theLocalPart.equals("profile")) { @SuppressWarnings("unchecked") List<IdDt> profiles = (List<IdDt>) myMap.get(ResourceMetadataKeyEnum.PROFILES); List<IdDt> newProfiles; if (profiles != null) { newProfiles = new ArrayList<IdDt>(profiles.size() + 1); newProfiles.addAll(profiles); } else { newProfiles = new ArrayList<IdDt>(1); } IdDt profile = new IdDt(); push(new PrimitiveState(getPreResourceState(), profile)); newProfiles.add(profile); myMap.put(ResourceMetadataKeyEnum.PROFILES, Collections.unmodifiableList(newProfiles)); } else if (theLocalPart.equals("tag")) { TagList tagList = (TagList) myMap.get(ResourceMetadataKeyEnum.TAG_LIST); if (tagList == null) { tagList = new TagList(); myMap.put(ResourceMetadataKeyEnum.TAG_LIST, tagList); } push(new TagState(tagList)); } else { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); return; } } } private class MetaVersionElementState extends BaseState { private ResourceMetadataMap myMap; public MetaVersionElementState(ParserState<T>.PreResourceState thePreResourceState, ResourceMetadataMap theMap) { super(thePreResourceState); myMap = theMap; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { myMap.put(ResourceMetadataKeyEnum.VERSION, theValue); } @Override public void endingElement() throws DataFormatException { pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); return; } } private abstract class PreResourceState extends BaseState { private Map<String, IBaseResource> myContainedResources; private IBaseResource myInstance; private FhirVersionEnum myParentVersion; private boolean myRequireResourceType = true; private Class<? extends IBaseResource> myResourceType; public PreResourceState(Class<? extends IBaseResource> theResourceType) { super(null); myResourceType = theResourceType; myContainedResources = new HashMap<String, IBaseResource>(); if (theResourceType != null) { myParentVersion = myContext.getResourceDefinition(theResourceType).getStructureVersion(); } else { myParentVersion = myContext.getVersion().getVersion(); } } public PreResourceState(PreResourceState thePreResourcesState, FhirVersionEnum theParentVersion) { super(thePreResourcesState); Validate.notNull(theParentVersion); myParentVersion = theParentVersion; myContainedResources = thePreResourcesState.getContainedResources(); } @Override public void endingElement() throws DataFormatException { stitchBundleCrossReferences(); pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { BaseRuntimeElementDefinition<?> definition; if (myResourceType == null) { definition = null; if (myParser.getPreferTypes() != null) { for (Class<? extends IBaseResource> next : myParser.getPreferTypes()) { RuntimeResourceDefinition nextDef = myContext.getResourceDefinition(next); if (nextDef.getName().equals(theLocalPart)) { definition = nextDef; } } } if (definition == null) { definition = myContext.getResourceDefinition(myParentVersion, theLocalPart); } if ((definition == null)) { throw new DataFormatException("Element '" + theLocalPart + "' is not a known resource type, expected a resource at this position"); } } else { definition = myContext.getResourceDefinition(myResourceType); if (!StringUtils.equals(theLocalPart, definition.getName())) { if (myRequireResourceType) { throw new DataFormatException(myContext.getLocalizer().getMessage(ParserState.class, "wrongResourceTypeFound", definition.getName(), theLocalPart)); } definition = myContext.getResourceDefinition(theLocalPart); if (!(definition instanceof RuntimeResourceDefinition)) { throw new DataFormatException("Element '" + theLocalPart + "' is not a resource, expected a resource at this position"); } } } RuntimeResourceDefinition def = (RuntimeResourceDefinition) definition; if (!definition.getName().equals(theLocalPart) && definition.getName().equalsIgnoreCase(theLocalPart)) { throw new DataFormatException("Unknown resource type '" + theLocalPart + "': Resource names are case sensitive, found similar name: '" + definition.getName() + "'"); } myInstance = def.newInstance(); if (myInstance instanceof IResource) { push(new ResourceStateHapi(getRootPreResourceState(), def, (IResource) myInstance)); } else { push(new ResourceStateHl7Org(getRootPreResourceState(), def, myInstance)); } } public Map<String, IBaseResource> getContainedResources() { return myContainedResources; } @Override protected IBaseResource getCurrentElement() { return myInstance; } private PreResourceState getRootPreResourceState() { if (getPreResourceState() != null) { return getPreResourceState(); } return this; } @Override public boolean isPreResource() { return true; } protected abstract void populateTarget(); private void postProcess() { if (myContext.hasDefaultTypeForProfile()) { IBaseMetaType meta = myInstance.getMeta(); Class<? extends IBaseResource> wantedProfileType = null; String usedProfile = null; for (IPrimitiveType<String> next : meta.getProfile()) { if (isNotBlank(next.getValue())) { wantedProfileType = myContext.getDefaultTypeForProfile(next.getValue()); if (wantedProfileType != null) { usedProfile = next.getValue(); break; } } } if (wantedProfileType != null && !wantedProfileType.equals(myInstance.getClass())) { if (myResourceType == null || myResourceType.isAssignableFrom(wantedProfileType)) { ourLog.debug("Converting resource of type {} to type defined for profile \"{}\": {}", new Object[] { myInstance.getClass().getName(), usedProfile, wantedProfileType }); /* * This isn't the most efficient thing really.. If we want a specific * type we just re-parse into that type. The problem is that we don't know * until we've parsed the resource which type we want to use because the * profile declarations are in the text of the resource itself. * * At some point it would be good to write code which can present a view * of one type backed by another type and use that. */ IParser parser = myContext.newJsonParser(); String asString = parser.encodeResourceToString(myInstance); myInstance = parser.parseResource(wantedProfileType, asString); } } } populateTarget(); } private void stitchBundleCrossReferences() { final boolean bundle = "Bundle".equals(myContext.getResourceDefinition(myInstance).getName()); if (bundle) { FhirTerser t = myContext.newTerser(); Map<String, IBaseResource> idToResource = new HashMap<String, IBaseResource>(); List<IBase> entries = t.getValues(myInstance, "Bundle.entry", IBase.class); for (IBase nextEntry : entries) { IPrimitiveType<?> fullUrl = t.getSingleValueOrNull(nextEntry, "fullUrl", IPrimitiveType.class); if (fullUrl != null && isNotBlank(fullUrl.getValueAsString())) { IBaseResource resource = t.getSingleValueOrNull(nextEntry, "resource", IBaseResource.class); if (resource != null) { idToResource.put(fullUrl.getValueAsString(), resource); } } } /* * Stitch together resource references */ List<IBaseResource> resources = t.getAllPopulatedChildElementsOfType(myInstance, IBaseResource.class); for (IBaseResource next : resources) { IIdType id = next.getIdElement(); if (id != null && id.isEmpty() == false) { String resName = myContext.getResourceDefinition(next).getName(); IIdType idType = id.withResourceType(resName).toUnqualifiedVersionless(); idToResource.put(idType.getValueAsString(), next); } } for (IBaseResource next : resources) { List<IBaseReference> refs = myContext.newTerser().getAllPopulatedChildElementsOfType(next, IBaseReference.class); for (IBaseReference nextRef : refs) { if (nextRef.isEmpty() == false && nextRef.getReferenceElement() != null) { IIdType unqualifiedVersionless = nextRef.getReferenceElement().toUnqualifiedVersionless(); IBaseResource target = idToResource.get(unqualifiedVersionless.getValueAsString()); if (target != null) { nextRef.setResource(target); } } } } /* * Set resource IDs based on Bundle.entry.request.url */ List<Pair<String, IBaseResource>> urlsAndResources = BundleUtil.getBundleEntryUrlsAndResources(myContext, (IBaseBundle) myInstance); for (Pair<String, IBaseResource> pair : urlsAndResources) { if (pair.getRight() != null && isNotBlank(pair.getLeft()) && pair.getRight().getIdElement().isEmpty()) { if (pair.getLeft().startsWith("urn:")) { pair.getRight().setId(pair.getLeft()); } } } } } protected void weaveContainedResources() { FhirTerser terser = myContext.newTerser(); terser.visit(myInstance, new IModelVisitor() { @Override public void acceptElement(IBaseResource theResource, IBase theElement, List<String> thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition<?> theDefinition) { if (theElement instanceof BaseResourceReferenceDt) { BaseResourceReferenceDt nextRef = (BaseResourceReferenceDt) theElement; String ref = nextRef.getReference().getValue(); if (isNotBlank(ref)) { if (ref.startsWith("#")) { IResource target = (IResource) myContainedResources.get(ref); if (target != null) { ourLog.debug("Resource contains local ref {} in field {}", ref, thePathToElement); nextRef.setResource(target); } else { myErrorHandler.unknownReference(null, ref); } } } } else if (theElement instanceof IBaseReference) { IBaseReference nextRef = (IBaseReference) theElement; String ref = nextRef.getReferenceElement().getValue(); if (isNotBlank(ref)) { if (ref.startsWith("#")) { IBaseResource target = myContainedResources.get(ref); if (target != null) { ourLog.debug("Resource contains local ref {} in field {}", ref, thePathToElement); nextRef.setResource(target); } else { myErrorHandler.unknownReference(null, ref); } } } } } }); } @Override public void wereBack() { postProcess(); } } private class PreResourceStateHapi extends PreResourceState { private IMutator myMutator; private Object myTarget; public PreResourceStateHapi(Class<? extends IBaseResource> theResourceType) { super(theResourceType); assert theResourceType == null || IResource.class.isAssignableFrom(theResourceType); } public PreResourceStateHapi(Object theTarget, IMutator theMutator, Class<? extends IBaseResource> theResourceType) { super(theResourceType); myTarget = theTarget; myMutator = theMutator; assert theResourceType == null || IResource.class.isAssignableFrom(theResourceType); } // @Override // public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { // super.enteringNewElement(theNamespaceUri, theLocalPart); // populateTarget(); // } @Override protected void populateTarget() { weaveContainedResources(); if (myMutator != null) { myMutator.setValue(myTarget, getCurrentElement()); } } @Override public void wereBack() { super.wereBack(); IResource nextResource = (IResource) getCurrentElement(); String version = ResourceMetadataKeyEnum.VERSION.get(nextResource); String resourceName = myContext.getResourceDefinition(nextResource).getName(); String bundleIdPart = nextResource.getId().getIdPart(); if (isNotBlank(bundleIdPart)) { // if (isNotBlank(entryBaseUrl)) { // nextResource.setId(new IdDt(entryBaseUrl, resourceName, bundleIdPart, version)); // } else { IdDt previousId = nextResource.getId(); nextResource.setId(new IdDt(null, resourceName, bundleIdPart, version)); // Copy extensions if (!previousId.getAllUndeclaredExtensions().isEmpty()) { for (final ExtensionDt ext : previousId.getAllUndeclaredExtensions()) { nextResource.getId().addUndeclaredExtension(ext); } } // } } } } private class PreResourceStateHl7Org extends PreResourceState { private IMutator myMutator; private Object myTarget; public PreResourceStateHl7Org(Class<? extends IBaseResource> theResourceType) { super(theResourceType); } public PreResourceStateHl7Org(Object theTarget, IMutator theMutator, Class<? extends IBaseResource> theResourceType) { super(theResourceType); myMutator = theMutator; myTarget = theTarget; } @Override protected void populateTarget() { weaveContainedResources(); if (myMutator != null) { myMutator.setValue(myTarget, getCurrentElement()); } } @Override public void wereBack() { super.wereBack(); if (getCurrentElement() instanceof IDomainResource) { IDomainResource elem = (IDomainResource) getCurrentElement(); String resourceName = myContext.getResourceDefinition(elem).getName(); String versionId = elem.getMeta().getVersionId(); if (StringUtils.isBlank(elem.getIdElement().getIdPart())) { // Resource has no ID } else if (StringUtils.isNotBlank(versionId)) { elem.getIdElement().setValue(resourceName + "/" + elem.getIdElement().getIdPart() + "/_history/" + versionId); } else { elem.getIdElement().setValue(resourceName + "/" + elem.getIdElement().getIdPart()); } } } } private class PreTagListState extends BaseState { private TagList myTagList; public PreTagListState() { super(null); myTagList = new TagList(); } @Override public void endingElement() throws DataFormatException { pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { if (!TagList.ELEMENT_NAME_LC.equals(theLocalPart.toLowerCase())) { throw new DataFormatException("resourceType does not appear to be 'TagList', found: " + theLocalPart); } push(new TagListState(myTagList)); } @Override protected IBase getCurrentElement() { return myTagList; } @Override public boolean isPreResource() { return true; } } private class PrimitiveState extends BaseState { private IPrimitiveType<?> myInstance; public PrimitiveState(PreResourceState thePreResourceState, IPrimitiveType<?> theInstance) { super(thePreResourceState); myInstance = theInstance; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { if ("value".equals(theName)) { if ("".equals(theValue)) { myErrorHandler.invalidValue(null, theValue, "Attribute values must not be empty (\"\")"); } else { try { myInstance.setValueAsString(theValue); } catch (DataFormatException e) { myErrorHandler.invalidValue(null, theValue, e.getMessage()); } catch (IllegalArgumentException e) { myErrorHandler.invalidValue(null, theValue, e.getMessage()); } } } else if ("id".equals(theName)) { if (myInstance instanceof IIdentifiableElement) { ((IIdentifiableElement) myInstance).setElementSpecificId(theValue); } else if (myInstance instanceof IBaseElement) { ((IBaseElement) myInstance).setId(theValue); } else if (myInstance instanceof IBaseResource) { new IdDt(theValue).applyTo((org.hl7.fhir.instance.model.api.IBaseResource) myInstance); } else { myErrorHandler.unknownAttribute(null, theName); } } else { myErrorHandler.unknownAttribute(null, theName); } } @Override public void endingElement() { pop(); } // @Override // public void enteringNewElementExtension(StartElement theElement, // String theUrlAttr) { // if (myInstance instanceof ISupportsUndeclaredExtensions) { // UndeclaredExtension ext = new UndeclaredExtension(theUrlAttr); // ((ISupportsUndeclaredExtensions) // myInstance).getUndeclaredExtensions().add(ext); // push(new ExtensionState(ext)); // } // } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); return; } @Override protected IBase getCurrentElement() { return myInstance; } } private class ResourceStateHapi extends ElementCompositeState { private IResource myInstance; public ResourceStateHapi(PreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition<?> theDef, IResource theInstance) { super(thePreResourceState, theDef.getName(), theDef, theInstance); myInstance = theInstance; } @Override public void enteringNewElement(String theNamespace, String theChildName) throws DataFormatException { if ("id".equals(theChildName)) { push(new PrimitiveState(getPreResourceState(), myInstance.getId())); } else if ("meta".equals(theChildName)) { push(new MetaElementState(getPreResourceState(), myInstance.getResourceMetadata())); } else { super.enteringNewElement(theNamespace, theChildName); } } } private class ResourceStateHl7Org extends ElementCompositeState { public ResourceStateHl7Org(PreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition<?> theDef, IBaseResource theInstance) { super(thePreResourceState, theDef.getName(), theDef, theInstance); } } private class SecurityLabelElementStateHapi extends ElementCompositeState { public SecurityLabelElementStateHapi(ParserState<T>.PreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition<?> theDef, IBase codingDt) { super(thePreResourceState, theDef.getName(), theDef, codingDt); } @Override public void endingElement() throws DataFormatException { pop(); } } private class SwallowChildrenWholeState extends BaseState { private int myDepth; public SwallowChildrenWholeState(PreResourceState thePreResourceState) { super(thePreResourceState); } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { // ignore } @Override public void endingElement() throws DataFormatException { myDepth--; if (myDepth < 0) { pop(); } } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { myDepth++; } @Override public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { myDepth++; } } private class TagListState extends BaseState { private TagList myTagList; public TagListState(TagList theTagList) { super(null); myTagList = theTagList; } @Override public void endingElement() throws DataFormatException { pop(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { if (TagList.ATTR_CATEGORY.equals(theLocalPart)) { push(new TagState(myTagList)); } else { throw new DataFormatException("Unexpected element: " + theLocalPart); } } @Override protected IBase getCurrentElement() { return myTagList; } } private class TagState extends BaseState { private static final int LABEL = 2; private static final int NONE = 0; private static final int SCHEME = 3; private static final int TERM = 1; private String myLabel; private String myScheme; private int mySubState = 0; private TagList myTagList; private String myTerm; public TagState(TagList theTagList) { super(null); myTagList = theTagList; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { String value = defaultIfBlank(theValue, null); switch (mySubState) { case TERM: myTerm = (value); break; case LABEL: myLabel = (value); break; case SCHEME: myScheme = (value); break; case NONE: // This handles JSON encoding, which is a bit weird enteringNewElement(null, theName); attributeValue(null, value); endingElement(); break; } } @Override public void endingElement() throws DataFormatException { if (mySubState != NONE) { mySubState = NONE; } else { if (isNotEmpty(myScheme) || isNotBlank(myTerm) || isNotBlank(myLabel)) { myTagList.addTag(myScheme, myTerm, myLabel); } pop(); } } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { /* * We allow for both the DSTU1 and DSTU2 names here */ if (Tag.ATTR_TERM.equals(theLocalPart) || "code".equals(theLocalPart)) { mySubState = TERM; } else if (Tag.ATTR_SCHEME.equals(theLocalPart) || "system".equals(theLocalPart)) { mySubState = SCHEME; } else if (Tag.ATTR_LABEL.equals(theLocalPart) || "display".equals(theLocalPart)) { mySubState = LABEL; } else { throw new DataFormatException("Unexpected element: " + theLocalPart); } } } private class XhtmlState extends BaseState { private int myDepth; private XhtmlDt myDt; private List<XMLEvent> myEvents = new ArrayList<XMLEvent>(); private boolean myIncludeOuterEvent; private XhtmlState(PreResourceState thePreResourceState, XhtmlDt theXhtmlDt, boolean theIncludeOuterEvent) throws DataFormatException { super(thePreResourceState); myDepth = 0; myDt = theXhtmlDt; myIncludeOuterEvent = theIncludeOuterEvent; } @Override public void attributeValue(String theName, String theValue) throws DataFormatException { if (myJsonMode) { myDt.setValueAsString(theValue); } else { // IGNORE - don't handle this as an error, we process these as XML events } } protected void doPop() { pop(); } @Override public void endingElement() throws DataFormatException { if (myJsonMode) { doPop(); return; } super.endingElement(); } @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { // IGNORE - don't handle this as an error, we process these as XML events } @Override protected IElement getCurrentElement() { return myDt; } public XhtmlDt getDt() { return myDt; } @Override public void xmlEvent(XMLEvent theEvent) { if (theEvent.isEndElement()) { myDepth--; } if (myIncludeOuterEvent || myDepth > 0) { myEvents.add(theEvent); } if (theEvent.isStartElement()) { myDepth++; } if (theEvent.isEndElement()) { if (myDepth == 0) { myDt.setValue(myEvents); doPop(); } } } } private class XhtmlStateHl7Org extends XhtmlState { private IBaseXhtml myHl7OrgDatatype; private XhtmlStateHl7Org(PreResourceState thePreResourceState, IBaseXhtml theHl7OrgDatatype) { super(thePreResourceState, new XhtmlDt(), true); myHl7OrgDatatype = theHl7OrgDatatype; } @Override public void doPop() { // TODO: this is not very efficient String value = getDt().getValueAsString(); myHl7OrgDatatype.setValueAsString(value); super.doPop(); } } }
/** * JBoss, Home of Professional Open Source * Copyright Red Hat, Inc., and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.rest.registry.applications; import java.util.Map; import java.util.UUID; import javax.inject.Inject; import javax.validation.ConstraintViolationException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import org.jboss.aerogear.unifiedpush.api.PushApplication; import org.jboss.aerogear.unifiedpush.api.Variant; import org.jboss.aerogear.unifiedpush.dao.InstallationDao; import org.jboss.aerogear.unifiedpush.dao.PageResult; import org.jboss.aerogear.unifiedpush.dto.Count; import org.jboss.aerogear.unifiedpush.rest.AbstractBaseEndpoint; import org.jboss.aerogear.unifiedpush.service.PushApplicationService; import org.jboss.aerogear.unifiedpush.service.metrics.PushMessageMetricsService; import com.qmino.miredot.annotations.ReturnType; @Path("/applications") public class PushApplicationEndpoint extends AbstractBaseEndpoint { private static final int MAX_PAGE_SIZE = 25; private static final int DEFAULT_PAGE_SIZE = 8; @Inject private PushApplicationService pushAppService; @Inject private PushMessageMetricsService metricsService; @Inject private InstallationDao installationDao; /** * Create Push Application * * @param pushApp new {@link PushApplication} * @return created {@link PushApplication} * * @statuscode 201 The PushApplication Variant created successfully * @statuscode 400 The format of the client request was incorrect */ @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ReturnType("org.jboss.aerogear.unifiedpush.api.PushApplication") public Response registerPushApplication(PushApplication pushApp) { // some validation try { validateModelClass(pushApp); } catch (ConstraintViolationException cve) { // Build and return the 400 (Bad Request) response ResponseBuilder builder = createBadRequestResponse(cve.getConstraintViolations()); return builder.build(); } pushAppService.addPushApplication(pushApp); return Response.created(UriBuilder.fromResource(PushApplicationEndpoint.class).path(String.valueOf(pushApp.getPushApplicationID())).build()).entity(pushApp) .build(); } /** * List Push Applications * * @param page page number * @param pageSize number of items per page * @param includeDeviceCount put device count into response headers, default {@code false} * @param includeActivity put activity into response headers, default {@code false} * @return list of {@link PushApplication}s * * @responseheader total Total count of items * @responseheader activity_app_{pushApplicationID} Count number of messages for Push Application * @responseheader activity_variant_{variantID} Count number of messages for Variant * @responseheader deviceCount_app_{pushApplicationID} Count number of devices for Push Application * @responseheader deviceCount_variant_{variantID} Count number of devices for Variant */ @GET @Produces(MediaType.APPLICATION_JSON) @ReturnType("java.util.List<org.jboss.aerogear.unifiedpush.api.PushApplication>") public Response listAllPushApplications(@QueryParam("page") Integer page, @QueryParam("per_page") Integer pageSize, @QueryParam("includeDeviceCount") @DefaultValue("false") boolean includeDeviceCount, @QueryParam("includeActivity") @DefaultValue("false") boolean includeActivity) { if (pageSize != null) { pageSize = Math.min(MAX_PAGE_SIZE, pageSize); } else { pageSize = DEFAULT_PAGE_SIZE; } if (page == null) { page = 0; } final PageResult<PushApplication, Count> pageResult = getSearch().findAllPushApplicationsForDeveloper(page, pageSize); ResponseBuilder response = Response.ok(pageResult.getResultList()); response.header("total", pageResult.getAggregate().getCount()); for (PushApplication app : pageResult.getResultList()) { if (includeActivity) { putActivityIntoResponseHeaders(app, response); } if (includeDeviceCount) { putDeviceCountIntoResponseHeaders(app, response); } } return response.build(); } /** * Get Push Application. * * @param pushApplicationID id of {@link PushApplication} * @param includeDeviceCount boolean param to put device count into response headers, default {@code false} * @param includeActivity boolean param to put activity into response headers, default {@code false} * @return requested {@link PushApplication} * * @responseheader activity_app_{pushApplicationID} Count number of messages for Push Application * @responseheader activity_variant_{variantID} Count number of messages for Variant * @responseheader deviceCount_app_{pushApplicationID} Count number of devices for Push Application * @responseheader deviceCount_variant_{variantID} Count number of devices for Variant * * @statuscode 404 The requested PushApplication resource does not exist */ @GET @Path("/{pushAppID}") @Produces(MediaType.APPLICATION_JSON) @ReturnType("org.jboss.aerogear.unifiedpush.api.PushApplication") public Response findById( @PathParam("pushAppID") String pushApplicationID, @QueryParam("includeDeviceCount") @DefaultValue("false") boolean includeDeviceCount, @QueryParam("includeActivity") @DefaultValue("false") boolean includeActivity) { PushApplication pushApp = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID); if (pushApp != null) { ResponseBuilder response = Response.ok(pushApp); if (includeActivity) { putActivityIntoResponseHeaders(pushApp, response); } if (includeDeviceCount) { putDeviceCountIntoResponseHeaders(pushApp, response); } return response.build(); } return Response.status(Status.NOT_FOUND).entity("Could not find requested PushApplicationEntity").build(); } private void putActivityIntoResponseHeaders(PushApplication app, ResponseBuilder response) { response.header("activity_app_" + app.getPushApplicationID(), metricsService.countMessagesForPushApplication(app.getPushApplicationID())); } private void putDeviceCountIntoResponseHeaders(PushApplication app, ResponseBuilder response) { long appCount = 0; for (Variant variant : app.getVariants()) { long variantCount = installationDao.getNumberOfDevicesForVariantID(variant.getVariantID()); appCount += variantCount; response.header("deviceCount_variant_" + variant.getVariantID(), variantCount); } response.header("deviceCount_app_" + app.getPushApplicationID(), appCount); } /** * Update Push Application * * @param pushApplicationID id of {@link PushApplication} * @param updatedPushApp new info of {@link PushApplication} * * @statuscode 204 The PushApplication updated successfully * @statuscode 400 The format of the client request was incorrect * @statuscode 404 The requested PushApplication resource does not exist */ @PUT @Path("/{pushAppID}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ReturnType("java.lang.Void") public Response updatePushApplication(@PathParam("pushAppID") String pushApplicationID, PushApplication updatedPushApp) { PushApplication pushApp = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID); if (pushApp != null) { // some validation try { validateModelClass(updatedPushApp); } catch (ConstraintViolationException cve) { // Build and return the 400 (Bad Request) response ResponseBuilder builder = createBadRequestResponse(cve.getConstraintViolations()); return builder.build(); } // update name/desc: pushApp.setDescription(updatedPushApp.getDescription()); pushApp.setName(updatedPushApp.getName()); pushAppService.updatePushApplication(pushApp); return Response.noContent().build(); } return Response.status(Status.NOT_FOUND).entity("Could not find requested PushApplicationEntity").build(); } /** * Reset MasterSecret for Push Application * * @param pushApplicationID id of {@link PushApplication} * @return updated {@link PushApplication} * * @statuscode 204 The MasterSecret for Push Application reset successfully * @statuscode 404 The requested PushApplication resource does not exist */ @PUT @Path("/{pushAppID}/reset") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ReturnType("org.jboss.aerogear.unifiedpush.api.PushApplication") public Response resetMasterSecret(@PathParam("pushAppID") String pushApplicationID) { //PushApplication pushApp = pushAppService.findByPushApplicationIDForDeveloper(pushApplicationID, extractUsername(request)); PushApplication pushApp = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID); if (pushApp != null) { // generate the new 'masterSecret' and apply it: String newMasterSecret = UUID.randomUUID().toString(); pushApp.setMasterSecret(newMasterSecret); pushAppService.updatePushApplication(pushApp); return Response.ok(pushApp).build(); } return Response.status(Status.NOT_FOUND).entity("Could not find requested PushApplicationEntity").build(); } /** * Delete Push Application * * @param pushApplicationID id of {@link PushApplication} * * @statuscode 204 The PushApplication successfully deleted * @statuscode 404 The requested PushApplication resource does not exist */ @DELETE @Path("/{pushAppID}") @Produces(MediaType.APPLICATION_JSON) @ReturnType("java.lang.Void") public Response deletePushApplication(@PathParam("pushAppID") String pushApplicationID) { PushApplication pushApp = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID); if (pushApp != null) { pushAppService.removePushApplication(pushApp); return Response.noContent().build(); } return Response.status(Status.NOT_FOUND).entity("Could not find requested PushApplicationEntity").build(); } /** * Count Push Applications * * @param pushApplicationID id of {@link PushApplication} * @return count number for each {@link org.jboss.aerogear.unifiedpush.api.VariantType} */ @GET @Path("/{pushAppID}/count") @Produces(MediaType.APPLICATION_JSON) @ReturnType("java.util.Map<java.lang.String, java.lang.Long>") public Response countInstallations(@PathParam("pushAppID") String pushApplicationID) { Map<String, Long> result = pushAppService.countInstallationsByType(pushApplicationID); return Response.ok(result).build(); } }
package testing; import org.junit.Test; import app_kvEcs.ECS; import client.KVStore; import junit.framework.TestCase; import common.messages.KVMessage; import common.messages.KVMessage.StatusType; public class AdditionalTest extends TestCase { private KVStore kvClient; private ECS ecs; @Test public void testServer_stopped() { ecs = new ECS(); ecs.initService(1); String add = ecs.metaData.get("1"); String[] ss = add.split(":"); kvClient = new KVStore(ss[0], Integer.valueOf(ss[1])); try { kvClient.connect(); } catch (Exception e) { } String key = "foo"; String value = "bar"; KVMessage response = null; Exception ex = null; try { response = kvClient.put(key, value); } catch (Exception e) { ex = e; } ecs.shutDown(); try { kvClient.disconnect(); } catch (Exception e) { System.out.println("Error! Cannot disconnect"); } try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertTrue(ex == null && response.getStatus() == StatusType.SERVER_STOPPED); } public void testMetadataUpdate() { ecs = new ECS(); ecs.initService(2); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } ecs.start(); String add = ecs.metaData.get("1"); String[] ss = add.split(":"); kvClient = new KVStore(ss[0], Integer.valueOf(ss[1])); try { kvClient.connect(); } catch (Exception e) { } String key = "foo"; String value = "bar"; KVMessage response = null; Exception ex = null; try { response = kvClient.put(key, value); } catch (Exception e) { ex = e; } ecs.shutDown(); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { kvClient.disconnect(); } catch (Exception e) { System.out.println("Error! Cannot disconnect"); } if (response.getStatus() == StatusType.SERVER_NOT_RESPONSIBLE) assertTrue(ex == null && response.getMetaData() != null); } @Test public void testConsistentHash() { ecs = new ECS(); ecs.initService(2); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } ecs.start(); String add = ecs.metaData.get("1"); String[] ss = add.split(":"); kvClient = new KVStore(ss[0], Integer.valueOf(ss[1])); try { kvClient.connect(); } catch (Exception e) { } String key = "foo"; String value = "bar"; KVMessage response = null; Exception ex = null; try { response = kvClient.put(key, value); } catch (Exception e) { ex = e; } ecs.shutDown(); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { kvClient.disconnect(); } catch (Exception e) { System.out.println("Error! Cannot disconnect"); } if (response.getStatus() == StatusType.SERVER_NOT_RESPONSIBLE) { String newAd = response.getMetaData().get(key); String[] AD = newAd.split(":"); assertTrue(ex == null && Integer.valueOf(AD[1]) != Integer.valueOf(ss[1])); } } @Test public void testAddNode() { ecs = new ECS(); ecs.initService(2); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } ecs.start(); int Prior_Number_of_Nodes = ecs.metaData.getCircle().keySet().size(); ecs.addNode(); int Post_Number_of_Nodes = ecs.metaData.getCircle().keySet().size(); ecs.shutDown(); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { kvClient.disconnect(); } catch (Exception e) { System.out.println("Error! Cannot disconnect"); } assertTrue(Post_Number_of_Nodes == Prior_Number_of_Nodes + 1); } @Test public void testRemoveNode() { ecs = new ECS(); ecs.initService(3); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } ecs.start(); int Prior_Number_of_Nodes = ecs.metaData.getCircle().keySet().size(); ecs.removeNode(); int Post_Number_of_Nodes = ecs.metaData.getCircle().keySet().size(); ecs.shutDown(); // Wait for latency try { Thread.sleep(2000); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { kvClient.disconnect(); } catch (Exception e) { System.out.println("Error! Cannot disconnect"); } assertTrue(Post_Number_of_Nodes == Prior_Number_of_Nodes - 1); } }
/* * This file is generated by jOOQ. */ package generated.rx.async.vertx.tables.pojos; import generated.rx.async.vertx.tables.interfaces.ISomething; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import javax.annotation.Generated; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.10.1" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class Something implements ISomething { private static final long serialVersionUID = 1988493579; private Integer someid; private String somestring; private Long somehugenumber; private Short somesmallnumber; private Integer someregularnumber; private Double somedouble; private String someenum; private JsonObject somejsonobject; private JsonArray somejsonarray; public Something() {} public Something(Something value) { this.someid = value.someid; this.somestring = value.somestring; this.somehugenumber = value.somehugenumber; this.somesmallnumber = value.somesmallnumber; this.someregularnumber = value.someregularnumber; this.somedouble = value.somedouble; this.someenum = value.someenum; this.somejsonobject = value.somejsonobject; this.somejsonarray = value.somejsonarray; } public Something( Integer someid, String somestring, Long somehugenumber, Short somesmallnumber, Integer someregularnumber, Double somedouble, String someenum, JsonObject somejsonobject, JsonArray somejsonarray ) { this.someid = someid; this.somestring = somestring; this.somehugenumber = somehugenumber; this.somesmallnumber = somesmallnumber; this.someregularnumber = someregularnumber; this.somedouble = somedouble; this.someenum = someenum; this.somejsonobject = somejsonobject; this.somejsonarray = somejsonarray; } @Override public Integer getSomeid() { return this.someid; } @Override public Something setSomeid(Integer someid) { this.someid = someid; return this; } @Override public String getSomestring() { return this.somestring; } @Override public Something setSomestring(String somestring) { this.somestring = somestring; return this; } @Override public Long getSomehugenumber() { return this.somehugenumber; } @Override public Something setSomehugenumber(Long somehugenumber) { this.somehugenumber = somehugenumber; return this; } @Override public Short getSomesmallnumber() { return this.somesmallnumber; } @Override public Something setSomesmallnumber(Short somesmallnumber) { this.somesmallnumber = somesmallnumber; return this; } @Override public Integer getSomeregularnumber() { return this.someregularnumber; } @Override public Something setSomeregularnumber(Integer someregularnumber) { this.someregularnumber = someregularnumber; return this; } @Override public Double getSomedouble() { return this.somedouble; } @Override public Something setSomedouble(Double somedouble) { this.somedouble = somedouble; return this; } @Override public String getSomeenum() { return this.someenum; } @Override public Something setSomeenum(String someenum) { this.someenum = someenum; return this; } @Override public JsonObject getSomejsonobject() { return this.somejsonobject; } @Override public Something setSomejsonobject(JsonObject somejsonobject) { this.somejsonobject = somejsonobject; return this; } @Override public JsonArray getSomejsonarray() { return this.somejsonarray; } @Override public Something setSomejsonarray(JsonArray somejsonarray) { this.somejsonarray = somejsonarray; return this; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final Something other = (Something) obj; if (someid == null) { if (other.someid != null) return false; } else if (!someid.equals(other.someid)) return false; if (somestring == null) { if (other.somestring != null) return false; } else if (!somestring.equals(other.somestring)) return false; if (somehugenumber == null) { if (other.somehugenumber != null) return false; } else if (!somehugenumber.equals(other.somehugenumber)) return false; if (somesmallnumber == null) { if (other.somesmallnumber != null) return false; } else if (!somesmallnumber.equals(other.somesmallnumber)) return false; if (someregularnumber == null) { if (other.someregularnumber != null) return false; } else if (!someregularnumber.equals(other.someregularnumber)) return false; if (somedouble == null) { if (other.somedouble != null) return false; } else if (!somedouble.equals(other.somedouble)) return false; if (someenum == null) { if (other.someenum != null) return false; } else if (!someenum.equals(other.someenum)) return false; if (somejsonobject == null) { if (other.somejsonobject != null) return false; } else if (!somejsonobject.equals(other.somejsonobject)) return false; if (somejsonarray == null) { if (other.somejsonarray != null) return false; } else if (!somejsonarray.equals(other.somejsonarray)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.someid == null) ? 0 : this.someid.hashCode()); result = prime * result + ((this.somestring == null) ? 0 : this.somestring.hashCode()); result = prime * result + ((this.somehugenumber == null) ? 0 : this.somehugenumber.hashCode()); result = prime * result + ((this.somesmallnumber == null) ? 0 : this.somesmallnumber.hashCode()); result = prime * result + ((this.someregularnumber == null) ? 0 : this.someregularnumber.hashCode()); result = prime * result + ((this.somedouble == null) ? 0 : this.somedouble.hashCode()); result = prime * result + ((this.someenum == null) ? 0 : this.someenum.hashCode()); result = prime * result + ((this.somejsonobject == null) ? 0 : this.somejsonobject.hashCode()); result = prime * result + ((this.somejsonarray == null) ? 0 : this.somejsonarray.hashCode()); return result; } @Override public String toString() { StringBuilder sb = new StringBuilder("Something ("); sb.append(someid); sb.append(", ").append(somestring); sb.append(", ").append(somehugenumber); sb.append(", ").append(somesmallnumber); sb.append(", ").append(someregularnumber); sb.append(", ").append(somedouble); sb.append(", ").append(someenum); sb.append(", ").append(somejsonobject); sb.append(", ").append(somejsonarray); sb.append(")"); return sb.toString(); } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public void from(ISomething from) { setSomeid(from.getSomeid()); setSomestring(from.getSomestring()); setSomehugenumber(from.getSomehugenumber()); setSomesmallnumber(from.getSomesmallnumber()); setSomeregularnumber(from.getSomeregularnumber()); setSomedouble(from.getSomedouble()); setSomeenum(from.getSomeenum()); setSomejsonobject(from.getSomejsonobject()); setSomejsonarray(from.getSomejsonarray()); } /** * {@inheritDoc} */ @Override public <E extends ISomething> E into(E into) { into.from(this); return into; } public Something(io.vertx.core.json.JsonObject json) { fromJson(json); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.cache; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import com.gemstone.gemfire.internal.cache.lru.EnableLRU; import com.gemstone.gemfire.internal.cache.persistence.DiskRecoveryStore; import com.gemstone.gemfire.internal.cache.lru.LRUClockNode; import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand; import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; // macros whose definition changes this class: // disk: DISK // lru: LRU // stats: STATS // versioned: VERSIONED // offheap: OFFHEAP // One of the following key macros must be defined: // key object: KEY_OBJECT // key int: KEY_INT // key long: KEY_LONG // key uuid: KEY_UUID // key string1: KEY_STRING1 // key string2: KEY_STRING2 /** * Do not modify this class. It was generated. * Instead modify LeafRegionEntry.cpp and then run * bin/generateRegionEntryClasses.sh from the directory * that contains your build.xml. */ public class VMThinDiskLRURegionEntryHeapLongKey extends VMThinDiskLRURegionEntryHeap { public VMThinDiskLRURegionEntryHeapLongKey (RegionEntryContext context, long key, Object value ) { super(context, (value instanceof RecoveredEntry ? null : value) ); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp initialize(context, value); this.key = key; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // common code protected int hash; private HashEntry<Object, Object> next; @SuppressWarnings("unused") private volatile long lastModified; private static final AtomicLongFieldUpdater<VMThinDiskLRURegionEntryHeapLongKey> lastModifiedUpdater = AtomicLongFieldUpdater.newUpdater(VMThinDiskLRURegionEntryHeapLongKey.class, "lastModified"); private volatile Object value; @Override protected final Object getValueField() { return this.value; } @Override protected void setValueField(Object v) { this.value = v; } protected long getlastModifiedField() { return lastModifiedUpdater.get(this); } protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) { return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue); } /** * @see HashEntry#getEntryHash() */ public final int getEntryHash() { return this.hash; } protected void setEntryHash(int v) { this.hash = v; } /** * @see HashEntry#getNextEntry() */ public final HashEntry<Object, Object> getNextEntry() { return this.next; } /** * @see HashEntry#setNextEntry */ public final void setNextEntry(final HashEntry<Object, Object> n) { this.next = n; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // disk code protected void initialize(RegionEntryContext drs, Object value) { boolean isBackup; if (drs instanceof LocalRegion) { isBackup = ((LocalRegion)drs).getDiskRegion().isBackup(); } else if (drs instanceof PlaceHolderDiskRegion) { isBackup = true; } else { throw new IllegalArgumentException("expected a LocalRegion or PlaceHolderDiskRegion"); } // Delay the initialization of DiskID if overflow only if (isBackup) { diskInitialize(drs, value); } } @Override public final synchronized int updateAsyncEntrySize(EnableLRU capacityController) { int oldSize = getEntrySize(); int newSize = capacityController.entrySize( getKeyForSizing(), null); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private void diskInitialize(RegionEntryContext context, Object value) { DiskRecoveryStore drs = (DiskRecoveryStore)context; DiskStoreImpl ds = drs.getDiskStore(); long maxOplogSize = ds.getMaxOplogSize(); //get appropriate instance of DiskId implementation based on maxOplogSize this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList()); Helper.initialize(this, drs, value); } /** * DiskId * * @since 5.1 */ protected DiskId id;//= new DiskId(); public DiskId getDiskId() { return this.id; } @Override void setDiskId(RegionEntry old) { this.id = ((AbstractDiskRegionEntry)old).getDiskId(); } // // inlining DiskId // // always have these fields // /** // * id consists of // * most significant // * 1 byte = users bits // * 2-8 bytes = oplog id // * least significant. // * // * The highest bit in the oplog id part is set to 1 if the oplog id // * is negative. // * @todo this field could be an int for an overflow only region // */ // private long id; // /** // * Length of the bytes on disk. // * This is always set. If the value is invalid then it will be set to 0. // * The most significant bit is used by overflow to mark it as needing to be written. // */ // protected int valueLength = 0; // // have intOffset or longOffset // // intOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile int offsetInOplog; // // longOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile long offsetInOplog; // // have overflowOnly or persistence // // overflowOnly // // no fields // // persistent // /** unique entry identifier * */ // private long keyId; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // lru code @Override public void setDelayedDiskId(LocalRegion r) { DiskStoreImpl ds = r.getDiskStore(); long maxOplogSize = ds.getMaxOplogSize(); this.id = DiskId.createDiskId(maxOplogSize, false /* over flow only */, ds.needsLinkedList()); } public final synchronized int updateEntrySize(EnableLRU capacityController) { return updateEntrySize(capacityController, _getValue()); // OFHEAP: _getValue ok w/o incing refcount because we are synced and only getting the size } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public final synchronized int updateEntrySize(EnableLRU capacityController, Object value) { int oldSize = getEntrySize(); int newSize = capacityController.entrySize( getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; // if ( debug ) log( "updateEntrySize key=" + getKey() // + (_getValue() == Token.INVALID ? " invalid" : // (_getValue() == Token.LOCAL_INVALID ? "local_invalid" : // (_getValue()==null ? " evicted" : " valid"))) // + " oldSize=" + oldSize // + " newSize=" + this.size ); return delta; } public final boolean testRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public final void setRecentlyUsed() { setBits(RECENTLY_USED); } public final void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } public final boolean testEvicted() { return areAnyBitsSet(EVICTED); } public final void setEvicted() { setBits(EVICTED); } public final void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private LRUClockNode nextLRU; private LRUClockNode prevLRU; private int size; public final void setNextLRUNode( LRUClockNode next ) { this.nextLRU = next; } public final LRUClockNode nextLRUNode() { return this.nextLRU; } public final void setPrevLRUNode( LRUClockNode prev ) { this.prevLRU = prev; } public final LRUClockNode prevLRUNode() { return this.prevLRU; } public final int getEntrySize() { return this.size; } protected final void setEntrySize(int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp //@Override //public StringBuilder appendFieldsToString(final StringBuilder sb) { // StringBuilder result = super.appendFieldsToString(sb); // result.append("; prev=").append(this.prevLRU==null?"null":"not null"); // result.append("; next=").append(this.nextLRU==null?"null":"not null"); // return result; //} @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // key code private final long key; @Override public final Object getKey() { return this.key; } @Override public boolean isKeyEqual(Object k) { if (k instanceof Long) { return ((Long) k).longValue() == this.key; } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
package jp.sourceforge.ea2ddl.dao.bsentity; import java.io.Serializable; import java.util.*; import org.seasar.dbflute.Entity; import org.seasar.dbflute.dbmeta.DBMeta; import jp.sourceforge.ea2ddl.dao.allcommon.DBMetaInstanceHandler; /** * The entity of t_objecttypes that the type is TABLE. <br /> * <pre> * [primary-key] * * * [column] * Object_Type, Description, DesignObject, ImageID * * [sequence] * * * [identity] * * * [version-no] * * * [foreign-table] * * * [referrer-table] * * * [foreign-property] * * * [referrer-property] * * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsTObjecttypes implements Entity, Serializable { // =================================================================================== // Definition // ========== /** Serial version UID. (Default) */ private static final long serialVersionUID = 1L; // =================================================================================== // Attribute // ========= // ----------------------------------------------------- // Column // ------ /** Object_Type: {UQ : VARCHAR(50)} */ protected String _objectType; /** Description: {VARCHAR(255)} */ protected String _description; /** DesignObject: {NotNull : BIT} */ protected Boolean _designobject; /** ImageID: {SMALLINT} */ protected java.lang.Integer _imageid; // ----------------------------------------------------- // Internal // -------- /** The attribute of entity modified properties. (for S2Dao) */ protected EntityModifiedProperties _modifiedProperties = newEntityModifiedProperties(); // =================================================================================== // Table Name // ========== public String getTableDbName() { return "t_objecttypes"; } public String getTablePropertyName() { // as JavaBeansRule return "TObjecttypes"; } // =================================================================================== // DBMeta // ====== public DBMeta getDBMeta() { return DBMetaInstanceHandler.findDBMeta(getTableDbName()); } // =================================================================================== // Classification Classifying // ========================== // =================================================================================== // Classification Determination // ============================ // =================================================================================== // Classification Name/Alias // ========================= // =================================================================================== // Foreign Property // ================ // =================================================================================== // Referrer Property // ================= // =================================================================================== // Determination // ============= public boolean hasPrimaryKeyValue() { return false; } // =================================================================================== // Modified Properties // =================== public Set<String> getModifiedPropertyNames() { return _modifiedProperties.getPropertyNames(); } protected EntityModifiedProperties newEntityModifiedProperties() { return new EntityModifiedProperties(); } public void clearModifiedPropertyNames() { _modifiedProperties.clear(); } public boolean hasModification() { return !_modifiedProperties.isEmpty(); } // =================================================================================== // Basic Override // ============== /** * If the all-column value of the other is same as this one, returns true. * @param other Other entity. (Nullable) * @return Comparing result. If other is null, returns false. */ public boolean equals(Object other) { if (other == null || !(other instanceof BsTObjecttypes)) { return false; } final BsTObjecttypes otherEntity = (BsTObjecttypes)other; if (!helpComparingValue(getObjectType(), otherEntity.getObjectType())) { return false; } if (!helpComparingValue(getDescription(), otherEntity.getDescription())) { return false; } if (!helpComparingValue(getDesignobject(), otherEntity.getDesignobject())) { return false; } if (!helpComparingValue(getImageid(), otherEntity.getImageid())) { return false; } return true; } protected boolean helpComparingValue(Object value1, Object value2) { if (value1 == null && value2 == null) { return true; } return value1 != null && value2 != null && value1.equals(value2); } /** * Calculates hash-code from all columns. * @return Hash-code from all-columns. */ public int hashCode() { int result = 17; if (this.getObjectType() != null) { result = result + this.getObjectType().hashCode(); } if (this.getDescription() != null) { result = result + this.getDescription().hashCode(); } if (this.getDesignobject() != null) { result = result + this.getDesignobject().hashCode(); } if (this.getImageid() != null) { result = result + this.getImageid().hashCode(); } return result; } /** * @return The view string of columns. (NotNull) */ public String toString() { String delimiter = ","; StringBuilder sb = new StringBuilder(); sb.append(delimiter).append(getObjectType()); sb.append(delimiter).append(getDescription()); sb.append(delimiter).append(getDesignobject()); sb.append(delimiter).append(getImageid()); if (sb.length() > 0) { sb.delete(0, delimiter.length()); } sb.insert(0, "{").append("}"); return sb.toString(); } // =================================================================================== // Accessor // ======== /** * Object_Type: {UQ : VARCHAR(50)} <br /> * @return The value of the column 'Object_Type'. (Nullable) */ public String getObjectType() { return _objectType; } /** * Object_Type: {UQ : VARCHAR(50)} <br /> * @param objectType The value of the column 'Object_Type'. (Nullable) */ public void setObjectType(String objectType) { _modifiedProperties.addPropertyName("objectType"); this._objectType = objectType; } /** * Description: {VARCHAR(255)} <br /> * @return The value of the column 'Description'. (Nullable) */ public String getDescription() { return _description; } /** * Description: {VARCHAR(255)} <br /> * @param description The value of the column 'Description'. (Nullable) */ public void setDescription(String description) { _modifiedProperties.addPropertyName("description"); this._description = description; } /** * DesignObject: {NotNull : BIT} <br /> * @return The value of the column 'DesignObject'. (Nullable) */ public Boolean getDesignobject() { return _designobject; } /** * DesignObject: {NotNull : BIT} <br /> * @param designobject The value of the column 'DesignObject'. (Nullable) */ public void setDesignobject(Boolean designobject) { _modifiedProperties.addPropertyName("designobject"); this._designobject = designobject; } /** * ImageID: {SMALLINT} <br /> * @return The value of the column 'ImageID'. (Nullable) */ public java.lang.Integer getImageid() { return _imageid; } /** * ImageID: {SMALLINT} <br /> * @param imageid The value of the column 'ImageID'. (Nullable) */ public void setImageid(java.lang.Integer imageid) { _modifiedProperties.addPropertyName("imageid"); this._imageid = imageid; } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.python; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.rules.python.PythonTestUtils.assumesDefaultIsPY2; import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.util.ConfigurationTestCase; import com.google.devtools.common.options.OptionsParsingException; import com.google.devtools.common.options.TriState; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link PythonOptions} and {@link PythonConfiguration}. */ @RunWith(JUnit4.class) public class PythonConfigurationTest extends ConfigurationTestCase { private PythonOptions parsePythonOptions(String... cmdline) throws Exception { BuildConfiguration config = create(cmdline); return config.getOptions().get(PythonOptions.class); } @Test public void invalidTargetPythonValue_NotATargetValue() { OptionsParsingException expected = assertThrows(OptionsParsingException.class, () -> create("--force_python=PY2AND3")); assertThat(expected).hasMessageThat().contains("Not a valid Python major version"); } @Test public void invalidTargetPythonValue_UnknownValue() { OptionsParsingException expected = assertThrows( OptionsParsingException.class, () -> create("--force_python=BEETLEJUICE")); assertThat(expected).hasMessageThat().contains("Not a valid Python major version"); } @Test public void oldVersionFlagGatedByIncompatibleFlag() throws Exception { create("--incompatible_remove_old_python_version_api=false", "--force_python=PY2"); checkError( "`--force_python` is disabled by `--incompatible_remove_old_python_version_api`", "--incompatible_remove_old_python_version_api=true", "--force_python=PY2"); } @Test public void py3IsDefaultFlagRequiresNewSemanticsFlag() throws Exception { checkError( "cannot enable `--incompatible_py3_is_default` without also enabling " + "`--incompatible_allow_python_version_transitions`", "--incompatible_allow_python_version_transitions=false", "--incompatible_py3_is_default=true"); } @Test public void getDefaultPythonVersion() throws Exception { // --incompatible_py3_is_default requires --incompatible_allow_python_version_transitions PythonOptions withoutPy3IsDefaultOpts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=false"); PythonOptions withPy3IsDefaultOpts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=true"); assertThat(withoutPy3IsDefaultOpts.getDefaultPythonVersion()).isEqualTo(PythonVersion.PY2); assertThat(withPy3IsDefaultOpts.getDefaultPythonVersion()).isEqualTo(PythonVersion.PY3); } @Test public void getPythonVersion_FallBackOnDefaultPythonVersion() throws Exception { // Run it twice with two different values for the incompatible flag to confirm it's actually // reading getDefaultPythonVersion() and not some other source of default values. Note that // --incompatible_py3_is_default requires --incompatible_allow_python_version_transitions. PythonOptions py2Opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=false"); PythonOptions py3Opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=true"); assertThat(py2Opts.getPythonVersion()).isEqualTo(PythonVersion.PY2); assertThat(py3Opts.getPythonVersion()).isEqualTo(PythonVersion.PY3); } @Test public void getPythonVersion_NewFlagTakesPrecedence() throws Exception { assumesDefaultIsPY2(); // --force_python is superseded by --python_version. PythonOptions opts = parsePythonOptions( "--incompatible_remove_old_python_version_api=false", "--force_python=PY2", "--python_version=PY3"); assertThat(opts.getPythonVersion()).isEqualTo(PythonVersion.PY3); } @Test public void getPythonVersion_FallBackOnOldFlag() throws Exception { assumesDefaultIsPY2(); // --force_python is used because --python_version is absent. PythonOptions opts = parsePythonOptions( "--incompatible_remove_old_python_version_api=false", "--force_python=PY3"); assertThat(opts.getPythonVersion()).isEqualTo(PythonVersion.PY3); } @Test public void canTransitionPythonVersion_OldSemantics_Yes() throws Exception { assumesDefaultIsPY2(); PythonOptions opts = parsePythonOptions("--incompatible_allow_python_version_transitions=false"); assertThat(opts.canTransitionPythonVersion(PythonVersion.PY3)).isTrue(); } @Test public void canTransitionPythonVersion_OldSemantics_NoBecauseAlreadySet() throws Exception { assumesDefaultIsPY2(); PythonOptions optsWithOldFlag = parsePythonOptions( "--incompatible_allow_python_version_transitions=false", "--incompatible_remove_old_python_version_api=false", "--force_python=PY2"); PythonOptions optsWithNewFlag = parsePythonOptions( "--incompatible_allow_python_version_transitions=false", "--python_version=PY2"); assertThat(optsWithOldFlag.canTransitionPythonVersion(PythonVersion.PY3)).isFalse(); assertThat(optsWithNewFlag.canTransitionPythonVersion(PythonVersion.PY3)).isFalse(); } @Test public void canTransitionPythonVersion_OldSemantics_NoBecauseNewValueSameAsDefault() throws Exception { assumesDefaultIsPY2(); PythonOptions opts = parsePythonOptions("--incompatible_allow_python_version_transitions=false"); assertThat(opts.canTransitionPythonVersion(PythonVersion.PY2)).isFalse(); } @Test public void canTransitionPythonVersion_NewSemantics_Yes() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--python_version=PY3"); assertThat(opts.canTransitionPythonVersion(PythonVersion.PY2)).isTrue(); } @Test public void canTransitionPythonVersion_NewSemantics_NoBecauseSameAsCurrent() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", // Set --force_python too, or else we fall into the "make --force_python consistent" // case. "--incompatible_remove_old_python_version_api=false", "--force_python=PY3", "--python_version=PY3"); assertThat(opts.canTransitionPythonVersion(PythonVersion.PY3)).isFalse(); } @Test public void canTransitionPythonVersion_NewApi_NoEvenWhenForcePythonDisagrees() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_remove_old_python_version_api=false", // Test that even though --force_python's value isn't in sync, we don't transition // because getPythonVersion() would be unaffected by the transition. "--force_python=PY2", "--python_version=PY3"); assertThat(opts.canTransitionPythonVersion(PythonVersion.PY3)).isFalse(); } @Test public void setPythonVersion_OldApiEnabled() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_remove_old_python_version_api=false", "--force_python=PY2", "--python_version=PY2"); opts.setPythonVersion(PythonVersion.PY3); assertThat(opts.forcePython).isEqualTo(PythonVersion.PY3); assertThat(opts.pythonVersion).isEqualTo(PythonVersion.PY3); } @Test public void setPythonVersion_OldApiDisabled() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_remove_old_python_version_api=true", "--python_version=PY2"); opts.setPythonVersion(PythonVersion.PY3); assertThat(opts.forcePython).isNull(); assertThat(opts.pythonVersion).isEqualTo(PythonVersion.PY3); } @Test public void getHost_CopiesMostValues() throws Exception { PythonOptions opts = parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_remove_old_python_version_api=true", "--incompatible_py3_is_default=true", "--incompatible_py2_outputs_are_suffixed=true", "--build_python_zip=true", "--incompatible_disallow_legacy_py_provider=true", "--incompatible_use_python_toolchains=true"); PythonOptions hostOpts = (PythonOptions) opts.getHost(); assertThat(hostOpts.incompatibleAllowPythonVersionTransitions).isTrue(); assertThat(hostOpts.incompatibleRemoveOldPythonVersionApi).isTrue(); assertThat(hostOpts.incompatiblePy3IsDefault).isTrue(); assertThat(hostOpts.incompatiblePy2OutputsAreSuffixed).isTrue(); assertThat(hostOpts.buildPythonZip).isEqualTo(TriState.YES); assertThat(hostOpts.incompatibleDisallowLegacyPyProvider).isTrue(); assertThat(hostOpts.incompatibleUsePythonToolchains).isTrue(); } @Test public void getHost_AppliesHostForcePython() throws Exception { assumesDefaultIsPY2(); PythonOptions optsWithForcePythonFlag = parsePythonOptions( "--incompatible_remove_old_python_version_api=false", "--force_python=PY2", "--host_force_python=PY3"); PythonOptions optsWithPythonVersionFlag = parsePythonOptions("--python_version=PY2", "--host_force_python=PY3"); PythonOptions optsWithPy3IsDefaultFlag = // --incompatible_py3_is_default requires --incompatible_allow_python_version_transitions parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=true", // It's more interesting to set the incompatible flag true and force host to PY2, than // it is to set the flag false and force host to PY3. "--host_force_python=PY2"); PythonOptions hostOptsWithForcePythonFlag = (PythonOptions) optsWithForcePythonFlag.getHost(); PythonOptions hostOptsWithPythonVersionFlag = (PythonOptions) optsWithPythonVersionFlag.getHost(); PythonOptions hostOptsWithPy3IsDefaultFlag = (PythonOptions) optsWithPy3IsDefaultFlag.getHost(); assertThat(hostOptsWithForcePythonFlag.getPythonVersion()).isEqualTo(PythonVersion.PY3); assertThat(hostOptsWithPythonVersionFlag.getPythonVersion()).isEqualTo(PythonVersion.PY3); assertThat(hostOptsWithPy3IsDefaultFlag.getPythonVersion()).isEqualTo(PythonVersion.PY2); } @Test public void getHost_Py3IsDefaultFlagChangesHost() throws Exception { assumesDefaultIsPY2(); PythonOptions opts = // --incompatible_py3_is_default requires --incompatible_allow_python_version_transitions parsePythonOptions( "--incompatible_allow_python_version_transitions=true", "--incompatible_py3_is_default=true"); PythonOptions hostOpts = (PythonOptions) opts.getHost(); assertThat(hostOpts.getPythonVersion()).isEqualTo(PythonVersion.PY3); } @Test public void getNormalized_OldSemantics() throws Exception { PythonOptions opts = parsePythonOptions("--incompatible_allow_python_version_transitions=false"); PythonOptions normalizedOpts = (PythonOptions) opts.getNormalized(); assertThat(normalizedOpts.pythonVersion).isNull(); } @Test public void getNormalized_NewSemantics() throws Exception { assumesDefaultIsPY2(); PythonOptions opts = parsePythonOptions("--incompatible_allow_python_version_transitions=true"); PythonOptions normalizedOpts = (PythonOptions) opts.getNormalized(); assertThat(normalizedOpts.pythonVersion).isEqualTo(PythonVersion.PY2); } }
/* * Copyright (c) 2018, The Modern Way. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.themodernway.server.core.scripting; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import javax.script.ScriptEngine; import javax.script.ScriptEngineFactory; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import org.slf4j.Logger; import org.springframework.core.io.Resource; import com.themodernway.common.api.java.util.CommonOps; import com.themodernway.common.api.java.util.StringOps; import com.themodernway.server.core.io.IO; import com.themodernway.server.core.logging.LoggingOps; public class ScriptingProvider implements IScriptingProvider { private static final Logger logger = LoggingOps.getLogger(ScriptingProvider.class); private final LinkedHashMap<ScriptType, IScriptingProperties> m_dict = new LinkedHashMap<>(); public ScriptingProvider(final List<IScriptingProperties> list) { for (final IScriptingProperties prop : list) { final ScriptType type = prop.getType(); if (null == m_dict.get(type)) { if (false == prop.isActive()) { prop.setActive(true); } m_dict.put(type, prop); if (logger.isInfoEnabled()) { logger.info(LoggingOps.THE_MODERN_WAY_MARKER, String.format("IScriptingProperties for type (%s) registered.", type.getValue())); } } else if (logger.isWarnEnabled()) { logger.warn(LoggingOps.THE_MODERN_WAY_MARKER, String.format("IScriptingProperties for type (%s) ignored.", type.getValue())); } } } @Override public ScriptEngine engine(final ScriptType type) throws ScriptException { return getScriptEngineManager().getEngineByName(StringOps.requireTrimOrNull(type.getValue())); } @Override public ScriptEngine engine(final ScriptType type, final ClassLoader loader) throws ScriptException { return getScriptEngineManager(CommonOps.requireNonNull(loader)).getEngineByName(StringOps.requireTrimOrNull(type.getValue())); } @Override public ScriptEngine engine(final ScriptType type, final Resource resource) throws ScriptException { try { return engine(type, resource.getInputStream()); } catch (final IOException e) { throw new ScriptException(e); } } @Override public ScriptEngine engine(final ScriptType type, final Reader reader) throws ScriptException { try (Reader inputs = reader) { final ScriptEngine engine = engine(type); engine.eval(inputs); return engine; } catch (final IOException e) { throw new ScriptException(e); } finally { IO.close(reader); } } @Override public ScriptEngine engine(final ScriptType type, final InputStream stream) throws ScriptException { return engine(type, new InputStreamReader(stream, IO.UTF_8_CHARSET)); } @Override public List<String> getScriptingLanguageNames(final ClassLoader loader) { final HashSet<String> look = new HashSet<>(); for (final ScriptEngineFactory factory : getScriptEngineManager(CommonOps.requireNonNull(loader)).getEngineFactories()) { look.addAll(factory.getNames()); } final HashSet<String> find = new HashSet<>(); for (final ScriptType type : ScriptType.values()) { for (final String name : look) { if (type.getValue().equalsIgnoreCase(name)) { find.add(type.getValue()); } } } return CommonOps.toUnmodifiableList(find); } @Override public List<String> getScriptingLanguageNames() { final HashSet<String> look = new HashSet<>(); for (final ScriptEngineFactory factory : getScriptEngineManager().getEngineFactories()) { look.addAll(factory.getNames()); } final HashSet<String> find = new HashSet<>(); for (final ScriptType type : ScriptType.values()) { for (final String name : look) { if (type.getValue().equalsIgnoreCase(name)) { find.add(type.getValue()); } } } return CommonOps.toUnmodifiableList(find); } @Override public List<ScriptType> getScriptingLanguageTypes() { final HashSet<String> look = new HashSet<>(); for (final ScriptEngineFactory factory : getScriptEngineManager().getEngineFactories()) { look.addAll(factory.getNames()); } final HashSet<ScriptType> find = new HashSet<>(); for (final ScriptType type : ScriptType.values()) { for (final String name : look) { if (type.getValue().equalsIgnoreCase(name)) { find.add(type); } } } return CommonOps.toUnmodifiableList(find); } @Override public List<ScriptType> getScriptingLanguageTypes(final ClassLoader loader) { final HashSet<String> look = new HashSet<>(); for (final ScriptEngineFactory factory : getScriptEngineManager(CommonOps.requireNonNull(loader)).getEngineFactories()) { look.addAll(factory.getNames()); } final HashSet<ScriptType> find = new HashSet<>(); for (final ScriptType type : ScriptType.values()) { for (final String name : look) { if (type.getValue().equalsIgnoreCase(name)) { find.add(type); } } } return CommonOps.toUnmodifiableList(find); } @Override public void close() throws IOException { IO.close(m_dict.values()); } @Override public ScriptEngineManager getScriptEngineManager() { return new ScriptEngineManager(); } @Override public ScriptEngineManager getScriptEngineManager(final ClassLoader loader) { return new ScriptEngineManager(CommonOps.requireNonNull(loader)); } @Override public ScriptingProxy proxy(final ScriptType type, final Resource resource) throws ScriptException { return new ScriptingProxy(type, resource); } @Override public ScriptingProxy proxy(final ScriptType type, final Reader reader) throws ScriptException { return new ScriptingProxy(type, reader); } @Override public ScriptingProxy proxy(final ScriptType type, final InputStream stream) throws ScriptException { return new ScriptingProxy(type, stream); } }
/* Derby - Class org.apache.derbyTesting.unitTests.store.T_StreamFile Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.unitTests.store; import org.apache.derbyTesting.unitTests.harness.T_MultiThreadedIterations; import org.apache.derbyTesting.unitTests.harness.T_Fail; import org.apache.derby.impl.store.raw.data.*; import org.apache.derby.iapi.services.context.ContextService; import org.apache.derby.iapi.services.context.ContextManager; import org.apache.derby.iapi.services.locks.*; import org.apache.derby.iapi.services.property.PropertyUtil; import org.apache.derby.iapi.services.monitor.Monitor; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.services.io.Storable; import org.apache.derby.iapi.services.io.FormatIdOutputStream; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.store.raw.*; import org.apache.derby.iapi.store.raw.xact.RawTransaction; import org.apache.derby.iapi.store.raw.data.RawContainerHandle; import org.apache.derby.iapi.store.access.conglomerate.LogicalUndo; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.reference.Property; import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.iapi.services.io.DynamicByteArrayOutputStream; import java.io.*; import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.AccessController; import java.util.Properties; /** An Impl unittest for rawstore data that is based on the stream file */ public class T_StreamFile extends T_MultiThreadedIterations { private static final String testService = "streamFileTest"; static final String REC_001 = "McLaren"; static final String REC_002 = "Ferrari"; static final String REC_003 = "Benetton"; static final String REC_004 = "Prost"; static final String REC_005 = "Tyrell"; static final String REC_006 = "Derby, Natscape, Goatscape, the popular names"; static final String REC_007 = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"; static final String SP1 = "savepoint1"; static final String SP2 = "savepoint2"; static RawStoreFactory factory; static LockFactory lf; static long commonContainer = -1; static boolean testRollback; // initialize in start static final String TEST_ROLLBACK_OFF = "derby.RawStore.RollbackTestOff"; private static ContextService contextService; private T_Util t_util; public T_StreamFile() { super(); } /** @exception StandardException cannot startup the context service */ public void boot(boolean create, Properties startParams) throws StandardException { super.boot(create, startParams); contextService = getContextService(); } /* ** Methods required by T_Generic */ protected String getModuleToTestProtocolName() { return RawStoreFactory.MODULE; } /** Run the tests @exception T_Fail Unexpected behaviour from the API */ protected void setupTest() throws T_Fail { String rollbackOff = PropertyUtil.getSystemProperty(TEST_ROLLBACK_OFF); testRollback = !Boolean.valueOf(rollbackOff).booleanValue(); // don't automatic boot this service if it gets left around if (startParams == null) { startParams = new Properties(); } startParams.put(Property.NO_AUTO_BOOT, Boolean.TRUE.toString()); // remove the service directory to ensure a clean run startParams.put(Property.DELETE_ON_CREATE, Boolean.TRUE.toString()); // see if we are testing encryption startParams = T_Util.setEncryptionParam(startParams); try { factory = (RawStoreFactory) createPersistentService(getModuleToTestProtocolName(), testService, startParams); if (factory == null) { throw T_Fail.testFailMsg(getModuleToTestProtocolName() + " service not started."); } lf = factory.getLockFactory(); if (lf == null) { throw T_Fail.testFailMsg("LockFactory.MODULE not found"); } } catch (StandardException mse) { throw T_Fail.exceptionFail(mse); } t_util = new T_Util(factory, lf, contextService); commonContainer = commonContainer(); return; } /** * T_MultiThreadedIteration method * * @exception T_Fail Unexpected behaviour from the API */ protected void joinSetupTest() throws T_Fail { T_Fail.T_ASSERT(factory != null, "raw store factory not setup "); T_Fail.T_ASSERT(contextService != null, "Context service not setup "); T_Fail.T_ASSERT(commonContainer != -1, "common container not setup "); t_util = new T_Util(factory, lf, contextService); } protected T_MultiThreadedIterations newTestObject() { return new T_StreamFile(); } /** run the test @exception T_Fail Unexpected behaviour from the API */ protected void runTestSet() throws T_Fail { // get a utility helper ContextManager cm1 = contextService.newContextManager(); contextService.setCurrentContextManager(cm1); try { // boundry case: 1 row, 13 columns, string types SF001(1, 13, T_RowSource.STRING_ROW_TYPE, false); // boundry case: 1 rows, 1 null column, string types SF001(1, 1, T_RowSource.STRING_ROW_TYPE, false); // boundry case: 1000 rows, 1 null column, string types SF001(1000, 1, T_RowSource.STRING_ROW_TYPE, false); // boundry case: 1000 rows, 2 column (first null, second not null), string types SF001(1000, 2, T_RowSource.STRING_ROW_TYPE, false); // boundry case: 500 rows, 9 columns (first & last column null), string types SF001(500, 9, T_RowSource.STRING_ROW_TYPE, false); // 500 rows, 13 column, string type SF001(500, 13, T_RowSource.STRING_ROW_TYPE, false); // test error condition SF001(1000, 2, T_RowSource.STRING_ROW_TYPE, true); // The following test tests externalizable types, but we currently don't support it. // do, don't run the test yet. // 100 rows, 5 column, Integer object type //SF001(100, 5, T_RowSource.INTEGER_ROW_TYPE, false); // 100 rows, 1 column, Integer object type //SF001(100, 1, T_RowSource.INTEGER_ROW_TYPE, false); // SF002() tests are used to check performance of the stream file. // no need to run them regularly. //SF002(0); //SF002(1); } catch (StandardException se) { //Assume database is not active. DERBY-4856 thread dump cm1.cleanupOnError(se, false); throw T_Fail.exceptionFail(se); } finally { contextService.resetCurrentContextManager(cm1); } } /* * create a container that all threads can use */ private long commonContainer() throws T_Fail { ContextManager cm1 = contextService.newContextManager(); contextService.setCurrentContextManager(cm1); long cid; try { Transaction t = t_util.t_startTransaction(); cid = t_util.t_addContainer(t, 0); t_util.t_commit(t); t.close(); } catch (StandardException se) { //Assume database is not active. DERBY-4856 thread dump cm1.cleanupOnError(se, false); throw T_Fail.exceptionFail(se); } finally { contextService.resetCurrentContextManager(cm1); } return cid; } /* * create a stream container load with rowCount number of rows. * fetch it all back, and check to make sure all rows are correct. */ protected void SF001(int rowCount, int columnCount, int columnType, boolean forceAbort) throws StandardException, T_Fail { Transaction t = t_util.t_startTransaction(); int segmentId = StreamContainerHandle.TEMPORARY_SEGMENT; Properties properties = new Properties(); properties.put(RawStoreFactory.STREAM_FILE_BUFFER_SIZE_PARAMETER, "16384"); // create row source T_RowSource testRowSource = null; if (forceAbort) testRowSource = new T_RowSource(rowCount, columnCount, columnType, forceAbort, t); else testRowSource = new T_RowSource(rowCount, columnCount, columnType, forceAbort, null); long startms = System.currentTimeMillis(); long containerId = t.addAndLoadStreamContainer(segmentId, properties, testRowSource); long endms = System.currentTimeMillis(); long time = endms - startms; REPORT("SF001 - write: " + time + "ms"); // open the container, and start fetching... StreamContainerHandle scHandle = t.openStreamContainer(segmentId, containerId, false); // set up the template row DataValueDescriptor template[] = null; template = testRowSource.getTemplate(); DataValueDescriptor readRow[] = null; readRow = testRowSource.getTemplate(); segmentId = StreamContainerHandle.TEMPORARY_SEGMENT; int fetchCount = 0; startms = System.currentTimeMillis(); while (scHandle.fetchNext(readRow)) { fetchCount++; // check to make sure the row is what we inserted. // this depends on T_RowSource generate identical rows. if (!readRow.toString().equals(template.toString())) throw T_Fail.testFailMsg("Record's value incorrect, expected :" + template.toString() + ": - got :" + readRow.toString()); } endms = System.currentTimeMillis(); time = endms - startms; // check to make sure we got the right number of rows. if (fetchCount != rowCount) throw T_Fail.testFailMsg("incorrect number of row fetched. Expecting " + rowCount + " rows, got " + fetchCount + ", rows instead."); REPORT("SF001 - fetch: " + time + "ms"); scHandle.close(); t_util.t_commit(t); t.close(); PASS("SF001, rowCount = " + rowCount + ", columnCount = " + columnCount + ", clumn type: " + columnType); } // this test test the rowSource over head. // when param set to 1, also gets the overhead for writeExternal for Storables protected void SF002(int param) throws StandardException, T_Fail { T_RowSource rowSource = new T_RowSource(500000, 13, 2, false, null); DynamicByteArrayOutputStream out = new DynamicByteArrayOutputStream(16384); FormatIdOutputStream logicalDataOut = new FormatIdOutputStream(out); long startms = System.currentTimeMillis(); System.out.println("starting rowSource test, time: " + startms); try { FormatableBitSet validColumns = rowSource.getValidColumns(); int numberFields = 0; if (validColumns != null) { for (int i = validColumns.size() - 1; i >= 0; i--) { if (validColumns.get(i)) { numberFields = i + 1; break; } } } DataValueDescriptor[] row = rowSource.getNextRowFromRowSource(); while (row != null) { if (SanityManager.DEBUG) { SanityManager.ASSERT(row != null, "RowSource returned null"); } int arrayPosition = -1; for (int i = 0; i < numberFields; i++) { // write each column out if (validColumns.get(i)) { arrayPosition++; DataValueDescriptor column = row[arrayPosition]; if (param == 1) { try { Storable sColumn = (Storable) column; if (!sColumn.isNull()) { sColumn.writeExternal(logicalDataOut); out.reset(); } } catch (IOException ioe) { throw T_Fail.exceptionFail(ioe); } } } } row = rowSource.getNextRowFromRowSource(); } } finally { } long endms = System.currentTimeMillis(); long time2 = endms - startms; if (param != 1) System.out.println("ended rowSource test, time: " + endms + ", time spent = " + time2); else System.out.println("------ writeExternal called....\n ended rowSource test, time: " + endms + ", time spent = " + time2); PASS("SF002"); } /** * Privileged lookup of the ContextService. Must be private so that user code * can't call this entry point. */ private static ContextService getContextService() { return AccessController.doPrivileged ( new PrivilegedAction<ContextService>() { public ContextService run() { return ContextService.getFactory(); } } ); } /** * Privileged startup. Must be private so that user code * can't call this entry point. */ private static Object createPersistentService( final String factoryInterface, final String serviceName, final Properties properties ) throws StandardException { try { return AccessController.doPrivileged ( new PrivilegedExceptionAction<Object>() { public Object run() throws StandardException { return Monitor.createPersistentService( factoryInterface, serviceName, properties ); } } ); } catch (PrivilegedActionException pae) { throw StandardException.plainWrapException( pae ); } } }
/* * Copyright 2015 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.hbase; import static com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule.COLUMN_FAMILY; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Random; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; public class TestBasicOps extends AbstractTest { /** Happy path for a single value. */ @Test public void testPutGetDelete() throws IOException { // Initialize byte[] rowKey = dataHelper.randomData("testrow-"); byte[] testQualifier = dataHelper.randomData("testQualifier-"); byte[] testValue = dataHelper.randomData("testValue-"); testPutGetDelete(true, rowKey, testQualifier, testValue); } /** Requirement 1.2 - Rowkey, family, qualifer, and value are byte[] */ @Test public void testBinaryPutGetDelete() throws IOException { // Initialize Random random = new Random(); byte[] rowKey = new byte[100]; random.nextBytes(rowKey); byte[] testQualifier = new byte[100]; random.nextBytes(testQualifier); byte[] testValue = new byte[100]; random.nextBytes(testValue); // TODO(carterpage) - test that column-family can work as raw binary // Put testPutGetDelete(true, rowKey, testQualifier, testValue); } /** * Requirement 1.9 - Referring to a column without the qualifier implicitly sets a special "empty" * qualifier. */ @Test public void testNullQualifier() throws IOException { // Initialize values Table table = getDefaultTable(); byte[] rowKey = dataHelper.randomData("testrow-"); byte[] testValue = dataHelper.randomData("testValue-"); // Insert value with null qualifier Put put = new Put(rowKey); put.addColumn(COLUMN_FAMILY, null, testValue); table.put(put); // This is treated the same as an empty String (which is just an empty byte array). Get get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, Bytes.toBytes("")); Result result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // Get as a null. This should work. get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, null); result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // This should return when selecting the whole family too. get = new Get(rowKey); get.addFamily(COLUMN_FAMILY); result = table.get(get); Assert.assertEquals(1, result.size()); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, null)); Assert.assertArrayEquals( testValue, CellUtil.cloneValue(result.getColumnLatestCell(COLUMN_FAMILY, null))); // Delete Delete delete = new Delete(rowKey); delete.addColumns(COLUMN_FAMILY, null); table.delete(delete); // Confirm deleted Assert.assertFalse(table.exists(get)); table.close(); } /** * Requirement 2.4 - Maximum cell size is 10MB by default. Can be overriden using * hbase.client.keyvalue.maxsize property. * * <p>Cell size includes value and key info, so the value needs to a bit less than the max to * work. */ @Test @Category(KnownEmulatorGap.class) public void testPutGetBigValue() throws IOException { testPutGetDeleteExists((10 << 20) - 1024, false, true); // 10 MB - 1kB } /** * Test a put without a get. This will help allow us to see performance differences between put * alone and put/get. There are (or hopefully were, by the time this is read), performance issues * with testBigValue. The profile for put (uploading) is different from the profile for get * (downloading). We need a way to see where the issue is. */ @Test @Category(KnownEmulatorGap.class) public void testPutBigValue() throws IOException { testPutGetDeleteExists((10 << 20) - 1024, false, false); // 10 MB - 1kB } /** * Requirement 2.4 - Maximum cell size is 10MB by default. Can be overridden using * hbase.client.keyvalue.maxsize property. * * <p>Ensure the failure case. */ @Test(expected = IllegalArgumentException.class) @Category(KnownEmulatorGap.class) public void testPutTooBigValue() throws IOException { testPutGetDeleteExists((10 << 20) + 1 + 4, true, true); // 10 MB + 1 } @Test @Category(KnownEmulatorGap.class) public void testPutAlmostTooBigValue() throws IOException { testPutGetDeleteExists(10 << 20, true, true); // 10 MB } @Test @Category(KnownEmulatorGap.class) /** Run a large value ten times for performance logging purposes */ public void testPutAlmostTooBigValueTenTimes() throws IOException { for (int i = 0; i < 10; i++) { long start = System.currentTimeMillis(); testPutGetDeleteExists(10 << 20, true, true); // 10 MB if (System.currentTimeMillis() - start > 5_000) { // If this is a slow connection, don't bother doing a performance test. break; } } } void testPutGetDeleteExists(int size, boolean removeMetadataSize, boolean doGet) throws IOException { // Initialize variables byte[] testRowKey = dataHelper.randomData("testrow-"); byte[] testQualifier = dataHelper.randomData("testQualifier-"); int valueSize = size; if (removeMetadataSize) { // looks like in hbase 2.0 Cell size increased by 4. TODO verify it. int metadataSize = (20 + 4 + testRowKey.length + COLUMN_FAMILY.length + testQualifier.length); valueSize -= metadataSize; } byte[] testValue = new byte[valueSize]; new Random().nextBytes(testValue); testPutGetDelete(doGet, testRowKey, testQualifier, testValue); } private void testPutGetDelete( boolean doGet, byte[] rowKey, byte[] testQualifier, byte[] testValue) throws IOException { Table table = getDefaultTable(); Stopwatch stopwatch = new Stopwatch(); // Put Put put = new Put(rowKey); put.addColumn(COLUMN_FAMILY, testQualifier, testValue); table.put(put); stopwatch.print("Put took %d ms"); // Get Get get = new Get(rowKey); get.addColumn(COLUMN_FAMILY, testQualifier); // Do the get on some tests, but not others. The rationale for that is to do performance // testing on large values. if (doGet) { Result result = table.get(get); stopwatch.print("Get took %d ms"); Assert.assertTrue(result.containsColumn(COLUMN_FAMILY, testQualifier)); List<Cell> cells = result.getColumnCells(COLUMN_FAMILY, testQualifier); Assert.assertEquals(1, cells.size()); Assert.assertTrue(Arrays.equals(testValue, CellUtil.cloneValue(cells.get(0)))); stopwatch.print("Verifying took %d ms"); } // Delete Delete delete = new Delete(rowKey); delete.addColumns(COLUMN_FAMILY, testQualifier); table.delete(delete); stopwatch.print("Delete took %d ms"); // Confirm deleted Assert.assertFalse(table.exists(get)); stopwatch.print("Exists took %d ms"); table.close(); stopwatch.print("close took %d ms"); } private class Stopwatch { long lastCheckin = System.currentTimeMillis(); private void print(String string) { long now = System.currentTimeMillis(); logger.info(string, now - lastCheckin); lastCheckin = now; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.videointelligence.v1p3beta1.stub; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoProgress; import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest; import com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link VideoIntelligenceServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (videointelligence.googleapis.com) and default port (443) are * used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of annotateVideo to 30 seconds: * * <pre>{@code * VideoIntelligenceServiceStubSettings.Builder videoIntelligenceServiceSettingsBuilder = * VideoIntelligenceServiceStubSettings.newBuilder(); * videoIntelligenceServiceSettingsBuilder * .annotateVideoSettings() * .setRetrySettings( * videoIntelligenceServiceSettingsBuilder * .annotateVideoSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * VideoIntelligenceServiceStubSettings videoIntelligenceServiceSettings = * videoIntelligenceServiceSettingsBuilder.build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class VideoIntelligenceServiceStubSettings extends StubSettings<VideoIntelligenceServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<AnnotateVideoRequest, Operation> annotateVideoSettings; private final OperationCallSettings< AnnotateVideoRequest, AnnotateVideoResponse, AnnotateVideoProgress> annotateVideoOperationSettings; /** Returns the object with the settings used for calls to annotateVideo. */ public UnaryCallSettings<AnnotateVideoRequest, Operation> annotateVideoSettings() { return annotateVideoSettings; } /** Returns the object with the settings used for calls to annotateVideo. */ public OperationCallSettings<AnnotateVideoRequest, AnnotateVideoResponse, AnnotateVideoProgress> annotateVideoOperationSettings() { return annotateVideoOperationSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public VideoIntelligenceServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcVideoIntelligenceServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "videointelligence.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "videointelligence.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(VideoIntelligenceServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected VideoIntelligenceServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); annotateVideoSettings = settingsBuilder.annotateVideoSettings().build(); annotateVideoOperationSettings = settingsBuilder.annotateVideoOperationSettings().build(); } /** Builder for VideoIntelligenceServiceStubSettings. */ public static class Builder extends StubSettings.Builder<VideoIntelligenceServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<AnnotateVideoRequest, Operation> annotateVideoSettings; private final OperationCallSettings.Builder< AnnotateVideoRequest, AnnotateVideoResponse, AnnotateVideoProgress> annotateVideoOperationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.5) .setMaxRetryDelay(Duration.ofMillis(120000L)) .setInitialRpcTimeout(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); annotateVideoSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); annotateVideoOperationSettings = OperationCallSettings.newBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(annotateVideoSettings); initDefaults(this); } protected Builder(VideoIntelligenceServiceStubSettings settings) { super(settings); annotateVideoSettings = settings.annotateVideoSettings.toBuilder(); annotateVideoOperationSettings = settings.annotateVideoOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(annotateVideoSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .annotateVideoSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .annotateVideoOperationSettings() .setInitialCallSettings( UnaryCallSettings .<AnnotateVideoRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(AnnotateVideoResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(AnnotateVideoProgress.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(20000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelay(Duration.ofMillis(45000L)) .setInitialRpcTimeout(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(86400000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to annotateVideo. */ public UnaryCallSettings.Builder<AnnotateVideoRequest, Operation> annotateVideoSettings() { return annotateVideoSettings; } /** Returns the builder for the settings used for calls to annotateVideo. */ @BetaApi( "The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings.Builder< AnnotateVideoRequest, AnnotateVideoResponse, AnnotateVideoProgress> annotateVideoOperationSettings() { return annotateVideoOperationSettings; } @Override public VideoIntelligenceServiceStubSettings build() throws IOException { return new VideoIntelligenceServiceStubSettings(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.ldap.handlers.request; import java.util.Map; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.directory.api.ldap.model.constants.SchemaConstants; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapAuthenticationException; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; import org.apache.directory.api.ldap.model.exception.LdapUnwillingToPerformException; import org.apache.directory.api.ldap.model.message.BindRequest; import org.apache.directory.api.ldap.model.message.BindResponse; import org.apache.directory.api.ldap.model.message.LdapResult; import org.apache.directory.api.ldap.model.message.ResultCodeEnum; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.util.StringConstants; import org.apache.directory.api.util.Strings; import org.apache.directory.server.core.api.CoreSession; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.LdapPrincipal; import org.apache.directory.server.core.api.OperationEnum; import org.apache.directory.server.core.api.entry.ClonedServerEntry; import org.apache.directory.server.core.api.interceptor.context.BindOperationContext; import org.apache.directory.server.core.shared.DefaultCoreSession; import org.apache.directory.server.i18n.I18n; import org.apache.directory.server.ldap.LdapProtocolUtils; import org.apache.directory.server.ldap.LdapSession; import org.apache.directory.server.ldap.handlers.LdapRequestHandler; import org.apache.directory.server.ldap.handlers.sasl.MechanismHandler; import org.apache.directory.server.ldap.handlers.sasl.SaslConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A single reply MessageReceived handler for {@link BindRequest}s. * * Implements server-side of RFC 2222, sections 4.2 and 4.3. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class BindRequestHandler extends LdapRequestHandler<BindRequest> { private static final Logger LOG = LoggerFactory.getLogger( BindRequestHandler.class ); /** A Hashed Adapter mapping SASL mechanisms to their handlers. */ private Map<String, MechanismHandler> handlers; /** * Set the mechanisms handler map. * * @param handlers The associations btween a machanism and its handler */ public void setSaslMechanismHandlers( Map<String, MechanismHandler> handlers ) { this.handlers = handlers; } /** * Handle the Simple authentication. * * @param ldapSession The associated Session * @param bindRequest The BindRequest received * @throws Exception If the authentication cannot be done */ // This will suppress PMD.EmptyCatchBlock warnings in this method @SuppressWarnings("PMD.EmptyCatchBlock") public void handleSimpleAuth( LdapSession ldapSession, BindRequest bindRequest ) throws Exception { DirectoryService directoryService = ldapServer.getDirectoryService(); // if the user is already bound, we have to unbind him if ( ldapSession.isAuthenticated() ) { // We already have a bound session for this user. We have to // abandon it first. ldapSession.getCoreSession().unbind(); } // Set the status to SimpleAuthPending ldapSession.setSimpleAuthPending(); // Now, bind the user // create a new Bind context, with a null session, as we don't have // any context yet. BindOperationContext bindContext = new BindOperationContext( null ); // Stores the Dn of the user to check, and its password bindContext.setDn( bindRequest.getDn() ); bindContext.setCredentials( bindRequest.getCredentials() ); bindContext.setIoSession( ldapSession.getIoSession() ); bindContext.setInterceptors( directoryService.getInterceptors( OperationEnum.BIND ) ); // Stores the request controls into the operation context LdapProtocolUtils.setRequestControls( bindContext, bindRequest ); try { /* * Referral handling as specified by RFC 3296 here: * * http://www.faqs.org/rfcs/rfc3296.html * * See section 5.6.1 where if the bind principal Dn is a referral * we return an invalidCredentials result response. Optionally we * could support delegated authentication in the future with this * potential. See the following JIRA for more on this possibility: * * https://issues.apache.org/jira/browse/DIRSERVER-1217 * * NOTE: if this is done then this handler should extend the * a modified form of the ReferralAwareRequestHandler so it can * detect conditions where ancestors of the Dn are referrals * and delegate appropriately. */ Entry principalEntry = null; try { principalEntry = directoryService.getAdminSession().lookup( bindRequest.getDn() ); } catch ( LdapException le ) { // this is OK } if ( principalEntry == null ) { LOG.info( "The {} principalDN cannot be found in the server : bind failure.", bindRequest.getName() ); } else if ( ( ( ClonedServerEntry ) principalEntry ).getOriginalEntry().contains( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.REFERRAL_OC ) ) { LOG.info( "Bind principalDn points to referral." ); LdapResult result = bindRequest.getResultResponse().getLdapResult(); result.setDiagnosticMessage( "Bind principalDn points to referral." ); result.setResultCode( ResultCodeEnum.INVALID_CREDENTIALS ); ldapSession.getIoSession().write( bindRequest.getResultResponse() ); return; } // TODO - might cause issues since lookups are not returning all // attributes right now - this is an optimization that can be // enabled later after determining whether or not this will cause // issues. // reuse the looked up entry so we don't incur another lookup // opContext.setEntry( principalEntry ); // And call the OperationManager bind operation. bindContext.setInterceptors( directoryService.getInterceptors( OperationEnum.BIND ) ); directoryService.getOperationManager().bind( bindContext ); // As a result, store the created session in the Core Session CoreSession coreSession = bindContext.getSession(); ldapSession.setCoreSession( coreSession ); // Store the IoSession in the coreSession ( ( DefaultCoreSession ) coreSession ).setIoSession( bindContext.getIoSession() ); // And set the current state accordingly if ( !ldapSession.getCoreSession().isAnonymous() ) { ldapSession.setAuthenticated(); } else { ldapSession.setAnonymous(); } // Return the successful response bindRequest.getResultResponse().addAllControls( bindContext.getResponseControls() ); sendBindSuccess( ldapSession, bindRequest, null ); } catch ( Exception e ) { // Something went wrong. Write back an error message // For BindRequest, it should be an InvalidCredentials, // no matter what kind of exception we got. ResultCodeEnum code = null; LdapResult result = bindRequest.getResultResponse().getLdapResult(); if ( e instanceof LdapUnwillingToPerformException ) { code = ResultCodeEnum.UNWILLING_TO_PERFORM; result.setResultCode( code ); } else if ( e instanceof LdapInvalidDnException ) { code = ResultCodeEnum.INVALID_DN_SYNTAX; result.setResultCode( code ); } else { code = ResultCodeEnum.INVALID_CREDENTIALS; result.setResultCode( code ); } String msg = code.toString() + ": Bind failed: " + e.getLocalizedMessage(); if ( LOG.isDebugEnabled() ) { msg += ":\n" + ExceptionUtils.getStackTrace( e ); msg += "\n\nBindRequest = \n" + bindRequest.toString(); } Dn dn = null; if ( e instanceof LdapAuthenticationException ) { dn = ( ( LdapAuthenticationException ) e ).getResolvedDn(); } if ( ( dn != null ) && ( ( code == ResultCodeEnum.NO_SUCH_OBJECT ) || ( code == ResultCodeEnum.ALIAS_PROBLEM ) || ( code == ResultCodeEnum.INVALID_DN_SYNTAX ) || ( code == ResultCodeEnum.ALIAS_DEREFERENCING_PROBLEM ) ) ) { result.setMatchedDn( dn ); } result.setDiagnosticMessage( msg ); bindRequest.getResultResponse().addAllControls( bindContext.getResponseControls() ); ldapSession.getIoSession().write( bindRequest.getResultResponse() ); } finally { // Reset LDAP session bind status to anonymous if authentication failed if ( !ldapSession.isAuthenticated() ) { ldapSession.setAnonymous(); } } } /** * Check if the mechanism exists. */ private boolean checkMechanism( String saslMechanism ) throws Exception { // Guard clause: Reject unsupported SASL mechanisms. if ( !ldapServer.getSupportedMechanisms().contains( saslMechanism ) ) { LOG.error( I18n.err( I18n.ERR_160, saslMechanism ) ); return false; } else { return true; } } /** * For challenge/response exchange, generate the challenge. * If the exchange is complete then send bind success. * * @param ldapSession * @param ss * @param bindRequest */ private void generateSaslChallengeOrComplete( LdapSession ldapSession, SaslServer ss, BindRequest bindRequest ) throws Exception { LdapResult result = bindRequest.getResultResponse().getLdapResult(); // SaslServer will throw an exception if the credentials are null. if ( bindRequest.getCredentials() == null ) { bindRequest.setCredentials( StringConstants.EMPTY_BYTES ); } try { // Compute the challenge byte[] tokenBytes = ss.evaluateResponse( bindRequest.getCredentials() ); if ( ss.isComplete() ) { // This is the end of the C/R exchange if ( tokenBytes != null ) { /* * There may be a token to return to the client. We set it here * so it will be returned in a SUCCESS message, after an LdapContext * has been initialized for the client. */ ldapSession.putSaslProperty( SaslConstants.SASL_CREDS, tokenBytes ); } LdapPrincipal ldapPrincipal = ( LdapPrincipal ) ldapSession .getSaslProperty( SaslConstants.SASL_AUTHENT_USER ); if ( ldapPrincipal != null ) { DirectoryService ds = ldapSession.getLdapServer().getDirectoryService(); String saslMechanism = bindRequest.getSaslMechanism(); byte[] password = null; if ( ldapPrincipal.getUserPasswords() != null ) { password = ldapPrincipal.getUserPasswords()[0]; } CoreSession userSession = ds.getSession( ldapPrincipal.getDn(), password, saslMechanism, null ); // Set the user session into the ldap session ldapSession.setCoreSession( userSession ); // Store the IoSession in the coreSession ( ( DefaultCoreSession ) userSession ).setIoSession( ldapSession.getIoSession() ); } // Mark the user as authenticated ldapSession.setAuthenticated(); // Call the cleanup method for the selected mechanism MechanismHandler handler = ( MechanismHandler ) ldapSession .getSaslProperty( SaslConstants.SASL_MECH_HANDLER ); handler.cleanup( ldapSession ); // Return the successful response sendBindSuccess( ldapSession, bindRequest, tokenBytes ); } else { // The SASL bind must continue, we are sending the computed challenge LOG.info( "Continuation token had length " + tokenBytes.length ); // Build the response result.setResultCode( ResultCodeEnum.SASL_BIND_IN_PROGRESS ); BindResponse resp = bindRequest.getResultResponse(); // Store the challenge resp.setServerSaslCreds( tokenBytes ); // Switch to SASLAuthPending ldapSession.setSaslAuthPending(); // And write back the response ldapSession.getIoSession().write( resp ); LOG.debug( "Returning final authentication data to client to complete context." ); } } catch ( SaslException se ) { sendInvalidCredentials( ldapSession, bindRequest, se ); } } /** * Send back an AUTH-METH-NOT-SUPPORTED error message to the client */ private void sendAuthMethNotSupported( LdapSession ldapSession, BindRequest bindRequest ) { // First, r-einit the state to Anonymous, and clear the // saslProperty map ldapSession.clearSaslProperties(); ldapSession.setAnonymous(); // And send the response to the client LdapResult bindResult = bindRequest.getResultResponse().getLdapResult(); bindResult.setResultCode( ResultCodeEnum.AUTH_METHOD_NOT_SUPPORTED ); bindResult.setDiagnosticMessage( ResultCodeEnum.AUTH_METHOD_NOT_SUPPORTED.toString() + ": " + bindRequest.getSaslMechanism() + " is not a supported mechanism." ); // Write back the error ldapSession.getIoSession().write( bindRequest.getResultResponse() ); } /** * Send back an INVALID-CREDENTIAL error message to the user. If we have an exception * as a third argument, then send back the associated message to the client. */ private void sendInvalidCredentials( LdapSession ldapSession, BindRequest bindRequest, Exception e ) { LdapResult result = bindRequest.getResultResponse().getLdapResult(); String message = ""; if ( e != null ) { message = ResultCodeEnum.INVALID_CREDENTIALS + ": " + e.getLocalizedMessage(); } else { message = ResultCodeEnum.INVALID_CREDENTIALS.toString(); } LOG.error( message ); result.setResultCode( ResultCodeEnum.INVALID_CREDENTIALS ); result.setDiagnosticMessage( message ); // Reinitialize the state to Anonymous and clear the sasl properties ldapSession.clearSaslProperties(); ldapSession.setAnonymous(); // Write back the error response ldapSession.getIoSession().write( bindRequest.getResultResponse() ); } /** * Send a SUCCESS message back to the client. */ private void sendBindSuccess( LdapSession ldapSession, BindRequest bindRequest, byte[] tokenBytes ) { // Return the successful response BindResponse response = bindRequest.getResultResponse(); response.getLdapResult().setResultCode( ResultCodeEnum.SUCCESS ); response.setServerSaslCreds( tokenBytes ); if ( !ldapSession.getCoreSession().isAnonymous() ) { // If we have not been asked to authenticate as Anonymous, authenticate the user ldapSession.setAuthenticated(); } else { // Otherwise, switch back to Anonymous ldapSession.setAnonymous(); } // Clean the SaslProperties, we don't need them anymore MechanismHandler handler = ( MechanismHandler ) ldapSession.getSaslProperty( SaslConstants.SASL_MECH_HANDLER ); if ( handler != null ) { handler.cleanup( ldapSession ); } ldapSession.getIoSession().write( response ); LOG.debug( "Returned SUCCESS message: {}.", response ); } private void handleSaslAuthPending( LdapSession ldapSession, BindRequest bindRequest ) throws Exception { // First, check that we have the same mechanism String saslMechanism = bindRequest.getSaslMechanism(); // The empty mechanism is also a request for a new Bind session if ( Strings.isEmpty( saslMechanism ) || !ldapSession.getSaslProperty( SaslConstants.SASL_MECH ).equals( saslMechanism ) ) { sendAuthMethNotSupported( ldapSession, bindRequest ); return; } // We have already received a first BindRequest, and sent back some challenge. // First, check if the mechanism is the same MechanismHandler mechanismHandler = handlers.get( saslMechanism ); if ( mechanismHandler == null ) { String message = I18n.err( I18n.ERR_161, saslMechanism ); // Clear the saslProperties, and move to the anonymous state ldapSession.clearSaslProperties(); ldapSession.setAnonymous(); LOG.error( message ); throw new IllegalArgumentException( message ); } // Get the previously created SaslServer instance SaslServer ss = mechanismHandler.handleMechanism( ldapSession, bindRequest ); generateSaslChallengeOrComplete( ldapSession, ss, bindRequest ); } /** * Handle the SASL authentication. If the mechanism is known, we are * facing three cases : * <ul> * <li>The user does not has a session yet</li> * <li>The user already has a session</li> * <li>The user has started a SASL negotiation</li> * </lu><br/> * * In the first case, we initiate a SaslBind session, which will be used all * along the negotiation.<br/> * In the second case, we first have to unbind the user, and initiate a new * SaslBind session.<br/> * In the third case, we have sub cases : * <ul> * <li>The mechanism is not provided : that means the user want to reset the * current negotiation. We move back to an Anonymous state</li> * <li>The mechanism is provided : the user is initializing a new negotiation * with another mechanism. The current SaslBind session is reinitialized</li> * <li></li> * </ul><br/> * * @param ldapSession The associated Session * @param bindRequest The BindRequest received * @throws Exception If the authentication cannot be done */ public void handleSaslAuth( LdapSession ldapSession, BindRequest bindRequest ) throws Exception { String saslMechanism = bindRequest.getSaslMechanism(); // Case #2 : the user does have a session. We have to unbind him if ( ldapSession.isAuthenticated() ) { // We already have a bound session for this user. We have to // close the previous session first. ldapSession.getCoreSession().unbind(); // Reset the status to Anonymous ldapSession.setAnonymous(); // Clean the sasl properties ldapSession.clearSaslProperties(); // Now we can continue as if the client was Anonymous from the beginning } // case #1 : The user does not have a session. if ( ldapSession.isAnonymous() ) { // fist check that the mechanism exists if ( !checkMechanism( saslMechanism ) ) { // get out ! sendAuthMethNotSupported( ldapSession, bindRequest ); return; } // Store the mechanism in the ldap session ldapSession.putSaslProperty( SaslConstants.SASL_MECH, saslMechanism ); // Get the handler for this mechanism MechanismHandler mechanismHandler = handlers.get( saslMechanism ); // Store the mechanism handler in the salsProperties ldapSession.putSaslProperty( SaslConstants.SASL_MECH_HANDLER, mechanismHandler ); // Initialize the mechanism specific data mechanismHandler.init( ldapSession ); // Get the SaslServer instance which manage the C/R exchange SaslServer ss = mechanismHandler.handleMechanism( ldapSession, bindRequest ); // We have to generate a challenge generateSaslChallengeOrComplete( ldapSession, ss, bindRequest ); // And get back return; } else if ( ldapSession.isAuthPending() ) { try { handleSaslAuthPending( ldapSession, bindRequest ); } catch ( SaslException se ) { sendInvalidCredentials( ldapSession, bindRequest, se ); } return; } } /** * Deal with a received BindRequest * * @param ldapSession The current session * @param bindRequest The received BindRequest * @throws Exception If the authentication cannot be handled */ public void handle( LdapSession ldapSession, BindRequest bindRequest ) throws Exception { LOG.debug( "Received: {}", bindRequest ); // Guard clause: LDAP version 3 if ( !bindRequest.getVersion3() ) { LOG.error( I18n.err( I18n.ERR_162 ) ); LdapResult bindResult = bindRequest.getResultResponse().getLdapResult(); bindResult.setResultCode( ResultCodeEnum.PROTOCOL_ERROR ); bindResult.setDiagnosticMessage( I18n.err( I18n.ERR_163 ) ); ldapSession.getIoSession().write( bindRequest.getResultResponse() ); return; } // Deal with the two kinds of authentication : Simple and SASL if ( bindRequest.isSimple() ) { handleSimpleAuth( ldapSession, bindRequest ); } else { handleSaslAuth( ldapSession, bindRequest ); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.appservice.v2018_02_01.implementation; import com.microsoft.azure.arm.resources.models.implementation.GroupableResourceCoreImpl; import com.microsoft.azure.management.appservice.v2018_02_01.AppServicePlan; import rx.Observable; import com.microsoft.azure.management.appservice.v2018_02_01.AppServicePlanPatchResource; import org.joda.time.DateTime; import com.microsoft.azure.management.appservice.v2018_02_01.StatusOptions; import com.microsoft.azure.management.appservice.v2018_02_01.HostingEnvironmentProfile; import com.microsoft.azure.management.appservice.v2018_02_01.ProvisioningState; import com.microsoft.azure.management.appservice.v2018_02_01.SkuDescription; import rx.functions.Func1; class AppServicePlanImpl extends GroupableResourceCoreImpl<AppServicePlan, AppServicePlanInner, AppServicePlanImpl, AppServiceManager> implements AppServicePlan, AppServicePlan.Definition, AppServicePlan.Update { private AppServicePlanPatchResource updateParameter; AppServicePlanImpl(String name, AppServicePlanInner inner, AppServiceManager manager) { super(name, inner, manager); this.updateParameter = new AppServicePlanPatchResource(); } @Override public Observable<AppServicePlan> createResourceAsync() { AppServicePlansInner client = this.manager().inner().appServicePlans(); return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner()) .map(new Func1<AppServicePlanInner, AppServicePlanInner>() { @Override public AppServicePlanInner call(AppServicePlanInner resource) { resetCreateUpdateParameters(); return resource; } }) .map(innerToFluentMap(this)); } @Override public Observable<AppServicePlan> updateResourceAsync() { AppServicePlansInner client = this.manager().inner().appServicePlans(); return client.updateAsync(this.resourceGroupName(), this.name(), this.updateParameter) .map(new Func1<AppServicePlanInner, AppServicePlanInner>() { @Override public AppServicePlanInner call(AppServicePlanInner resource) { resetCreateUpdateParameters(); return resource; } }) .map(innerToFluentMap(this)); } @Override protected Observable<AppServicePlanInner> getInnerAsync() { AppServicePlansInner client = this.manager().inner().appServicePlans(); return client.getByResourceGroupAsync(this.resourceGroupName(), this.name()); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } private void resetCreateUpdateParameters() { this.updateParameter = new AppServicePlanPatchResource(); } @Override public DateTime freeOfferExpirationTime() { return this.inner().freeOfferExpirationTime(); } @Override public String geoRegion() { return this.inner().geoRegion(); } @Override public HostingEnvironmentProfile hostingEnvironmentProfile() { return this.inner().hostingEnvironmentProfile(); } @Override public Boolean hyperV() { return this.inner().hyperV(); } @Override public Boolean isSpot() { return this.inner().isSpot(); } @Override public Boolean isXenon() { return this.inner().isXenon(); } @Override public String kind() { return this.inner().kind(); } @Override public Integer maximumElasticWorkerCount() { return this.inner().maximumElasticWorkerCount(); } @Override public Integer maximumNumberOfWorkers() { return this.inner().maximumNumberOfWorkers(); } @Override public Integer numberOfSites() { return this.inner().numberOfSites(); } @Override public Boolean perSiteScaling() { return this.inner().perSiteScaling(); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public Boolean reserved() { return this.inner().reserved(); } @Override public String resourceGroup() { return this.inner().resourceGroup(); } @Override public SkuDescription sku() { return this.inner().sku(); } @Override public DateTime spotExpirationTime() { return this.inner().spotExpirationTime(); } @Override public StatusOptions status() { return this.inner().status(); } @Override public String subscription() { return this.inner().subscription(); } @Override public Integer targetWorkerCount() { return this.inner().targetWorkerCount(); } @Override public Integer targetWorkerSizeId() { return this.inner().targetWorkerSizeId(); } @Override public String workerTierName() { return this.inner().workerTierName(); } @Override public AppServicePlanImpl withSku(SkuDescription sku) { this.inner().withSku(sku); return this; } @Override public AppServicePlanImpl withFreeOfferExpirationTime(DateTime freeOfferExpirationTime) { if (isInCreateMode()) { this.inner().withFreeOfferExpirationTime(freeOfferExpirationTime); } else { this.updateParameter.withFreeOfferExpirationTime(freeOfferExpirationTime); } return this; } @Override public AppServicePlanImpl withHostingEnvironmentProfile(HostingEnvironmentProfile hostingEnvironmentProfile) { if (isInCreateMode()) { this.inner().withHostingEnvironmentProfile(hostingEnvironmentProfile); } else { this.updateParameter.withHostingEnvironmentProfile(hostingEnvironmentProfile); } return this; } @Override public AppServicePlanImpl withHyperV(Boolean hyperV) { if (isInCreateMode()) { this.inner().withHyperV(hyperV); } else { this.updateParameter.withHyperV(hyperV); } return this; } @Override public AppServicePlanImpl withIsSpot(Boolean isSpot) { if (isInCreateMode()) { this.inner().withIsSpot(isSpot); } else { this.updateParameter.withIsSpot(isSpot); } return this; } @Override public AppServicePlanImpl withIsXenon(Boolean isXenon) { if (isInCreateMode()) { this.inner().withIsXenon(isXenon); } else { this.updateParameter.withIsXenon(isXenon); } return this; } @Override public AppServicePlanImpl withKind(String kind) { if (isInCreateMode()) { this.inner().withKind(kind); } else { this.updateParameter.withKind(kind); } return this; } @Override public AppServicePlanImpl withMaximumElasticWorkerCount(Integer maximumElasticWorkerCount) { if (isInCreateMode()) { this.inner().withMaximumElasticWorkerCount(maximumElasticWorkerCount); } else { this.updateParameter.withMaximumElasticWorkerCount(maximumElasticWorkerCount); } return this; } @Override public AppServicePlanImpl withPerSiteScaling(Boolean perSiteScaling) { if (isInCreateMode()) { this.inner().withPerSiteScaling(perSiteScaling); } else { this.updateParameter.withPerSiteScaling(perSiteScaling); } return this; } @Override public AppServicePlanImpl withReserved(Boolean reserved) { if (isInCreateMode()) { this.inner().withReserved(reserved); } else { this.updateParameter.withReserved(reserved); } return this; } @Override public AppServicePlanImpl withSpotExpirationTime(DateTime spotExpirationTime) { if (isInCreateMode()) { this.inner().withSpotExpirationTime(spotExpirationTime); } else { this.updateParameter.withSpotExpirationTime(spotExpirationTime); } return this; } @Override public AppServicePlanImpl withTargetWorkerCount(Integer targetWorkerCount) { if (isInCreateMode()) { this.inner().withTargetWorkerCount(targetWorkerCount); } else { this.updateParameter.withTargetWorkerCount(targetWorkerCount); } return this; } @Override public AppServicePlanImpl withTargetWorkerSizeId(Integer targetWorkerSizeId) { if (isInCreateMode()) { this.inner().withTargetWorkerSizeId(targetWorkerSizeId); } else { this.updateParameter.withTargetWorkerSizeId(targetWorkerSizeId); } return this; } @Override public AppServicePlanImpl withWorkerTierName(String workerTierName) { if (isInCreateMode()) { this.inner().withWorkerTierName(workerTierName); } else { this.updateParameter.withWorkerTierName(workerTierName); } return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.provider.http5; import java.io.File; import java.io.IOException; import java.net.ProxySelector; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.UserAuthenticationData; import org.apache.commons.vfs2.UserAuthenticator; import org.apache.commons.vfs2.provider.AbstractOriginatingFileProvider; import org.apache.commons.vfs2.provider.GenericFileName; import org.apache.commons.vfs2.util.UserAuthenticatorUtils; import org.apache.hc.client5.http.auth.AuthCache; import org.apache.hc.client5.http.auth.AuthScope; import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; import org.apache.hc.client5.http.classic.HttpClient; import org.apache.hc.client5.http.config.RequestConfig; import org.apache.hc.client5.http.cookie.BasicCookieStore; import org.apache.hc.client5.http.cookie.Cookie; import org.apache.hc.client5.http.cookie.CookieStore; import org.apache.hc.client5.http.impl.auth.BasicAuthCache; import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; import org.apache.hc.client5.http.impl.auth.BasicScheme; import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; import org.apache.hc.client5.http.impl.classic.HttpClients; import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; import org.apache.hc.client5.http.impl.routing.SystemDefaultRoutePlanner; import org.apache.hc.client5.http.io.HttpClientConnectionManager; import org.apache.hc.client5.http.protocol.HttpClientContext; import org.apache.hc.client5.http.routing.HttpRoutePlanner; import org.apache.hc.client5.http.ssl.DefaultHostnameVerifier; import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactoryBuilder; import org.apache.hc.client5.http.ssl.TrustAllStrategy; import org.apache.hc.core5.http.ConnectionReuseStrategy; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpHeaders; import org.apache.hc.core5.http.HttpHost; import org.apache.hc.core5.http.HttpRequest; import org.apache.hc.core5.http.HttpResponse; import org.apache.hc.core5.http.impl.DefaultConnectionReuseStrategy; import org.apache.hc.core5.http.io.SocketConfig; import org.apache.hc.core5.http.message.BasicHeader; import org.apache.hc.core5.http.protocol.HttpContext; import org.apache.hc.core5.http.ssl.TLS; import org.apache.hc.core5.ssl.SSLContextBuilder; /** * <code>FileProvider</code> implementation using HttpComponents HttpClient v5 library. * * @since 2.5.0 */ public class Http5FileProvider extends AbstractOriginatingFileProvider { /** Authenticator information. */ static final UserAuthenticationData.Type[] AUTHENTICATOR_TYPES = new UserAuthenticationData.Type[] { UserAuthenticationData.USERNAME, UserAuthenticationData.PASSWORD }; /** FileProvider capabilities */ static final Collection<Capability> capabilities = Collections.unmodifiableCollection( Arrays.asList( Capability.GET_TYPE, Capability.READ_CONTENT, Capability.URI, Capability.GET_LAST_MODIFIED, Capability.ATTRIBUTES, Capability.RANDOM_ACCESS_READ, Capability.DIRECTORY_READ_CONTENT ) ); /** * Constructs a new provider. */ public Http5FileProvider() { super(); setFileNameParser(Http5FileNameParser.getInstance()); } @Override public FileSystemConfigBuilder getConfigBuilder() { return Http5FileSystemConfigBuilder.getInstance(); } @Override public Collection<Capability> getCapabilities() { return capabilities; } @Override protected FileSystem doCreateFileSystem(final FileName name, final FileSystemOptions fileSystemOptions) throws FileSystemException { final GenericFileName rootName = (GenericFileName) name; UserAuthenticationData authData = null; HttpClient httpClient = null; HttpClientContext httpClientContext = null; try { final Http5FileSystemConfigBuilder builder = Http5FileSystemConfigBuilder.getInstance(); authData = UserAuthenticatorUtils.authenticate(fileSystemOptions, AUTHENTICATOR_TYPES); httpClientContext = createHttpClientContext(builder, rootName, fileSystemOptions, authData); httpClient = createHttpClient(builder, rootName, fileSystemOptions); } finally { UserAuthenticatorUtils.cleanup(authData); } return new Http5FileSystem(rootName, fileSystemOptions, httpClient, httpClientContext); } /** * Create an {@link HttpClient} object for an http4 file system. * * @param builder Configuration options builder for http4 provider * @param rootName The root path * @param fileSystemOptions The file system options * @return an {@link HttpClient} object * @throws FileSystemException if an error occurs. */ protected HttpClient createHttpClient(final Http5FileSystemConfigBuilder builder, final GenericFileName rootName, final FileSystemOptions fileSystemOptions) throws FileSystemException { return createHttpClientBuilder(builder, rootName, fileSystemOptions).build(); } /** * Create an {@link HttpClientBuilder} object. Invoked by {@link #createHttpClient(Http5FileSystemConfigBuilder, GenericFileName, FileSystemOptions)}. * * @param builder Configuration options builder for HTTP4 provider * @param rootName The root path * @param fileSystemOptions The FileSystem options * @return an {@link HttpClientBuilder} object * @throws FileSystemException if an error occurs */ protected HttpClientBuilder createHttpClientBuilder(final Http5FileSystemConfigBuilder builder, final GenericFileName rootName, final FileSystemOptions fileSystemOptions) throws FileSystemException { final List<Header> defaultHeaders = new ArrayList<>(); defaultHeaders.add(new BasicHeader(HttpHeaders.USER_AGENT, builder.getUserAgent(fileSystemOptions))); final ConnectionReuseStrategy connectionReuseStrategy = builder.isKeepAlive(fileSystemOptions) ? DefaultConnectionReuseStrategy.INSTANCE : new ConnectionReuseStrategy() { @Override public boolean keepAlive( final HttpRequest request, final HttpResponse response, final HttpContext context) { return false; } }; final HttpClientBuilder httpClientBuilder = HttpClients.custom() .setRoutePlanner(createHttpRoutePlanner(builder, fileSystemOptions)) .setConnectionManager(createConnectionManager(builder, fileSystemOptions)) .setConnectionReuseStrategy(connectionReuseStrategy) .setDefaultRequestConfig(createDefaultRequestConfig(builder, fileSystemOptions)) .setDefaultHeaders(defaultHeaders) .setDefaultCookieStore(createDefaultCookieStore(builder, fileSystemOptions)); if (!builder.getFollowRedirect(fileSystemOptions)) { httpClientBuilder.disableRedirectHandling(); } return httpClientBuilder; } /** * Create {@link SSLContext} for HttpClient. Invoked by {@link #createHttpClientBuilder(Http5FileSystemConfigBuilder, GenericFileName, FileSystemOptions)}. * * @param builder Configuration options builder for HTTP4 provider * @param fileSystemOptions The FileSystem options * @return a {@link SSLContext} for HttpClient * @throws FileSystemException if an error occurs */ protected SSLContext createSSLContext(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) throws FileSystemException { try { final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); File keystoreFileObject = null; final String keystoreFile = builder.getKeyStoreFile(fileSystemOptions); if (keystoreFile != null && !keystoreFile.isEmpty()) { keystoreFileObject = new File(keystoreFile); } if (keystoreFileObject != null && keystoreFileObject.exists()) { final String keystorePass = builder.getKeyStorePass(fileSystemOptions); final char[] keystorePassChars = (keystorePass != null) ? keystorePass.toCharArray() : null; sslContextBuilder.loadTrustMaterial(keystoreFileObject, keystorePassChars, TrustAllStrategy.INSTANCE); } else { sslContextBuilder.loadTrustMaterial(TrustAllStrategy.INSTANCE); } return sslContextBuilder.build(); } catch (final KeyStoreException e) { throw new FileSystemException("Keystore error. " + e.getMessage(), e); } catch (final KeyManagementException e) { throw new FileSystemException("Cannot retrieve keys. " + e.getMessage(), e); } catch (final NoSuchAlgorithmException e) { throw new FileSystemException("Algorithm error. " + e.getMessage(), e); } catch (final CertificateException e) { throw new FileSystemException("Certificate error. " + e.getMessage(), e); } catch (final IOException e) { throw new FileSystemException("Cannot open key file. " + e.getMessage(), e); } } /** * Create an {@link HttpClientContext} object for an http4 file system. * * @param builder Configuration options builder for http4 provider * @param rootName The root path * @param fileSystemOptions The FileSystem options * @param authData The <code>UserAuthentiationData</code> object * @return an {@link HttpClientContext} object * @throws FileSystemException if an error occurs */ protected HttpClientContext createHttpClientContext(final Http5FileSystemConfigBuilder builder, final GenericFileName rootName, final FileSystemOptions fileSystemOptions, final UserAuthenticationData authData) throws FileSystemException { final HttpClientContext clientContext = HttpClientContext.create(); final BasicCredentialsProvider credsProvider = new BasicCredentialsProvider(); clientContext.setCredentialsProvider(credsProvider); final String username = UserAuthenticatorUtils.toString(UserAuthenticatorUtils.getData(authData, UserAuthenticationData.USERNAME, UserAuthenticatorUtils.toChar(rootName.getUserName()))); final char[] password = UserAuthenticatorUtils.getData(authData, UserAuthenticationData.PASSWORD, UserAuthenticatorUtils.toChar(rootName.getPassword())); if (username != null && !username.isEmpty()) { // -1 for any port credsProvider.setCredentials(new AuthScope(rootName.getHostName(), -1), new UsernamePasswordCredentials(username, password)); } final HttpHost proxyHost = getProxyHttpHost(builder, fileSystemOptions); if (proxyHost != null) { final UserAuthenticator proxyAuth = builder.getProxyAuthenticator(fileSystemOptions); if (proxyAuth != null) { final UserAuthenticationData proxyAuthData = UserAuthenticatorUtils.authenticate(proxyAuth, new UserAuthenticationData.Type[] { UserAuthenticationData.USERNAME, UserAuthenticationData.PASSWORD }); if (proxyAuthData != null) { final UsernamePasswordCredentials proxyCreds = new UsernamePasswordCredentials( UserAuthenticatorUtils.toString( UserAuthenticatorUtils.getData(authData, UserAuthenticationData.USERNAME, null)), UserAuthenticatorUtils.getData(authData, UserAuthenticationData.PASSWORD, null)); // -1 for any port credsProvider.setCredentials(new AuthScope(proxyHost.getHostName(), -1), proxyCreds); } if (builder.isPreemptiveAuth(fileSystemOptions)) { final AuthCache authCache = new BasicAuthCache(); final BasicScheme basicAuth = new BasicScheme(); authCache.put(proxyHost, basicAuth); clientContext.setAuthCache(authCache); } } } return clientContext; } private HttpClientConnectionManager createConnectionManager(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) throws FileSystemException { final SocketConfig socketConfig = SocketConfig .custom() .setSoTimeout(builder.getSoTimeout(fileSystemOptions), TimeUnit.MILLISECONDS) .build(); final String[] tlsVersions = builder.getTlsVersions(fileSystemOptions).split("\\s*,\\s*"); final TLS[] tlsArray = new TLS[tlsVersions.length]; for (int i = 0; i < tlsVersions.length; i++) { tlsArray[i] = TLS.valueOf(tlsVersions[i]); } final SSLConnectionSocketFactory sslSocketFactory = SSLConnectionSocketFactoryBuilder.create() .setSslContext(createSSLContext(builder, fileSystemOptions)) .setHostnameVerifier(createHostnameVerifier(builder, fileSystemOptions)) .setTlsVersions(tlsArray) .build(); return PoolingHttpClientConnectionManagerBuilder.create() .setSSLSocketFactory(sslSocketFactory) .setMaxConnTotal(builder.getMaxTotalConnections(fileSystemOptions)) .setMaxConnPerRoute(builder.getMaxConnectionsPerHost(fileSystemOptions)) .setDefaultSocketConfig(socketConfig) .build(); } private RequestConfig createDefaultRequestConfig(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) { return RequestConfig.custom() .setConnectTimeout(builder.getConnectionTimeout(fileSystemOptions), TimeUnit.MILLISECONDS) .build(); } private HttpRoutePlanner createHttpRoutePlanner(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) { final HttpHost proxyHost = getProxyHttpHost(builder, fileSystemOptions); if (proxyHost != null) { return new DefaultProxyRoutePlanner(proxyHost); } return new SystemDefaultRoutePlanner(ProxySelector.getDefault()); } private HttpHost getProxyHttpHost(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) { final String proxyHost = builder.getProxyHost(fileSystemOptions); final int proxyPort = builder.getProxyPort(fileSystemOptions); if (proxyHost != null && proxyHost.length() > 0 && proxyPort > 0) { return new HttpHost(proxyHost, proxyPort); } return null; } private CookieStore createDefaultCookieStore(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) { final CookieStore cookieStore = new BasicCookieStore(); final Cookie[] cookies = builder.getCookies(fileSystemOptions); if (cookies != null) { for (final Cookie cookie : cookies) { cookieStore.addCookie(cookie); } } return cookieStore; } private HostnameVerifier createHostnameVerifier(final Http5FileSystemConfigBuilder builder, final FileSystemOptions fileSystemOptions) throws FileSystemException { if (!builder.isHostnameVerificationEnabled(fileSystemOptions)) { return NoopHostnameVerifier.INSTANCE; } return new DefaultHostnameVerifier(); } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Queue; import java.util.RandomAccess; import java.util.Set; import java.util.SortedSet; import javax.annotation.Nullable; /** * This class contains static utility methods that operate on or return objects * of type {@code Iterable}. Except as noted, each method has a corresponding * {@link Iterator}-based method in the {@link Iterators} class. * * <p><i>Performance notes:</i> Unless otherwise noted, all of the iterables * produced in this class are <i>lazy</i>, which means that their iterators * only advance the backing iteration when absolutely necessary. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/CollectionUtilitiesExplained#Iterables"> * {@code Iterables}</a>. * * @author Kevin Bourrillion * @author Jared Levy * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Iterables { private Iterables() {} /** Returns an unmodifiable view of {@code iterable}. */ public static <T> Iterable<T> unmodifiableIterable( final Iterable<T> iterable) { checkNotNull(iterable); if (iterable instanceof UnmodifiableIterable || iterable instanceof ImmutableCollection) { return iterable; } return new UnmodifiableIterable<T>(iterable); } /** * Simply returns its argument. * * @deprecated no need to use this * @since 10.0 */ @Deprecated public static <E> Iterable<E> unmodifiableIterable( ImmutableCollection<E> iterable) { return checkNotNull(iterable); } private static final class UnmodifiableIterable<T> extends FluentIterable<T> { private final Iterable<T> iterable; private UnmodifiableIterable(Iterable<T> iterable) { this.iterable = iterable; } @Override public Iterator<T> iterator() { return Iterators.unmodifiableIterator(iterable.iterator()); } @Override public String toString() { return iterable.toString(); } // no equals and hashCode; it would break the contract! } /** * Returns the number of elements in {@code iterable}. */ public static int size(Iterable<?> iterable) { return (iterable instanceof Collection) ? ((Collection<?>) iterable).size() : Iterators.size(iterable.iterator()); } /** * Returns {@code true} if {@code iterable} contains any object for which {@code equals(element)} * is true. */ public static boolean contains(Iterable<?> iterable, @Nullable Object element) { if (iterable instanceof Collection) { Collection<?> collection = (Collection<?>) iterable; try { return collection.contains(element); } catch (NullPointerException e) { return false; } catch (ClassCastException e) { return false; } } return Iterators.contains(iterable.iterator(), element); } /** * Removes, from an iterable, every element that belongs to the provided * collection. * * <p>This method calls {@link Collection#removeAll} if {@code iterable} is a * collection, and {@link Iterators#removeAll} otherwise. * * @param removeFrom the iterable to (potentially) remove elements from * @param elementsToRemove the elements to remove * @return {@code true} if any element was removed from {@code iterable} */ public static boolean removeAll( Iterable<?> removeFrom, Collection<?> elementsToRemove) { return (removeFrom instanceof Collection) ? ((Collection<?>) removeFrom).removeAll(checkNotNull(elementsToRemove)) : Iterators.removeAll(removeFrom.iterator(), elementsToRemove); } /** * Removes, from an iterable, every element that does not belong to the * provided collection. * * <p>This method calls {@link Collection#retainAll} if {@code iterable} is a * collection, and {@link Iterators#retainAll} otherwise. * * @param removeFrom the iterable to (potentially) remove elements from * @param elementsToRetain the elements to retain * @return {@code true} if any element was removed from {@code iterable} */ public static boolean retainAll( Iterable<?> removeFrom, Collection<?> elementsToRetain) { return (removeFrom instanceof Collection) ? ((Collection<?>) removeFrom).retainAll(checkNotNull(elementsToRetain)) : Iterators.retainAll(removeFrom.iterator(), elementsToRetain); } /** * Removes, from an iterable, every element that satisfies the provided * predicate. * * @param removeFrom the iterable to (potentially) remove elements from * @param predicate a predicate that determines whether an element should * be removed * @return {@code true} if any elements were removed from the iterable * * @throws UnsupportedOperationException if the iterable does not support * {@code remove()}. * @since 2.0 */ public static <T> boolean removeIf( Iterable<T> removeFrom, Predicate<? super T> predicate) { if (removeFrom instanceof RandomAccess && removeFrom instanceof List) { return removeIfFromRandomAccessList( (List<T>) removeFrom, checkNotNull(predicate)); } return Iterators.removeIf(removeFrom.iterator(), predicate); } private static <T> boolean removeIfFromRandomAccessList( List<T> list, Predicate<? super T> predicate) { // Note: Not all random access lists support set() so we need to deal with // those that don't and attempt the slower remove() based solution. int from = 0; int to = 0; for (; from < list.size(); from++) { T element = list.get(from); if (!predicate.apply(element)) { if (from > to) { try { list.set(to, element); } catch (UnsupportedOperationException e) { slowRemoveIfForRemainingElements(list, predicate, to, from); return true; } } to++; } } // Clear the tail of any remaining items list.subList(to, list.size()).clear(); return from != to; } private static <T> void slowRemoveIfForRemainingElements(List<T> list, Predicate<? super T> predicate, int to, int from) { // Here we know that: // * (to < from) and that both are valid indices. // * Everything with (index < to) should be kept. // * Everything with (to <= index < from) should be removed. // * The element with (index == from) should be kept. // * Everything with (index > from) has not been checked yet. // Check from the end of the list backwards (minimize expected cost of // moving elements when remove() is called). Stop before 'from' because // we already know that should be kept. for (int n = list.size() - 1; n > from; n--) { if (predicate.apply(list.get(n))) { list.remove(n); } } // And now remove everything in the range [to, from) (going backwards). for (int n = from - 1; n >= to; n--) { list.remove(n); } } /** * Determines whether two iterables contain equal elements in the same order. * More specifically, this method returns {@code true} if {@code iterable1} * and {@code iterable2} contain the same number of elements and every element * of {@code iterable1} is equal to the corresponding element of * {@code iterable2}. */ public static boolean elementsEqual( Iterable<?> iterable1, Iterable<?> iterable2) { return Iterators.elementsEqual(iterable1.iterator(), iterable2.iterator()); } /** * Returns a string representation of {@code iterable}, with the format * {@code [e1, e2, ..., en]}. */ public static String toString(Iterable<?> iterable) { return Iterators.toString(iterable.iterator()); } /** * Returns the single element contained in {@code iterable}. * * @throws NoSuchElementException if the iterable is empty * @throws IllegalArgumentException if the iterable contains multiple * elements */ public static <T> T getOnlyElement(Iterable<T> iterable) { return Iterators.getOnlyElement(iterable.iterator()); } /** * Returns the single element contained in {@code iterable}, or {@code * defaultValue} if the iterable is empty. * * @throws IllegalArgumentException if the iterator contains multiple * elements */ public static <T> T getOnlyElement( Iterable<? extends T> iterable, @Nullable T defaultValue) { return Iterators.getOnlyElement(iterable.iterator(), defaultValue); } /** * Copies an iterable's elements into an array. * * @param iterable the iterable to copy * @return a newly-allocated array into which all the elements of the iterable * have been copied */ static Object[] toArray(Iterable<?> iterable) { return toCollection(iterable).toArray(); } /** * Converts an iterable into a collection. If the iterable is already a * collection, it is returned. Otherwise, an {@link java.util.ArrayList} is * created with the contents of the iterable in the same iteration order. */ private static <E> Collection<E> toCollection(Iterable<E> iterable) { return (iterable instanceof Collection) ? (Collection<E>) iterable : Lists.newArrayList(iterable.iterator()); } /** * Adds all elements in {@code iterable} to {@code collection}. * * @return {@code true} if {@code collection} was modified as a result of this * operation. */ public static <T> boolean addAll( Collection<T> addTo, Iterable<? extends T> elementsToAdd) { if (elementsToAdd instanceof Collection) { Collection<? extends T> c = Collections2.cast(elementsToAdd); return addTo.addAll(c); } return Iterators.addAll(addTo, elementsToAdd.iterator()); } /** * Returns the number of elements in the specified iterable that equal the * specified object. This implementation avoids a full iteration when the * iterable is a {@link Multiset} or {@link Set}. * * @see Collections#frequency */ public static int frequency(Iterable<?> iterable, @Nullable Object element) { if ((iterable instanceof Multiset)) { return ((Multiset<?>) iterable).count(element); } if ((iterable instanceof Set)) { return ((Set<?>) iterable).contains(element) ? 1 : 0; } return Iterators.frequency(iterable.iterator(), element); } /** * Returns an iterable whose iterators cycle indefinitely over the elements of * {@code iterable}. * * <p>That iterator supports {@code remove()} if {@code iterable.iterator()} * does. After {@code remove()} is called, subsequent cycles omit the removed * element, which is no longer in {@code iterable}. The iterator's * {@code hasNext()} method returns {@code true} until {@code iterable} is * empty. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an * infinite loop. You should use an explicit {@code break} or be certain that * you will eventually remove all the elements. * * <p>To cycle over the iterable {@code n} times, use the following: * {@code Iterables.concat(Collections.nCopies(n, iterable))} */ public static <T> Iterable<T> cycle(final Iterable<T> iterable) { checkNotNull(iterable); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.cycle(iterable); } @Override public String toString() { return iterable.toString() + " (cycled)"; } }; } /** * Returns an iterable whose iterators cycle indefinitely over the provided * elements. * * <p>After {@code remove} is invoked on a generated iterator, the removed * element will no longer appear in either that iterator or any other iterator * created from the same source iterable. That is, this method behaves exactly * as {@code Iterables.cycle(Lists.newArrayList(elements))}. The iterator's * {@code hasNext} method returns {@code true} until all of the original * elements have been removed. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an * infinite loop. You should use an explicit {@code break} or be certain that * you will eventually remove all the elements. * * <p>To cycle over the elements {@code n} times, use the following: * {@code Iterables.concat(Collections.nCopies(n, Arrays.asList(elements)))} */ public static <T> Iterable<T> cycle(T... elements) { return cycle(Lists.newArrayList(elements)); } /** * Combines two iterables into a single iterable. The returned iterable has an * iterator that traverses the elements in {@code a}, followed by the elements * in {@code b}. The source iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat( Iterable<? extends T> a, Iterable<? extends T> b) { checkNotNull(a); checkNotNull(b); return concat(Arrays.asList(a, b)); } /** * Combines three iterables into a single iterable. The returned iterable has * an iterator that traverses the elements in {@code a}, followed by the * elements in {@code b}, followed by the elements in {@code c}. The source * iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) { checkNotNull(a); checkNotNull(b); checkNotNull(c); return concat(Arrays.asList(a, b, c)); } /** * Combines four iterables into a single iterable. The returned iterable has * an iterator that traverses the elements in {@code a}, followed by the * elements in {@code b}, followed by the elements in {@code c}, followed by * the elements in {@code d}. The source iterators are not polled until * necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c, Iterable<? extends T> d) { checkNotNull(a); checkNotNull(b); checkNotNull(c); checkNotNull(d); return concat(Arrays.asList(a, b, c, d)); } /** * Combines multiple iterables into a single iterable. The returned iterable * has an iterator that traverses the elements of each iterable in * {@code inputs}. The input iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. * * @throws NullPointerException if any of the provided iterables is null */ public static <T> Iterable<T> concat(Iterable<? extends T>... inputs) { return concat(ImmutableList.copyOf(inputs)); } /** * Combines multiple iterables into a single iterable. The returned iterable * has an iterator that traverses the elements of each iterable in * {@code inputs}. The input iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. The methods of the returned * iterable may throw {@code NullPointerException} if any of the input * iterators is null. */ public static <T> Iterable<T> concat( final Iterable<? extends Iterable<? extends T>> inputs) { checkNotNull(inputs); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.concat(iterators(inputs)); } }; } /** * Returns an iterator over the iterators of the given iterables. */ private static <T> UnmodifiableIterator<Iterator<? extends T>> iterators( Iterable<? extends Iterable<? extends T>> iterables) { final Iterator<? extends Iterable<? extends T>> iterableIterator = iterables.iterator(); return new UnmodifiableIterator<Iterator<? extends T>>() { @Override public boolean hasNext() { return iterableIterator.hasNext(); } @Override public Iterator<? extends T> next() { return iterableIterator.next().iterator(); } }; } /** * Divides an iterable into unmodifiable sublists of the given size (the final * iterable may be smaller). For example, partitioning an iterable containing * {@code [a, b, c, d, e]} with a partition size of 3 yields {@code * [[a, b, c], [d, e]]} -- an outer iterable containing two inner lists of * three and two elements, all in the original order. * * <p>Iterators returned by the returned iterable do not support the {@link * Iterator#remove()} method. The returned lists implement {@link * RandomAccess}, whether or not the input list does. * * <p><b>Note:</b> if {@code iterable} is a {@link List}, use {@link * Lists#partition(List, int)} instead. * * @param iterable the iterable to return a partitioned view of * @param size the desired size of each partition (the last may be smaller) * @return an iterable of unmodifiable lists containing the elements of {@code * iterable} divided into partitions * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T> Iterable<List<T>> partition( final Iterable<T> iterable, final int size) { checkNotNull(iterable); checkArgument(size > 0); return new FluentIterable<List<T>>() { @Override public Iterator<List<T>> iterator() { return Iterators.partition(iterable.iterator(), size); } }; } /** * Divides an iterable into unmodifiable sublists of the given size, padding * the final iterable with null values if necessary. For example, partitioning * an iterable containing {@code [a, b, c, d, e]} with a partition size of 3 * yields {@code [[a, b, c], [d, e, null]]} -- an outer iterable containing * two inner lists of three elements each, all in the original order. * * <p>Iterators returned by the returned iterable do not support the {@link * Iterator#remove()} method. * * @param iterable the iterable to return a partitioned view of * @param size the desired size of each partition * @return an iterable of unmodifiable lists containing the elements of {@code * iterable} divided into partitions (the final iterable may have * trailing null elements) * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T> Iterable<List<T>> paddedPartition( final Iterable<T> iterable, final int size) { checkNotNull(iterable); checkArgument(size > 0); return new FluentIterable<List<T>>() { @Override public Iterator<List<T>> iterator() { return Iterators.paddedPartition(iterable.iterator(), size); } }; } /** * Returns the elements of {@code unfiltered} that satisfy a predicate. The * resulting iterable's iterator does not support {@code remove()}. */ public static <T> Iterable<T> filter( final Iterable<T> unfiltered, final Predicate<? super T> predicate) { checkNotNull(unfiltered); checkNotNull(predicate); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.filter(unfiltered.iterator(), predicate); } }; } /** * Returns {@code true} if any element in {@code iterable} satisfies the predicate. */ public static <T> boolean any( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.any(iterable.iterator(), predicate); } /** * Returns {@code true} if every element in {@code iterable} satisfies the * predicate. If {@code iterable} is empty, {@code true} is returned. */ public static <T> boolean all( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.all(iterable.iterator(), predicate); } /** * Returns the first element in {@code iterable} that satisfies the given * predicate; use this method only when such an element is known to exist. If * it is possible that <i>no</i> element will match, use {@link #tryFind} or * {@link #find(Iterable, Predicate, Object)} instead. * * @throws NoSuchElementException if no element in {@code iterable} matches * the given predicate */ public static <T> T find(Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.find(iterable.iterator(), predicate); } /** * Returns the first element in {@code iterable} that satisfies the given * predicate, or {@code defaultValue} if none found. Note that this can * usually be handled more naturally using {@code * tryFind(iterable, predicate).or(defaultValue)}. * * @since 7.0 */ public static <T> T find(Iterable<? extends T> iterable, Predicate<? super T> predicate, @Nullable T defaultValue) { return Iterators.find(iterable.iterator(), predicate, defaultValue); } /** * Returns an {@link Optional} containing the first element in {@code * iterable} that satisfies the given predicate, if such an element exists. * * <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code * null}. If {@code null} is matched in {@code iterable}, a * NullPointerException will be thrown. * * @since 11.0 */ public static <T> Optional<T> tryFind(Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.tryFind(iterable.iterator(), predicate); } /** * Returns the index in {@code iterable} of the first element that satisfies * the provided {@code predicate}, or {@code -1} if the Iterable has no such * elements. * * <p>More formally, returns the lowest index {@code i} such that * {@code predicate.apply(Iterables.get(iterable, i))} returns {@code true}, * or {@code -1} if there is no such index. * * @since 2.0 */ public static <T> int indexOf( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.indexOf(iterable.iterator(), predicate); } /** * Returns an iterable that applies {@code function} to each element of {@code * fromIterable}. * * <p>The returned iterable's iterator supports {@code remove()} if the * provided iterator does. After a successful {@code remove()} call, * {@code fromIterable} no longer contains the corresponding element. * * <p>If the input {@code Iterable} is known to be a {@code List} or other * {@code Collection}, consider {@link Lists#transform} and {@link * Collections2#transform}. */ public static <F, T> Iterable<T> transform(final Iterable<F> fromIterable, final Function<? super F, ? extends T> function) { checkNotNull(fromIterable); checkNotNull(function); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.transform(fromIterable.iterator(), function); } }; } /** * Returns the element at the specified position in an iterable. * * @param position position of the element to return * @return the element at the specified position in {@code iterable} * @throws IndexOutOfBoundsException if {@code position} is negative or * greater than or equal to the size of {@code iterable} */ public static <T> T get(Iterable<T> iterable, int position) { checkNotNull(iterable); if (iterable instanceof List) { return ((List<T>) iterable).get(position); } if (iterable instanceof Collection) { // Can check both ends Collection<T> collection = (Collection<T>) iterable; Preconditions.checkElementIndex(position, collection.size()); } else { // Can only check the lower end checkNonnegativeIndex(position); } return Iterators.get(iterable.iterator(), position); } private static void checkNonnegativeIndex(int position) { if (position < 0) { throw new IndexOutOfBoundsException( "position cannot be negative: " + position); } } /** * Returns the element at the specified position in an iterable or a default * value otherwise. * * @param position position of the element to return * @param defaultValue the default value to return if {@code position} is * greater than or equal to the size of the iterable * @return the element at the specified position in {@code iterable} or * {@code defaultValue} if {@code iterable} contains fewer than * {@code position + 1} elements. * @throws IndexOutOfBoundsException if {@code position} is negative * @since 4.0 */ public static <T> T get(Iterable<? extends T> iterable, int position, @Nullable T defaultValue) { checkNotNull(iterable); checkNonnegativeIndex(position); try { return get(iterable, position); } catch (IndexOutOfBoundsException e) { return defaultValue; } } /** * Returns the first element in {@code iterable} or {@code defaultValue} if * the iterable is empty. The {@link Iterators} analog to this method is * {@link Iterators#getNext}. * * @param defaultValue the default value to return if the iterable is empty * @return the first element of {@code iterable} or the default value * @since 7.0 */ public static <T> T getFirst(Iterable<? extends T> iterable, @Nullable T defaultValue) { return Iterators.getNext(iterable.iterator(), defaultValue); } /** * Returns the last element of {@code iterable}. * * @return the last element of {@code iterable} * @throws NoSuchElementException if the iterable is empty */ public static <T> T getLast(Iterable<T> iterable) { // TODO(kevinb): Support a concurrently modified collection? if (iterable instanceof List) { List<T> list = (List<T>) iterable; if (list.isEmpty()) { throw new NoSuchElementException(); } return getLastInNonemptyList(list); } /* * TODO(kevinb): consider whether this "optimization" is worthwhile. Users * with SortedSets tend to know they are SortedSets and probably would not * call this method. */ if (iterable instanceof SortedSet) { SortedSet<T> sortedSet = (SortedSet<T>) iterable; return sortedSet.last(); } return Iterators.getLast(iterable.iterator()); } /** * Returns the last element of {@code iterable} or {@code defaultValue} if * the iterable is empty. * * @param defaultValue the value to return if {@code iterable} is empty * @return the last element of {@code iterable} or the default value * @since 3.0 */ public static <T> T getLast(Iterable<? extends T> iterable, @Nullable T defaultValue) { if (iterable instanceof Collection) { Collection<? extends T> collection = Collections2.cast(iterable); if (collection.isEmpty()) { return defaultValue; } } if (iterable instanceof List) { List<? extends T> list = Lists.cast(iterable); return getLastInNonemptyList(list); } /* * TODO(kevinb): consider whether this "optimization" is worthwhile. Users * with SortedSets tend to know they are SortedSets and probably would not * call this method. */ if (iterable instanceof SortedSet) { SortedSet<? extends T> sortedSet = Sets.cast(iterable); return sortedSet.last(); } return Iterators.getLast(iterable.iterator(), defaultValue); } private static <T> T getLastInNonemptyList(List<T> list) { return list.get(list.size() - 1); } /** * Returns a view of {@code iterable} that skips its first * {@code numberToSkip} elements. If {@code iterable} contains fewer than * {@code numberToSkip} elements, the returned iterable skips all of its * elements. * * <p>Modifications to the underlying {@link Iterable} before a call to * {@code iterator()} are reflected in the returned iterator. That is, the * iterator skips the first {@code numberToSkip} elements that exist when the * {@code Iterator} is created, not when {@code skip()} is called. * * <p>The returned iterable's iterator supports {@code remove()} if the * iterator of the underlying iterable supports it. Note that it is * <i>not</i> possible to delete the last skipped element by immediately * calling {@code remove()} on that iterator, as the {@code Iterator} * contract states that a call to {@code remove()} before a call to * {@code next()} will throw an {@link IllegalStateException}. * * @since 3.0 */ public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) { checkNotNull(iterable); checkArgument(numberToSkip >= 0, "number to skip cannot be negative"); if (iterable instanceof List) { final List<T> list = (List<T>) iterable; return new FluentIterable<T>() { @Override public Iterator<T> iterator() { // TODO(kevinb): Support a concurrently modified collection? return (numberToSkip >= list.size()) ? Iterators.<T>emptyIterator() : list.subList(numberToSkip, list.size()).iterator(); } }; } return new FluentIterable<T>() { @Override public Iterator<T> iterator() { final Iterator<T> iterator = iterable.iterator(); Iterators.advance(iterator, numberToSkip); /* * We can't just return the iterator because an immediate call to its * remove() method would remove one of the skipped elements instead of * throwing an IllegalStateException. */ return new Iterator<T>() { boolean atStart = true; @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(); } try { return iterator.next(); } finally { atStart = false; } } @Override public void remove() { if (atStart) { throw new IllegalStateException(); } iterator.remove(); } }; } }; } /** * Creates an iterable with the first {@code limitSize} elements of the given * iterable. If the original iterable does not contain that many elements, the * returned iterator will have the same behavior as the original iterable. The * returned iterable's iterator supports {@code remove()} if the original * iterator does. * * @param iterable the iterable to limit * @param limitSize the maximum number of elements in the returned iterator * @throws IllegalArgumentException if {@code limitSize} is negative * @since 3.0 */ public static <T> Iterable<T> limit( final Iterable<T> iterable, final int limitSize) { checkNotNull(iterable); checkArgument(limitSize >= 0, "limit is negative"); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.limit(iterable.iterator(), limitSize); } }; } /** * Returns a view of the supplied iterable that wraps each generated * {@link Iterator} through {@link Iterators#consumingIterator(Iterator)}. * * <p>Note: If {@code iterable} is a {@link Queue}, the returned iterable will * get entries from {@link Queue#remove()} since {@link Queue}'s iteration * order is undefined. Calling {@link Iterator#hasNext()} on a generated * iterator from the returned iterable may cause an item to be immediately * dequeued for return on a subsequent call to {@link Iterator#next()}. * * @param iterable the iterable to wrap * @return a view of the supplied iterable that wraps each generated iterator * through {@link Iterators#consumingIterator(Iterator)}; for queues, * an iterable that generates iterators that return and consume the * queue's elements in queue order * * @see Iterators#consumingIterator(Iterator) * @since 2.0 */ public static <T> Iterable<T> consumingIterable(final Iterable<T> iterable) { if (iterable instanceof Queue) { return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return new ConsumingQueueIterator<T>((Queue<T>) iterable); } }; } checkNotNull(iterable); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.consumingIterator(iterable.iterator()); } }; } private static class ConsumingQueueIterator<T> extends AbstractIterator<T> { private final Queue<T> queue; private ConsumingQueueIterator(Queue<T> queue) { this.queue = queue; } @Override public T computeNext() { try { return queue.remove(); } catch (NoSuchElementException e) { return endOfData(); } } } // Methods only in Iterables, not in Iterators /** * Determines if the given iterable contains no elements. * * <p>There is no precise {@link Iterator} equivalent to this method, since * one can only ask an iterator whether it has any elements <i>remaining</i> * (which one does using {@link Iterator#hasNext}). * * @return {@code true} if the iterable contains no elements */ public static boolean isEmpty(Iterable<?> iterable) { if (iterable instanceof Collection) { return ((Collection<?>) iterable).isEmpty(); } return !iterable.iterator().hasNext(); } /** * Returns an iterable over the merged contents of all given * {@code iterables}. Equivalent entries will not be de-duplicated. * * <p>Callers must ensure that the source {@code iterables} are in * non-descending order as this method does not sort its input. * * <p>For any equivalent elements across all {@code iterables}, it is * undefined which element is returned first. * * @since 11.0 */ @Beta public static <T> Iterable<T> mergeSorted( final Iterable<? extends Iterable<? extends T>> iterables, final Comparator<? super T> comparator) { checkNotNull(iterables, "iterables"); checkNotNull(comparator, "comparator"); Iterable<T> iterable = new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.mergeSorted( Iterables.transform(iterables, Iterables.<T>toIterator()), comparator); } }; return new UnmodifiableIterable<T>(iterable); } // TODO(user): Is this the best place for this? Move to fluent functions? // Useful as a public method? private static <T> Function<Iterable<? extends T>, Iterator<? extends T>> toIterator() { return new Function<Iterable<? extends T>, Iterator<? extends T>>() { @Override public Iterator<? extends T> apply(Iterable<? extends T> iterable) { return iterable.iterator(); } }; } }
/* * Copyright 2006-2012 Amazon Technologies, Inc. or its affiliates. * Amazon, Amazon.com and Carbonado are trademarks or registered trademarks * of Amazon Technologies, Inc. or its affiliates. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amazon.carbonado.sequence; import com.amazon.carbonado.FetchException; import com.amazon.carbonado.PersistException; import com.amazon.carbonado.Repository; import com.amazon.carbonado.RepositoryException; import com.amazon.carbonado.Storage; import com.amazon.carbonado.Transaction; /** * General purpose implementation of a sequence value generator. * * @author Brian S O'Neill * @author bcastill * @see com.amazon.carbonado.Sequence * @see StoredSequence * @since 1.2 */ public class SequenceValueGenerator extends AbstractSequenceValueProducer { public static final int DEFAULT_RESERVE_AMOUNT = 100; public static final int DEFAULT_INITIAL_VALUE = 1; public static final int DEFAULT_INCREMENT = 1; private final Repository mRepository; private final Storage<StoredSequence> mStorage; private final StoredSequence mStoredSequence; private final int mIncrement; private final int mReserveAmount; private boolean mHasReservedValues; private long mNextValue; /** * Construct a new SequenceValueGenerator which might create persistent * sequence data if it does not exist. The initial sequence value is one, * and the increment is one. * * @param repo repository to persist sequence data * @param name name of sequence */ public SequenceValueGenerator(Repository repo, String name) throws RepositoryException { this(repo, name, DEFAULT_INITIAL_VALUE, DEFAULT_INCREMENT); } /** * Construct a new SequenceValueGenerator which might create persistent * sequence data if it does not exist. * * @param repo repository to persist sequence data * @param name name of sequence * @param initialValue initial sequence value, if sequence needs to be created * @param increment amount to increment sequence by */ public SequenceValueGenerator(Repository repo, String name, long initialValue, int increment) throws RepositoryException { this(repo, name, initialValue, increment, DEFAULT_RESERVE_AMOUNT); } /** * Construct a new SequenceValueGenerator which might create persistent * sequence data if it does not exist. * * @param repo repository to persist sequence data * @param name name of sequence * @param initialValue initial sequence value, if sequence needs to be created * @param increment amount to increment sequence by * @param reserveAmount amount of sequence values to reserve */ public SequenceValueGenerator(Repository repo, String name, long initialValue, int increment, int reserveAmount) throws RepositoryException { if (repo == null || name == null || increment < 1 || reserveAmount < 1) { throw new IllegalArgumentException(); } mRepository = repo; mIncrement = increment; mReserveAmount = reserveAmount; mStorage = repo.storageFor(StoredSequence.class); mStoredSequence = mStorage.prepare(); mStoredSequence.setName(name); Transaction txn = repo.enterTopTransaction(null); txn.setForUpdate(true); try { if (!mStoredSequence.tryLoad()) { // Create a new sequence. mStoredSequence.setInitialValue(initialValue); // Start as small as possible to allow signed long comparisons to work. mStoredSequence.setNextValue(Long.MIN_VALUE); // Try to transfer values from a deprecated sequence. com.amazon.carbonado.spi.StoredSequence oldSequence; try { oldSequence = repo .storageFor(com.amazon.carbonado.spi.StoredSequence.class).prepare(); oldSequence.setName(name); if (oldSequence.tryLoad()) { mStoredSequence.setInitialValue(oldSequence.getInitialValue()); mStoredSequence.setNextValue(oldSequence.getNextValue()); } else { oldSequence = null; } } catch (RepositoryException e) { // Okay, perhaps no old sequence. oldSequence = null; } if (mStoredSequence.tryInsert()) { if (oldSequence != null) { try { // Get rid of deprecated sequence. oldSequence.tryDelete(); } catch (RepositoryException e) { // Oh well. } } } else { // A race condition likely. Load again. mStoredSequence.load(); } } txn.commit(); } finally { txn.exit(); } } /** * Reset the sequence. * * @param initialValue first value produced by sequence */ public void reset(int initialValue) throws FetchException, PersistException { synchronized (mStoredSequence) { Transaction txn = mRepository.enterTopTransaction(null); txn.setForUpdate(true); try { boolean doUpdate = mStoredSequence.tryLoad(); mStoredSequence.setInitialValue(initialValue); // Start as small as possible to allow signed long comparisons to work. mStoredSequence.setNextValue(Long.MIN_VALUE); if (doUpdate) { mStoredSequence.update(); } else { mStoredSequence.insert(); } txn.commit(); mHasReservedValues = false; } finally { txn.exit(); } } } /** * Returns the next value from the sequence, which may wrap negative if all * positive values are exhausted. When sequence wraps back to initial * value, the sequence is fully exhausted, and an exception is thrown to * indicate this. * * <p>Note: this method throws PersistException even for fetch failures * since this method is called by insert operations. Insert operations can * only throw a PersistException. * * @throws PersistException for fetch/persist failure or if sequence is exhausted. */ public long nextLongValue() throws PersistException { try { synchronized (mStoredSequence) { return nextUnadjustedValue() + Long.MIN_VALUE + mStoredSequence.getInitialValue(); } } catch (FetchException e) { throw e.toPersistException(); } } /** * Returns the next value from the sequence, which may wrap negative if all * positive values are exhausted. When sequence wraps back to initial * value, the sequence is fully exhausted, and an exception is thrown to * indicate this. * * <p>Note: this method throws PersistException even for fetch failures * since this method is called by insert operations. Insert operations can * only throw a PersistException. * * @throws PersistException for fetch/persist failure or if sequence is * exhausted for int values. */ @Override public int nextIntValue() throws PersistException { try { synchronized (mStoredSequence) { long initial = mStoredSequence.getInitialValue(); if (initial >= 0x100000000L) { throw new PersistException ("Sequence initial value too large to support 32-bit ints: " + mStoredSequence.getName() + ", initial: " + initial); } long next = nextUnadjustedValue(); if (next >= Long.MIN_VALUE + 0x100000000L) { // Everytime we throw this exception, a long sequence value // has been lost. This seems fairly benign. throw new PersistException ("Sequence exhausted for 32-bit ints: " + mStoredSequence.getName() + ", next: " + (next + Long.MIN_VALUE + initial)); } return (int) (next + Long.MIN_VALUE + initial); } } catch (FetchException e) { throw e.toPersistException(); } } /** * Allow any unused reserved values to be returned for re-use. If the * repository is shared by other processes, then reserved values might not * be returnable. * * <p>This method should be called during the shutdown process of a * repository, although calling it does not invalidate this * SequenceValueGenerator. If getNextValue is called again, it will reserve * values again. * * @return true if reserved values were returned */ public boolean returnReservedValues() throws FetchException, PersistException { synchronized (mStoredSequence) { if (mHasReservedValues) { Transaction txn = mRepository.enterTopTransaction(null); txn.setForUpdate(true); try { // Compare known StoredSequence with current persistent // one. If same, then reserved values can be returned. StoredSequence current = mStorage.prepare(); current.setName(mStoredSequence.getName()); if (current.tryLoad() && current.equals(mStoredSequence)) { mStoredSequence.setNextValue(mNextValue + mIncrement); mStoredSequence.update(); txn.commit(); mHasReservedValues = false; return true; } } finally { txn.exit(); } } } return false; } // Assumes caller has synchronized on mStoredSequence private long nextUnadjustedValue() throws FetchException, PersistException { if (mHasReservedValues) { long next = mNextValue + mIncrement; mNextValue = next; if (next < mStoredSequence.getNextValue()) { return next; } mHasReservedValues = false; } Transaction txn = mRepository.enterTopTransaction(null); txn.setForUpdate(true); try { // Assume that StoredSequence is stale, so reload. mStoredSequence.load(); long next = mStoredSequence.getNextValue(); long nextStored = next + mReserveAmount * mIncrement; if (next >= 0 && nextStored < 0) { // Wrapped around. There might be just a few values left. long avail = (Long.MAX_VALUE - next) / mIncrement; if (avail > 0) { nextStored = next + avail * mIncrement; } else { // Throw a PersistException since sequences are applied during // insert operations, and inserts can only throw PersistExceptions. throw new PersistException ("Sequence exhausted: " + mStoredSequence.getName()); } } mStoredSequence.setNextValue(nextStored); mStoredSequence.update(); txn.commit(); mNextValue = next; mHasReservedValues = true; return next; } finally { txn.exit(); } } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.opengl; import java.nio.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.JNI.*; import static org.lwjgl.system.MemoryStack.*; import static org.lwjgl.system.MemoryUtil.*; /** * Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_bindless_texture.txt">ARB_bindless_texture</a> extension. * * <p>This extension allows OpenGL applications to access texture objects in shaders without first binding each texture to one of a limited number of texture * image units. Using this extension, an application can query a 64-bit unsigned integer texture handle for each texture that it wants to access and then * use that handle directly in GLSL or assembly-based shaders. The ability to access textures without having to bind and/or re-bind them is similar to the * capability provided by the {@link NVShaderBufferLoad NV_shader_buffer_load} extension that allows shaders to access buffer objects without binding them. In both cases, * these extensions significantly reduce the amount of API and internal GL driver overhead needed to manage resource bindings.</p> * * <p>This extension also provides similar capability for the image load, store, and atomic functionality provided by OpenGL 4.2 and the * {@link ARBShaderImageLoadStore ARB_shader_image_load_store} and {@link EXTShaderImageLoadStore EXT_shader_image_load_store} extensions, where a texture can be accessed without first binding it to an * image unit. An image handle can be extracted from a texture object using an API with a set of parameters similar to those for * {@link EXTShaderImageLoadStore#glBindImageTextureEXT BindImageTextureEXT}.</p> * * <p>This extension adds no new data types to GLSL. Instead, it uses existing sampler and image data types and allows them to be populated with texture and * image handles. This extension does permit sampler and image data types to be used in more contexts than in unextended GLSL 4.00. In particular, sampler * and image types may be used as shader inputs/outputs, temporary variables, and uniform block members, and may be assigned to by shader code. * Constructors are provided to convert unsigned integer values to and from sampler and image data types. Additionally, new APIs are provided to load * values for sampler and image uniforms with 64-bit handle inputs. The use of existing integer-based Uniform* APIs is still permitted, in which case the * integer specified will identify a texture image or image unit. For samplers and images with values specified as texture image or image units, the GL * implemenation will translate the unit number to an internal handle as required.</p> * * <p>To access texture or image resources using handles, the handles must first be made resident. Accessing a texture or image by handle without first * making it resident can result in undefined results, including program termination. Since the amount of texture memory required by an application may * exceed the amount of memory available to the system, this extension provides API calls allowing applications to manage overall texture memory * consumption by making a texture resident and non-resident as required.</p> * * <p>Requires {@link GL40 OpenGL 4.0}.</p> */ public class ARBBindlessTexture { static { GL.initialize(); } /** Accepted by the {@code type} parameter of VertexAttribLPointer. */ public static final int GL_UNSIGNED_INT64_ARB = 0x140F; protected ARBBindlessTexture() { throw new UnsupportedOperationException(); } // --- [ glGetTextureHandleARB ] --- /** * Creates a texture handle using the current state of the texture named {@code texture}, including any embedded sampler state. See * {@link #glGetTextureSamplerHandleARB GetTextureSamplerHandleARB} for details. * * @param texture the texture object */ @NativeType("GLuint64") public static native long glGetTextureHandleARB(@NativeType("GLuint") int texture); // --- [ glGetTextureSamplerHandleARB ] --- /** * Creates a texture handle using the current non-sampler state from the texture named {@code texture} and the sampler state from the sampler object * {@code sampler}. In both cases, a 64-bit unsigned integer handle is returned. The error {@link GL11#GL_INVALID_VALUE INVALID_VALUE} is generated if {@code texture} is zero or is * not the name of an existing texture object or if {@code sampler} is zero or is not the name of an existing sampler object. The error * {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated if the texture object {@code texture} is not complete. If an error occurs, a handle of zero is returned. * * <p>The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated if the border color (taken from the embedded sampler for GetTextureHandleARB or from the {@code sampler} * for GetTextureSamplerHandleARB) is not one of the following allowed values. If the texture's base internal format is signed or unsigned integer, allowed * values are (0,0,0,0), (0,0,0,1), (1,1,1,0), and (1,1,1,1). If the base internal format is not integer, allowed values are (0.0,0.0,0.0,0.0), * (0.0,0.0,0.0,1.0), (1.0,1.0,1.0,0.0), and (1.0,1.0,1.0,1.0).</p> * * <p>The handle for each texture or texture/sampler pair is unique; the same handle will be returned if GetTextureHandleARB is called multiple times for the * same texture or if GetTextureSamplerHandleARB is called multiple times for the same texture/sampler pair.</p> * * <p>When a texture object is referenced by one or more texture handles, the texture parameters of the object may not be changed, and the size and format of * the images in the texture object may not be re-specified. The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated if the functions TexImage*, CopyTexImage*, * CompressedTexImage*, TexBuffer*, or TexParameter* are called to modify a texture object referenced by one or more texture handles. The contents of the * images in a texture object may still be updated via commands such as TexSubImage*, CopyTexSubImage*, and CompressedTexSubImage*, and by rendering to a * framebuffer object, even if the texture object is referenced by one or more texture handles.</p> * * <p>The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated by {@link GL15C#glBufferData BufferData} if it is called to modify a buffer object bound to a buffer texture while that * texture object is referenced by one or more texture handles. The contents of the buffer object may still be updated via buffer update commands such as * {@link GL15C#glBufferSubData BufferSubData} and MapBuffer*, or via the texture update commands, even if the buffer is bound to a texture while that buffer texture object is * referenced by one or more texture handles.</p> * * <p>When a sampler object is referenced by one or more texture handles, the sampler parameters of the object may not be changed. The error * {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated when calling SamplerParameter* functions to modify a sampler object referenced by one or more texture handles.</p> * * @param texture the texture object * @param sampler the sampler object */ @NativeType("GLuint64") public static native long glGetTextureSamplerHandleARB(@NativeType("GLuint") int texture, @NativeType("GLuint") int sampler); // --- [ glMakeTextureHandleResidentARB ] --- /** * Make a texture handle resident, so that it is accessible to shaders for texture mapping operations. * * <p>While the texture handle is resident, it may be used in texture mapping operations. If a shader attempts to perform a texture mapping operation using a * handle that is not resident, the results of that operation are undefined and may lead to application termination. When a texture handle is resident, the * texture it references is also considered resident for the purposes of the {@link GL11#glAreTexturesResident AreTexturesResident} command. The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is * generated if {@code handle} is not a valid texture handle, or if {@code handle} is already resident in the current GL context.</p> * * @param handle the texture handle */ public static native void glMakeTextureHandleResidentARB(@NativeType("GLuint64") long handle); // --- [ glMakeTextureHandleNonResidentARB ] --- /** * Makes a texture handle inaccessible to shaders. * * <p>The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated if {@code handle} is not a valid texture handle, or if {@code handle} is not resident in the current GL * context.</p> * * @param handle the texture handle */ public static native void glMakeTextureHandleNonResidentARB(@NativeType("GLuint64") long handle); // --- [ glGetImageHandleARB ] --- /** * Creates and returns an image handle for level {@code level} of the texture named {@code texture}. If {@code layered} is {@link GL11#GL_TRUE TRUE}, a handle is created * for the entire texture level. If {@code layered} is {@link GL11#GL_FALSE FALSE}, a handle is created for only the layer {@code layer} of the texture level. * {@code format} specifies a format used to interpret the texels of the image when used for image loads, stores, and atomics, and has the same meaning as * the {@code format} parameter of {@link EXTShaderImageLoadStore#glBindImageTextureEXT BindImageTextureEXT}. A 64-bit unsigned integer handle is returned if the command succeeds; otherwise, zero is * returned. * * <p>The error {@link GL11#GL_INVALID_VALUE INVALID_VALUE} is generated by GetImageHandleARB if:</p> * * <ul> * <li>{@code texture} is zero or not the name of an existing texture object;</li> * <li>the image for the texture level {@code level} doesn't exist (i.e., has a size of zero in {@code texture}); or</li> * <li>{@code layered} is FALSE and {@code layer} is greater than or equal to the number of layers in the image at level {@code level}.</li> * </ul> * * <p>The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated by GetImageHandleARB if:</p> * * <ul> * <li>the texture object {@code texture} is not complete (section 3.9.14);</li> * <li>{@code layered} is TRUE and the texture is not a three-dimensional, one-dimensional array, two dimensional array, cube map, or cube map array * texture.</li> * </ul> * * <p>When a texture object is referenced by one or more image handles, the texture parameters of the object may not be changed, and the size and format of * the images in the texture object may not be re-specified. The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated when calling TexImage*, CopyTexImage*, * CompressedTexImage*, TexBuffer*, or TexParameter* functions while a texture object is referenced by one or more image handles. The contents of the * images in a texture object may still be updated via commands such as TexSubImage*, CopyTexSubImage*, and CompressedTexSubImage*, and by rendering to a * framebuffer object, even if the texture object is referenced by one or more image handles.</p> * * <p>The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated by {@link GL15C#glBufferData BufferData} if it is called to modify a buffer object bound to a buffer texture while that texture * object is referenced by one or more image handles. The contents of the buffer object may still be updated via buffer update commands such as * {@link GL15C#glBufferSubData BufferSubData} and MapBuffer*, or via the texture update commands, even if the buffer is bound to a texture while that buffer texture object is * referenced by one or more image handles.</p> * * <p>The handle returned for each combination of {@code texture}, {@code level}, {@code layered}, {@code layer}, and {@code format} is unique; the same * handle will be returned if GetImageHandleARB is called multiple times with the same parameters.</p> * * @param texture the texture object * @param level the texture level * @param layered the layered flag * @param layer the texture layer * @param format the texture format */ @NativeType("GLuint64") public static native long glGetImageHandleARB(@NativeType("GLuint") int texture, @NativeType("GLint") int level, @NativeType("GLboolean") boolean layered, @NativeType("GLint") int layer, @NativeType("GLenum") int format); // --- [ glMakeImageHandleResidentARB ] --- /** * Makes an image handle resident, so that it is accessible to shaders for image loads, stores, and atomic operations. * * <p>{@code access} specifies whether the texture bound to the image handle will be treated as {@link GL15#GL_READ_ONLY READ_ONLY}, {@link GL15#GL_WRITE_ONLY WRITE_ONLY}, or {@link GL15#GL_READ_WRITE READ_WRITE}. If a * shader reads from an image handle made resident as {@link GL15#GL_WRITE_ONLY WRITE_ONLY}, or writes to an image handle made resident as {@link GL15#GL_READ_ONLY READ_ONLY}, the results of that * shader operation are undefined and may lead to application termination. The error {@link GL11#GL_INVALID_OPERATION INVALID_OPERATION} is generated if {@code handle} is not a valid * image handle, or if {@code handle} is already resident in the current GL context.</p> * * <p>While the image handle is resident, it may be used in image load, store, and atomic operations. If a shader attempts to perform an image operation using * a handle that is not resident, the results of that operation are undefined and may lead to application termination. When an image handle is resident, * the texture it references is not necessarily considered resident for the purposes of the {@link GL11#glAreTexturesResident AreTexturesResident} command.</p> * * @param handle the image handle * @param access the access type. One of:<br><table><tr><td>{@link GL15#GL_READ_ONLY READ_ONLY}</td><td>{@link GL15#GL_WRITE_ONLY WRITE_ONLY}</td><td>{@link GL15#GL_READ_WRITE READ_WRITE}</td></tr></table> */ public static native void glMakeImageHandleResidentARB(@NativeType("GLuint64") long handle, @NativeType("GLenum") int access); // --- [ glMakeImageHandleNonResidentARB ] --- /** * Makes an image handle inaccessible to shaders. * * @param handle the image handle */ public static native void glMakeImageHandleNonResidentARB(@NativeType("GLuint64") long handle); // --- [ glUniformHandleui64ARB ] --- /** * Loads a 64-bit unsigned integer handle into a uniform location corresponding to sampler or image variable types. * * @param location the uniform location * @param value the handle value */ public static native void glUniformHandleui64ARB(@NativeType("GLint") int location, @NativeType("GLuint64") long value); // --- [ glUniformHandleui64vARB ] --- /** * Unsafe version of: {@link #glUniformHandleui64vARB UniformHandleui64vARB} * * @param count the number of handles to load */ public static native void nglUniformHandleui64vARB(int location, int count, long values); /** * Loads {@code count} 64-bit unsigned integer handles into a uniform location corresponding to sampler or image variable types. * * @param location the uniform location * @param values a buffer from which to load the handles */ public static void glUniformHandleui64vARB(@NativeType("GLint") int location, @NativeType("GLuint64 const *") LongBuffer values) { nglUniformHandleui64vARB(location, values.remaining(), memAddress(values)); } // --- [ glProgramUniformHandleui64ARB ] --- /** * DSA version of {@link #glUniformHandleui64ARB UniformHandleui64ARB}. * * @param program the program object * @param location the uniform location * @param value the handle value */ public static native void glProgramUniformHandleui64ARB(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64") long value); // --- [ glProgramUniformHandleui64vARB ] --- /** * Unsafe version of: {@link #glProgramUniformHandleui64vARB ProgramUniformHandleui64vARB} * * @param count the number of handles to load */ public static native void nglProgramUniformHandleui64vARB(int program, int location, int count, long values); /** * DSA version of {@link #glUniformHandleui64vARB UniformHandleui64vARB}. * * @param program the program object * @param location the uniform location * @param values a buffer from which to load the handles */ public static void glProgramUniformHandleui64vARB(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64 const *") LongBuffer values) { nglProgramUniformHandleui64vARB(program, location, values.remaining(), memAddress(values)); } // --- [ glIsTextureHandleResidentARB ] --- /** * Returns {@link GL11#GL_TRUE TRUE} if the specified texture handle is resident in the current context. * * @param handle the texture handle */ @NativeType("GLboolean") public static native boolean glIsTextureHandleResidentARB(@NativeType("GLuint64") long handle); // --- [ glIsImageHandleResidentARB ] --- /** * Returns {@link GL11#GL_TRUE TRUE} if the specified image handle is resident in the current context. * * @param handle the image handle */ @NativeType("GLboolean") public static native boolean glIsImageHandleResidentARB(@NativeType("GLuint64") long handle); // --- [ glVertexAttribL1ui64ARB ] --- /** * Specifies the 64-bit unsigned integer handle value of a generic vertex attribute. * * @param index the index of the generic vertex attribute to be modified * @param x the handle value */ public static native void glVertexAttribL1ui64ARB(@NativeType("GLuint") int index, @NativeType("GLuint64") long x); // --- [ glVertexAttribL1ui64vARB ] --- /** Unsafe version of: {@link #glVertexAttribL1ui64vARB VertexAttribL1ui64vARB} */ public static native void nglVertexAttribL1ui64vARB(int index, long v); /** * Pointer version of {@link #glVertexAttribL1ui64ARB VertexAttribL1ui64ARB}. * * @param index the index of the generic vertex attribute to be modified * @param v the vertex attribute buffer */ public static void glVertexAttribL1ui64vARB(@NativeType("GLuint") int index, @NativeType("GLuint64 const *") LongBuffer v) { if (CHECKS) { check(v, 1); } nglVertexAttribL1ui64vARB(index, memAddress(v)); } // --- [ glGetVertexAttribLui64vARB ] --- /** Unsafe version of: {@link #glGetVertexAttribLui64vARB GetVertexAttribLui64vARB} */ public static native void nglGetVertexAttribLui64vARB(int index, int pname, long params); /** * Returns the 64-bit unsigned integer handle value of a generic vertex attribute parameter. * * @param index the generic vertex attribute index * @param pname the parameter to query * @param params a buffer in which to place the returned data */ public static void glGetVertexAttribLui64vARB(@NativeType("GLuint") int index, @NativeType("GLenum") int pname, @NativeType("GLuint64 *") LongBuffer params) { if (CHECKS) { check(params, 1); } nglGetVertexAttribLui64vARB(index, pname, memAddress(params)); } /** * Returns the 64-bit unsigned integer handle value of a generic vertex attribute parameter. * * @param index the generic vertex attribute index * @param pname the parameter to query */ @NativeType("void") public static long glGetVertexAttribLui64ARB(@NativeType("GLuint") int index, @NativeType("GLenum") int pname) { MemoryStack stack = stackGet(); int stackPointer = stack.getPointer(); try { LongBuffer params = stack.callocLong(1); nglGetVertexAttribLui64vARB(index, pname, memAddress(params)); return params.get(0); } finally { stack.setPointer(stackPointer); } } /** Array version of: {@link #glUniformHandleui64vARB UniformHandleui64vARB} */ public static void glUniformHandleui64vARB(@NativeType("GLint") int location, @NativeType("GLuint64 const *") long[] values) { long __functionAddress = GL.getICD().glUniformHandleui64vARB; if (CHECKS) { check(__functionAddress); } callPV(location, values.length, values, __functionAddress); } /** Array version of: {@link #glProgramUniformHandleui64vARB ProgramUniformHandleui64vARB} */ public static void glProgramUniformHandleui64vARB(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64 const *") long[] values) { long __functionAddress = GL.getICD().glProgramUniformHandleui64vARB; if (CHECKS) { check(__functionAddress); } callPV(program, location, values.length, values, __functionAddress); } /** Array version of: {@link #glVertexAttribL1ui64vARB VertexAttribL1ui64vARB} */ public static void glVertexAttribL1ui64vARB(@NativeType("GLuint") int index, @NativeType("GLuint64 const *") long[] v) { long __functionAddress = GL.getICD().glVertexAttribL1ui64vARB; if (CHECKS) { check(__functionAddress); check(v, 1); } callPV(index, v, __functionAddress); } /** Array version of: {@link #glGetVertexAttribLui64vARB GetVertexAttribLui64vARB} */ public static void glGetVertexAttribLui64vARB(@NativeType("GLuint") int index, @NativeType("GLenum") int pname, @NativeType("GLuint64 *") long[] params) { long __functionAddress = GL.getICD().glGetVertexAttribLui64vARB; if (CHECKS) { check(__functionAddress); check(params, 1); } callPV(index, pname, params, __functionAddress); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.directory.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/ListSchemaExtensions" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListSchemaExtensionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier of the directory from which to retrieve the schema extension information. * </p> */ private String directoryId; /** * <p> * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to <code>ListSchemaExtensions</code>. * Pass null if this is the first call. * </p> */ private String nextToken; /** * <p> * The maximum number of items to return. * </p> */ private Integer limit; /** * <p> * The identifier of the directory from which to retrieve the schema extension information. * </p> * * @param directoryId * The identifier of the directory from which to retrieve the schema extension information. */ public void setDirectoryId(String directoryId) { this.directoryId = directoryId; } /** * <p> * The identifier of the directory from which to retrieve the schema extension information. * </p> * * @return The identifier of the directory from which to retrieve the schema extension information. */ public String getDirectoryId() { return this.directoryId; } /** * <p> * The identifier of the directory from which to retrieve the schema extension information. * </p> * * @param directoryId * The identifier of the directory from which to retrieve the schema extension information. * @return Returns a reference to this object so that method calls can be chained together. */ public ListSchemaExtensionsRequest withDirectoryId(String directoryId) { setDirectoryId(directoryId); return this; } /** * <p> * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to <code>ListSchemaExtensions</code>. * Pass null if this is the first call. * </p> * * @param nextToken * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to * <code>ListSchemaExtensions</code>. Pass null if this is the first call. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to <code>ListSchemaExtensions</code>. * Pass null if this is the first call. * </p> * * @return The <code>ListSchemaExtensions.NextToken</code> value from a previous call to * <code>ListSchemaExtensions</code>. Pass null if this is the first call. */ public String getNextToken() { return this.nextToken; } /** * <p> * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to <code>ListSchemaExtensions</code>. * Pass null if this is the first call. * </p> * * @param nextToken * The <code>ListSchemaExtensions.NextToken</code> value from a previous call to * <code>ListSchemaExtensions</code>. Pass null if this is the first call. * @return Returns a reference to this object so that method calls can be chained together. */ public ListSchemaExtensionsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The maximum number of items to return. * </p> * * @param limit * The maximum number of items to return. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of items to return. * </p> * * @return The maximum number of items to return. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of items to return. * </p> * * @param limit * The maximum number of items to return. * @return Returns a reference to this object so that method calls can be chained together. */ public ListSchemaExtensionsRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDirectoryId() != null) sb.append("DirectoryId: ").append(getDirectoryId()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListSchemaExtensionsRequest == false) return false; ListSchemaExtensionsRequest other = (ListSchemaExtensionsRequest) obj; if (other.getDirectoryId() == null ^ this.getDirectoryId() == null) return false; if (other.getDirectoryId() != null && other.getDirectoryId().equals(this.getDirectoryId()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDirectoryId() == null) ? 0 : getDirectoryId().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public ListSchemaExtensionsRequest clone() { return (ListSchemaExtensionsRequest) super.clone(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2beta2/dlp.proto package com.google.privacy.dlp.v2beta2; /** * <pre> * Redact a given value. For example, if used with an `InfoTypeTransformation` * transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the * output would be 'My phone number is '. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta2.RedactConfig} */ public final class RedactConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta2.RedactConfig) RedactConfigOrBuilder { private static final long serialVersionUID = 0L; // Use RedactConfig.newBuilder() to construct. private RedactConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RedactConfig() { } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RedactConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta2.DlpProto.internal_static_google_privacy_dlp_v2beta2_RedactConfig_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta2.DlpProto.internal_static_google_privacy_dlp_v2beta2_RedactConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta2.RedactConfig.class, com.google.privacy.dlp.v2beta2.RedactConfig.Builder.class); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2beta2.RedactConfig)) { return super.equals(obj); } com.google.privacy.dlp.v2beta2.RedactConfig other = (com.google.privacy.dlp.v2beta2.RedactConfig) obj; boolean result = true; result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta2.RedactConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2beta2.RedactConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Redact a given value. For example, if used with an `InfoTypeTransformation` * transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the * output would be 'My phone number is '. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta2.RedactConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta2.RedactConfig) com.google.privacy.dlp.v2beta2.RedactConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta2.DlpProto.internal_static_google_privacy_dlp_v2beta2_RedactConfig_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta2.DlpProto.internal_static_google_privacy_dlp_v2beta2_RedactConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta2.RedactConfig.class, com.google.privacy.dlp.v2beta2.RedactConfig.Builder.class); } // Construct using com.google.privacy.dlp.v2beta2.RedactConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2beta2.DlpProto.internal_static_google_privacy_dlp_v2beta2_RedactConfig_descriptor; } public com.google.privacy.dlp.v2beta2.RedactConfig getDefaultInstanceForType() { return com.google.privacy.dlp.v2beta2.RedactConfig.getDefaultInstance(); } public com.google.privacy.dlp.v2beta2.RedactConfig build() { com.google.privacy.dlp.v2beta2.RedactConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.privacy.dlp.v2beta2.RedactConfig buildPartial() { com.google.privacy.dlp.v2beta2.RedactConfig result = new com.google.privacy.dlp.v2beta2.RedactConfig(this); onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2beta2.RedactConfig) { return mergeFrom((com.google.privacy.dlp.v2beta2.RedactConfig)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2beta2.RedactConfig other) { if (other == com.google.privacy.dlp.v2beta2.RedactConfig.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2beta2.RedactConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2beta2.RedactConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta2.RedactConfig) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta2.RedactConfig) private static final com.google.privacy.dlp.v2beta2.RedactConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta2.RedactConfig(); } public static com.google.privacy.dlp.v2beta2.RedactConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RedactConfig> PARSER = new com.google.protobuf.AbstractParser<RedactConfig>() { public RedactConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RedactConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<RedactConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RedactConfig> getParserForType() { return PARSER; } public com.google.privacy.dlp.v2beta2.RedactConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.sana.android.db; import org.sana.android.db.SanaDB.BinarySQLFormat; import android.content.ContentResolver; import android.content.ContentValues; import android.database.Cursor; import android.net.Uri; import android.text.TextUtils; import android.util.Log; /** * Data access object for mapping binary objects to encounter elements, i.e. * observations. * * @author Sana Development Team */ public class BinaryDAO { private static final String TAG = BinaryDAO.class.getSimpleName(); public static final String DEFAULT_MIME = "application/octet-stream"; /** * Removes the entry for a binary object and . * * @param cr * @param uri * @return */ public static int delete(ContentResolver cr, Uri uri) { int result = 0; result = cr.delete(uri, null, null); Log.d(TAG, "Result: " + result + ", deleted: " + uri); return result; } /** * Removes the file entry only for a binary object in this table. * * @param cr * @param answer * @return */ public static int deleteFile(ContentResolver cr, Uri uri) { int result = 0; Uri fUri = queryFile(cr, uri); update(cr, uri, null); result = cr.delete(fUri, null, null); Log.d(TAG, "Result: " + result + ", deleted: " + fUri); return result; } /** * Inserts a new record for a binary object. * * @param cr A content resolver * @param encounterId The encounter identifier * @param elementId The encounter element identifier * @param fileUri For locating the binary object * @return A Uri for locating the new entry or null if unsuccessful. */ public static Uri insert(ContentResolver cr, String encounterId, String elementId, Uri fileUri, String mime) { Uri result = null; ContentValues values = new ContentValues(); values.put(BinarySQLFormat.ENCOUNTER_ID, encounterId); values.put(BinarySQLFormat.ELEMENT_ID, elementId); values.put(BinarySQLFormat.CONTENT, fileUri.toString()); values.put(BinarySQLFormat.MIME, mime); result = cr.insert(BinarySQLFormat.CONTENT_URI, values); Log.d(TAG, "Result: " + result); return result; } /** * Inserts a new record for a binary object with no file uri or mime type * set. * * @param cr A content resolver * @param encounterId The encounter identifier * @param elementId The encounter element identifier * @return A Uri for locating the new entry or null if unsuccessful. */ public static Uri insert(ContentResolver cr, String encounterId, String elementId) { Uri result = null; ContentValues values = new ContentValues(); values.put(BinarySQLFormat.ENCOUNTER_ID, encounterId); values.put(BinarySQLFormat.ELEMENT_ID, elementId); result = cr.insert(BinarySQLFormat.CONTENT_URI, values); Log.d(TAG, "Result: " + result); return result; } /** * Returns the Uri for the row. * * @param cr A content resolver * @param answerUri The row to query * @return The Uri or null. */ public static Uri query(ContentResolver cr, String encounter, String element) { Uri result = null; String[] projection = BinaryProvider.PROJ_ID; String selection = BinaryProvider.OBS_WHERE; String[] selArgs = new String[]{encounter, element}; Cursor c = null; try { c = cr.query(BinarySQLFormat.CONTENT_URI, projection, selection, selArgs, null); if (c.moveToFirst()) { result = Uri.parse(c.getString( c.getColumnIndex(BinarySQLFormat.CONTENT))); } } catch (Exception e) { Log.e(TAG, e.toString()); } finally { if (c != null) c.close(); } Log.d(TAG, "Result: " + result + ", For(encounter,element): (" + encounter + "," + element + ")"); return result; } /** * Returns the Uri for the binary file object. * * @param cr A content resolver * @param uri The row to query * @return The Uri or null. */ public static Uri queryFile(ContentResolver cr, Uri uri) { Uri result = null; String[] projection = BinaryProvider.PROJ_ITEM_CONTENT; Cursor c = null; try { c = cr.query(uri, projection, null, null, null); if (c.moveToFirst()) { result = Uri.parse(c.getString( c.getColumnIndex(BinarySQLFormat.CONTENT))); } } catch (Exception e) { Log.e(TAG, e.toString()); } finally { if (c != null) c.close(); } Log.d(TAG, "Result: " + result + ", From: " + uri); return result; } /** * Updates the file Uri string for the row stored as an answer. * * @param cr A content resolver * @param answerUri The row to update * @param fileUri The new Uri for the binary object. * @return 1 if successful, otherwise 0. */ public static Uri updateOrCreate(ContentResolver cr, String encounterId, String elementId, Uri fileUri, String mime) { int result = 0; Uri uri = query(cr, encounterId, elementId); mime = (TextUtils.isEmpty(mime)) ? BinaryDAO.DEFAULT_MIME : mime; if (uri == null) { uri = BinaryDAO.insert(cr, encounterId, elementId, fileUri, mime); } else { String newFile = (fileUri != null) ? fileUri.toString() : ""; ContentValues values = new ContentValues(); values.put(BinarySQLFormat.CONTENT, newFile); values.put(BinarySQLFormat.MIME, mime); result = cr.update(uri, values, null, null); } Log.d(TAG, "Result: " + result + ", Updated: " + uri + ", with: " + fileUri); return uri; } /** * Updates the file Uri string for the row stored as an answer. * * @param cr A content resolver * @param uri The row to update * @param fileUri The new Uri for the binary object. * @return 1 if successful, otherwise 0. */ public static int update(ContentResolver cr, Uri uri, Uri fileUri) { int result = 0; String newFile = (fileUri != null) ? fileUri.toString() : ""; ContentValues values = new ContentValues(); values.put(BinarySQLFormat.CONTENT, newFile); result = cr.update(uri, values, null, null); Log.d(TAG, "Result: " + result + ", Updated: " + uri + ", with: " + fileUri); return result; } /** * Returns a unique identifier for the entry mapped to the Uri * * @param uri The entry to get a UUID for. * @return The uuid or null */ public static String getUUID(Uri uri) { if (uri != null) return uri.getPathSegments().get(1); else return null; } public static Uri obsUri(Uri encounter, String element) { Uri.Builder result = encounter.buildUpon(); result.appendPath(element); return result.build(); } }
package com.sedmelluq.discord.lavaplayer.tools.io; import com.sedmelluq.discord.lavaplayer.tools.Units; import com.sedmelluq.discord.lavaplayer.track.info.AudioTrackInfoBuilder; import com.sedmelluq.discord.lavaplayer.track.info.AudioTrackInfoProvider; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.Collections; import java.util.List; import org.apache.http.Header; import org.apache.http.HttpHeaders; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.sedmelluq.discord.lavaplayer.tools.io.HttpClientTools.getHeaderValue; import static com.sedmelluq.discord.lavaplayer.tools.io.HttpClientTools.isSuccessWithContent; /** * Use an HTTP endpoint as a stream, where the connection resetting is handled gracefully by reopening the connection * and using a closed stream will just reopen the connection. */ public class PersistentHttpStream extends SeekableInputStream implements AutoCloseable { private static final Logger log = LoggerFactory.getLogger(PersistentHttpStream.class); private static final long MAX_SKIP_DISTANCE = 512L * 1024L; private final HttpInterface httpInterface; protected final URI contentUrl; private int lastStatusCode; private CloseableHttpResponse currentResponse; private InputStream currentContent; protected long position; /** * @param httpInterface The HTTP interface to use for requests * @param contentUrl The URL of the resource * @param contentLength The length of the resource in bytes */ public PersistentHttpStream(HttpInterface httpInterface, URI contentUrl, Long contentLength) { super(contentLength == null ? Units.CONTENT_LENGTH_UNKNOWN : contentLength, MAX_SKIP_DISTANCE); this.httpInterface = httpInterface; this.contentUrl = contentUrl; this.position = 0; } /** * Connect and return status code or return last status code if already connected. This causes the internal status * code checker to be disabled, so non-success status codes will be returned instead of being thrown as they would * be otherwise. * * @return The status code when connecting to the URL * @throws IOException On IO error */ public int checkStatusCode() throws IOException { connect(true); return lastStatusCode; } /** * @return An HTTP response if one is currently open. */ public HttpResponse getCurrentResponse() { return currentResponse; } protected URI getConnectUrl() { return contentUrl; } protected boolean useHeadersForRange() { return true; } private static boolean validateStatusCode(HttpResponse response, boolean returnOnServerError) { int statusCode = response.getStatusLine().getStatusCode(); if (returnOnServerError && statusCode >= HttpStatus.SC_INTERNAL_SERVER_ERROR) { return false; } else if (!isSuccessWithContent(statusCode)) { throw new RuntimeException("Not success status code: " + statusCode); } return true; } private HttpGet getConnectRequest() { HttpGet request = new HttpGet(getConnectUrl()); if (position > 0 && useHeadersForRange()) { request.setHeader(HttpHeaders.RANGE, "bytes=" + position + "-"); } return request; } private void connect(boolean skipStatusCheck) throws IOException { if (currentResponse == null) { for (int i = 1; i >= 0; i--) { if (attemptConnect(skipStatusCheck, i > 0)) { break; } } } } private boolean attemptConnect(boolean skipStatusCheck, boolean retryOnServerError) throws IOException { currentResponse = httpInterface.execute(getConnectRequest()); lastStatusCode = currentResponse.getStatusLine().getStatusCode(); if (!skipStatusCheck && !validateStatusCode(currentResponse, retryOnServerError)) { return false; } if (currentResponse.getEntity() == null) { currentContent = EmptyInputStream.INSTANCE; contentLength = 0; return true; } currentContent = new BufferedInputStream(currentResponse.getEntity().getContent()); if (contentLength == Units.CONTENT_LENGTH_UNKNOWN) { Header header = currentResponse.getFirstHeader("Content-Length"); if (header != null) { contentLength = Long.parseLong(header.getValue()); } } return true; } private void handleNetworkException(IOException exception, boolean attemptReconnect) throws IOException { if (!attemptReconnect || !HttpClientTools.isRetriableNetworkException(exception)) { throw exception; } close(); log.debug("Encountered retriable exception on url {}.", contentUrl, exception); } private int internalRead(boolean attemptReconnect) throws IOException { connect(false); try { int result = currentContent.read(); if (result >= 0) { position++; } return result; } catch (IOException e) { handleNetworkException(e, attemptReconnect); return internalRead(false); } } @Override public int read() throws IOException { return internalRead(true); } private int internalRead(byte[] b, int off, int len, boolean attemptReconnect) throws IOException { connect(false); try { int result = currentContent.read(b, off, len); if (result >= 0) { position += result; } return result; } catch (IOException e) { handleNetworkException(e, attemptReconnect); return internalRead(b, off, len, false); } } @Override public int read(byte[] b, int off, int len) throws IOException { return internalRead(b, off, len, true); } private long internalSkip(long n, boolean attemptReconnect) throws IOException { connect(false); try { long result = currentContent.skip(n); if (result >= 0) { position += result; } return result; } catch (IOException e) { handleNetworkException(e, attemptReconnect); return internalSkip(n, false); } } @Override public long skip(long n) throws IOException { return internalSkip(n, true); } private int internalAvailable(boolean attemptReconnect) throws IOException { connect(false); try { return currentContent.available(); } catch (IOException e) { handleNetworkException(e, attemptReconnect); return internalAvailable(false); } } @Override public int available() throws IOException { return internalAvailable(true); } @Override public synchronized void reset() throws IOException { throw new IOException("mark/reset not supported"); } @Override public boolean markSupported() { return false; } @Override public void close() throws IOException { if (currentResponse != null) { try { currentResponse.close(); } catch (IOException e) { log.debug("Failed to close response.", e); } currentResponse = null; currentContent = null; } } /** * Detach from the current connection, making sure not to close the connection when the stream is closed. */ public void releaseConnection() { if (currentContent != null) { try { currentContent.close(); } catch (IOException e) { log.debug("Failed to close response stream.", e); } } currentResponse = null; currentContent = null; } @Override public long getPosition() { return position; } @Override protected void seekHard(long position) throws IOException { close(); this.position = position; } @Override public boolean canSeekHard() { return contentLength != Units.CONTENT_LENGTH_UNKNOWN; } @Override public List<AudioTrackInfoProvider> getTrackInfoProviders() { if (currentResponse != null) { return Collections.singletonList(createIceCastHeaderProvider()); } else { return Collections.emptyList(); } } private AudioTrackInfoProvider createIceCastHeaderProvider() { AudioTrackInfoBuilder builder = AudioTrackInfoBuilder.empty() .setTitle(getHeaderValue(currentResponse, "icy-description")) .setAuthor(getHeaderValue(currentResponse, "icy-name")); if (builder.getTitle() == null) { builder.setTitle(getHeaderValue(currentResponse, "icy-url")); } return builder; } }
package com.obdobion.funnel.parameters; import java.io.File; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.obdobion.algebrain.Equ; import com.obdobion.argument.CmdLine; import com.obdobion.argument.ICmdLine; import com.obdobion.argument.type.ByteCLA; import com.obdobion.argument.type.CmdLineCLA; import com.obdobion.argument.type.WildFiles; import com.obdobion.funnel.AppContext; import com.obdobion.funnel.FunnelDataProvider; import com.obdobion.funnel.FunnelDataPublisher; import com.obdobion.funnel.aggregation.Aggregate; import com.obdobion.funnel.aggregation.AggregateCount; import com.obdobion.funnel.columns.ColumnHelper; import com.obdobion.funnel.columns.FormatPart; import com.obdobion.funnel.columns.HeaderHelper; import com.obdobion.funnel.columns.HeaderOutHelper; import com.obdobion.funnel.columns.OutputFormatHelper; import com.obdobion.funnel.orderby.Filler; import com.obdobion.funnel.orderby.KeyHelper; import com.obdobion.funnel.orderby.KeyPart; import com.obdobion.funnel.orderby.KeyType; import com.obdobion.funnel.provider.AbstractInputCache; import com.obdobion.funnel.provider.ProviderFactory; import com.obdobion.funnel.publisher.PublisherFactory; /** * <p> * FunnelContext class. * </p> * * @author Chris DeGreef fedupforone@gmail.com */ public class FunnelContext { static final private Logger logger = LoggerFactory.getLogger(FunnelContext.class); static private void showSystemParameters() { @SuppressWarnings("unchecked") final Enumeration<String> pEnumerator = (Enumeration<String>) System.getProperties().propertyNames(); while (pEnumerator.hasMoreElements()) { final String name = pEnumerator.nextElement(); // if ("java.library.path".equalsIgnoreCase(name) // || "java.endorsed.dirs".equalsIgnoreCase(name) // || "sun.boot.library.path".equalsIgnoreCase(name) // || "java.class.path".equalsIgnoreCase(name) // || "java.home".equalsIgnoreCase(name) // || "java.ext.dirs".equalsIgnoreCase(name) // || "sun.boot.class.path".equalsIgnoreCase(name)) // continue; if ("line.separator".equalsIgnoreCase(name)) { final byte[] ls = System.getProperties().getProperty(name).getBytes(); if (ls.length == 1) logger.debug("JVM: {}={}", name, ls[0]); else logger.debug("JVM: {}={} {}", name, ls[0], ls[1]); continue; } if ("java.version".equalsIgnoreCase(name)) { logger.debug("Java version: {}", System.getProperties().getProperty(name)); continue; } logger.trace("JVM: {}={}", name, System.getProperties().getProperty(name)); } } final FunnelSortContext fsc; String[] args; private int inputFileIndex; public String specDirectory; public FunnelDataProvider provider; public FunnelDataPublisher publisher; public AbstractInputCache inputCache; public KeyHelper keyHelper; public OutputFormatHelper formatOutHelper; public HeaderOutHelper headerOutHelper; public ColumnHelper columnHelper; public HeaderHelper headerHelper; public long comparisonCounter; private long duplicateCount; private long writeCount; private long unselectedCount; private long recordCount; private List<KeyPart> keys; /** * <p> * Constructor for FunnelContext. * </p> * * @param cfg a {@link com.obdobion.funnel.AppContext} object. * @param _args a {@link java.lang.String} object. * @throws java.io.IOException if any. * @throws java.text.ParseException if any. */ public FunnelContext(final AppContext cfg, final String... _args) throws IOException, ParseException { logger.info("================ BEGIN ==================="); logger.debug("Funnel {}", cfg.version); final CmdLine parser = new CmdLine(null, "Funnel is a sort / copy / merge utility.\n\nVersion " + cfg.version + ". The log4j configuration file is " + cfg.log4jConfigFileName + "."); if (cfg.specPath != null) { for (int p = 0; p < cfg.specPath.length; p++) { parser.addDefaultIncludeDirectory(new File(cfg.specPath[p])); } } fsc = new FunnelSortContext(parser, _args); if (isUsageRun()) return; if (isVersion()) { logger.info("version {}", cfg.version); System.out.println("Funnel " + cfg.version); return; } /* * The parser would normally apply defaults but the generator does not * provide for java code to be executed for default values. */ if (fsc.workDirectory == null) fsc.workDirectory = new File(System.getProperty("java.io.tmpdir")); try { final StringBuilder sb = new StringBuilder(); sb.append("commandline:"); for (final String arg : _args) sb.append(" ").append(arg); logger.info(sb.toString()); showSystemParameters(); postParseAnalysis(); showParameters(); provider = ProviderFactory.create(this); publisher = PublisherFactory.create(this); logger.debug("============= INITIALIZED ================"); } catch (final ParseException pe) { // logger.fatal(pe.getMessage()); pe.fillInStackTrace(); throw pe; } } /** * <p> * getAggregateByName. * </p> * * @param name a {@link java.lang.String} object. * @return a {@link com.obdobion.funnel.aggregation.Aggregate} object. */ public Aggregate getAggregateByName(final String name) { if (getAggregates() != null) for (final Aggregate agg : getAggregates()) if (agg.name.equalsIgnoreCase(name)) return agg; return null; } /** * <p> * getAggregates. * </p> * * @return a {@link java.util.List} object. */ public List<Aggregate> getAggregates() { return fsc.aggregates; } /** * <p> * getCopyOrder. * </p> * * @return a {@link com.obdobion.funnel.parameters.CopyOrder} object. */ public CopyOrder getCopyOrder() { return fsc.copyOrder; } /** * <p> * getCsv. * </p> * * @return a {@link com.obdobion.funnel.parameters.CSVDef} object. */ public CSVDef getCsv() { return fsc.csv; } /** * <p> * getDepth. * </p> * * @return a int. */ public int getDepth() { return fsc.depth; } /** * <p> * Getter for the field <code>duplicateCount</code>. * </p> * * @return a long. */ public long getDuplicateCount() { return duplicateCount; } /** * <p> * getDuplicateDisposition. * </p> * * @return a {@link com.obdobion.funnel.parameters.DuplicateDisposition} * object. */ public DuplicateDisposition getDuplicateDisposition() { return fsc.duplicateDisposition; } /** * <p> * getEndOfRecordDelimiterIn. * </p> * * @return an array of byte. */ public byte[] getEndOfRecordDelimiterIn() { return fsc.endOfRecordDelimiterIn; } /** * <p> * getEndOfRecordDelimiterOut. * </p> * * @return an array of byte. */ public byte[] getEndOfRecordDelimiterOut() { return fsc.endOfRecordDelimiterOut; } /** * <p> * getFixedRecordLengthIn. * </p> * * @return a int. */ public int getFixedRecordLengthIn() { return fsc.fixedRecordLengthIn; } /** * <p> * getFixedRecordLengthOut. * </p> * * @return a int. */ public int getFixedRecordLengthOut() { return fsc.fixedRecordLengthOut; } /** * <p> * getFormatOutDefs. * </p> * * @return a {@link java.util.List} object. */ public List<FormatPart> getFormatOutDefs() { return fsc.formatOutDefs; } /** * <p> * getHeaderInDefs. * </p> * * @return a {@link java.util.List} object. */ public List<KeyPart> getHeaderInDefs() { return fsc.headerInDefs; } /** * <p> * getHeaderOutDefs. * </p> * * @return a {@link java.util.List} object. */ public List<FormatPart> getHeaderOutDefs() { return fsc.headerOutDefs; } /** * <p> * getHexDumps. * </p> * * @return a {@link java.util.List} object. */ public List<HexDump> getHexDumps() { return fsc.hexDumps; } /** * <p> * getInputColumnDefs. * </p> * * @return a {@link java.util.List} object. */ public List<KeyPart> getInputColumnDefs() { return fsc.inputColumnDefs; } /** * <p> * getInputFile. * </p> * * @param fileNumber a int. * @return a {@link java.io.File} object. * @throws java.text.ParseException if any. * @throws java.io.IOException if any. */ public File getInputFile(final int fileNumber) throws ParseException, IOException { return fsc.inputFiles.files().get(fileNumber); } /** * <p> * getInputFiles. * </p> * * @return a {@link com.obdobion.argument.type.WildFiles} object. */ public WildFiles getInputFiles() { return fsc.inputFiles; } /** * <p> * Getter for the field <code>keys</code>. * </p> * * @return a {@link java.util.List} object. */ public List<KeyPart> getKeys() { return keys; } /** * <p> * getMaximumNumberOfRows. * </p> * * @return a long. */ public long getMaximumNumberOfRows() { return fsc.maximumNumberOfRows; } private List<OrderBy> getOrderBys() { return fsc.orderBys; } /** * <p> * getOutputFile. * </p> * * @return a {@link java.io.File} object. */ public File getOutputFile() { return fsc.outputFile; } /** * <p> * Getter for the field <code>recordCount</code>. * </p> * * @return a long. */ public long getRecordCount() { return recordCount; } /** * <p> * getStopEqu. * </p> * * @return a {@link java.util.List} object. */ public List<Equ> getStopEqu() { return fsc.stopEqu; } /** * <p> * Getter for the field <code>unselectedCount</code>. * </p> * * @return a long. */ public long getUnselectedCount() { return unselectedCount; } /** * <p> * getWhereEqu. * </p> * * @return a {@link java.util.List} object. */ public List<Equ> getWhereEqu() { return fsc.whereEqu; } /** * <p> * getWorkDirectory. * </p> * * @return a {@link java.io.File} object. */ public File getWorkDirectory() { return fsc.workDirectory; } /** * <p> * Getter for the field <code>writeCount</code>. * </p> * * @return a long. */ public long getWriteCount() { return writeCount; } /** * <p> * inputCounters. * </p> * * @param p_unselectedCount a long. * @param p_recordCount a long. */ public void inputCounters(final long p_unselectedCount, final long p_recordCount) { unselectedCount += p_unselectedCount; recordCount += p_recordCount; } /** * <p> * inputFileCount. * </p> * * @return a int. * @throws java.text.ParseException if any. * @throws java.io.IOException if any. */ public int inputFileCount() throws ParseException, IOException { if (getInputFiles() == null) return 0; return getInputFiles().files().size(); } /** * <p> * inputFileIndex. * </p> * * @return a int. */ public int inputFileIndex() { return inputFileIndex; } /** * <p> * isAggregating. * </p> * * @return a boolean. */ public boolean isAggregating() { return getAggregates() != null && !getAggregates().isEmpty(); } /** * <p> * isCacheInput. * </p> * * @return a boolean. */ public boolean isCacheInput() { return !fsc.noCacheInput; } /** * <p> * isCacheWork. * </p> * * @return a boolean. */ public boolean isCacheWork() { return !fsc.diskWork; } /** * <p> * isDiskWork. * </p> * * @return a boolean. */ public boolean isDiskWork() { return fsc.diskWork; } /** * <p> * isHexDumping. * </p> * * @return a boolean. */ public boolean isHexDumping() { return fsc.hexDumps != null; } /** * <p> * isInPlaceSort. * </p> * * @return a boolean. */ public boolean isInPlaceSort() { return fsc.inPlaceSort; } /** * <p> * isMultisourceInput. * </p> * * @return a boolean. * @throws java.text.ParseException if any. * @throws java.io.IOException if any. */ public boolean isMultisourceInput() throws ParseException, IOException { return getInputFiles() != null && getInputFiles().files().size() > 1; } /** * <p> * isNoCacheInput. * </p> * * @return a boolean. */ public boolean isNoCacheInput() { return fsc.noCacheInput; } /** * <p> * isSyntaxOnly. * </p> * * @return a boolean. */ public boolean isSyntaxOnly() { return fsc.syntaxOnly; } /** * <p> * isSysin. * </p> * * @return a boolean. */ public boolean isSysin() { return !(fsc.getParser().arg("--inputfilename").isParsed()); } /** * <p> * isSysout. * </p> * * @return a boolean. * @throws java.text.ParseException if any. * @throws java.io.IOException if any. */ public boolean isSysout() throws ParseException, IOException { if (isMultisourceInput() && isInPlaceSort()) return false; return getOutputFile() == null; } /** * <p> * isUsageRun. * </p> * * @return a boolean. */ public boolean isUsageRun() { return ((CmdLine) fsc.getParser()).isUsageRun(); } /** * <p> * isUserSpecifiedOrder. * </p> * * @return a boolean. */ public boolean isUserSpecifiedOrder() { return getOrderBys() == null || getOrderBys().isEmpty(); } /** * <p> * isVariableLengthInput. * </p> * * @return a boolean. */ public boolean isVariableLengthInput() { return fsc.getParser().arg("--variableIn").isParsed() || !(fsc.getParser().arg("--fixedIn").isParsed()); } /** * <p> * isVariableLengthOutput. * </p> * * @return a boolean. */ public boolean isVariableLengthOutput() { return fsc.getParser().arg("--variableOutput").isParsed(); } /** * <p> * isVersion. * </p> * * @return a boolean. */ public boolean isVersion() { return fsc.version; } /** * <p> * outputCounters. * </p> * * @param p_duplicateCount a long. * @param p_writeCount a long. */ public void outputCounters(final long p_duplicateCount, final long p_writeCount) { duplicateCount += p_duplicateCount; writeCount += p_writeCount; } private void postParseAggregation() throws ParseException { if (getAggregates() != null) { final List<String> aggregateNamesFoundSoFar = new ArrayList<>(); for (final Aggregate agg : getAggregates()) { if (aggregateNamesFoundSoFar.contains(agg.name)) throw new ParseException("aggregate \"" + agg.name + "\" must have a unique name", 0); aggregateNamesFoundSoFar.add(agg.name); if (agg instanceof AggregateCount) continue; if (columnHelper.exists(agg.name)) throw new ParseException("aggregate \"" + agg.name + "\" is already defined as a column", 0); if (agg.columnName != null) { if (!columnHelper.exists(agg.columnName)) throw new ParseException("aggregate \"" + agg.name + "\" must reference a defined column: " + agg.columnName, 0); final KeyPart col = columnHelper.get(agg.columnName); if ((col.isNumeric() && !agg.supportsNumber()) || (col.isDate() && !agg.supportsDate()) || (!col.isNumeric() && !col.isDate())) throw new ParseException("aggregate \"" + agg.name + "\" must reference a numeric or date column: " + agg.columnName + " (" + col.typeName + ")", 0); if (agg.equation != null) throw new ParseException("aggregate \"" + agg.name + "\" columnName and --equ are mutually exclusive", 0); } } } } private void postParseAnalysis() throws ParseException, IOException { columnHelper = new ColumnHelper(); keyHelper = new KeyHelper(); formatOutHelper = new OutputFormatHelper(columnHelper, headerHelper); headerHelper = new HeaderHelper(); headerOutHelper = new HeaderOutHelper(headerHelper); postParseInputFile(); postParseHeaderIn(); postParseHeaderOut(); postParseInputColumns(); postParseOrderBy(); postParseHexDumps(); postParseAggregation(); postParseFormatOut(); postParseOutputFile(); postParseEolIn(); postParseEolOut(); postParseCSV(); postParseFixed(); } private void postParseCSV() { /* * Create a CSV parser if needed. */ if (fsc.getParser().arg("--csv").isParsed()) { getCsv().format = getCsv().predefinedFormat.getFormat(); logger.debug("defining the CSV parser based on \"{}\"", getCsv().predefinedFormat.name()); final ICmdLine csvParser = ((CmdLineCLA) fsc.getParser().arg("--csv")).templateCmdLine; if (csvParser.arg("--commentMarker").isParsed()) getCsv().format = getCsv().format.withCommentMarker((char) getCsv().commentMarker); if (csvParser.arg("--delimiter").isParsed()) getCsv().format = getCsv().format.withDelimiter((char) getCsv().delimiter); if (csvParser.arg("--escape").isParsed()) getCsv().format = getCsv().format.withEscape((char) getCsv().escape); if (csvParser.arg("--ignoreEmptyLines").isParsed()) getCsv().format = getCsv().format.withIgnoreEmptyLines(getCsv().ignoreEmptyLines); if (csvParser.arg("--ignoreSurroundingSpaces").isParsed()) getCsv().format = getCsv().format.withIgnoreSurroundingSpaces(getCsv().ignoreSurroundingSpaces); if (csvParser.arg("--nullString").isParsed()) getCsv().format = getCsv().format.withNullString(getCsv().nullString); if (csvParser.arg("--quote").isParsed()) getCsv().format = getCsv().format.withQuote((char) getCsv().quote); } } private void postParseEolIn() { if (getEndOfRecordDelimiterIn() == null) fsc.endOfRecordDelimiterIn = System.lineSeparator().getBytes(); } private void postParseEolOut() { if (getEndOfRecordDelimiterOut() == null) fsc.endOfRecordDelimiterOut = getEndOfRecordDelimiterIn(); } private void postParseFixed() throws ParseException { if (getFixedRecordLengthOut() > 0 && isVariableLengthOutput()) throw new ParseException("--fixedOut and --variableOutput are mutually exclusive parameters", 0); if (isVariableLengthOutput()) return; if (getFixedRecordLengthOut() == 0) fsc.fixedRecordLengthOut = getFixedRecordLengthIn(); } private void postParseFormatOut() throws ParseException { if (getFormatOutDefs() != null) { if (getCsv() != null) { throw new ParseException("--csv and --format are mutually exclusive parameters", 0); } for (final FormatPart kdef : getFormatOutDefs()) { try { if (kdef.offset == -1) // unspecified kdef.offset = 0; if (kdef.columnName != null) if (!columnHelper.exists(kdef.columnName)) { if (!headerHelper.exists(kdef.columnName)) { if (getAggregateByName(kdef.columnName) == null) throw new ParseException("--formatOut must be a defined column or header: " + kdef.columnName, 0); throw new ParseException( "--formatOut must be a defined column, aggregates can only be used within --equ: " + kdef.columnName, 0); } } if (kdef.columnName != null && kdef.equation != null) throw new ParseException("--formatOut columnName and --equ are mutually exclusive", 0); if (kdef.format != null && kdef.equation == null) throw new ParseException("--formatOut --format is only valid with --equ", 0); if (kdef.equation != null) { if (kdef.length == 255) throw new ParseException("--formatOut --length is required when --equ is specified", 0); } formatOutHelper.add(kdef); } catch (final Exception e) { throw new ParseException(e.getMessage(), 0); } } } } private void postParseHeaderIn() throws ParseException { headerHelper.setWaitingForInput(false); if (getHeaderInDefs() != null) { headerHelper.setWaitingForInput(true); // headerInDefs.size() > 1 // || (headerInDefs.get(0).columnName != null || // headerInDefs.get(0).equation != null) /* * This may be overridden in the postParseHeaderOut method. */ headerOutHelper.setWaitingToWrite(true); KeyPart previousColDef = null; for (final KeyPart colDef : getHeaderInDefs()) { try { /* * Provide a default length when the format is specified and * the length is not. */ if (colDef.length == KeyHelper.MAX_KEY_SIZE && colDef.parseFormat != null && colDef.parseFormat.length() > 0) { colDef.length = colDef.parseFormat.length(); logger.debug("column \"{}\" length set to {} because of format", colDef.columnName, colDef.length); } if (getCsv() != null) throw new ParseException("headerIn not supported for csv files", 0); if (colDef.offset == -1) // unspecified { if (previousColDef != null) colDef.offset = previousColDef.offset + previousColDef.length; else colDef.offset = 0; } if (!(colDef instanceof Filler)) { if (headerHelper.exists(colDef.columnName)) throw new ParseException("headerIn must be unique: " + colDef.columnName, 0); headerHelper.add(colDef); } previousColDef = colDef; } catch (final Exception e) { throw new ParseException(e.getMessage(), 0); } } } } private void postParseHeaderOut() throws ParseException { if (getHeaderOutDefs() != null) { if (getCsv() != null) { throw new ParseException("--csv and --headerOut are mutually exclusive parameters", 0); } /* * --headerOut(), no args, means to suppress the headerIn from being * written. */ headerOutHelper.setWaitingToWrite(getHeaderOutDefs().size() > 1 || (getHeaderOutDefs().get(0).columnName != null || getHeaderOutDefs().get(0).equation != null)); for (final FormatPart kdef : getHeaderOutDefs()) { try { if (kdef.offset == -1) // unspecified kdef.offset = 0; if (kdef.columnName != null) if (!headerHelper.exists(kdef.columnName)) { throw new ParseException( "--headerOut must be a defined headerIn: " + kdef.columnName, 0); } if (kdef.columnName != null && kdef.equation != null) throw new ParseException("--headerOut columnName and --equ are mutually exclusive", 0); if (kdef.format != null && kdef.equation == null) throw new ParseException("--headerOut --format is only valid with --equ", 0); if (kdef.equation != null) { if (kdef.length == 255) throw new ParseException("--headerOut --length is required when --equ is specified", 0); } headerOutHelper.add(kdef); } catch (final Exception e) { throw new ParseException(e.getMessage(), 0); } } } } private void postParseHexDumps() throws ParseException { /* * Convert OrderBys into sort keys */ if (getHexDumps() != null && !getHexDumps().isEmpty()) { if (getHexDumps().size() == 1 && getHexDumps().get(0).columnName == null) /* * Full record dump */ return; if (getAggregates() != null) throw new ParseException("HexDump with aggregate processing is not supported", 0); if (!isVariableLengthOutput() && (getFixedRecordLengthIn() > 0 || getFixedRecordLengthOut() > 0)) throw new ParseException("HexDump is only valid with variableOutput", 0); if (isInPlaceSort()) throw new ParseException("HexDump is not valid with --replace", 0); for (final HexDump hexDump : getHexDumps()) { if (!columnHelper.exists(hexDump.columnName)) throw new ParseException("HexDump must be a defined column: " + hexDump.columnName, 0); final KeyPart column = columnHelper.get(hexDump.columnName); if (KeyType.String != column.typeName && KeyType.Byte != column.typeName) throw new ParseException("HexDump can only be on String or Byte columns: " + hexDump.columnName, 0); } } } private void postParseInputColumns() throws ParseException { if (getInputColumnDefs() != null) { KeyPart previousColDef = null; for (final KeyPart colDef : getInputColumnDefs()) { try { /* * Provide a default length when the format is specified and * the length is not. */ if (colDef.length == KeyHelper.MAX_KEY_SIZE && colDef.parseFormat != null && colDef.parseFormat.length() > 0) { colDef.length = colDef.parseFormat.length(); logger.debug("column \"{}\" length set to {} because of format", colDef.columnName, colDef.length); } /* * Compute an offset if one was not specified. But only for * non-csv files since offset is not part of the csv * specification. */ if (getCsv() == null) if (colDef.offset == -1) // unspecified { if (previousColDef != null) colDef.offset = previousColDef.offset + previousColDef.length; else colDef.offset = 0; } /* * Since the parameter is 1-relative, an arbitrary decision, * we have to subtract one from them before they can be * used. */ if (colDef.csvFieldNumber > 0) { colDef.csvFieldNumber--; colDef.offset = 0; } if (!(colDef instanceof Filler)) { if (headerHelper.exists(colDef.columnName)) throw new ParseException("columnsIn must be unique from headerIn: " + colDef.columnName, 0); columnHelper.add(colDef); } previousColDef = colDef; } catch (final Exception e) { throw new ParseException(e.getMessage(), 0); } } } } private void postParseInputFile() throws ParseException, IOException { if (!isSysin() && (getInputFiles() == null || getInputFiles().files() == null || getInputFiles().files().size() == 0)) { final StringBuilder sb = new StringBuilder(); sb.append("file not found"); if (getInputFiles() != null) { sb.append(": "); sb.append(getInputFiles().toString()); } throw new ParseException(sb.toString(), 0); } } private void postParseOrderBy() throws ParseException { /* * Convert OrderBys into sort keys */ if (getOrderBys() != null && !getOrderBys().isEmpty()) { if (keys == null) keys = new ArrayList<>(); for (final OrderBy orderBy : getOrderBys()) { if (!columnHelper.exists(orderBy.columnName)) throw new ParseException("OrderBy must be a defined column: " + orderBy.columnName, 0); final KeyPart col = columnHelper.get(orderBy.columnName); final KeyPart newKey = col.newCopy(); newKey.direction = orderBy.direction; keys.add(newKey); } } if (keys == null) keyHelper.setUpAsCopy(this); /* * Check for cvs keys on a non-cvs file */ if (keys != null && getCsv() == null) for (final KeyPart kdef : keys) { if (kdef.isCsv()) { throw new ParseException("unexpected CSV key (--field) on a non-CSV file", 0); } } /* * Check for non-cvs keys on a cvs file */ if (keys != null && getCsv() != null) for (final KeyPart kdef : keys) { if (!kdef.isCsv()) { throw new ParseException("only CSV keys (--field) allowed on a CSV file", 0); } } /* * Check for duplicate csv keys */ if (keys != null && getCsv() != null) for (final KeyPart k1 : keys) { for (final KeyPart k2 : keys) { if (k1 != k2 && k1.csvFieldNumber == k2.csvFieldNumber) { throw new ParseException( "sorting on the same field (--field " + k2.csvFieldNumber + ") is not allowed", 0); } } } if (keys != null) for (final KeyPart kdef : keys) { try { keyHelper.add(kdef, columnHelper); } catch (final Exception e) { throw new ParseException(e.getMessage(), 0); } } } private void postParseOutputFile() throws ParseException, IOException { if (isInPlaceSort() && getOutputFile() != null) throw new ParseException("--replace and --outputFile are mutually exclusive parameters", 0); if (isInPlaceSort() && isSysin()) throw new ParseException("--replace requires --inputFile, redirection or piped input is not allowed", 0); /* * -r is how the input file is replaced. If we set the outputFile here * it then becomes impossible to sort to the command line (sysout). */ if (isInPlaceSort()) fsc.outputFile = getInputFile(0); } /** * <p> * reset. * </p> * * @throws java.io.IOException if any. * @throws java.text.ParseException if any. */ public void reset() throws IOException, ParseException { if (provider != null) provider.reset(); if (publisher != null) publisher.reset(); } /** * <p> * setDepth. * </p> * * @param optimalFunnelDepth a int. */ public void setDepth(final int optimalFunnelDepth) { fsc.depth = optimalFunnelDepth; } /** * <p> * setInputFiles. * </p> * * @param wildFiles a {@link com.obdobion.argument.type.WildFiles} object. */ public void setInputFiles(final WildFiles wildFiles) { fsc.inputFiles = wildFiles; } /** * <p> * setOutputFile. * </p> * * @param outputFile a {@link java.io.File} object. */ public void setOutputFile(final File outputFile) { fsc.outputFile = outputFile; } /** * */ void showParameters() throws IOException, ParseException { if (isSysin()) showParametersLog(true, "input is SYSIN"); else for (final File file : getInputFiles().files()) showParametersLog(true, "inputFilename = {}", file.getAbsolutePath()); if (isCacheInput()) showParametersLog(false, "input caching enabled"); if (isSysout()) showParametersLog(true, "output is SYSOUT"); else if (isInPlaceSort()) showParametersLog(true, "outputFilename= input file name"); else showParametersLog(true, "outputFilename= {}", getOutputFile().getAbsolutePath()); if (isCacheWork()) showParametersLog(false, "work files are cached in memory"); else if (getWorkDirectory() != null) showParametersLog(false, "work directory= {}", getWorkDirectory().getAbsolutePath()); if (specDirectory != null) showParametersLog(false, "specification include path is {}", specDirectory); if (getFixedRecordLengthIn() > 0) { final StringBuilder sb = new StringBuilder(); sb.append("FixedIn = ").append(getFixedRecordLengthIn()); if (isVariableLengthOutput()) sb.append(" adding VLR delimiters on output"); showParametersLog(false, sb.toString()); } else { if (getMaximumNumberOfRows() != Long.MAX_VALUE) showParametersLog(false, "max rows= {}", getMaximumNumberOfRows()); final StringBuilder bytes = new StringBuilder(); bytes.append("in:"); for (int b = 0; b < getEndOfRecordDelimiterIn().length; b++) { bytes.append(" "); bytes.append(ByteCLA.asLiteral(getEndOfRecordDelimiterIn()[b])); } if (getFixedRecordLengthOut() == 0) { bytes.append(", out:"); for (int b = 0; b < getEndOfRecordDelimiterOut().length; b++) { bytes.append(" "); bytes.append(ByteCLA.asLiteral(getEndOfRecordDelimiterOut()[b])); } } showParametersLog(false, "End of line delimeter {}", bytes.toString()); if (getCsv() != null) { final StringBuilder csvMsg = new StringBuilder(); csvMsg.append("csv: "); if (getCsv().header) csvMsg.append("has header"); else csvMsg.append("no header"); showParametersLog(false, csvMsg.toString()); } } if (getFixedRecordLengthOut() > 0) { final StringBuilder sb = new StringBuilder(); sb.append("FixedOut = ").append(getFixedRecordLengthOut()); showParametersLog(false, sb.toString()); } showParametersLog(false, "power = {}", getDepth()); if (getDuplicateDisposition() != DuplicateDisposition.Original) showParametersLog(false, "dups = {}", getDuplicateDisposition().name()); for (final String colName : columnHelper.getNames()) { final KeyPart col = columnHelper.get(colName); if (getCsv() == null) showParametersLog(false, "col \"{}\" {} offset {} length {} {}", col.columnName, col.typeName, col.offset, col.length, (col.parseFormat == null ? "" : " format " + col.parseFormat)); else showParametersLog(false, "col {} {} csvField {} {}", col.columnName, col.typeName, col.csvFieldNumber, (col.parseFormat == null ? "" : " format " + col.parseFormat)); } for (final String colName : headerHelper.getNames()) { final KeyPart col = headerHelper.get(colName); showParametersLog(false, "headerIn \"{}\" {} offset {} length {} {}", col.columnName, col.typeName, col.offset, col.length, (col.parseFormat == null ? "" : " format " + col.parseFormat)); } if (getAggregates() != null) for (final Aggregate agg : getAggregates()) { if (agg instanceof AggregateCount) showParametersLog(false, "aggregate \"count\""); else showParametersLog(false, "aggregate \"{}\" {}", agg.name, (agg.columnName == null ? agg.equation.toString() : agg.columnName)); } if (getWhereEqu() != null) { for (final Equ equ : getWhereEqu()) { showParametersLog(true, "where \"{}\"", equ.toString()); } } if (getStopEqu() != null) { for (final Equ equ : getStopEqu()) { showParametersLog(true, "stopWhen \"{}\"", equ.toString()); } } if (keys == null) logger.debug("process = {} order", getCopyOrder().name()); else for (final KeyPart def : keys) { showParametersLog(false, "orderBy {} {}", def.columnName, def.direction.name()); } if (getFormatOutDefs() != null) for (final FormatPart outDef : getFormatOutDefs()) { final StringBuilder sb = new StringBuilder(); sb.append("format "); if (outDef.columnName != null) sb.append("\"").append(outDef.columnName).append("\""); if (outDef.equation != null) sb.append("\"").append(outDef.equation.toString()).append("\""); if (outDef.typeName != null) sb.append(" ").append(outDef.typeName.name()); if (outDef.format != null) sb.append(" format \"").append(outDef.format).append("\""); if (outDef.filler != 0x00) sb.append(" fill=").append(ByteCLA.asLiteral(outDef.filler)); if (outDef.length != 255) sb.append(" length ").append(outDef.length); if (outDef.offset != 0) sb.append(" offset ").append(outDef.offset); if (outDef.size != 255) sb.append(" size ").append(outDef.size); showParametersLog(false, sb.toString()); if (outDef.equation != null) { try { logger.trace("\n{}", outDef.equation.showRPN()); } catch (final Exception e) { logger.warn("algebrain", e); } } } if (getHeaderOutDefs() != null) for (final FormatPart outDef : getHeaderOutDefs()) { final StringBuilder sb = new StringBuilder(); sb.append("headerOut "); if (outDef.columnName != null) sb.append("\"").append(outDef.columnName).append("\""); if (outDef.equation != null) sb.append("\"").append(outDef.equation.toString()).append("\""); if (outDef.typeName != null) sb.append(" ").append(outDef.typeName.name()); if (outDef.format != null) sb.append(" format \"").append(outDef.format).append("\""); if (outDef.filler != 0x00) sb.append(" fill=").append(ByteCLA.asLiteral(outDef.filler)); if (outDef.length != 255) sb.append(" length ").append(outDef.length); if (outDef.offset != 0) sb.append(" offset ").append(outDef.offset); if (outDef.size != 255) sb.append(" size ").append(outDef.size); showParametersLog(false, sb.toString()); if (outDef.equation != null) { try { logger.trace("\n{}", outDef.equation.showRPN()); } catch (final Exception e) { logger.warn("algebrain", e); } } } } private void showParametersLog(final boolean forceInfo, final String message, final Object... parms) { if (forceInfo || isSyntaxOnly()) logger.info(message, parms); else logger.debug(message, parms); } /** * <p> * startNextInput. * </p> * * @return a boolean. * @throws java.text.ParseException if any. * @throws java.io.IOException if any. */ public boolean startNextInput() throws ParseException, IOException { /* * Has the last input file been read? Then return false. */ if (inputFileIndex() >= (inputFileCount() - 1)) return false; inputFileIndex++; return true; } /** * <p> * stopIsTrue. * </p> * * @return a boolean. * @throws java.lang.Exception if any. */ public boolean stopIsTrue() throws Exception { if (getStopEqu() == null) return false; for (final Equ equ : getStopEqu()) { /* * All of the stop equations must be true. */ final Object result = equ.evaluate(); if (result == null) return false; if (!(result instanceof Boolean)) throw new Exception("--stopWhen clause must evaluate to true or false"); if (!((Boolean) result).booleanValue()) return false; } return true; } /** * <p> * whereIsTrue. * </p> * * @return a boolean. * @throws java.lang.Exception if any. */ public boolean whereIsTrue() throws Exception { if (getWhereEqu() == null) return true; for (final Equ equ : getWhereEqu()) { /* * All of the where equations must be true. */ final Object result = equ.evaluate(); if (result == null) return false; if (!(result instanceof Boolean)) throw new Exception("--where clause must evaluate to true or false"); if (!((Boolean) result).booleanValue()) return false; } return true; } }