text
stringlengths
7
1.01M
package vn.bhxh.bhxhmail.mail; import android.support.annotation.VisibleForTesting; import android.text.TextUtils; import android.text.util.Rfc822Token; import android.text.util.Rfc822Tokenizer; import android.util.Log; import org.apache.james.mime4j.MimeException; import org.apache.james.mime4j.codec.EncoderUtil; import org.apache.james.mime4j.dom.address.Mailbox; import org.apache.james.mime4j.dom.address.MailboxList; import org.apache.james.mime4j.field.address.AddressBuilder; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import static vn.bhxh.bhxhmail.mail.K9MailLib.LOG_TAG; public class Address implements Serializable { private static final Pattern ATOM = Pattern.compile("^(?:[a-zA-Z0-9!#$%&'*+\\-/=?^_`{|}~]|\\s)+$"); /** * Immutable empty {@link Address} array */ private static final Address[] EMPTY_ADDRESS_ARRAY = new Address[0]; private String mAddress; private String mPersonal; public Address(Address address) { mAddress = address.mAddress; mPersonal = address.mPersonal; } public Address(String address, String personal) { this(address, personal, true); } public Address(String address) { this(address, null, true); } private Address(String address, String personal, boolean parse) { if (parse) { Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(address); if (tokens.length > 0) { Rfc822Token token = tokens[0]; mAddress = token.getAddress(); String name = token.getName(); if (!TextUtils.isEmpty(name)) { /* * Don't use the "personal" argument if "address" is of the form: * James Bond <james.bond@mi6.uk> * * See issue 2920 */ mPersonal = name; } else { mPersonal = (personal == null) ? null : personal.trim(); } } else { // This should be an error } } else { mAddress = address; mPersonal = personal; } } public String getAddress() { return mAddress; } public String getHostname() { int hostIdx = mAddress.lastIndexOf("@"); if (hostIdx == -1) { return null; } return mAddress.substring(hostIdx+1); } public void setAddress(String address) { this.mAddress = address; } public String getPersonal() { return mPersonal; } public void setPersonal(String personal) { if ("".equals(personal)) { personal = null; } if (personal != null) { personal = personal.trim(); } this.mPersonal = personal; } /** * Parse a comma separated list of email addresses in human readable format and return an * array of Address objects, RFC-822 encoded. * * @param addressList * @return An array of 0 or more Addresses. */ public static Address[] parseUnencoded(String addressList) { List<Address> addresses = new ArrayList<Address>(); if (!TextUtils.isEmpty(addressList)) { Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(addressList); for (Rfc822Token token : tokens) { String address = token.getAddress(); if (!TextUtils.isEmpty(address)) { addresses.add(new Address(token.getAddress(), token.getName(), false)); } } } return addresses.toArray(EMPTY_ADDRESS_ARRAY); } /** * Parse a comma separated list of addresses in RFC-822 format and return an * array of Address objects. * * @param addressList * @return An array of 0 or more Addresses. */ public static Address[] parse(String addressList) { if (TextUtils.isEmpty(addressList)) { return EMPTY_ADDRESS_ARRAY; } List<Address> addresses = new ArrayList<Address>(); try { MailboxList parsedList = AddressBuilder.DEFAULT.parseAddressList(addressList).flatten(); for (int i = 0, count = parsedList.size(); i < count; i++) { org.apache.james.mime4j.dom.address.Address address = parsedList.get(i); if (address instanceof Mailbox) { Mailbox mailbox = (Mailbox)address; addresses.add(new Address(mailbox.getLocalPart() + "@" + mailbox.getDomain(), mailbox.getName(), false)); } else { Log.e(LOG_TAG, "Unknown address type from Mime4J: " + address.getClass().toString()); } } } catch (MimeException pe) { Log.e(LOG_TAG, "MimeException in Address.parse()", pe); //but we do an silent failover : we just use the given string as name with empty address addresses.add(new Address(null, addressList, false)); } return addresses.toArray(EMPTY_ADDRESS_ARRAY); } @Override public boolean equals(Object o) { if (o instanceof Address) { Address other = (Address) o; if (mPersonal != null && other.mPersonal != null && !mPersonal.equals(other.mPersonal)) { return false; } return mAddress.equals(other.mAddress); } return super.equals(o); } @Override public int hashCode() { int hash = mAddress.hashCode(); if (mPersonal != null) { hash += 3 * mPersonal.hashCode(); } return hash; } @Override public String toString() { if (!TextUtils.isEmpty(mPersonal)) { return quoteAtoms(mPersonal) + " <" + mAddress + ">"; } else { return mAddress; } } public static String toString(Address[] addresses) { if (addresses == null) { return null; } return TextUtils.join(", ", addresses); } public String toEncodedString() { if (!TextUtils.isEmpty(mPersonal)) { return EncoderUtil.encodeAddressDisplayName(mPersonal) + " <" + mAddress + ">"; } else { return mAddress; } } public static String toEncodedString(Address[] addresses) { if (addresses == null) { return null; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < addresses.length; i++) { sb.append(addresses[i].toEncodedString()); if (i < addresses.length - 1) { sb.append(','); } } return sb.toString(); } /** * Unpacks an address list previously packed with packAddressList() * @param addressList Packed address list. * @return Unpacked list. */ public static Address[] unpack(String addressList) { if (addressList == null) { return new Address[] { }; } List<Address> addresses = new ArrayList<Address>(); int length = addressList.length(); int pairStartIndex = 0; int pairEndIndex = 0; int addressEndIndex = 0; while (pairStartIndex < length) { pairEndIndex = addressList.indexOf(",\u0000", pairStartIndex); if (pairEndIndex == -1) { pairEndIndex = length; } addressEndIndex = addressList.indexOf(";\u0000", pairStartIndex); String address = null; String personal = null; if (addressEndIndex == -1 || addressEndIndex > pairEndIndex) { address = addressList.substring(pairStartIndex, pairEndIndex); } else { address = addressList.substring(pairStartIndex, addressEndIndex); personal = addressList.substring(addressEndIndex + 2, pairEndIndex); } addresses.add(new Address(address, personal, false)); pairStartIndex = pairEndIndex + 2; } return addresses.toArray(new Address[addresses.size()]); } /** * Packs an address list into a String that is very quick to read * and parse. Packed lists can be unpacked with unpackAddressList() * The packed list is a ",\u0000" separated list of: * address;\u0000personal * @param addresses Array of addresses to pack. * @return Packed addresses. */ public static String pack(Address[] addresses) { if (addresses == null) { return null; } StringBuilder sb = new StringBuilder(); for (int i = 0, count = addresses.length; i < count; i++) { Address address = addresses[i]; sb.append(address.getAddress()); String personal = address.getPersonal(); if (personal != null) { sb.append(";\u0000"); // Escape quotes in the address part on the way in personal = personal.replaceAll("\"", "\\\""); sb.append(personal); } if (i < count - 1) { sb.append(",\u0000"); } } return sb.toString(); } /** * Quote a string, if necessary, based upon the definition of an "atom," as defined by RFC2822 * (http://tools.ietf.org/html/rfc2822#section-3.2.4). Strings that consist purely of atoms are * left unquoted; anything else is returned as a quoted string. * @param text String to quote. * @return Possibly quoted string. */ public static String quoteAtoms(final String text) { if (ATOM.matcher(text).matches()) { return text; } else { return quoteString(text); } } /** * Ensures that the given string starts and ends with the double quote character. The string is not modified in any way except to add the * double quote character to start and end if it's not already there. * sample -> "sample" * "sample" -> "sample" * ""sample"" -> ""sample"" * "sample"" -> "sample" * sa"mp"le -> "sa"mp"le" * "sa"mp"le" -> "sa"mp"le" * (empty string) -> "" * " -> """ * @param s * @return */ @VisibleForTesting static String quoteString(String s) { if (s == null) { return null; } if (!s.matches("^\".*\"$")) { return "\"" + s + "\""; } else { return s; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.functions.utils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.apache.pulsar.client.api.SubscriptionInitialPosition; import org.apache.pulsar.common.functions.ConsumerConfig; import org.apache.pulsar.common.functions.FunctionConfig; import org.apache.pulsar.common.functions.ProducerConfig; import org.apache.pulsar.common.functions.Resources; import org.apache.pulsar.common.functions.WindowConfig; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.util.ObjectMapperFactory; import org.apache.pulsar.functions.proto.Function; import org.apache.pulsar.functions.proto.Function.FunctionDetails; import java.io.File; import java.lang.reflect.Type; import java.net.MalformedURLException; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import static org.apache.commons.lang.StringUtils.isBlank; import static org.apache.commons.lang.StringUtils.isNotBlank; import static org.apache.commons.lang.StringUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.isEmpty; import static org.apache.pulsar.common.functions.Utils.BUILTIN; import static org.apache.pulsar.common.util.ClassLoaderUtils.loadJar; @Slf4j public class FunctionConfigUtils { static final Integer MAX_PENDING_ASYNC_REQUESTS_DEFAULT = Integer.valueOf(1000); static final Boolean FORWARD_SOURCE_MESSAGE_PROPERTY_DEFAULT = Boolean.TRUE; private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.create(); public static FunctionDetails convert(FunctionConfig functionConfig, ClassLoader classLoader) throws IllegalArgumentException { boolean isBuiltin = !org.apache.commons.lang3.StringUtils.isEmpty(functionConfig.getJar()) && functionConfig.getJar().startsWith(org.apache.pulsar.common.functions.Utils.BUILTIN); Class<?>[] typeArgs = null; if (functionConfig.getRuntime() == FunctionConfig.Runtime.JAVA) { if (classLoader != null) { try { typeArgs = FunctionCommon.getFunctionTypes(functionConfig, classLoader); } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new IllegalArgumentException( String.format("Function class %s must be in class path", functionConfig.getClassName()), e); } } } FunctionDetails.Builder functionDetailsBuilder = FunctionDetails.newBuilder(); // Setup source Function.SourceSpec.Builder sourceSpecBuilder = Function.SourceSpec.newBuilder(); if (functionConfig.getInputs() != null) { functionConfig.getInputs().forEach((topicName -> { sourceSpecBuilder.putInputSpecs(topicName, Function.ConsumerSpec.newBuilder() .setIsRegexPattern(false) .build()); })); } if (functionConfig.getTopicsPattern() != null && !functionConfig.getTopicsPattern().isEmpty()) { sourceSpecBuilder.putInputSpecs(functionConfig.getTopicsPattern(), Function.ConsumerSpec.newBuilder() .setIsRegexPattern(true) .build()); } if (functionConfig.getCustomSerdeInputs() != null) { functionConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { sourceSpecBuilder.putInputSpecs(topicName, Function.ConsumerSpec.newBuilder() .setSerdeClassName(serdeClassName) .setIsRegexPattern(false) .build()); }); } if (functionConfig.getCustomSchemaInputs() != null) { functionConfig.getCustomSchemaInputs().forEach((topicName, conf) -> { try { ConsumerConfig consumerConfig = OBJECT_MAPPER.readValue(conf, ConsumerConfig.class); sourceSpecBuilder.putInputSpecs(topicName, Function.ConsumerSpec.newBuilder() .setSchemaType(consumerConfig.getSchemaType()) .putAllSchemaProperties(consumerConfig.getSchemaProperties()) .putAllConsumerProperties(consumerConfig.getConsumerProperties()) .setIsRegexPattern(false) .build()); } catch (JsonProcessingException e) { throw new IllegalArgumentException(String.format("Incorrect custom schema inputs ,Topic %s ", topicName)); } }); } if (functionConfig.getInputSpecs() != null) { functionConfig.getInputSpecs().forEach((topicName, consumerConf) -> { Function.ConsumerSpec.Builder bldr = Function.ConsumerSpec.newBuilder() .setIsRegexPattern(consumerConf.isRegexPattern()); if (isNotBlank(consumerConf.getSchemaType())) { bldr.setSchemaType(consumerConf.getSchemaType()); } else if (isNotBlank(consumerConf.getSerdeClassName())) { bldr.setSerdeClassName(consumerConf.getSerdeClassName()); } if (consumerConf.getReceiverQueueSize() != null) { bldr.setReceiverQueueSize(Function.ConsumerSpec.ReceiverQueueSize.newBuilder() .setValue(consumerConf.getReceiverQueueSize()).build()); } if (consumerConf.getSchemaProperties() != null) { bldr.putAllSchemaProperties(consumerConf.getSchemaProperties()); } if (consumerConf.getCryptoConfig() != null) { bldr.setCryptoSpec(CryptoUtils.convert(consumerConf.getCryptoConfig())); } bldr.putAllConsumerProperties(consumerConf.getConsumerProperties()); bldr.setPoolMessages(consumerConf.isPoolMessages()); sourceSpecBuilder.putInputSpecs(topicName, bldr.build()); }); } // Set subscription type Function.SubscriptionType subType; if ((functionConfig.getRetainOrdering() != null && functionConfig.getRetainOrdering()) || FunctionConfig.ProcessingGuarantees.EFFECTIVELY_ONCE.equals(functionConfig.getProcessingGuarantees())) { subType = Function.SubscriptionType.FAILOVER; } else if (functionConfig.getRetainKeyOrdering() != null && functionConfig.getRetainKeyOrdering()) { subType = Function.SubscriptionType.KEY_SHARED; } else { subType = Function.SubscriptionType.SHARED; } sourceSpecBuilder.setSubscriptionType(subType); // Set subscription name if (isNotBlank(functionConfig.getSubName())) { sourceSpecBuilder.setSubscriptionName(functionConfig.getSubName()); } // Set subscription position Function.SubscriptionPosition subPosition; if (functionConfig.getSubscriptionPosition() == SubscriptionInitialPosition.Earliest) { subPosition = Function.SubscriptionPosition.EARLIEST; } else { subPosition = Function.SubscriptionPosition.LATEST; } sourceSpecBuilder.setSubscriptionPosition(subPosition); if (typeArgs != null) { sourceSpecBuilder.setTypeClassName(typeArgs[0].getName()); } if (functionConfig.getTimeoutMs() != null) { sourceSpecBuilder.setTimeoutMs(functionConfig.getTimeoutMs()); // We use negative acks for fast tracking failures sourceSpecBuilder.setNegativeAckRedeliveryDelayMs(functionConfig.getTimeoutMs()); } if (functionConfig.getCleanupSubscription() != null) { sourceSpecBuilder.setCleanupSubscription(functionConfig.getCleanupSubscription()); } else { sourceSpecBuilder.setCleanupSubscription(true); } functionDetailsBuilder.setSource(sourceSpecBuilder); // Setup sink Function.SinkSpec.Builder sinkSpecBuilder = Function.SinkSpec.newBuilder(); if (functionConfig.getOutput() != null) { sinkSpecBuilder.setTopic(functionConfig.getOutput()); } if (!StringUtils.isBlank(functionConfig.getOutputSerdeClassName())) { sinkSpecBuilder.setSerDeClassName(functionConfig.getOutputSerdeClassName()); } if (!StringUtils.isBlank(functionConfig.getOutputSchemaType())) { sinkSpecBuilder.setSchemaType(functionConfig.getOutputSchemaType()); } if (functionConfig.getForwardSourceMessageProperty() == Boolean.TRUE) { sinkSpecBuilder.setForwardSourceMessageProperty(functionConfig.getForwardSourceMessageProperty()); } if (functionConfig.getCustomSchemaOutputs() != null && functionConfig.getOutput() != null) { String conf = functionConfig.getCustomSchemaOutputs().get(functionConfig.getOutput()); try { if (StringUtils.isNotEmpty(conf)) { ConsumerConfig consumerConfig = OBJECT_MAPPER.readValue(conf, ConsumerConfig.class); sinkSpecBuilder.putAllSchemaProperties(consumerConfig.getSchemaProperties()); sinkSpecBuilder.putAllConsumerProperties(consumerConfig.getConsumerProperties()); } } catch (JsonProcessingException e) { throw new IllegalArgumentException(String.format("Incorrect custom schema outputs ,Topic %s ", functionConfig.getOutput())); } } if (typeArgs != null) { sinkSpecBuilder.setTypeClassName(typeArgs[1].getName()); } if (functionConfig.getProducerConfig() != null) { ProducerConfig producerConf = functionConfig.getProducerConfig(); Function.ProducerSpec.Builder pbldr = Function.ProducerSpec.newBuilder(); if (producerConf.getMaxPendingMessages() != null) { pbldr.setMaxPendingMessages(producerConf.getMaxPendingMessages()); } if (producerConf.getMaxPendingMessagesAcrossPartitions() != null) { pbldr.setMaxPendingMessagesAcrossPartitions(producerConf.getMaxPendingMessagesAcrossPartitions()); } if (producerConf.getUseThreadLocalProducers() != null) { pbldr.setUseThreadLocalProducers(producerConf.getUseThreadLocalProducers()); } if (producerConf.getCryptoConfig() != null) { pbldr.setCryptoSpec(CryptoUtils.convert(producerConf.getCryptoConfig())); } if (producerConf.getBatchBuilder() != null) { pbldr.setBatchBuilder(producerConf.getBatchBuilder()); } sinkSpecBuilder.setProducerSpec(pbldr.build()); } functionDetailsBuilder.setSink(sinkSpecBuilder); if (functionConfig.getTenant() != null) { functionDetailsBuilder.setTenant(functionConfig.getTenant()); } if (functionConfig.getNamespace() != null) { functionDetailsBuilder.setNamespace(functionConfig.getNamespace()); } if (functionConfig.getName() != null) { functionDetailsBuilder.setName(functionConfig.getName()); } if (functionConfig.getLogTopic() != null) { functionDetailsBuilder.setLogTopic(functionConfig.getLogTopic()); } if (functionConfig.getRuntime() != null) { functionDetailsBuilder.setRuntime(FunctionCommon.convertRuntime(functionConfig.getRuntime())); } if (functionConfig.getProcessingGuarantees() != null) { functionDetailsBuilder.setProcessingGuarantees( FunctionCommon.convertProcessingGuarantee(functionConfig.getProcessingGuarantees())); } if (functionConfig.getRetainKeyOrdering() != null) { functionDetailsBuilder.setRetainKeyOrdering(functionConfig.getRetainKeyOrdering()); } if (functionConfig.getRetainOrdering() != null) { functionDetailsBuilder.setRetainOrdering(functionConfig.getRetainOrdering()); } if (functionConfig.getMaxMessageRetries() != null && functionConfig.getMaxMessageRetries() >= 0) { Function.RetryDetails.Builder retryBuilder = Function.RetryDetails.newBuilder(); retryBuilder.setMaxMessageRetries(functionConfig.getMaxMessageRetries()); if (isNotEmpty(functionConfig.getDeadLetterTopic())) { retryBuilder.setDeadLetterTopic(functionConfig.getDeadLetterTopic()); } functionDetailsBuilder.setRetryDetails(retryBuilder); } Map<String, Object> configs = new HashMap<>(); if (functionConfig.getUserConfig() != null) { configs.putAll(functionConfig.getUserConfig()); } // windowing related WindowConfig windowConfig = functionConfig.getWindowConfig(); if (windowConfig != null) { windowConfig.setActualWindowFunctionClassName(functionConfig.getClassName()); configs.put(WindowConfig.WINDOW_CONFIG_KEY, windowConfig); // set class name to window function executor functionDetailsBuilder.setClassName("org.apache.pulsar.functions.windowing.WindowFunctionExecutor"); } else { if (functionConfig.getClassName() != null) { functionDetailsBuilder.setClassName(functionConfig.getClassName()); } } if (!configs.isEmpty()) { functionDetailsBuilder.setUserConfig(new Gson().toJson(configs)); } if (functionConfig.getSecrets() != null && !functionConfig.getSecrets().isEmpty()) { functionDetailsBuilder.setSecretsMap(new Gson().toJson(functionConfig.getSecrets())); } if (functionConfig.getAutoAck() != null) { functionDetailsBuilder.setAutoAck(functionConfig.getAutoAck()); } else { functionDetailsBuilder.setAutoAck(true); } if (functionConfig.getParallelism() != null) { functionDetailsBuilder.setParallelism(functionConfig.getParallelism()); } else { functionDetailsBuilder.setParallelism(1); } // use default resources if resources not set Resources resources = Resources.mergeWithDefault(functionConfig.getResources()); Function.Resources.Builder bldr = Function.Resources.newBuilder(); bldr.setCpu(resources.getCpu()); bldr.setRam(resources.getRam()); bldr.setDisk(resources.getDisk()); functionDetailsBuilder.setResources(bldr); if (!StringUtils.isEmpty(functionConfig.getRuntimeFlags())) { functionDetailsBuilder.setRuntimeFlags(functionConfig.getRuntimeFlags()); } functionDetailsBuilder.setComponentType(FunctionDetails.ComponentType.FUNCTION); if (!StringUtils.isEmpty(functionConfig.getCustomRuntimeOptions())) { functionDetailsBuilder.setCustomRuntimeOptions(functionConfig.getCustomRuntimeOptions()); } if (isBuiltin) { String builtin = functionConfig.getJar().replaceFirst("^builtin://", ""); functionDetailsBuilder.setBuiltin(builtin); } return functionDetailsBuilder.build(); } public static FunctionConfig convertFromDetails(FunctionDetails functionDetails) { FunctionConfig functionConfig = new FunctionConfig(); functionConfig.setTenant(functionDetails.getTenant()); functionConfig.setNamespace(functionDetails.getNamespace()); functionConfig.setName(functionDetails.getName()); functionConfig.setParallelism(functionDetails.getParallelism()); functionConfig.setProcessingGuarantees(FunctionCommon.convertProcessingGuarantee(functionDetails.getProcessingGuarantees())); Map<String, ConsumerConfig> consumerConfigMap = new HashMap<>(); for (Map.Entry<String, Function.ConsumerSpec> input : functionDetails.getSource().getInputSpecsMap().entrySet()) { ConsumerConfig consumerConfig = new ConsumerConfig(); if (isNotEmpty(input.getValue().getSerdeClassName())) { consumerConfig.setSerdeClassName(input.getValue().getSerdeClassName()); } if (isNotEmpty(input.getValue().getSchemaType())) { consumerConfig.setSchemaType(input.getValue().getSchemaType()); } if (input.getValue().hasReceiverQueueSize()) { consumerConfig.setReceiverQueueSize(input.getValue().getReceiverQueueSize().getValue()); } if (input.getValue().hasCryptoSpec()) { consumerConfig.setCryptoConfig(CryptoUtils.convertFromSpec(input.getValue().getCryptoSpec())); } consumerConfig.setRegexPattern(input.getValue().getIsRegexPattern()); consumerConfig.setSchemaProperties(input.getValue().getSchemaPropertiesMap()); consumerConfig.setPoolMessages(input.getValue().getPoolMessages()); consumerConfigMap.put(input.getKey(), consumerConfig); } functionConfig.setInputSpecs(consumerConfigMap); if (!isEmpty(functionDetails.getSource().getSubscriptionName())) { functionConfig.setSubName(functionDetails.getSource().getSubscriptionName()); } functionConfig.setRetainOrdering(functionDetails.getRetainOrdering()); functionConfig.setRetainKeyOrdering(functionDetails.getRetainKeyOrdering()); functionConfig.setCleanupSubscription(functionDetails.getSource().getCleanupSubscription()); functionConfig.setAutoAck(functionDetails.getAutoAck()); if (functionDetails.getSource().getTimeoutMs() != 0) { functionConfig.setTimeoutMs(functionDetails.getSource().getTimeoutMs()); } if (!isEmpty(functionDetails.getSink().getTopic())) { functionConfig.setOutput(functionDetails.getSink().getTopic()); } if (!isEmpty(functionDetails.getSink().getSerDeClassName())) { functionConfig.setOutputSerdeClassName(functionDetails.getSink().getSerDeClassName()); } if (!isEmpty(functionDetails.getSink().getSchemaType())) { functionConfig.setOutputSchemaType(functionDetails.getSink().getSchemaType()); } if (functionDetails.getSink().getProducerSpec() != null) { Function.ProducerSpec spec = functionDetails.getSink().getProducerSpec(); ProducerConfig producerConfig = new ProducerConfig(); if (spec.getMaxPendingMessages() != 0) { producerConfig.setMaxPendingMessages(spec.getMaxPendingMessages()); } if (spec.getMaxPendingMessagesAcrossPartitions() != 0) { producerConfig.setMaxPendingMessagesAcrossPartitions(spec.getMaxPendingMessagesAcrossPartitions()); } if (spec.hasCryptoSpec()) { producerConfig.setCryptoConfig(CryptoUtils.convertFromSpec(spec.getCryptoSpec())); } if (spec.getBatchBuilder() != null) { producerConfig.setBatchBuilder(spec.getBatchBuilder()); } producerConfig.setUseThreadLocalProducers(spec.getUseThreadLocalProducers()); functionConfig.setProducerConfig(producerConfig); } if (!isEmpty(functionDetails.getLogTopic())) { functionConfig.setLogTopic(functionDetails.getLogTopic()); } if (functionDetails.getSink().getForwardSourceMessageProperty()) { functionConfig.setForwardSourceMessageProperty(functionDetails.getSink().getForwardSourceMessageProperty()); } functionConfig.setRuntime(FunctionCommon.convertRuntime(functionDetails.getRuntime())); if (functionDetails.hasRetryDetails()) { functionConfig.setMaxMessageRetries(functionDetails.getRetryDetails().getMaxMessageRetries()); if (!isEmpty(functionDetails.getRetryDetails().getDeadLetterTopic())) { functionConfig.setDeadLetterTopic(functionDetails.getRetryDetails().getDeadLetterTopic()); } } Map<String, Object> userConfig; if (!isEmpty(functionDetails.getUserConfig())) { Type type = new TypeToken<Map<String, Object>>() { }.getType(); userConfig = new Gson().fromJson(functionDetails.getUserConfig(), type); } else { userConfig = new HashMap<>(); } if (userConfig.containsKey(WindowConfig.WINDOW_CONFIG_KEY)) { WindowConfig windowConfig = new Gson().fromJson( (new Gson().toJson(userConfig.get(WindowConfig.WINDOW_CONFIG_KEY))), WindowConfig.class); userConfig.remove(WindowConfig.WINDOW_CONFIG_KEY); functionConfig.setClassName(windowConfig.getActualWindowFunctionClassName()); functionConfig.setWindowConfig(windowConfig); } else { functionConfig.setClassName(functionDetails.getClassName()); } functionConfig.setUserConfig(userConfig); if (!isEmpty(functionDetails.getSecretsMap())) { Type type = new TypeToken<Map<String, Object>>() { }.getType(); Map<String, Object> secretsMap = new Gson().fromJson(functionDetails.getSecretsMap(), type); functionConfig.setSecrets(secretsMap); } if (functionDetails.hasResources()) { Resources resources = new Resources(); resources.setCpu(functionDetails.getResources().getCpu()); resources.setRam(functionDetails.getResources().getRam()); resources.setDisk(functionDetails.getResources().getDisk()); functionConfig.setResources(resources); } if (!isEmpty(functionDetails.getRuntimeFlags())) { functionConfig.setRuntimeFlags(functionDetails.getRuntimeFlags()); } if (!isEmpty(functionDetails.getCustomRuntimeOptions())) { functionConfig.setCustomRuntimeOptions(functionDetails.getCustomRuntimeOptions()); } return functionConfig; } public static void inferMissingArguments(FunctionConfig functionConfig, boolean forwardSourceMessagePropertyEnabled) { if (StringUtils.isEmpty(functionConfig.getName())) { org.apache.pulsar.common.functions.Utils.inferMissingFunctionName(functionConfig); } if (StringUtils.isEmpty(functionConfig.getTenant())) { org.apache.pulsar.common.functions.Utils.inferMissingTenant(functionConfig); } if (StringUtils.isEmpty(functionConfig.getNamespace())) { org.apache.pulsar.common.functions.Utils.inferMissingNamespace(functionConfig); } if (functionConfig.getParallelism() == null) { functionConfig.setParallelism(1); } if (functionConfig.getMaxPendingAsyncRequests() == null) { functionConfig.setMaxPendingAsyncRequests(MAX_PENDING_ASYNC_REQUESTS_DEFAULT); } if (forwardSourceMessagePropertyEnabled) { if (functionConfig.getForwardSourceMessageProperty() == null) { functionConfig.setForwardSourceMessageProperty(FORWARD_SOURCE_MESSAGE_PROPERTY_DEFAULT); } } else { // if worker disables forward source message property, we don't need to set the default value. functionConfig.setForwardSourceMessageProperty(null); } if (functionConfig.getJar() != null) { functionConfig.setRuntime(FunctionConfig.Runtime.JAVA); } else if (functionConfig.getPy() != null) { functionConfig.setRuntime(FunctionConfig.Runtime.PYTHON); } else if (functionConfig.getGo() != null) { functionConfig.setRuntime(FunctionConfig.Runtime.GO); } WindowConfig windowConfig = functionConfig.getWindowConfig(); if (windowConfig != null) { WindowConfigUtils.inferMissingArguments(windowConfig); functionConfig.setAutoAck(false); } } private static void doJavaChecks(FunctionConfig functionConfig, ClassLoader clsLoader) { try { Class functionClass = clsLoader.loadClass(functionConfig.getClassName()); if (!org.apache.pulsar.functions.api.Function.class.isAssignableFrom(functionClass) && !java.util.function.Function.class.isAssignableFrom(functionClass) && !org.apache.pulsar.functions.api.WindowFunction.class.isAssignableFrom(functionClass)) { throw new IllegalArgumentException( String.format("Function class %s does not implement the correct interface", functionClass.getName())); } } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new IllegalArgumentException( String.format("Function class %s must be in class path", functionConfig.getClassName()), e); } Class<?>[] typeArgs; try { typeArgs = FunctionCommon.getFunctionTypes(functionConfig, clsLoader); } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new IllegalArgumentException( String.format("Function class %s must be in class path", functionConfig.getClassName()), e); } // inputs use default schema, so there is no check needed there // Check if the Input serialization/deserialization class exists in jar or already loaded and that it // implements SerDe class if (functionConfig.getCustomSerdeInputs() != null) { functionConfig.getCustomSerdeInputs().forEach((topicName, inputSerializer) -> { ValidatorUtils.validateSerde(inputSerializer, typeArgs[0], clsLoader, true); }); } // Check if the Input serialization/deserialization class exists in jar or already loaded and that it // implements SerDe class if (functionConfig.getCustomSchemaInputs() != null) { functionConfig.getCustomSchemaInputs().forEach((topicName, conf) -> { ConsumerConfig consumerConfig; try { consumerConfig = OBJECT_MAPPER.readValue(conf, ConsumerConfig.class); } catch (JsonProcessingException e) { throw new IllegalArgumentException(String.format("Topic %s has an incorrect schema Info", topicName)); } ValidatorUtils.validateSchema(consumerConfig.getSchemaType(), typeArgs[0], clsLoader, true); }); } // Check if the Input serialization/deserialization class exists in jar or already loaded and that it // implements Schema or SerDe classes if (functionConfig.getInputSpecs() != null) { functionConfig.getInputSpecs().forEach((topicName, conf) -> { // Need to make sure that one and only one of schema/serde is set if (!isEmpty(conf.getSchemaType()) && !isEmpty(conf.getSerdeClassName())) { throw new IllegalArgumentException( "Only one of schemaType or serdeClassName should be set in inputSpec"); } if (!isEmpty(conf.getSerdeClassName())) { ValidatorUtils.validateSerde(conf.getSerdeClassName(), typeArgs[0], clsLoader, true); } if (!isEmpty(conf.getSchemaType())) { ValidatorUtils.validateSchema(conf.getSchemaType(), typeArgs[0], clsLoader, true); } if (conf.getCryptoConfig() != null) { ValidatorUtils.validateCryptoKeyReader(conf.getCryptoConfig(), clsLoader, false); } }); } if (Void.class.equals(typeArgs[1])) { return; } // One and only one of outputSchemaType and outputSerdeClassName should be set if (!isEmpty(functionConfig.getOutputSerdeClassName()) && !isEmpty(functionConfig.getOutputSchemaType())) { throw new IllegalArgumentException( "Only one of outputSchemaType or outputSerdeClassName should be set"); } if (!isEmpty(functionConfig.getOutputSchemaType())) { ValidatorUtils.validateSchema(functionConfig.getOutputSchemaType(), typeArgs[1], clsLoader, false); } if (!isEmpty(functionConfig.getOutputSerdeClassName())) { ValidatorUtils.validateSerde(functionConfig.getOutputSerdeClassName(), typeArgs[1], clsLoader, false); } if (functionConfig.getProducerConfig() != null && functionConfig.getProducerConfig().getCryptoConfig() != null) { ValidatorUtils.validateCryptoKeyReader(functionConfig.getProducerConfig().getCryptoConfig(), clsLoader, true); } } private static void doPythonChecks(FunctionConfig functionConfig) { if (functionConfig.getProcessingGuarantees() == FunctionConfig.ProcessingGuarantees.EFFECTIVELY_ONCE) { throw new RuntimeException("Effectively-once processing guarantees not yet supported in Python"); } if (functionConfig.getWindowConfig() != null) { throw new IllegalArgumentException("There is currently no support windowing in python"); } if (functionConfig.getMaxMessageRetries() != null && functionConfig.getMaxMessageRetries() >= 0) { throw new IllegalArgumentException("Message retries not yet supported in python"); } if (functionConfig.getRetainKeyOrdering() != null && functionConfig.getRetainKeyOrdering()) { throw new IllegalArgumentException("Retain Key Orderering not yet supported in python"); } } private static void doGolangChecks(FunctionConfig functionConfig) { if (functionConfig.getProcessingGuarantees() == FunctionConfig.ProcessingGuarantees.EFFECTIVELY_ONCE) { throw new RuntimeException("Effectively-once processing guarantees not yet supported in Go function"); } if (functionConfig.getWindowConfig() != null) { throw new IllegalArgumentException("Windowing is not supported in Go function yet"); } if (functionConfig.getMaxMessageRetries() != null && functionConfig.getMaxMessageRetries() >= 0) { throw new IllegalArgumentException("Message retries not yet supported in Go function"); } if (functionConfig.getRetainKeyOrdering() != null && functionConfig.getRetainKeyOrdering()) { throw new IllegalArgumentException("Retain Key Orderering not yet supported in Go function"); } } private static void verifyNoTopicClash(Collection<String> inputTopics, String outputTopic) throws IllegalArgumentException { if (inputTopics.contains(outputTopic)) { throw new IllegalArgumentException( String.format("Output topic %s is also being used as an input topic (topics must be one or the other)", outputTopic)); } } private static void doCommonChecks(FunctionConfig functionConfig) { if (isEmpty(functionConfig.getTenant())) { throw new IllegalArgumentException("Function tenant cannot be null"); } if (isEmpty(functionConfig.getNamespace())) { throw new IllegalArgumentException("Function namespace cannot be null"); } if (isEmpty(functionConfig.getName())) { throw new IllegalArgumentException("Function name cannot be null"); } // go doesn't need className if (functionConfig.getRuntime() == FunctionConfig.Runtime.PYTHON || functionConfig.getRuntime() == FunctionConfig.Runtime.JAVA){ if (isEmpty(functionConfig.getClassName())) { throw new IllegalArgumentException("Function classname cannot be null"); } } Collection<String> allInputTopics = collectAllInputTopics(functionConfig); if (allInputTopics.isEmpty()) { throw new IllegalArgumentException("No input topic(s) specified for the function"); } for (String topic : allInputTopics) { if (!TopicName.isValid(topic)) { throw new IllegalArgumentException(String.format("Input topic %s is invalid", topic)); } } if (!isEmpty(functionConfig.getOutput())) { if (!TopicName.isValid(functionConfig.getOutput())) { throw new IllegalArgumentException(String.format("Output topic %s is invalid", functionConfig.getOutput())); } } if (!isEmpty(functionConfig.getLogTopic())) { if (!TopicName.isValid(functionConfig.getLogTopic())) { throw new IllegalArgumentException(String.format("LogTopic topic %s is invalid", functionConfig.getLogTopic())); } } if (!isEmpty(functionConfig.getDeadLetterTopic())) { if (!TopicName.isValid(functionConfig.getDeadLetterTopic())) { throw new IllegalArgumentException(String.format("DeadLetter topic %s is invalid", functionConfig.getDeadLetterTopic())); } } if (functionConfig.getParallelism() != null && functionConfig.getParallelism() <= 0) { throw new IllegalArgumentException("Function parallelism must be a positive number"); } // Ensure that topics aren't being used as both input and output verifyNoTopicClash(allInputTopics, functionConfig.getOutput()); WindowConfig windowConfig = functionConfig.getWindowConfig(); if (windowConfig != null) { // set auto ack to false since windowing framework is responsible // for acking and not the function framework if (functionConfig.getAutoAck() != null && functionConfig.getAutoAck()) { throw new IllegalArgumentException("Cannot enable auto ack when using windowing functionality"); } WindowConfigUtils.validate(windowConfig); } if (functionConfig.getResources() != null) { ResourceConfigUtils.validate(functionConfig.getResources()); } if (functionConfig.getTimeoutMs() != null && functionConfig.getTimeoutMs() <= 0) { throw new IllegalArgumentException("Function timeout must be a positive number"); } if (functionConfig.getTimeoutMs() != null && functionConfig.getProcessingGuarantees() != null && functionConfig.getProcessingGuarantees() != FunctionConfig.ProcessingGuarantees.ATLEAST_ONCE) { throw new IllegalArgumentException("Message timeout can only be specified with processing guarantee is " + FunctionConfig.ProcessingGuarantees.ATLEAST_ONCE.name()); } if (functionConfig.getMaxMessageRetries() != null && functionConfig.getMaxMessageRetries() >= 0 && functionConfig.getProcessingGuarantees() == FunctionConfig.ProcessingGuarantees.EFFECTIVELY_ONCE) { throw new IllegalArgumentException("MaxMessageRetries and Effectively once don't gel well"); } if ((functionConfig.getMaxMessageRetries() == null || functionConfig.getMaxMessageRetries() < 0) && !org.apache.commons.lang3.StringUtils.isEmpty(functionConfig.getDeadLetterTopic())) { throw new IllegalArgumentException("Dead Letter Topic specified, however max retries is set to infinity"); } if (functionConfig.getRetainKeyOrdering() != null && functionConfig.getRetainKeyOrdering() && functionConfig.getProcessingGuarantees() != null && functionConfig.getProcessingGuarantees() == FunctionConfig.ProcessingGuarantees.EFFECTIVELY_ONCE) { throw new IllegalArgumentException("When effectively once processing guarantee is specified, retain Key ordering cannot be set"); } if (functionConfig.getRetainKeyOrdering() != null && functionConfig.getRetainKeyOrdering() && functionConfig.getRetainOrdering() != null && functionConfig.getRetainOrdering()) { throw new IllegalArgumentException("Only one of retain ordering or retain key ordering can be set"); } if (!isEmpty(functionConfig.getPy()) && !org.apache.pulsar.common.functions.Utils.isFunctionPackageUrlSupported(functionConfig.getPy()) && functionConfig.getPy().startsWith(BUILTIN)) { if (!new File(functionConfig.getPy()).exists()) { throw new IllegalArgumentException("The supplied python file does not exist"); } } if (!isEmpty(functionConfig.getGo()) && !org.apache.pulsar.common.functions.Utils.isFunctionPackageUrlSupported(functionConfig.getGo()) && functionConfig.getGo().startsWith(BUILTIN)) { if (!new File(functionConfig.getGo()).exists()) { throw new IllegalArgumentException("The supplied go file does not exist"); } } if (functionConfig.getInputSpecs() != null) { functionConfig.getInputSpecs().forEach((topicName, conf) -> { // receiver queue size should be >= 0 if (conf.getReceiverQueueSize() != null && conf.getReceiverQueueSize() < 0) { throw new IllegalArgumentException( "Receiver queue size should be >= zero"); } if (conf.getCryptoConfig() != null && isBlank(conf.getCryptoConfig().getCryptoKeyReaderClassName())) { throw new IllegalArgumentException( "CryptoKeyReader class name required"); } }); } if (functionConfig.getProducerConfig() != null && functionConfig.getProducerConfig().getCryptoConfig() != null) { if (isBlank(functionConfig.getProducerConfig().getCryptoConfig().getCryptoKeyReaderClassName())) { throw new IllegalArgumentException("CryptoKeyReader class name required"); } if (functionConfig.getProducerConfig().getCryptoConfig().getEncryptionKeys() == null || functionConfig.getProducerConfig().getCryptoConfig().getEncryptionKeys().length == 0) { throw new IllegalArgumentException("Must provide encryption key name for crypto key reader"); } } } private static Collection<String> collectAllInputTopics(FunctionConfig functionConfig) { List<String> retval = new LinkedList<>(); if (functionConfig.getInputs() != null) { retval.addAll(functionConfig.getInputs()); } if (functionConfig.getTopicsPattern() != null) { retval.add(functionConfig.getTopicsPattern()); } if (functionConfig.getCustomSerdeInputs() != null) { retval.addAll(functionConfig.getCustomSerdeInputs().keySet()); } if (functionConfig.getCustomSchemaInputs() != null) { retval.addAll(functionConfig.getCustomSchemaInputs().keySet()); } if (functionConfig.getInputSpecs() != null) { retval.addAll(functionConfig.getInputSpecs().keySet()); } return retval; } public static ClassLoader validate(FunctionConfig functionConfig, File functionPackageFile) { doCommonChecks(functionConfig); if (functionConfig.getRuntime() == FunctionConfig.Runtime.JAVA) { ClassLoader classLoader = null; if (functionPackageFile != null) { try { classLoader = loadJar(functionPackageFile); } catch (MalformedURLException e) { throw new IllegalArgumentException("Corrupted Jar File", e); } } else if (!isEmpty(functionConfig.getJar())) { File jarFile = new File(functionConfig.getJar()); if (!jarFile.exists()) { throw new IllegalArgumentException("Jar file does not exist"); } try { classLoader = loadJar(jarFile); } catch (Exception e) { throw new IllegalArgumentException("Corrupted Jar File", e); } } else { throw new IllegalArgumentException("Function Package is not provided"); } doJavaChecks(functionConfig, classLoader); return classLoader; } else if (functionConfig.getRuntime() == FunctionConfig.Runtime.GO) { doGolangChecks(functionConfig); return null; } else if (functionConfig.getRuntime() == FunctionConfig.Runtime.PYTHON){ doPythonChecks(functionConfig); return null; } else { throw new IllegalArgumentException("Function language runtime is either not set or cannot be determined"); } } public static void validateJavaFunction(FunctionConfig functionConfig, ClassLoader classLoader) { doCommonChecks(functionConfig); doJavaChecks(functionConfig, classLoader); } public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) { FunctionConfig mergedConfig = existingConfig.toBuilder().build(); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Function Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getJar())) { mergedConfig.setJar(newConfig.getJar()); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getCustomSerdeInputs() != null) { newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getCustomSchemaInputs() != null) { newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException("isRegexPattern for input topic " + topicName + " cannot be altered"); } mergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName().equals(existingConfig.getOutputSerdeClassName())) { throw new IllegalArgumentException("Output Serde mismatch"); } if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType().equals(existingConfig.getOutputSchemaType())) { throw new IllegalArgumentException("Output Schema mismatch"); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees().equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering().equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering().equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (!StringUtils.isEmpty(newConfig.getOutput())) { mergedConfig.setOutput(newConfig.getOutput()); } if (newConfig.getUserConfig() != null) { mergedConfig.setUserConfig(newConfig.getUserConfig()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) { throw new IllegalArgumentException("Runtime cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getMaxMessageRetries() != null) { mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries()); } if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) { mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic()); } if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName().equals(existingConfig.getSubName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getResources() != null) { mergedConfig.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getWindowConfig() != null) { mergedConfig.setWindowConfig(newConfig.getWindowConfig()); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } return mergedConfig; } }
/* * Copyright 2012-2016, the original author or authors. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.flipkart.flux.shard; import java.util.List; /* Mapper Class to read the shard configuration from Config File as Json * @author amitkumar.o */ public class MasterSlavePairList { private List<ShardPairModel> shardPairModelList; public MasterSlavePairList(List<ShardPairModel> shardPairModelList) { this.shardPairModelList = shardPairModelList; } public MasterSlavePairList() { } public List<ShardPairModel> getShardPairModelList() { return shardPairModelList; } public void setShardPairModelList(List<ShardPairModel> shardPairModelList) { this.shardPairModelList = shardPairModelList; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MasterSlavePairList)) return false; MasterSlavePairList that = (MasterSlavePairList) o; return getShardPairModelList().equals(that.getShardPairModelList()); } @Override public int hashCode() { return getShardPairModelList().hashCode(); } @Override public String toString() { return "MasterSlavePairList{" + "shardPairModelList=" + shardPairModelList + '}'; } }
package com.wasu.demo59; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class Demo59Application { public static void main(String[] args) { SpringApplication.run(Demo59Application.class, args); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.repository.kdr; import java.util.Date; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.pentaho.di.core.Const; import org.pentaho.di.core.ProgressMonitorListener; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.plugins.DatabasePluginType; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.repository.LongObjectId; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.kdr.delegates.KettleDatabaseRepositoryConnectionDelegate; public class KettleDatabaseRepositoryCreationHelper { private KettleDatabaseRepository repository; private LogChannelInterface log; private DatabaseMeta databaseMeta; private Database database; private PluginRegistry pluginRegistry; public KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ) { this.repository = repository; this.databaseMeta = this.repository.getDatabaseMeta(); this.database = this.repository.getDatabase(); this.log = repository.getLog(); this.pluginRegistry = PluginRegistry.getInstance(); } /** * Create or upgrade repository tables & fields, populate lookup tables, ... * * @param monitor * The progress monitor to use, or null if no monitor is present. * @param upgrade * True if you want to upgrade the repository, false if you want to create it. * @param statements * the list of statements to populate * @param dryrun * true if we don't actually execute the statements * * @throws KettleException * in case something goes wrong! */ public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String user[], pass[], code[], desc[]; int KEY = 9; // integer, no need for bigint! log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); // //////////////////////////////////////////////////////////////////////////////// // R_LOG // // Log the operations we do in the repository. // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE, ValueMetaInterface.TYPE_DATE ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } // //////////////////////////////////////////////////////////////////////////////// // R_VERSION // // Let's start with the version table // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Const.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // Insert an extra record in R_VERSION every time we pass here... // try { // if the table doesn't exist, don't try to grab an ID from it... LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } // //////////////////////////////////////////////////////////////////////////////// // R_DATABASE_TYPE // // Create table... // boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Const.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { // // Populate... // updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE_CONTYPE // // Create table... // boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // If it's creating the table, go ahead and populate below... // ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { // // Populate with data... // code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER, 7, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DATABASE_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_AK"; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DIRECTORY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_AK"; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANSFORMATION // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, ValueMetaInterface.TYPE_NUMBER, 12, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, ValueMetaInterface.TYPE_NUMBER, 12, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } // In case of an update, the added column R_TRANSFORMATION.ID_DIRECTORY == NULL!!! // if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_TRANS_ATTRIBUTE_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_JOB_ATTRIBUTE_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_DEPENDENCY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_PARTITION_SCHEMA // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_PARTITION // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_PARTITION_SCHEMA // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_CLUSTER // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_SLAVE_MASTER, ValueMetaInterface.TYPE_BOOLEAN ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_CLUSTER_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_SLAVE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_CLUSTER // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_TRANS_HOP // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_TRANS_STEP_CONDITION // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exists: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_CONDITION // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////// // R_VALUE // tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exists: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_TYPE // // Create table... boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Const.isEmpty( sql ) ) // Doesn't exists: create the table... { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP // // Create table table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES, ValueMetaInterface.TYPE_INTEGER, 3, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exists: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_ATTRIBUTE // // Create table... tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_INTEGER, 18, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_STEP_DATABASE // // Keeps the links between transformation steps and databases. // That way investigating dependencies becomes easier to program. // // Create table... tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU1"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU2"; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_TRANS_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_LOGLEVEL // // Create table... boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { // // Populate with data... // code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER ), nextid ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, ValueMetaInterface.TYPE_STRING ), code[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_LOG // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_FILENAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, ValueMetaInterface.TYPE_DATE, 20, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, ValueMetaInterface.TYPE_BOOLEAN, 0, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); // 255 max length for now. sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_DATABASE // // Keeps the links between job entries and databases. // That way investigating dependencies becomes easier to program. // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU1"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LU2"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_TYPE // // Create table... boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { // // Populate with data... // updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_COPY // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, ValueMetaInterface.TYPE_INTEGER, 4, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOBENTRY_ATTRIBUTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, ValueMetaInterface.TYPE_INTEGER, 6, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, ValueMetaInterface.TYPE_NUMBER, 13, 2 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = "IDX_" + schemaTable.replace( databaseMeta.getStartQuote(), "" ).replace( databaseMeta.getEndQuote(), "" ) + "_LOOKUP"; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { // Ignore this one: index is not properly detected, it already exists... } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_HOP // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_JOB_NOTE // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_TRANS_LOCK // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // // R_JOB_LOCK // // Create table... table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, ValueMetaInterface.TYPE_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////////// // // MetaStore tables... // // ///////////////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////////////// // // R_NAMESPACE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT_TYPE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // //////////////////////////////////////////////////////////////////////////////// // // R_ELEMENT_ATTRIBUTE // table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Const.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } // ///////////////////////////////////////////////////////////////////////////////// // // User tables... // // ///////////////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////////////// // // R_USER // // Keep a mapping between the user login and the object id // Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); // Create table... // boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ID_USER, ValueMetaInterface.TYPE_INTEGER, KEY, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_LOGIN, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_PASSWORD, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_NAME, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, ValueMetaInterface.TYPE_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ENABLED, ValueMetaInterface.TYPE_BOOLEAN, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Const.isEmpty( sql ) ) // Doesn't exist: create the table... { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { // // Populate with data... // user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; // prof = new String[] { "Administrator", "Read-only" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ID_USER, ValueMetaInterface.TYPE_INTEGER ), nextid ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_LOGIN, ValueMetaInterface.TYPE_STRING ), user[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_PASSWORD, ValueMetaInterface.TYPE_STRING ), password ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_NAME, ValueMetaInterface.TYPE_STRING ), code[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), desc[i] ); tableData.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_USER_ENABLED, ValueMetaInterface.TYPE_BOOLEAN ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } /** * Update the list in R_STEP_TYPE using the StepLoader StepPlugin entries * * @throws KettleException * if the update didn't go as planned. */ public List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... // List<PluginInterface> plugins = pluginRegistry.getPlugins( StepPluginType.class ); for ( int i = 0; i < plugins.size(); i++ ) { PluginInterface sp = plugins.get( i ); ObjectId id = null; if ( !create ) { id = repository.stepDelegate.getStepTypeID( sp.getIds()[0] ); } if ( id == null ) // Not found, we need to add this one... { // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextStepTypeID(); } RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), sp .getIds()[0] ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), sp.getName() ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, ValueMetaInterface.TYPE_STRING ), sp.getDescription() ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_STEP_TYPE, table.getRowMeta(), table .getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_STEP_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } return statements; } /** * Update the list in R_DATABASE_TYPE using the database plugin entries * * @throws KettleException * if the update didn't go as planned. */ public List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... // List<PluginInterface> plugins = pluginRegistry.getPlugins( DatabasePluginType.class ); for ( int i = 0; i < plugins.size(); i++ ) { PluginInterface plugin = plugins.get( i ); ObjectId id = null; if ( !create ) { id = repository.databaseDelegate.getDatabaseTypeID( plugin.getIds()[0] ); } if ( id == null ) // Not found, we need to add this one... { // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextDatabaseTypeID(); } RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), plugin.getIds()[0] ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), plugin.getName() ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE, table.getRowMeta(), table .getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } return statements; } /** * Update the list in R_JOBENTRY_TYPE * * @param create * * @exception KettleException * if something went wrong during the update. */ public void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ) throws KettleException { synchronized ( repository ) { // We should only do an update if something has changed... PluginRegistry registry = PluginRegistry.getInstance(); List<PluginInterface> jobPlugins = registry.getPlugins( JobEntryPluginType.class ); for ( int i = 0; i < jobPlugins.size(); i++ ) { PluginInterface jobPlugin = jobPlugins.get( i ); String type_desc = jobPlugin.getIds()[0]; String type_desc_long = jobPlugin.getName(); ObjectId id = null; if ( !create ) { id = repository.jobEntryDelegate.getJobEntryTypeID( type_desc ); } if ( id == null ) // Not found, we need to add this one... { // We need to add this one ... id = new LongObjectId( i + 1 ); if ( !create ) { id = repository.connectionDelegate.getNextJobEntryTypeID(); } RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, ValueMetaInterface.TYPE_INTEGER ), id ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, ValueMetaInterface.TYPE_STRING ), type_desc ); table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), type_desc_long ); if ( dryrun ) { String sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE, table.getRowMeta(), table .getData(), null ); statements.add( sql ); } else { database.prepareInsert( table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); } } } } } }
// Pixel Paint // John Irle // 21 March 2019 import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.control.Button; import javafx.scene.control.ColorPicker; import javafx.scene.layout.*; import javafx.geometry.Insets; import javafx.scene.paint.Color; import javafx.stage.FileChooser; import javafx.scene.layout.HBox; import javafx.stage.Stage; import java.io.File; import java.io.PrintWriter; import java.util.Scanner; public class PixelPaint extends Application { private Pixel[][] grid = new Pixel[36][36]; static ColorPicker colorPicker = new ColorPicker(Color.BLACK); public void start(Stage primaryStage) { primaryStage.setResizable(false); GridPane pane = new GridPane(); pane.setPadding(new Insets(0, 15, 15, 15 )); // Initialize grid for (int i = 0; i < 32; i++) { for (int j = 0; j < 32; j++) { pane.add(grid[i][j] = new Pixel(), j, i); } } FileChooser fileChooser = new FileChooser(); fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Text Files (.txt)", "*.txt")); Button btSave = new Button("Save"); Button btLoad = new Button("Load"); btSave.setOnMouseClicked(e -> { try { File file = fileChooser.showSaveDialog(primaryStage); PrintWriter f = new PrintWriter(file); for (int i = 0; i < 32; i++) { for (int j = 0; j < 32; j++) { Color current = grid[i][j].getColor(); f.print((int) (current.getRed() * 255) + " " + (int) (current.getGreen() * 255) + " " + (int) (current.getBlue() * 255) + '\n'); } } f.close(); } catch (Exception ex) { ex.printStackTrace(); } }); btLoad.setOnMouseClicked(e -> { File file = fileChooser.showOpenDialog(primaryStage); try { Scanner input = new Scanner(file); for (int i = 0; i < 32; i++) { for (int j = 0; j < 32; j++) { int R = input.nextInt(); int G = input.nextInt(); int B = input.nextInt(); Color color = Color.rgb(R, G, B); BackgroundFill backgroundFill = new BackgroundFill(color, CornerRadii.EMPTY, Insets.EMPTY); grid[i][j].setBackground(new Background(backgroundFill)); grid[i][j].setColor(color); } } } catch (Exception ex) { ex.printStackTrace(); } }); HBox controls = new HBox(); controls.setPadding(new Insets(5, 0, 5, 15)); controls.setSpacing(10); controls.getChildren().addAll(colorPicker, btSave, btLoad); BorderPane borderPane = new BorderPane(); borderPane.setTop(controls); borderPane.setCenter(pane); Scene scene = new Scene(borderPane, 800, 800); primaryStage.setTitle("Pixel Paint"); primaryStage.setScene(scene); primaryStage.show(); } public static void main(String[] args) { launch(args); } }
package com.adaptris.core.http.apache5; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.MetadataCollection; import com.adaptris.core.MetadataElement; import com.adaptris.core.http.client.RequestHeaderProvider; import com.adaptris.core.metadata.MetadataFilter; import com.adaptris.core.util.Args; import com.thoughtworks.xstream.annotations.XStreamAlias; import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; import javax.validation.Valid; import javax.validation.constraints.NotNull; /** * Implementation of {@link RequestHeaderProvider} that applies {@link AdaptrisMessage} metadata as headers using a * {@link MetadataFilter}. * * @config apache-http-metadata-request-headers * */ @XStreamAlias("apache-http5-metadata-request-headers") public class MetadataRequestHeaders implements RequestHeaderProvider<HttpUriRequestBase> { @NotNull @Valid private MetadataFilter filter; public MetadataRequestHeaders() { } public MetadataRequestHeaders(MetadataFilter mf) { this(); setFilter(mf); } @Override public HttpUriRequestBase addHeaders(AdaptrisMessage msg, HttpUriRequestBase target) { MetadataCollection metadataSubset = getFilter().filter(msg); for (MetadataElement me : metadataSubset) { target.addHeader(me.getKey(), me.getValue()); } return target; } public MetadataFilter getFilter() { return filter; } public void setFilter(MetadataFilter filter) { this.filter = Args.notNull(filter, "metadata filter"); } }
/******************************************************************************* * Copyright 2014 NIFTY Corporation All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ***************************************************************************** * * NIFTY Cloud SDK for Java * API Version: 1.18 * Date: 2014-05-23 17:00:00 * */ package com.nifty.cloud.sdk.sample; import static java.lang.System.err; import static java.lang.System.out; import java.util.List; import com.nifty.cloud.sdk.ClientConfiguration; import com.nifty.cloud.sdk.NiftyClientException; import com.nifty.cloud.sdk.Protocol; import com.nifty.cloud.sdk.auth.BasicCredentials; import com.nifty.cloud.sdk.auth.Credentials; import com.nifty.cloud.sdk.dashboard.NiftyDashboardClient; import com.nifty.cloud.sdk.dashboard.model.DescribeResourcesRequest; import com.nifty.cloud.sdk.dashboard.model.DescribeResourcesResult; import com.nifty.cloud.sdk.dashboard.model.ResourceInstance; import com.nifty.cloud.sdk.dashboard.model.Resources; import com.nifty.cloud.sdk.dashboard.model.ResourceElasticIp; /** * DescribeResourcesサンプル * */ public class DescribeResourcesSample { /** * DescribeResourcesを実行するサンプルです。 * 必要なパラメーターを設定してから利用してください。 * * @param args 利用しない */ public static void main(String[] args) { /************************************************************************ * AccessKeyとSecretAccessKeyを設定します。 キーはNIFTY Cloudのコントロールパネルから取得します。 ***********************************************************************/ String accessKey = "<Input Your AccessKey>"; String secretKey = "<Input Your SecretAccessKey>"; /************************************************************************ * 指定されたAccessKeyとSecretAccessKeyから認証情報インスタンスを生成します。 ***********************************************************************/ Credentials credential = new BasicCredentials(accessKey, secretKey); /************************************************************************ * クライアント設定情報インスタンスを生成します。 * 設定情報を変更する場合は値を設定します。 * 以下の項目について設定可能です。 * - ユーザーエージェント * - プロトコル * - プロキシサーバーアドレス * - プロキシサーバーポート番号 * - プロキシユーザー名 * - プロキシパスワード * - 最大リトライ回数 * - 最大接続数 * - TCPソケットタイムアウト秒数 * - 接続タイムアウト秒数 * - 送信バッファサイズ / 受信バッファサイズ * - SignatureVersion * - SignatureMethod * - HTTPメソッド ***********************************************************************/ ClientConfiguration config = new ClientConfiguration(); // config.setUserAgent("NIFTY Cloud API Java SDK"); // config.setProtocol(Protocol.HTTPS); // config.setProxyHost("proxy_host"); // config.setProxyPort(0); // config.setProxyUsername("proxy_username"); // config.setProxyPassword("proxy_password"); // config.setMaxErrorRetry(3); // config.setMaxConnections(50); // config.setSocketTimeout(30); // config.setConnectionTimeout(30); // config.setSocketSendBufferSizeHint(0, 0); // config.setSignatureVersion(SignatureVersion.Version_2); // config.setSignatureMethod(SignatureMethod.HmacSHA256); // config.setRequestMethod("GET"); /************************************************************************ * 認証情報とクライアント設定情報からダッシュボードカテゴリ クライアントを生成します。 * 非同期実行を行う場合はNiftyDashboardAsyncClientクラスを指定します。 ***********************************************************************/ NiftyDashboardClient client = new NiftyDashboardClient(credential, config); /************************************************************************ * 接続先URLを変更する場合は接続先を指定します。 ***********************************************************************/ // client.setEndpoint("endpoint_url"); /************************************************************************ * リクエストパラメーターを設定し、DescribeResourcesを実行します。 ***********************************************************************/ DescribeResourcesRequest request = new DescribeResourcesRequest(); invokeDescribeResources(client, request); } /** * DescribeResources 実行サンプル * 設定されたリクエストから DescribeResources を実行し、実行結果を表示します。 * * @param client クライアント * @param request リクエスト */ public static void invokeDescribeResources(NiftyDashboardClient client, DescribeResourcesRequest request) { try { DescribeResourcesResult result = client.describeResources(request); out.println("DescribeResources Action Response"); out.println("============================================================================="); Resources resource = result.getResource(); if (resource.getInstances() != null) { out.println("Instances"); List<ResourceInstance> instances = resource.getInstances(); for (ResourceInstance instance :instances) { if (instance.getType() != null) { out.println(" Type : " + instance.getType()); } if (instance.getCount() != null) { out.println(" Count : " + instance.getCount()); } } } if (resource.getDynamicIpCount() != null) { out.println("DynamicIpCount : " + resource.getDynamicIpCount()); } if (resource.getCustomizeImageCount() != null) { out.println("CustomizeImageCount : " + resource.getCustomizeImageCount()); } if (resource.getAddDiskCount() != null) { out.println("AddDiskCount : " + resource.getAddDiskCount()); } if (resource.getAddDiskTotalSize() != null) { out.println("AddDiskTotalSize : " + resource.getAddDiskTotalSize()); } if (resource.getNetworkFlowAmount() != null) { out.println("NetworkFlowAmount : " + resource.getNetworkFlowAmount()); } if (resource.getSecurityGroupCount() != null) { out.println("SecurityGroupCount : " + resource.getSecurityGroupCount()); } if (resource.getLoadBalancerCount() != null) { out.println("LoadBalancerCount : " + resource.getLoadBalancerCount()); } if (resource.getElasticIps() != null) { out.println("ElasticIps"); List<ResourceElasticIp> elasticIps = resource.getElasticIps(); for (ResourceElasticIp elasticIp : elasticIps) { if (elasticIp.getType() != null) { out.println(" Type : " + elasticIp.getType()); } if (elasticIp.getCount() != null) { out.println(" Count : " + elasticIp.getCount()); } } } if (resource.getSslCertCount() != null) { out.println("SslCertCount : " + resource.getSslCertCount()); } if (resource.getMonitoringRuleCount() != null) { out.println("MonitoringRuleCount : " + resource.getMonitoringRuleCount()); } if (resource.getAutoScaleCount() != null) { out.println("AutoScaleCount : " + resource.getAutoScaleCount()); } if (resource.getPrivateLanCount() != null) { out.println("PrivateLanCount : " + resource.getPrivateLanCount()); } if (resource.getPremiumSupports() != null) { out.println("PremiumSupports"); List<String> premiumSupports = resource.getPremiumSupports(); for (String premiumSupport : premiumSupports) { out.println(" SupportName : " + premiumSupport); } } out.println("============================================================================="); } catch (NiftyClientException ex) { err.println("Message: " + ex.getMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.configuration.PlatformConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.binary.BinaryRawReaderEx; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.logger.platform.PlatformLogger; import org.apache.ignite.internal.processors.GridProcessorAdapter; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl; import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl; import org.apache.ignite.internal.processors.platform.binary.PlatformBinaryProcessor; import org.apache.ignite.internal.processors.platform.cache.PlatformCache; import org.apache.ignite.internal.processors.platform.cache.PlatformCacheExtension; import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity; import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore; import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup; import org.apache.ignite.internal.processors.platform.compute.PlatformCompute; import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicReference; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicSequence; import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore; import org.apache.ignite.internal.processors.platform.events.PlatformEvents; import org.apache.ignite.internal.processors.platform.memory.PlatformMemory; import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream; import org.apache.ignite.internal.processors.platform.messaging.PlatformMessaging; import org.apache.ignite.internal.processors.platform.services.PlatformServices; import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions; import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * GridGain platform processor. */ @SuppressWarnings({"ConditionalExpressionWithIdenticalBranches", "unchecked"}) public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor { /** Start latch. */ private final CountDownLatch startLatch = new CountDownLatch(1); /** Stores pending initialization. */ private final Collection<StoreInfo> pendingStores = Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>()); /** Lock for store lifecycle operations. */ private final ReadWriteLock storeLock = new ReentrantReadWriteLock(); /** Logger. */ @SuppressWarnings("FieldCanBeLocal") private final IgniteLogger log; /** Context. */ private final PlatformContext platformCtx; /** Interop configuration. */ private final PlatformConfigurationEx interopCfg; /** Extensions. */ private final PlatformPluginExtension[] extensions; /** Whether processor is started. */ private boolean started; /** Whether processor if stopped (or stopping). */ private volatile boolean stopped; /** Cache extensions. */ private final PlatformCacheExtension[] cacheExts; /** Cluster restart flag for the reconnect callback. */ private volatile boolean clusterRestarted; /** * Constructor. * * @param ctx Kernal context. */ public PlatformProcessorImpl(GridKernalContext ctx) { super(ctx); log = ctx.log(PlatformProcessorImpl.class); PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration(); assert interopCfg0 != null : "Must be checked earlier during component creation."; if (!(interopCfg0 instanceof PlatformConfigurationEx)) throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName()); interopCfg = (PlatformConfigurationEx)interopCfg0; if (!F.isEmpty(interopCfg.warnings())) { for (String w : interopCfg.warnings()) U.warn(log, w); } platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory(), interopCfg.platform()); // Initialize cache extensions (if any). cacheExts = prepareCacheExtensions(interopCfg.cacheExtensions()); if (interopCfg.logger() != null) interopCfg.logger().setContext(platformCtx); // Initialize extensions (if any). extensions = prepareExtensions(ctx.plugins().extensions(PlatformPluginExtension.class)); } /** {@inheritDoc} */ @Override public void start(boolean activeOnStart) throws IgniteCheckedException { try (PlatformMemory mem = platformCtx.memory().allocate()) { PlatformOutputStream out = mem.output(); BinaryRawWriterEx writer = platformCtx.writer(out); writer.writeString(ctx.igniteInstanceName()); out.synchronize(); platformCtx.gateway().onStart(this, mem.pointer()); } // At this moment all necessary native libraries must be loaded, so we can process with store creation. storeLock.writeLock().lock(); try { for (StoreInfo store : pendingStores) registerStore0(store.store, store.convertBinary); pendingStores.clear(); started = true; } finally { storeLock.writeLock().unlock(); } // Add Interop node attributes. ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform()); } /** {@inheritDoc} */ @Override public void onKernalStop(boolean cancel) { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { if (platformCtx != null) { stopped = true; platformCtx.gateway().onStop(); } } /** {@inheritDoc} */ @Override public Ignite ignite() { return ctx.grid(); } /** {@inheritDoc} */ @Override public long environmentPointer() { return platformCtx.gateway().environmentPointer(); } /** {@inheritDoc} */ @Override public void releaseStart() { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void awaitStart() throws IgniteCheckedException { U.await(startLatch); } /** {@inheritDoc} */ @Override public PlatformContext context() { return platformCtx; } /** {@inheritDoc} */ @Override public PlatformTargetProxy cache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name); if (cache == null) throw new IllegalArgumentException("Cache doesn't exist: " + name); return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public PlatformTargetProxy createCache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name); assert cache != null; return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public PlatformTargetProxy getOrCreateCache(@Nullable String name) throws IgniteCheckedException { IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name); assert cache != null; return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public PlatformTargetProxy createCacheFromConfig(long memPtr) throws IgniteCheckedException { BinaryRawReaderEx reader = platformCtx.reader(platformCtx.memory().get(memPtr)); CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().createCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().createCache(cfg); return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public PlatformTargetProxy getOrCreateCacheFromConfig(long memPtr) throws IgniteCheckedException { BinaryRawReaderEx reader = platformCtx.reader(platformCtx.memory().get(memPtr)); CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg); return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public void destroyCache(@Nullable String name) throws IgniteCheckedException { ctx.grid().destroyCache(name); } /** {@inheritDoc} */ @Override public PlatformTargetProxy affinity(@Nullable String name) throws IgniteCheckedException { return proxy(new PlatformAffinity(platformCtx, ctx, name)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy dataStreamer(@Nullable String cacheName, boolean keepBinary) throws IgniteCheckedException { IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName); ldr.keepBinary(true); return proxy(new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy transactions() { return proxy(new PlatformTransactions(platformCtx)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy projection() throws IgniteCheckedException { return proxy(new PlatformClusterGroup(platformCtx, ctx.grid().cluster())); } /** {@inheritDoc} */ @Override public PlatformTargetProxy compute(PlatformTargetProxy grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp.unwrap(); return proxy(new PlatformCompute(platformCtx, grp0.projection(), PlatformUtils.ATTR_PLATFORM)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy message(PlatformTargetProxy grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp.unwrap(); return proxy(new PlatformMessaging(platformCtx, grp0.projection().ignite().message(grp0.projection()))); } /** {@inheritDoc} */ @Override public PlatformTargetProxy events(PlatformTargetProxy grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp.unwrap(); return proxy(new PlatformEvents(platformCtx, grp0.projection().ignite().events(grp0.projection()))); } /** {@inheritDoc} */ @Override public PlatformTargetProxy services(PlatformTargetProxy grp) { PlatformClusterGroup grp0 = (PlatformClusterGroup)grp.unwrap(); return proxy(new PlatformServices(platformCtx, grp0.projection().ignite().services(grp0.projection()), false)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy extensions() { return null; } /** {@inheritDoc} */ @Override public PlatformTargetProxy extension(int id) { if (extensions != null && id < extensions.length) { PlatformPluginExtension ext = extensions[id]; if (ext != null) return proxy(ext.createTarget()); } throw new IgniteException("Platform extension is not registered [id=" + id + ']'); } /** {@inheritDoc} */ @Override public void registerStore(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { storeLock.readLock().lock(); try { if (stopped) throw new IgniteCheckedException("Failed to initialize interop store because node is stopping: " + store); if (started) registerStore0(store, convertBinary); else pendingStores.add(new StoreInfo(store, convertBinary)); } finally { storeLock.readLock().unlock(); } } /** {@inheritDoc} */ @Override public PlatformTargetProxy atomicLong(String name, long initVal, boolean create) throws IgniteException { GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create); if (atomicLong == null) return null; return proxy(new PlatformAtomicLong(platformCtx, atomicLong)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy atomicSequence(String name, long initVal, boolean create) throws IgniteException { IgniteAtomicSequence atomicSeq = ignite().atomicSequence(name, initVal, create); if (atomicSeq == null) return null; return proxy(new PlatformAtomicSequence(platformCtx, atomicSeq)); } /** {@inheritDoc} */ @Override public PlatformTargetProxy atomicReference(String name, long memPtr, boolean create) throws IgniteException { PlatformAtomicReference ref = PlatformAtomicReference.createInstance(platformCtx, name, memPtr, create); return ref != null ? proxy(ref) : null; } /** {@inheritDoc} */ @Override public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException { platformCtx.gateway().onClientDisconnected(); // 1) onReconnected is called on all grid components. // 2) After all of grid components have completed their reconnection, reconnectFut is completed. reconnectFut.listen(new CI1<IgniteFuture<?>>() { @Override public void apply(IgniteFuture<?> future) { platformCtx.gateway().onClientReconnected(clusterRestarted); } }); } /** {@inheritDoc} */ @Override public IgniteInternalFuture<?> onReconnected(boolean clusterRestarted) throws IgniteCheckedException { // Save the flag value for callback of reconnectFut. this.clusterRestarted = clusterRestarted; return null; } /** {@inheritDoc} */ @Override public void getIgniteConfiguration(long memPtr) { PlatformOutputStream stream = platformCtx.memory().get(memPtr).output(); BinaryRawWriterEx writer = platformCtx.writer(stream); PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration()); stream.synchronize(); } /** {@inheritDoc} */ @Override public void getCacheNames(long memPtr) { PlatformOutputStream stream = platformCtx.memory().get(memPtr).output(); BinaryRawWriterEx writer = platformCtx.writer(stream); Collection<String> names = ignite().cacheNames(); writer.writeInt(names.size()); for (String name : names) writer.writeString(name); stream.synchronize(); } /** {@inheritDoc} */ @Override public PlatformTargetProxy createNearCache(@Nullable String cacheName, long memPtr) { NearCacheConfiguration cfg = getNearCacheConfiguration(memPtr); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createNearCache(cacheName, cfg); return createPlatformCache(cache); } /** {@inheritDoc} */ @Override public PlatformTargetProxy getOrCreateNearCache(@Nullable String cacheName, long memPtr) { NearCacheConfiguration cfg = getNearCacheConfiguration(memPtr); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateNearCache(cacheName, cfg); return createPlatformCache(cache); } /** * Creates new platform cache. */ private PlatformTargetProxy createPlatformCache(IgniteCacheProxy cache) { return proxy(new PlatformCache(platformCtx, cache, false, cacheExts)); } /** {@inheritDoc} */ @Override public boolean loggerIsLevelEnabled(int level) { IgniteLogger log = ctx.grid().log(); switch (level) { case PlatformLogger.LVL_TRACE: return log.isTraceEnabled(); case PlatformLogger.LVL_DEBUG: return log.isDebugEnabled(); case PlatformLogger.LVL_INFO: return log.isInfoEnabled(); case PlatformLogger.LVL_WARN: return true; case PlatformLogger.LVL_ERROR: return true; default: assert false; } return false; } /** {@inheritDoc} */ @Override public void loggerLog(int level, String message, String category, String errorInfo) { IgniteLogger log = ctx.grid().log(); if (category != null) log = log.getLogger(category); Throwable err = errorInfo == null ? null : new IgniteException("Platform error:" + errorInfo); switch (level) { case PlatformLogger.LVL_TRACE: log.trace(message); break; case PlatformLogger.LVL_DEBUG: log.debug(message); break; case PlatformLogger.LVL_INFO: log.info(message); break; case PlatformLogger.LVL_WARN: log.warning(message, err); break; case PlatformLogger.LVL_ERROR: log.error(message, err); break; default: assert false; } } /** {@inheritDoc} */ @Override public PlatformTargetProxy binaryProcessor() { return proxy(new PlatformBinaryProcessor(platformCtx)); } /** * Gets the near cache config. * * @param memPtr Memory pointer. * @return Near config. */ private NearCacheConfiguration getNearCacheConfiguration(long memPtr) { assert memPtr != 0; BinaryRawReaderEx reader = platformCtx.reader(platformCtx.memory().get(memPtr)); return PlatformConfigurationUtils.readNearConfiguration(reader); } /** * Internal store initialization routine. * * @param store Store. * @param convertBinary Convert binary flag. * @throws IgniteCheckedException If failed. */ private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { if (store instanceof PlatformDotNetCacheStore) { PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store; store0.initialize(ctx, convertBinary); } else throw new IgniteCheckedException("Unsupported interop store: " + store); } /** * Prepare cache extensions. * * @param cacheExts Original extensions. * @return Prepared extensions. */ private static PlatformCacheExtension[] prepareCacheExtensions(Collection<PlatformCacheExtension> cacheExts) { if (!F.isEmpty(cacheExts)) { int maxExtId = 0; Map<Integer, PlatformCacheExtension> idToExt = new HashMap<>(); for (PlatformCacheExtension cacheExt : cacheExts) { if (cacheExt == null) throw new IgniteException("Platform cache extension cannot be null."); if (cacheExt.id() < 0) throw new IgniteException("Platform cache extension ID cannot be negative: " + cacheExt); PlatformCacheExtension oldCacheExt = idToExt.put(cacheExt.id(), cacheExt); if (oldCacheExt != null) throw new IgniteException("Platform cache extensions cannot have the same ID [" + "id=" + cacheExt.id() + ", first=" + oldCacheExt + ", second=" + cacheExt + ']'); if (cacheExt.id() > maxExtId) maxExtId = cacheExt.id(); } PlatformCacheExtension[] res = new PlatformCacheExtension[maxExtId + 1]; for (PlatformCacheExtension cacheExt : cacheExts) res[cacheExt.id()]= cacheExt; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformCacheExtension[0]; } /** * Prepare extensions. * * @param exts Original extensions. * @return Prepared extensions. */ private static PlatformPluginExtension[] prepareExtensions(PlatformPluginExtension[] exts) { if (!F.isEmpty(exts)) { int maxExtId = 0; Map<Integer, PlatformPluginExtension> idToExt = new HashMap<>(); for (PlatformPluginExtension ext : exts) { if (ext == null) throw new IgniteException("Platform extension cannot be null."); if (ext.id() < 0) throw new IgniteException("Platform extension ID cannot be negative: " + ext); PlatformPluginExtension oldCacheExt = idToExt.put(ext.id(), ext); if (oldCacheExt != null) throw new IgniteException("Platform extensions cannot have the same ID [" + "id=" + ext.id() + ", first=" + oldCacheExt + ", second=" + ext + ']'); if (ext.id() > maxExtId) maxExtId = ext.id(); } PlatformPluginExtension[] res = new PlatformPluginExtension[maxExtId + 1]; for (PlatformPluginExtension ext : exts) res[ext.id()]= ext; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformPluginExtension[0]; } /** * Wraps target in a proxy. */ private PlatformTargetProxy proxy(PlatformTarget target) { return new PlatformTargetProxyImpl(target, platformCtx); } /** * Store and manager pair. */ private static class StoreInfo { /** Store. */ private final PlatformCacheStore store; /** Convert binary flag. */ private final boolean convertBinary; /** * Constructor. * * @param store Store. * @param convertBinary Convert binary flag. */ private StoreInfo(PlatformCacheStore store, boolean convertBinary) { this.store = store; this.convertBinary = convertBinary; } } }
package gov.healthit.chpl.scheduler.job.xmlgenerator; import java.util.List; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import gov.healthit.chpl.domain.compliance.DirectReviewNonConformity; public class DirectReviewNonConformityXmlGenerator extends XmlGenerator { public static void add(List<DirectReviewNonConformity> nonconformities, String rootNodeName, XMLStreamWriter sw) throws XMLStreamException { if (nonconformities != null) { sw.writeStartElement(rootNodeName); for (DirectReviewNonConformity nonconformity : nonconformities) { add(nonconformity, "nonConformity", sw); } sw.writeEndElement(); } } public static void add(DirectReviewNonConformity nonconformity, String rootNodeName, XMLStreamWriter sw) throws XMLStreamException { sw.writeStartElement(rootNodeName); createSimpleElement(nonconformity.getCapApprovalDate() != null ? nonconformity.getCapApprovalDate().toString() : null, "capApprovalDate", sw); createSimpleElement(nonconformity.getCapEndDate() != null ? nonconformity.getCapEndDate().toString() : null, "capEndDate", sw); createSimpleElement(nonconformity.getCapMustCompleteDate() != null ? nonconformity.getCapMustCompleteDate().toString() : null, "capMustCompleteDate", sw); createSimpleElement(nonconformity.getCapStartDate() != null ? nonconformity.getCapStartDate().toString() : null, "capStartDate", sw); createSimpleElement(nonconformity.getCreated(), "created", sw); createSimpleElement(nonconformity.getDateOfDetermination() != null ? nonconformity.getDateOfDetermination().toString() : null, "dateOfDetermination", sw); DeveloperAssociatedListingXmlGenerator.add(nonconformity.getDeveloperAssociatedListings(), "developerAssociatedListings", sw); createSimpleElement(nonconformity.getLastUpdated(), "lastUpdated", sw); createSimpleElement(nonconformity.getNonConformityFindings(), "nonConformityFindings", sw); createSimpleElement(nonconformity.getNonConformityStatus(), "nonConformityStatus", sw); createSimpleElement(nonconformity.getNonConformitySummary(), "nonConformitySummary", sw); createSimpleElement(nonconformity.getNonConformityType(), "nonConformityType", sw); createSimpleElement(nonconformity.getRequirement(), "requirement", sw); createSimpleElement(nonconformity.getResolution(), "resolution", sw); sw.writeEndElement(); } }
/* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ram.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.ram.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * InvalidStateTransitionException JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InvalidStateTransitionExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller { private InvalidStateTransitionExceptionUnmarshaller() { super(com.amazonaws.services.ram.model.InvalidStateTransitionException.class, "InvalidStateTransitionException"); } @Override public com.amazonaws.services.ram.model.InvalidStateTransitionException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception { com.amazonaws.services.ram.model.InvalidStateTransitionException invalidStateTransitionException = new com.amazonaws.services.ram.model.InvalidStateTransitionException( null); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return invalidStateTransitionException; } private static InvalidStateTransitionExceptionUnmarshaller instance; public static InvalidStateTransitionExceptionUnmarshaller getInstance() { if (instance == null) instance = new InvalidStateTransitionExceptionUnmarshaller(); return instance; } }
package com.cisco.axl.api._8; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for LCredentialPolicy complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="LCredentialPolicy"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence minOccurs="0"> * &lt;element name="name" type="{http://www.cisco.com/AXL/API/8.0}String255" minOccurs="0"/> * &lt;element name="failedLogon" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="resetFailedLogonAttempts" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="lockoutDuration" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="credChangeDuration" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="credExpiresAfter" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="minCredLength" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="prevCredStoredNum" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="inactiveDaysAllowed" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="expiryWarningDays" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="trivialCredCheck" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="uuid" type="{http://www.cisco.com/AXL/API/8.0}XUUID" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "LCredentialPolicy", propOrder = { "name", "failedLogon", "resetFailedLogonAttempts", "lockoutDuration", "credChangeDuration", "credExpiresAfter", "minCredLength", "prevCredStoredNum", "inactiveDaysAllowed", "expiryWarningDays", "trivialCredCheck" }) public class LCredentialPolicy { protected String name; protected String failedLogon; protected String resetFailedLogonAttempts; protected String lockoutDuration; protected String credChangeDuration; protected String credExpiresAfter; protected String minCredLength; protected String prevCredStoredNum; protected String inactiveDaysAllowed; protected String expiryWarningDays; protected String trivialCredCheck; @XmlAttribute protected String uuid; /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the failedLogon property. * * @return * possible object is * {@link String } * */ public String getFailedLogon() { return failedLogon; } /** * Sets the value of the failedLogon property. * * @param value * allowed object is * {@link String } * */ public void setFailedLogon(String value) { this.failedLogon = value; } /** * Gets the value of the resetFailedLogonAttempts property. * * @return * possible object is * {@link String } * */ public String getResetFailedLogonAttempts() { return resetFailedLogonAttempts; } /** * Sets the value of the resetFailedLogonAttempts property. * * @param value * allowed object is * {@link String } * */ public void setResetFailedLogonAttempts(String value) { this.resetFailedLogonAttempts = value; } /** * Gets the value of the lockoutDuration property. * * @return * possible object is * {@link String } * */ public String getLockoutDuration() { return lockoutDuration; } /** * Sets the value of the lockoutDuration property. * * @param value * allowed object is * {@link String } * */ public void setLockoutDuration(String value) { this.lockoutDuration = value; } /** * Gets the value of the credChangeDuration property. * * @return * possible object is * {@link String } * */ public String getCredChangeDuration() { return credChangeDuration; } /** * Sets the value of the credChangeDuration property. * * @param value * allowed object is * {@link String } * */ public void setCredChangeDuration(String value) { this.credChangeDuration = value; } /** * Gets the value of the credExpiresAfter property. * * @return * possible object is * {@link String } * */ public String getCredExpiresAfter() { return credExpiresAfter; } /** * Sets the value of the credExpiresAfter property. * * @param value * allowed object is * {@link String } * */ public void setCredExpiresAfter(String value) { this.credExpiresAfter = value; } /** * Gets the value of the minCredLength property. * * @return * possible object is * {@link String } * */ public String getMinCredLength() { return minCredLength; } /** * Sets the value of the minCredLength property. * * @param value * allowed object is * {@link String } * */ public void setMinCredLength(String value) { this.minCredLength = value; } /** * Gets the value of the prevCredStoredNum property. * * @return * possible object is * {@link String } * */ public String getPrevCredStoredNum() { return prevCredStoredNum; } /** * Sets the value of the prevCredStoredNum property. * * @param value * allowed object is * {@link String } * */ public void setPrevCredStoredNum(String value) { this.prevCredStoredNum = value; } /** * Gets the value of the inactiveDaysAllowed property. * * @return * possible object is * {@link String } * */ public String getInactiveDaysAllowed() { return inactiveDaysAllowed; } /** * Sets the value of the inactiveDaysAllowed property. * * @param value * allowed object is * {@link String } * */ public void setInactiveDaysAllowed(String value) { this.inactiveDaysAllowed = value; } /** * Gets the value of the expiryWarningDays property. * * @return * possible object is * {@link String } * */ public String getExpiryWarningDays() { return expiryWarningDays; } /** * Sets the value of the expiryWarningDays property. * * @param value * allowed object is * {@link String } * */ public void setExpiryWarningDays(String value) { this.expiryWarningDays = value; } /** * Gets the value of the trivialCredCheck property. * * @return * possible object is * {@link String } * */ public String getTrivialCredCheck() { return trivialCredCheck; } /** * Sets the value of the trivialCredCheck property. * * @param value * allowed object is * {@link String } * */ public void setTrivialCredCheck(String value) { this.trivialCredCheck = value; } /** * Gets the value of the uuid property. * * @return * possible object is * {@link String } * */ public String getUuid() { return uuid; } /** * Sets the value of the uuid property. * * @param value * allowed object is * {@link String } * */ public void setUuid(String value) { this.uuid = value; } }
package ca.itinerum.android; import android.annotation.SuppressLint; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v7.widget.AppCompatTextView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import com.facebook.drawee.backends.pipeline.Fresco; import com.facebook.drawee.view.SimpleDraweeView; import com.facebook.imagepipeline.request.ImageRequestBuilder; import butterknife.BindView; import butterknife.ButterKnife; import ca.itinerum.android.BuildConfig; import ca.itinerum.android.R; /** * Created by stewjacks on 2016-08-23. */ public class AboutDialog extends DialogFragment { @BindView(R.id.message) AppCompatTextView mMessage; @BindView(R.id.version) AppCompatTextView mVersion; @BindView(R.id.brand_logo) ImageView mBrandLogo; @BindView(R.id.avatar) SimpleDraweeView mAvatar; private String mMessageText; @SuppressLint("StringFormatInvalid") @Override public void onStart() { super.onStart(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.dialog_about, container, false); ButterKnife.bind(this, v); Bundle bundle = getArguments(); if (bundle.containsKey("local_image")) { mAvatar.setController(Fresco.newDraweeControllerBuilder() .setImageRequest( ImageRequestBuilder.newBuilderWithResourceId(bundle.getInt("local_image")) .build()) .build()); } else mAvatar.setImageURI(bundle.getString("remote_image")); mBrandLogo.setVisibility(bundle.getBoolean("show_brand", false) ? View.VISIBLE : View.GONE); mMessageText = bundle.getString("message"); mMessage.setText(mMessageText); mVersion.setText(String.format(getString(R.string.about_version), BuildConfig.VERSION_NAME)); return v; } }
package com.blunderer.materialdesignlibrary.activities; import android.content.Intent; import android.os.Bundle; import android.support.v4.view.ViewPager; import android.view.View; import com.blunderer.materialdesignlibrary.R; import com.blunderer.materialdesignlibrary.adapters.ViewPagerAdapter; import com.blunderer.materialdesignlibrary.handlers.ViewPagerHandler; import com.blunderer.materialdesignlibrary.models.ViewPagerItem; import com.blunderer.materialdesignlibrary.views.ToolbarSearch; import com.viewpagerindicator.CirclePageIndicator; import java.util.List; public abstract class ViewPagerActivity extends AActivity implements com.blunderer.materialdesignlibrary.interfaces.ViewPager { protected ViewPager mViewPager; protected CirclePageIndicator mViewPagerIndicator; private List<ViewPagerItem> mViewPagerItems; private final ViewPager.OnPageChangeListener mOnPageChangeListener = new ViewPager .OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { replaceTitle(mViewPagerItems.get(position).getTitle()); } @Override public void onPageScrollStateChanged(int state) { } }; @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == ToolbarSearch.SEARCH_REQUEST_CODE) { super.onActivityResult(requestCode, resultCode, data); } else if (mViewPagerItems != null && mViewPagerItems.size() > 0 && mViewPager != null) { int tabPosition = mViewPager.getCurrentItem(); if (tabPosition >= 0 && tabPosition < mViewPagerItems.size()) { mViewPagerItems.get(tabPosition).getFragment() .onActivityResult(requestCode, resultCode, data); } } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState, R.layout.mdl_activity_view_pager); ViewPagerHandler handler = getViewPagerHandler(); if (handler != null && handler.getViewPagerItems() != null) { mViewPagerItems = handler.getViewPagerItems(); } if (mViewPagerItems != null && mViewPagerItems.size() > 0) { mViewPager = (ViewPager) findViewById(R.id.viewpager); mViewPager.setAdapter(new ViewPagerAdapter(getSupportFragmentManager(), mViewPagerItems)); int defaultViewPagerItemSelectedPosition = defaultViewPagerPageSelectedPosition(); if (defaultViewPagerItemSelectedPosition >= 0 && defaultViewPagerItemSelectedPosition < mViewPagerItems.size()) { mViewPager.setCurrentItem(defaultViewPagerItemSelectedPosition); } else defaultViewPagerItemSelectedPosition = 0; showIndicator(mViewPager); replaceTitle(mViewPagerItems .get(defaultViewPagerItemSelectedPosition).getTitle()); } } private void showIndicator(ViewPager pager) { if (!showViewPagerIndicator()) { pager.setOnPageChangeListener(mOnPageChangeListener); } else { mViewPagerIndicator = (CirclePageIndicator) findViewById(R.id.viewpagerindicator); mViewPagerIndicator.setViewPager(pager); mViewPagerIndicator.setVisibility(View.VISIBLE); mViewPagerIndicator.setOnPageChangeListener(mOnPageChangeListener); } } private void replaceTitle(String title) { if (replaceActionBarTitleByViewPagerPageTitle()) getSupportActionBar().setTitle(title); } public abstract boolean showViewPagerIndicator(); public abstract boolean replaceActionBarTitleByViewPagerPageTitle(); }
package org.jzy3d.debugGL; import org.jzy3d.analysis.AWTAbstractAnalysis; import org.jzy3d.analysis.AnalysisLauncher; import org.jzy3d.chart.factories.AWTChartFactory; import org.jzy3d.colors.Color; import org.jzy3d.colors.ColorMapper; import org.jzy3d.colors.colormaps.ColorMapRainbow; import org.jzy3d.debugGL.tracers.DebugGLChart2d; import org.jzy3d.debugGL.tracers.DebugGLChart3d; import org.jzy3d.maths.Range; import org.jzy3d.maths.Rectangle; import org.jzy3d.plot3d.builder.Mapper; import org.jzy3d.plot3d.builder.SurfaceBuilder; import org.jzy3d.plot3d.primitives.Shape; import org.jzy3d.plot3d.rendering.canvas.Quality; public class DebugGL_Demo extends AWTAbstractAnalysis { public static void main(String[] args) throws Exception { DebugGL_Demo d = new DebugGL_Demo(); AnalysisLauncher.open(d, new Rectangle(300, 0, 800, 800)); DebugGLChart3d debugChart = new DebugGLChart3d(d.getChart(), new AWTChartFactory()); debugChart.open(new Rectangle(0, 0, 300, 300)); DebugGLChart2d debugChart2d = new DebugGLChart2d(d.getChart()); /* * debugChart2d.watch("near", Color.RED, c->c.getView().getCamera().getNear()); * debugChart2d.watch("far", Color.BLUE, c->c.getView().getCamera().getFar()); * debugChart2d.watch("radius", Color.GREEN, * c->c.getView().getCamera().getRenderingSphereRadius()); */ debugChart2d.watch("viewpoint.x", Color.RED, c -> c.getView().getViewPoint().x); debugChart2d.watch("viewpoint.y", Color.BLUE, c -> c.getView().getViewPoint().y); debugChart2d.watch("viewpoint.z", Color.GREEN, c -> c.getView().getViewPoint().z); debugChart2d.open(new Rectangle(0, 300, 300, 300)); } public DebugGL_Demo() {} @Override public void init() { // Define a function to plot Mapper mapper = new Mapper() { @Override public double f(double x, double y) { return x * Math.sin(x * y); } }; // Define range and precision for the function to plot Range range = new Range(-3, 3); int steps = 80; // Create the object to represent the function over the given range. final Shape surface = new SurfaceBuilder().orthonormal(mapper, range, steps); surface.setColorMapper(new ColorMapper(new ColorMapRainbow(), surface.getBounds().getZmin(), surface.getBounds().getZmax(), new Color(1, 1, 1, .5f))); surface.setFaceDisplayed(true); surface.setWireframeDisplayed(false); // Create a chart chart = initializeChart(Quality.Advanced); chart.getScene().getGraph().add(surface); chart.addKeyboardCameraController(); } }
package com.liyun.qa.edu.appium; import io.appium.java_client.android.AndroidDriver; import org.openqa.selenium.remote.DesiredCapabilities; import org.testng.annotations.BeforeClass; import java.net.MalformedURLException; import java.net.URL; /** * Demo 应用测试 * * @author Li Yun * @date 2020/8/4 10:28 */ public class DemoTest { private AndroidDriver driver; @BeforeClass public void setUp() throws MalformedURLException { DesiredCapabilities desiredCapabilities = new DesiredCapabilities(); desiredCapabilities.setCapability("platformName", "Android"); desiredCapabilities.setCapability("deviceName", "华为畅享"); desiredCapabilities.setCapability("appPackage", "com.liyun.android.demo"); desiredCapabilities.setCapability("appActivity", ".activity.ReceiveActivity"); desiredCapabilities.setCapability("unicodeKeyboard", "true"); desiredCapabilities.setCapability("resetKeyboard", "true"); URL remoteUrl = new URL("http://localhost:4723/wd/hub"); driver = new AndroidDriver(remoteUrl, desiredCapabilities); } }
package com.example.bozhilun.android.siswatch.view; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Path; import android.os.Handler; import android.os.Message; import android.util.AttributeSet; import android.view.View; import java.util.ArrayList; import java.util.Timer; import java.util.TimerTask; /** * Created by Administrator on 2017/10/28. */ public class LoginWaveView extends View { private Paint mLeftPaint; private Paint mRightPaint; Path leftPath; Path rightPath; //点集合 ArrayList<Point> mLeftPoints; ArrayList<Point> mRightPoints; //波浪的宽度和高度 float waveWidth; float waveHeight; //view的宽度和高度 float viewWidth; float viewHeight; //基准线-高度的一半 float levelHeight; //移动的总距离 float leftTotalLen; float rightTotalLen; //移动一次的距离 float leftMoveLen = 1.0f; float rightMoveLen = 0.5f; //移动间隔时间-越小越快 private long speed = 10; private boolean isMeasured = false; private Task mTask; private Timer mTimer; @SuppressLint("HandlerLeak") private Handler handler = new Handler() { @Override public void handleMessage(Message msg) { leftTotalLen += leftMoveLen; if (leftTotalLen > waveWidth) { leftTotalLen = 0; } rightTotalLen += rightMoveLen; if (rightTotalLen > waveWidth) { rightTotalLen = 0; } invalidate(); } }; public LoginWaveView(Context context) { super(context); init(); } public LoginWaveView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public LoginWaveView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private void init() { mLeftPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mLeftPaint.setStyle(Paint.Style.FILL_AND_STROKE); mLeftPaint.setColor(Color.parseColor("#1ebae3")); mRightPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mRightPaint.setStyle(Paint.Style.FILL_AND_STROKE); mRightPaint.setColor(Color.parseColor("#1ebae3")); leftPath = new Path(); rightPath = new Path(); mLeftPoints = new ArrayList<>(); mRightPoints = new ArrayList<>(); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); if (!isMeasured) { isMeasured = true; viewHeight = getMeasuredHeight(); viewWidth = getMeasuredWidth(); levelHeight = viewHeight / 2; //波的宽度是view的四倍 就是只能看到1/4的波 waveWidth = viewWidth*1.0f; //波的高度是view高度的 1/10; waveHeight = levelHeight / 3f; System.out.println("lh " + levelHeight + "wh " + waveHeight + "ww " + waveWidth); float x = 0; float y = 0; for (int i = 1; i <= 9; i++) { //y的计算 if (i % 2 == 1) { y = levelHeight; } else { if (i % 4 == 0) { y = levelHeight + waveHeight * 2; } else { y = levelHeight - waveHeight * 2; } } //x的计算 x = waveWidth / 4 * (i - 1); mLeftPoints.add(new Point(x, y)); } //另一个 for (Point point : mLeftPoints) { Point rightPoint = new Point(point.x-waveWidth, point.y); mRightPoints.add(rightPoint); } } } @Override protected void onDraw(Canvas canvas) { leftPath.reset(); leftPath.moveTo(mLeftPoints.get(0).x - leftTotalLen, levelHeight + waveHeight * 3); leftPath.lineTo(mLeftPoints.get(0).x - leftTotalLen, levelHeight); for (int i = 0; i < 4; i++) { leftPath.quadTo(mLeftPoints.get(1 + 2 * i).x - leftTotalLen, mLeftPoints.get(1 + 2 * i).y, mLeftPoints.get(2 + 2 * i).x - leftTotalLen, mLeftPoints.get(2 + 2 * i).y); } leftPath.lineTo(mLeftPoints.get(8).x - leftTotalLen, levelHeight + waveHeight * 3); leftPath.close(); canvas.drawPath(leftPath, mLeftPaint); rightPath.reset(); rightPath.moveTo(mRightPoints.get(0).x + rightTotalLen, levelHeight + waveHeight * 3); rightPath.lineTo(mRightPoints.get(0).x + rightTotalLen, levelHeight); for (int i = 0; i < 4; i++) { rightPath.quadTo(mRightPoints.get(1 + 2 * i).x + rightTotalLen, mRightPoints.get(1 + 2 * i).y, mRightPoints.get(2 + 2 * i).x + rightTotalLen, mRightPoints.get(2 + 2 * i).y); } rightPath.lineTo(mRightPoints.get(8).x + rightTotalLen, levelHeight + waveHeight * 3); rightPath.close(); canvas.drawPath(rightPath, mRightPaint); } /** * 开始动画 * 务必stopFollow() */ public void startMove() { try { if (mTimer != null) { mTimer.cancel(); mTimer = null; } if (mTask != null) { mTask.cancel(); mTask = null; } mTimer = new Timer(); mTask = new Task(handler); mTimer.schedule(mTask, 0, speed); }catch (Exception e){ e.printStackTrace(); } } public void stopMove() { if (mTimer != null) { mTimer.cancel(); mTimer = null; } if (mTask != null) { mTask.cancel(); mTask = null; } } class Task extends TimerTask { Handler mHandler; public Task(Handler handler) { mHandler = handler; } @Override public void run() { handler.sendMessage(handler.obtainMessage()); } } class Point { private float x; private float y; public Point(float x, float y) { this.x = x; this.y = y; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.function.BlockIndex; import com.facebook.presto.spi.function.BlockPosition; import com.facebook.presto.spi.function.Convention; import com.facebook.presto.spi.function.FunctionDependency; import com.facebook.presto.spi.function.ScalarFunction; import com.facebook.presto.spi.function.SqlType; import com.facebook.presto.spi.type.StandardTypes; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.lang.invoke.MethodHandle; import static com.facebook.presto.spi.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION; import static com.facebook.presto.spi.InvocationConvention.InvocationArgumentConvention.NEVER_NULL; import static com.facebook.presto.spi.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL; import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.google.common.base.Throwables.throwIfInstanceOf; public class TestConventionDependencies extends AbstractTestFunctions { @BeforeClass public void setUp() { registerParametricScalar(RegularConvention.class); registerParametricScalar(BlockPositionConvention.class); registerParametricScalar(Add.class); } @Test public void testConventionDependencies() { assertFunction("regular_convention(1, 1)", INTEGER, 2); assertFunction("regular_convention(50, 10)", INTEGER, 60); assertFunction("regular_convention(1, 0)", INTEGER, 1); assertFunction("block_position_convention(ARRAY [1, 2, 3])", INTEGER, 6); assertFunction("block_position_convention(ARRAY [25, 0, 5])", INTEGER, 30); assertFunction("block_position_convention(ARRAY [56, 275, 36])", INTEGER, 367); } @ScalarFunction("regular_convention") public static class RegularConvention { @SqlType(StandardTypes.INTEGER) public static long testRegularConvention( @FunctionDependency(name = "add", returnType = StandardTypes.INTEGER, argumentTypes = {StandardTypes.INTEGER, StandardTypes.INTEGER}, convention = @Convention(arguments = {NEVER_NULL, NEVER_NULL}, result = FAIL_ON_NULL)) MethodHandle function, @SqlType(StandardTypes.INTEGER) long left, @SqlType(StandardTypes.INTEGER) long right) { try { return (long) function.invokeExact(left, right); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } } @ScalarFunction("block_position_convention") public static class BlockPositionConvention { @SqlType(StandardTypes.INTEGER) public static long testBlockPositionConvention( @FunctionDependency( name = "add", returnType = StandardTypes.INTEGER, argumentTypes = {StandardTypes.INTEGER, StandardTypes.INTEGER}, convention = @Convention(arguments = {NEVER_NULL, BLOCK_POSITION}, result = FAIL_ON_NULL)) MethodHandle function, @SqlType("array(int)") Block array) { long sum = 0; for (int i = 0; i < array.getPositionCount(); i++) { try { sum = (long) function.invokeExact(sum, array, i); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } return sum; } } @ScalarFunction("add") public static class Add { @SqlType(StandardTypes.INTEGER) public static long add( @SqlType(StandardTypes.INTEGER) long left, @SqlType(StandardTypes.INTEGER) long right) { return Math.addExact((int) left, (int) right); } @SqlType(StandardTypes.INTEGER) public static long addBlockPosition( @SqlType(StandardTypes.INTEGER) long first, @BlockPosition @SqlType(value = StandardTypes.INTEGER, nativeContainerType = long.class) Block block, @BlockIndex int position) { return Math.addExact((int) first, (int) INTEGER.getLong(block, position)); } } }
/* * Copyright (c) 2016 IRCCloud, Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.irccloud.android; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.os.Build; import android.text.TextUtils; import android.util.Log; import com.codebutler.android_websockets.HybiParser; import com.crashlytics.android.Crashlytics; import com.datatheorem.android.trustkit.TrustKit; import org.apache.http.conn.ssl.StrictHostnameVerifier; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URL; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.zip.GZIPInputStream; import javax.net.SocketFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import okhttp3.Headers; import okhttp3.internal.http.StatusLine; @TargetApi(8) public class HTTPFetcher { private static final int MAX_THREADS = 6; private static final String TAG = "HTTPFetcher"; protected URL mURI; protected Socket mSocket; protected Thread mThread; protected String mProxyHost; protected int mProxyPort; protected boolean isCancelled; private static final String ENABLED_CIPHERS[] = { "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_DHE_RSA_WITH_AES_128_CBC_SHA", "TLS_DHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_RC4_128_SHA", "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA", "SSL_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_RSA_WITH_RC4_128_SHA", "SSL_RSA_WITH_RC4_128_MD5", }; private static final String ENABLED_PROTOCOLS[] = { "TLSv1.2", "TLSv1.1", "TLSv1" }; public HTTPFetcher(URL uri) { mURI = uri; mProxyHost = System.getProperty("http.proxyHost", null); try { mProxyPort = Integer.parseInt(System.getProperty("http.proxyPort", "8080")); } catch (NumberFormatException e) { mProxyPort = -1; } if (mProxyHost != null && mProxyHost.length() > 0 && (mProxyHost.equalsIgnoreCase("localhost") || mProxyHost.equalsIgnoreCase("127.0.0.1"))) mProxyHost = null; } public void cancel() { Crashlytics.log(Log.INFO, TAG, "HTTP request cancelled"); isCancelled = true; } private static final ArrayList<Thread> mSocketThreads = new ArrayList<>(); private final ArrayList<Thread> mCurrentSocketThreads = new ArrayList<>(); private int mAddressCount; private int mAttempts; private class ConnectRunnable implements Runnable { private SocketFactory mSocketFactory; private InetSocketAddress mAddress; ConnectRunnable(SocketFactory factory, InetSocketAddress address) { mSocketFactory = factory; mAddress = address; } @Override public void run() { try { Crashlytics.log(Log.INFO, TAG, "Connecting to address: " + mAddress.getAddress() + " port: " + mAddress.getPort() + " (attempt " + mAttempts + ")"); Socket socket = mSocketFactory.createSocket(); socket.connect(mAddress, 30000); if(mSocket == null) { mSocket = socket; Crashlytics.log(Log.INFO, TAG, "Connected to " + mAddress.getAddress() + " (attempt " + mAttempts + ")"); if (mURI.getProtocol().equals("https")) { SSLSocket s = (SSLSocket) mSocket; try { s.setEnabledProtocols(ENABLED_PROTOCOLS); } catch (IllegalArgumentException e) { //Not supported on older Android versions } try { s.setEnabledCipherSuites(ENABLED_CIPHERS); } catch (IllegalArgumentException e) { //Not supported on older Android versions } } mThread = Thread.currentThread(); http_thread(); } else { socket.close(); } } catch (Exception ex) { ex.printStackTrace(); if(mSocket == null) { NetworkConnection.printStackTraceToCrashlytics(ex); } } mSocketThreads.remove(Thread.currentThread()); mCurrentSocketThreads.remove(Thread.currentThread()); if(mSocket == null && mCurrentSocketThreads.size() == 0 && mAttempts == mAddressCount) { Crashlytics.log(Log.ERROR, TAG, "Failed to connect after " + mAttempts + " attempts"); onFetchFailed(); } } } public void connect() { if (mThread != null && mThread.isAlive()) { return; } mThread = new Thread(new Runnable() { @Override public void run() { try { if(isCancelled) return; Crashlytics.log(Log.INFO, TAG, "Requesting: " + mURI); int port = (mURI.getPort() != -1) ? mURI.getPort() : (mURI.getProtocol().equals("https") ? 443 : 80); SocketFactory factory = mURI.getProtocol().equals("https") ? getSSLSocketFactory() : SocketFactory.getDefault(); if (mProxyHost != null && mProxyHost.length() > 0 && mProxyPort > 0) { Crashlytics.log(Log.INFO, TAG, "Connecting to proxy: " + mProxyHost + " port: " + mProxyPort); mSocket = SocketFactory.getDefault().createSocket(mProxyHost, mProxyPort); mThread = new Thread(new Runnable() { @SuppressLint("NewApi") public void run() { http_thread(); } }); mThread.setName("http-stream-thread"); mThread.start(); } else { InetAddress[] addresses = InetAddress.getAllByName(mURI.getHost()); mAddressCount = addresses.length; for (InetAddress address : addresses) { if(mSocket == null && !isCancelled) { if(mSocketThreads.size() >= MAX_THREADS) { Crashlytics.log(Log.INFO, TAG, "Waiting for other HTTP requests to complete before continuing"); while (mSocketThreads.size() >= MAX_THREADS) { Thread.sleep(1000); } } Thread t = new Thread(new ConnectRunnable(factory, new InetSocketAddress(address, port))); mSocketThreads.add(t); mCurrentSocketThreads.add(t); mAttempts++; t.start(); Thread.sleep(300); } else { break; } } } } catch (Exception ex) { ex.printStackTrace(); } } }); mThread.start(); } private void http_thread() { try { mThread.setName("http-stream-thread"); int port = (mURI.getPort() != -1) ? mURI.getPort() : (mURI.getProtocol().equals("https") ? 443 : 80); String path = TextUtils.isEmpty(mURI.getPath()) ? "/" : mURI.getPath(); if (!TextUtils.isEmpty(mURI.getQuery())) { path += "?" + mURI.getQuery(); } PrintWriter out = new PrintWriter(mSocket.getOutputStream()); if(mProxyHost != null && mProxyHost.length() > 0 && mProxyPort > 0) { out.print("CONNECT " + mURI.getHost() + ":" + port + " HTTP/1.0\r\n"); out.print("\r\n"); out.flush(); HybiParser.HappyDataInputStream stream = new HybiParser.HappyDataInputStream(mSocket.getInputStream()); // Read HTTP response status line. String statusLineString = readLine(stream); if (statusLineString == null) { throw new Exception("Received no reply from server."); } else { StatusLine statusLine = StatusLine.parse(statusLineString); if (statusLine.code != HttpURLConnection.HTTP_OK) { throw new Exception(statusLine.toString()); } } // Read HTTP response headers. while (!TextUtils.isEmpty(readLine(stream))); if(mURI.getProtocol().equals("https")) { mSocket = getSSLSocketFactory().createSocket(mSocket, mURI.getHost(), port, false); SSLSocket s = (SSLSocket)mSocket; try { s.setEnabledProtocols(ENABLED_PROTOCOLS); } catch (IllegalArgumentException e) { //Not supported on older Android versions } try { s.setEnabledCipherSuites(ENABLED_CIPHERS); } catch (IllegalArgumentException e) { //Not supported on older Android versions } out = new PrintWriter(mSocket.getOutputStream()); } } if(mURI.getProtocol().equals("https")) { SSLSocket s = (SSLSocket) mSocket; StrictHostnameVerifier verifier = new StrictHostnameVerifier(); if (!verifier.verify(mURI.getHost(), s.getSession())) throw new SSLException("Hostname mismatch"); } Crashlytics.log(Log.DEBUG, TAG, "Sending HTTP request"); out.print("GET " + path + " HTTP/1.0\r\n"); out.print("Host: " + mURI.getHost() + "\r\n"); if(mURI.getHost().equals(NetworkConnection.IRCCLOUD_HOST) && NetworkConnection.getInstance().session != null && NetworkConnection.getInstance().session.length() > 0) out.print("Cookie: session=" + NetworkConnection.getInstance().session + "\r\n"); out.print("Connection: close\r\n"); out.print("Accept-Encoding: gzip\r\n"); out.print("User-Agent: " + NetworkConnection.getInstance().useragent + "\r\n"); out.print("\r\n"); out.flush(); HybiParser.HappyDataInputStream stream = new HybiParser.HappyDataInputStream(mSocket.getInputStream()); // Read HTTP response status line. String statusLineString = readLine(stream); StatusLine statusLine; if (statusLineString == null) { throw new Exception("Received no reply from server."); } else { Crashlytics.log(Log.DEBUG, TAG, "Got HTTP response: " + statusLineString); statusLine = StatusLine.parse(statusLineString); if (statusLine.code != HttpURLConnection.HTTP_OK && statusLine.code != HttpURLConnection.HTTP_MOVED_PERM) { Crashlytics.log(Log.ERROR, TAG, "Failure: " + mURI + ": " + statusLine.toString()); throw new Exception(statusLine.toString()); } } // Read HTTP response headers. String line; boolean gzipped = false; while (!TextUtils.isEmpty(line = readLine(stream))) { int index = line.indexOf(":"); Headers header = new Headers.Builder().add(line.substring(0, index).trim(), line.substring(index + 1)).build(); if(header.name(0).equalsIgnoreCase("content-encoding") && header.value(0).equalsIgnoreCase("gzip")) gzipped = true; if(statusLine.code == HttpURLConnection.HTTP_MOVED_PERM && header.name(0).equalsIgnoreCase("location")) { Crashlytics.log(Log.INFO, TAG, "Redirecting to: " + header.value(0)); mURI = new URL(header.value(0)); mSocket.close(); mSocket = null; mThread = null; connect(); return; } } if(gzipped) onStreamConnected(new GZIPInputStream(mSocket.getInputStream())); else onStreamConnected(mSocket.getInputStream()); onFetchComplete(); } catch (Exception ex) { NetworkConnection.printStackTraceToCrashlytics(ex); onFetchFailed(); } } protected void onFetchComplete() { } protected void onFetchFailed() { } protected void onStreamConnected(InputStream stream) throws Exception { } // Can't use BufferedReader because it buffers past the HTTP data. private String readLine(HybiParser.HappyDataInputStream reader) throws IOException { int readChar = reader.read(); if (readChar == -1) { return null; } StringBuilder string = new StringBuilder(""); while (readChar != '\n') { if (readChar != '\r') { string.append((char) readChar); } readChar = reader.read(); if (readChar == -1) { return null; } } return string.toString(); } private SSLSocketFactory getSSLSocketFactory() throws NoSuchAlgorithmException, KeyManagementException { SSLContext context = SSLContext.getInstance("TLS"); TrustManager[] trustManagers = null; trustManagers = new TrustManager[1]; trustManagers[0] = TrustKit.getInstance().getTrustManager(mURI.getHost()); context.init(null, trustManagers, null); return context.getSocketFactory(); } }
package com.example.findacar.activites; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.Toolbar; import androidx.fragment.app.Fragment; import androidx.fragment.app.FragmentTransaction; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.view.View; import android.widget.Button; import com.example.findacar.R; import com.example.findacar.fragments.ListResultsFragment; import com.example.findacar.model.CarService; import com.example.findacar.service.ConnectionReceiver; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; public class SearchResultsActivity extends AppCompatActivity { private double currentLocationX; private double currentLocationY; private ConnectionReceiver connectionReceiver; private Button btnViewMap; private Gson gson = new Gson(); String pickupDateTime; String returnDateTime; @Override protected void onCreate(Bundle savedInstanceState) { String email = getIntent().getStringExtra("email"); super.onCreate(savedInstanceState); setContentView(R.layout.activity_s_results); Toolbar toolbar = findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setDisplayShowHomeEnabled(true); String res = getSupportActionBar().getTitle().toString(); getSupportActionBar().setTitle(res + ": " + getIntent().getStringExtra("place")); getSupportActionBar().setElevation(0); pickupDateTime = getIntent().getStringExtra("pickUp"); returnDateTime = getIntent().getStringExtra("return"); currentLocationX = getIntent().getDoubleExtra("currentLocationX",0); currentLocationY = getIntent().getDoubleExtra("currentLocationY",0); //List of services String gsonS = getIntent().getStringExtra("services"); Type type = new TypeToken<List<CarService>>() { }.getType(); final ArrayList<CarService> services = gson.fromJson(gsonS, type); Fragment fragment = new ListResultsFragment(services); FragmentTransaction ft = getSupportFragmentManager().beginTransaction().setTransition((FragmentTransaction.TRANSIT_FRAGMENT_OPEN)) .replace(R.id.search_results, fragment); ft.commit(); btnViewMap = findViewById(R.id.btnViewMap); btnViewMap.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(SearchResultsActivity.this, MapsActivity.class); intent.putExtra("services", gson.toJson(services)); intent.putExtra("currentLocationX",currentLocationX); intent.putExtra("currentLocationY",currentLocationY); startActivity(intent); } }); } @Override public boolean onSupportNavigateUp() { onBackPressed(); return true; } @Override protected void onResume(){ IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction("android.net.conn.CONNECTIVITY_CHANGE"); connectionReceiver = new ConnectionReceiver(); registerReceiver(connectionReceiver, intentFilter); super.onResume(); } @Override protected void onPause(){ unregisterReceiver(connectionReceiver); super.onPause(); } }
package mod.pianomanu.blockcarpentry.model; import com.mojang.datafixers.util.Pair; import mod.pianomanu.blockcarpentry.bakedmodels.FrameBakedModel; import mod.pianomanu.blockcarpentry.bakedmodels.IllusionBlockBakedModel; import net.minecraft.client.renderer.model.*; import net.minecraft.client.renderer.texture.AtlasTexture; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.model.IModelConfiguration; import net.minecraftforge.client.model.geometry.IModelGeometry; import java.util.Collection; import java.util.Collections; import java.util.Set; import java.util.function.Function; public class IllusionBlockModelGeometry implements IModelGeometry<IllusionBlockModelGeometry> { @Override public IBakedModel bake(IModelConfiguration owner, ModelBakery bakery, Function<Material, TextureAtlasSprite> spriteGetter, IModelTransform modelTransform, ItemOverrideList overrides, ResourceLocation modelLocation) { return new IllusionBlockBakedModel(); } @Override public Collection<Material> getTextures(IModelConfiguration owner, Function<ResourceLocation, IUnbakedModel> modelGetter, Set<Pair<String, String>> missingTextureErrors) { return Collections.singletonList(new Material(AtlasTexture.LOCATION_BLOCKS_TEXTURE, FrameBakedModel.TEXTURE)); } }
package com.alipay.api.domain; import java.util.List; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.internal.mapping.ApiListField; /** * 解语花统一的内容审核接口 * * @author auto create * @since 1.0, 2021-05-07 20:39:26 */ public class AlipayFincoreComplianceRcsmartContentSubmitModel extends AlipayObject { private static final long serialVersionUID = 2453239474525537632L; /** * app_name为调用方系统名称(英文名称),与分配的app_token一起做调用系统合法性校验。 */ @ApiField("app_name") private String appName; /** * app_token为系统分配的密钥,与app_name一起做调用系统合法性校验 */ @ApiField("app_token") private String appToken; /** * 业务编码,一个业务biz_code代表当前业务,可以包含多个scene_code,与scene_code之间为一对多的关系。 */ @ApiField("biz_code") private String bizCode; /** * 文件信息列表,其中业务素材类型默认不传,特殊场景需约定传参。 */ @ApiListField("file_info_list") @ApiField("file_info") private List<FileInfo> fileInfoList; /** * 请求Id,与app_name组成唯一健,保证业务请求幂等性,同时在请求完毕后,获取业务风险详情信息结果使用 */ @ApiField("request_id") private String requestId; /** * 场景编码。内容审核业务对应的场景代码。 */ @ApiField("scene_code") private String sceneCode; public String getAppName() { return this.appName; } public void setAppName(String appName) { this.appName = appName; } public String getAppToken() { return this.appToken; } public void setAppToken(String appToken) { this.appToken = appToken; } public String getBizCode() { return this.bizCode; } public void setBizCode(String bizCode) { this.bizCode = bizCode; } public List<FileInfo> getFileInfoList() { return this.fileInfoList; } public void setFileInfoList(List<FileInfo> fileInfoList) { this.fileInfoList = fileInfoList; } public String getRequestId() { return this.requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } public String getSceneCode() { return this.sceneCode; } public void setSceneCode(String sceneCode) { this.sceneCode = sceneCode; } }
package com.shilin.gulimall.member.controller; import java.util.Arrays; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.shilin.gulimall.member.entity.MemberCollectSubjectEntity; import com.shilin.gulimall.member.service.MemberCollectSubjectService; import com.shilin.common.utils.PageUtils; import com.shilin.common.utils.R; /** * 会员收藏的专题活动 * * @author shilin * @email g1335026358@gmail.com * @date 2020-10-08 19:43:40 */ @RestController @RequestMapping("member/membercollectsubject") public class MemberCollectSubjectController { @Autowired private MemberCollectSubjectService memberCollectSubjectService; /** * 列表 */ @RequestMapping("/list") public R list(@RequestParam Map<String, Object> params){ PageUtils page = memberCollectSubjectService.queryPage(params); return R.ok().put("page", page); } /** * 信息 */ @RequestMapping("/info/{id}") public R info(@PathVariable("id") Long id){ MemberCollectSubjectEntity memberCollectSubject = memberCollectSubjectService.getById(id); return R.ok().put("memberCollectSubject", memberCollectSubject); } /** * 保存 */ @RequestMapping("/save") public R save(@RequestBody MemberCollectSubjectEntity memberCollectSubject){ memberCollectSubjectService.save(memberCollectSubject); return R.ok(); } /** * 修改 */ @RequestMapping("/update") public R update(@RequestBody MemberCollectSubjectEntity memberCollectSubject){ memberCollectSubjectService.updateById(memberCollectSubject); return R.ok(); } /** * 删除 */ @RequestMapping("/delete") public R delete(@RequestBody Long[] ids){ memberCollectSubjectService.removeByIds(Arrays.asList(ids)); return R.ok(); } }
package edu.olezha.sandbox.core.bit; public class BitwiseXor { public static void main(String[] args) { for (int a = 7; a < 15; a++) { System.out.println("---"); int b = 5, c = a ^ b; System.out.println(a + " ^ " + b + " = " + c); System.out.println( Integer.toBinaryString(a) + " ^ " + Integer.toBinaryString(b) + " = " + Integer.toBinaryString(c) ); } } }
package de.brockhausag.gruntr.auth; import de.brockhausag.gruntr.data.dto.CreateUserDto; import de.brockhausag.gruntr.data.entities.UserEntity; import de.brockhausag.gruntr.repositories.UserRepository; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.util.HashMap; import java.util.Map; @SpringBootTest(classes = {GruntrUserDetailsService.class}) @RunWith(SpringJUnit4ClassRunner.class) public class GruntrUserDetailsServiceTest { private static final String USER_NAME = "TestUser"; private static final String USER_PASSWORD = "Password"; private static final UserRole DEFAULT_USER_ROLE = UserRole.ROLE_USER; private Map<String, UserEntity> mockUserDb = new HashMap<>(); @MockBean UserRepository userRepository; @Autowired GruntrUserDetailsService userDetailsService; @Rule public ExpectedException expectedException = ExpectedException.none(); private CreateUserDto getCreateUserDto() { CreateUserDto dto = new CreateUserDto(); dto.setUserName(USER_NAME); dto.setPassword(USER_PASSWORD); dto.setMatchingPassword(USER_PASSWORD); return dto; } @Before public void setupMock() { Mockito.when(userRepository.save(Mockito.any(UserEntity.class))).thenAnswer(invocation -> { UserEntity entity = (UserEntity) invocation.getArguments()[0]; mockUserDb.put(entity.getUserName(), entity); return entity; }); Mockito.when(userRepository.findByUserName(Mockito.any(String.class))).thenAnswer(invocation -> { String name = (String) invocation.getArguments()[0]; return mockUserDb.get(name); }); } @Test public void loadUserByUsernameSuccess() { CreateUserDto dto = getCreateUserDto(); userDetailsService.create(dto, DEFAULT_USER_ROLE); UserDetails userDetails = userDetailsService.loadUserByUsername(USER_NAME); Assert.assertNotNull(userDetails); Assert.assertEquals(USER_NAME, userDetails.getUsername()); } @Test public void loadByUsernameNotFound() throws UsernameNotFoundException { expectedException.expect(UsernameNotFoundException.class); userDetailsService.loadUserByUsername("TotalBullshitNotExisting"); } @Test public void create() { CreateUserDto dto = getCreateUserDto(); userDetailsService.create(dto, DEFAULT_USER_ROLE); UserEntity entity = userRepository.findByUserName(USER_NAME); Assert.assertNotNull(entity); Assert.assertEquals(USER_NAME, entity.getUserName()); } @Test public void createHashesPassword() { CreateUserDto dto = getCreateUserDto(); userDetailsService.create(dto, DEFAULT_USER_ROLE); UserEntity entity = userRepository.findByUserName(USER_NAME); Assert.assertNotEquals(USER_PASSWORD, entity.getPasswordHash()); } @Test public void createSetsDefaultUserRole() { CreateUserDto dto = getCreateUserDto(); userDetailsService.create(dto, DEFAULT_USER_ROLE); UserEntity entity = userRepository.findByUserName(USER_NAME); Assert.assertEquals(DEFAULT_USER_ROLE, entity.getRole()); } @Test public void createSetsAdminRole() { CreateUserDto dto = getCreateUserDto(); userDetailsService.create(dto, UserRole.ROLE_ADMIN); UserEntity entity = userRepository.findByUserName(USER_NAME); Assert.assertEquals(UserRole.ROLE_ADMIN, entity.getRole()); } }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.requests.extensions; import com.microsoft.graph.http.IRequestBuilder; import com.microsoft.graph.core.ClientException; import com.microsoft.graph.concurrency.ICallback; import com.microsoft.graph.models.extensions.TeamsTemplate; import java.util.Arrays; import java.util.EnumSet; import com.microsoft.graph.http.IBaseCollectionPage; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The interface for the Teams Template Collection Request. */ public interface ITeamsTemplateCollectionRequest { void get(final ICallback<ITeamsTemplateCollectionPage> callback); ITeamsTemplateCollectionPage get() throws ClientException; void post(final TeamsTemplate newTeamsTemplate, final ICallback<TeamsTemplate> callback); TeamsTemplate post(final TeamsTemplate newTeamsTemplate) throws ClientException; /** * Sets the expand clause for the request * * @param value the expand clause * @return the updated request */ ITeamsTemplateCollectionRequest expand(final String value); /** * Sets the select clause for the request * * @param value the select clause * @return the updated request */ ITeamsTemplateCollectionRequest select(final String value); /** * Sets the top value for the request * * @param value the max number of items to return * @return the updated request */ ITeamsTemplateCollectionRequest top(final int value); /** * Sets the skip value for the request * * @param value of the number of items to skip * @return the updated request */ ITeamsTemplateCollectionRequest skip(final int value); /** * Sets the skip token value for the request * * @param skipToken value for pagination * * @return the updated request */ ITeamsTemplateCollectionRequest skipToken(String skipToken); }
package net.minecraft.client.resources.model; import java.util.List; import net.minecraft.client.renderer.block.model.BakedQuad; import net.minecraft.client.renderer.block.model.ItemCameraTransforms; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.util.EnumFacing; public class BuiltInModel implements IBakedModel { private ItemCameraTransforms cameraTransforms; public BuiltInModel(ItemCameraTransforms p_i46086_1_) { this.cameraTransforms = p_i46086_1_; } public List<BakedQuad> getFaceQuads(EnumFacing p_177551_1_) { return null; } public List<BakedQuad> getGeneralQuads() { return null; } public boolean isAmbientOcclusion() { return false; } public boolean isGui3d() { return true; } public boolean isBuiltInRenderer() { return true; } public TextureAtlasSprite getParticleTexture() { return null; } public ItemCameraTransforms getItemCameraTransforms() { return this.cameraTransforms; } }
package uk.bl.wa.hadoop.indexer.mdx; /* * #%L * warc-hadoop-indexer * %% * Copyright (C) 2013 - 2018 The webarchive-discovery project contributors * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.Writer; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.OutputLogFilter; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import uk.bl.wa.hadoop.mapreduce.mdx.MDX; import uk.bl.wa.hadoop.mapreduce.mdx.MDXSeqMerger; public class WARCMDXGeneratorIntegrationTest { private static final Log log = LogFactory .getLog(WARCMDXGeneratorIntegrationTest.class); // Test cluster: private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; // Input files: public final static String[] testWarcs = new String[] { "gov.uk-revisit-warcs/BL-20140325121225068-00000-32090~opera~8443.warc.gz", "gov.uk-revisit-warcs/BL-20140325122341434-00000-32090~opera~8443.warc.gz" }; private final Path input = new Path("inputs"); private final Path output = new Path("outputs"); private final Path outputMerged = new Path("outputs-merged"); // Exported results public static File outputSeq = new File("target/test.seq"); public static File outputMergedSeq = new File("target/test-merged.seq"); @Before public void setUp() throws Exception { // Print out the full config for debugging purposes: // Config index_conf = ConfigFactory.load(); // LOG.debug(index_conf.root().render()); log.warn("Spinning up test cluster..."); // make sure the log folder exists, // otherwise the test fill fail new File("target/test-logs").mkdirs(); // System.setProperty("hadoop.log.dir", "target/test-logs"); System.setProperty("javax.xml.parsers.SAXParserFactory", "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl"); // Configuration conf = new Configuration(); System.setProperty("test.build.data", new File("target/mini-dfs").getAbsolutePath()); dfsCluster = new MiniDFSCluster(conf, 1, true, null); dfsCluster.getFileSystem().makeQualified(input); dfsCluster.getFileSystem().makeQualified(output); // mrCluster = new MiniMRCluster(1, getFileSystem().getUri().toString(), 1); // prepare for tests for (String filename : testWarcs) { copyFileToTestCluster(getFileSystem(), input, "../warc-indexer/src/test/resources/", filename); } log.warn("Spun up test cluster."); } protected FileSystem getFileSystem() throws IOException { return dfsCluster.getFileSystem(); } public static void copyFileToTestCluster(FileSystem fs, Path input, String prefix, String filename) throws IOException { Path targetPath = new Path(input, filename); File sourceFile = new File(prefix + filename); log.info("Copying " + filename + " into cluster at " + targetPath.toUri() + "..."); FSDataOutputStream os = fs.create(targetPath); InputStream is = new FileInputStream(sourceFile); IOUtils.copy(is, os); is.close(); os.close(); log.info("Copy completed."); } public static File writeInputFile(Path[] inputFiles) throws Exception { // Make a list: File tmpInputsFile = File.createTempFile("inputs", ".txt"); tmpInputsFile.deleteOnExit(); Writer s = new FileWriter(tmpInputsFile); for (Path p : inputFiles) { s.write(p.toString() + "\n"); } s.close(); return tmpInputsFile; } @SuppressWarnings("deprecation") @Test public void testMDXGenerator() throws Exception { // prepare for test // createTextInputFile(); log.info("Checking input file is present..."); // Check that the input file is present: Path[] inputFiles = FileUtil.stat2Paths(getFileSystem() .listStatus(new Path(input, "gov.uk-revisit-warcs/"), new OutputLogFilter())); Assert.assertEquals(2, inputFiles.length); // Create a file of the inputs File tmpInputsFile = writeInputFile(inputFiles); // Set up arguments for the job: String[] args = { "-i", tmpInputsFile.getAbsolutePath(), "-o", this.output.getName() }; // Set up the WARCIndexerRunner WARCMDXGenerator wir = new WARCMDXGenerator(); // run job // Job configuration: log.info("Setting up job config..."); JobConf jobConf = this.mrCluster.createJobConf(); jobConf.setInt(WARCMDXGenerator.WARC_HADOOP_NUM_REDUCERS, 1); jobConf.set("mapred.child.java.opts", "-Xmx512m"); wir.createJobConf(jobConf, args); log.info("Running job..."); JobClient.runJob(jobConf); log.info("Job finished, checking the results..."); // check the output exists Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus( output, new OutputLogFilter())); // Default is 1 reducers (as knitting together multiple sequence files // is not a mere matter of concatentation): Assert.assertEquals(1, outputFiles.length); // Copy the output out of HDFS and onto local FS: FileOutputStream fout = new FileOutputStream(outputSeq); for (Path output : outputFiles) { log.info(" --- output : " + output); if (getFileSystem().isFile(output)) { InputStream is = getFileSystem().open(output); IOUtils.copy(is, fout); } else { log.info(" --- ...skipping directory..."); } fout.flush(); } fout.close(); // Check contents of the output: Configuration config = new Configuration(); Path path = new Path(outputSeq.getAbsolutePath()); SequenceFile.Reader reader = new SequenceFile.Reader( FileSystem.get(config), path, config); WritableComparable key = (WritableComparable) reader.getKeyClass() .newInstance(); Writable value = (Writable) reader.getValueClass().newInstance(); MDX mdx; int counter = 0; while (reader.next(key, value)) { mdx = new MDX(value.toString()); System.out.println("Key is: " + key + " record_type: " + mdx.getRecordType() + " SURT: " + mdx.getUrlAsSURT()); counter++; } assertEquals(114, counter); reader.close(); // Now test the MDXSeqMerger testSeqMerger(outputFiles); } private void testSeqMerger(Path[] inputFiles) throws Exception { // Create a file of the inputs File tmpInputsFile = writeInputFile(inputFiles); // Set up arguments for the job: String[] args = { "-i", tmpInputsFile.getAbsolutePath(), "-o", this.outputMerged.getName(), "-r", "1" }; // Set up the WARCIndexerRunner MDXSeqMerger msm = new MDXSeqMerger(); // run job log.info("Setting up job config..."); JobConf jobConf = this.mrCluster.createJobConf(); msm.createJobConf(jobConf, args); log.info("Running job..."); JobClient.runJob(jobConf); log.info("Job finished, checking the results..."); // Copy the output out of HDFS and onto local FS: FileOutputStream fout = new FileOutputStream(outputMergedSeq); Path[] outputFiles = FileUtil.stat2Paths(getFileSystem() .listStatus(outputMerged, new OutputLogFilter())); for (Path output : outputFiles) { log.info(" --- output : " + output); if (getFileSystem().isFile(output)) { InputStream is = getFileSystem().open(output); IOUtils.copy(is, fout); } else { log.info(" --- ...skipping directory..."); } fout.flush(); } fout.close(); } @After public void tearDown() throws Exception { log.warn("Tearing down test cluster..."); if (dfsCluster != null) { dfsCluster.shutdown(); dfsCluster = null; } if (mrCluster != null) { mrCluster.shutdown(); mrCluster = null; } log.warn("Torn down test cluster."); } }
/* * Copyright 2015-2016 USEF Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * contains classes for creating messages in the transport service */ package nl.energieprojecthoogdalem.messageservice.transportservice.mqttmessages;
package com.luban.common.listenner; import com.luban.common.quartz.utils.QuartzManager; import com.luban.common.service.JobService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; @Component @Order(value = 1) public class ScheduleJobInitListener implements CommandLineRunner { @Autowired JobService scheduleJobService; @Autowired QuartzManager quartzManager; @Override public void run(String... arg0) throws Exception { try { scheduleJobService.initSchedule(); } catch (Exception e) { e.printStackTrace(); } } }
package pro.lukasgorny.services; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import pro.lukasgorny.dto.Player; import pro.lukasgorny.messages.Messages; /** * Created by Łukasz "Husar" Górny on 2017-07-11. */ public class PlayerValidationService { public void validate(final Player player) throws IllegalArgumentException { Optional<Player> playerOptional = Optional.fromNullable(player); Preconditions.checkArgument(playerOptional.isPresent(), Messages.CANNOT_FILTER_PLAYER_NULL); Preconditions.checkArgument(playerOptional.get().getAccountId() != null, Messages.CANNOT_FILTER_PLAYER_NULL); Preconditions.checkArgument(playerOptional.get().getMatches() != null, Messages.PLAYER_HAS_NO_MATCHES_PLAYED); Preconditions.checkArgument(!playerOptional.get().getMatches().isEmpty(), Messages.PLAYER_HAS_NO_MATCHES_PLAYED); } }
package com.planet_ink.coffee_mud.core.collections; import java.util.*; /* Copyright 2016-2020 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class FullConvertingListIterator<K,L> implements ListIterator<L> { private final ListIterator<K> iter; private final FullConverter<K,L> converter; public FullConvertingListIterator(final ListIterator<K> i, final FullConverter<K,L> conv) { iter=i; converter=conv; } @Override public void add(final L arg0) { throw new java.lang.IllegalArgumentException(); } @Override public boolean hasNext() { return iter.hasNext(); } @Override public boolean hasPrevious() { return iter.hasPrevious(); } @Override public L next() { return converter.convert(iter.nextIndex(),iter.next()); } @Override public int nextIndex() { return iter.nextIndex(); } @Override public L previous() { return converter.convert(iter.previousIndex(),iter.previous()); } @Override public int previousIndex() { return iter.previousIndex(); } @Override public void remove() { iter.remove(); } @Override public void set(final L arg0) { throw new java.lang.IllegalArgumentException(); } }
package io.joern.fuzzyc2cpg.parsetreetoast; import static org.junit.Assert.assertTrue; import io.joern.fuzzyc2cpg.ast.expressions.Expression; import io.joern.fuzzyc2cpg.ast.langc.statements.blockstarters.ElseStatement; import io.joern.fuzzyc2cpg.ast.langc.statements.blockstarters.IfStatement; import io.joern.fuzzyc2cpg.ast.logical.statements.BlockStarter; import io.joern.fuzzyc2cpg.ast.logical.statements.CompoundStatement; import io.joern.fuzzyc2cpg.ast.expressions.Condition; import org.junit.Test; public class IfNestingTests { @Test public void ifBlockCompound() { String input = "if(foo){}"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); assertFirstChildIsIfStatement(compound); } @Test public void ifBlockNoCompound() { String input = "if(foo) bar();"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); assertFirstChildIsIfStatement(compound); } @Test public void nestedIfBlocksNoCompound() { String input = "if(foo) if(fooAgain) bar();"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); IfStatement ifStatement = (IfStatement) compound.getStatements().get(0); IfStatement innerStatement = (IfStatement) ifStatement.getStatement(); assertFirstChildIsIfStatement(compound); assertTrue(innerStatement.getCondition() != null); } @Test public void conditionString() { String input = "if(foo){}"; CompoundStatement item = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); BlockStarter starter = (BlockStarter) item.getStatements().get(0); Expression condition = ((Condition)starter.getCondition()).getExpression(); assertTrue(condition.getEscapedCodeStr().equals("foo")); } @Test public void ifElse() { String input = "if(foo) lr->f = stdin; else lr->f = fopen(pathname, \"r\");"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); assertFirstChildIsIfStatement(compound); assertFirstIfHasElse(compound); } @Test public void ifElseChain() { String input = "if(foo1) bar1(); else if(foo2) bar2(); else if(foo3) bar3();"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); IfStatement ifItem = (IfStatement) compound.getStatements().get(0); for (int i = 0; i < 2; i++) { assertHasElse(ifItem); ifItem = (IfStatement) ifItem.getElseNode().getStatement(); } } @Test public void ifInElse() { String input = "if (foo1){} else { if (foo2) { foo(); } }"; CompoundStatement compound = (CompoundStatement) FunctionContentTestUtil .parseAndWalk(input); IfStatement ifItem = (IfStatement) compound.getStatements().get(0); assertFirstChildIsIfStatement(compound); assertFirstIfHasElse(compound); ElseStatement elseNode = ifItem.getElseNode(); CompoundStatement innerCompound = (CompoundStatement) elseNode .getStatement(); assertTrue(innerCompound.getChildCount() == 1); IfStatement innerIf = (IfStatement) innerCompound.getChild(0); assertTrue(innerIf.getCondition() != null); } private void assertFirstChildIsIfStatement(CompoundStatement compound) { IfStatement ifStatement = (IfStatement) compound.getStatements().get(0); assertTrue(compound.getStatements().size() == 1); assertTrue(ifStatement.getCondition() != null); } private void assertFirstIfHasElse(CompoundStatement compound) { IfStatement ifItem = (IfStatement) compound.getStatements().get(0); assertHasElse(ifItem); } private void assertHasElse(IfStatement ifItem) { ElseStatement elseNode = ifItem.getElseNode(); assertTrue(elseNode != null); assertTrue(elseNode.getChild(0) != null); } }
package com.github.binarywang.wxpay.bean.result; import com.thoughtworks.xstream.annotations.XStreamAlias; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; /** * <pre> * Created by BinaryWang on 2017/6/18. * </pre> * * @author <a href="https://github.com/binarywang">Binary Wang</a> */ @Data @EqualsAndHashCode(callSuper = true) @NoArgsConstructor @XStreamAlias("xml") public class WxPaySandboxSignKeyResult extends WxPayBaseResult { /** * <pre> * 沙箱密钥 * sandbox_signkey * 否 * 013467007045764 * String(32) * 返回的沙箱密钥 * </pre> */ @XStreamAlias("sandbox_signkey") private String sandboxSignKey; }
/* * Copyright (c) 2010-2019 Nathan Rajlich * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import org.java_websocket.WebSocket; import org.java_websocket.client.WebSocketClient; import org.java_websocket.enums.Opcode; /** * This example shows how to send fragmented frames.<br> * For information on when to used fragmented frames see http://tools.ietf.org/html/rfc6455#section-5.4<br> * Fragmented and normal messages can not be mixed. * One is however allowed to mix them with control messages like ping/pong. * * @see WebSocket#sendFragmentedFrame(Opcode, ByteBuffer, boolean) **/ public class FragmentedFramesExample { public static void main( String[] args ) throws URISyntaxException , IOException , InterruptedException { // WebSocketImpl.DEBUG = true; // will give extra output WebSocketClient websocket = new ExampleClient( new URI( "ws://localhost:8887" )); if( !websocket.connectBlocking() ) { System.err.println( "Could not connect to the server." ); return; } System.out.println( "This example shows how to send fragmented(continuous) messages." ); BufferedReader stdin = new BufferedReader( new InputStreamReader( System.in ) ); while ( websocket.isOpen() ) { System.out.println( "Please type in a loooooong line(which then will be send in 2 byte fragments):" ); String longline = stdin.readLine(); ByteBuffer longelinebuffer = ByteBuffer.wrap( longline.getBytes() ); longelinebuffer.rewind(); for( int position = 2 ; ; position += 2 ) { if( position < longelinebuffer.capacity() ) { longelinebuffer.limit( position ); websocket.sendFragmentedFrame( Opcode.TEXT, longelinebuffer, false );// when sending binary data one should use Opcode.BINARY assert ( longelinebuffer.remaining() == 0 ); // after calling sendFragmentedFrame one may reuse the buffer given to the method immediately } else { longelinebuffer.limit( longelinebuffer.capacity() ); websocket.sendFragmentedFrame( Opcode.TEXT, longelinebuffer, true );// sending the last frame break; } } System.out.println( "You can not type in the next long message or press Ctr-C to exit." ); } System.out.println( "FragmentedFramesExample terminated" ); } }
package ru.otus.core.model; import java.util.HashSet; import java.util.Objects; import java.util.Set; @Entity @Table(name = "users") public class User { @Id @GeneratedValue(strategy = GenerationType.SEQUENCE) @Column(name = "id") private long id; @Column(name = "name") private String name; @OneToOne(targetEntity = Address.class, cascade = CascadeType.ALL, fetch = FetchType.LAZY) @JoinColumn(name = "address_id", referencedColumnName = "id") private Address address; @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY, mappedBy = "user", orphanRemoval = true) private Set<Phone> phones = new HashSet<>(); public User() { } public User(long id, String name) { this.id = id; this.name = name; } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Address getAddress() { return address; } public void setAddress(Address address) { this.address = address; } public Set<Phone> getPhones() { return phones; } public void setPhones(Set<Phone> phones) { this.phones = phones; } @Override public String toString() { return "User{" + "id=" + id + ", name='" + name + '\'' + '}'; } @Override public int hashCode() { return Objects.hash(id, name); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) return false; User objB = (User) obj; return this.id == objB.id && this.name.equals(objB.name); } }
package io.fizz.chat.user.domain; import io.fizz.common.Utils; import io.fizz.common.domain.ApplicationId; import io.fizz.common.domain.Url; import io.fizz.common.domain.UserId; import java.util.*; public class User { public static class Builder { private ApplicationId appId; private UserId userId; private Nick nick; private StatusMessage statusMessage; private Url profileUrl; public Builder setAppId(final ApplicationId aAppId) { this.appId = aAppId; return this; } public Builder setUserId(final UserId aUserId) { this.userId = aUserId; return this; } public Builder setNick(final Nick aNick) { this.nick = aNick; return this; } public Builder setStatusMessage(final StatusMessage aStatusMessage) { this.statusMessage = aStatusMessage; return this; } public Builder setProfileUrl(Url aProfileUrl) { this.profileUrl = aProfileUrl; return this; } public User build() { if (Objects.isNull(nick)) { nick = new Nick(""); } if (Objects.isNull(statusMessage)) { statusMessage = new StatusMessage(""); } return new User(appId, userId, nick, statusMessage, profileUrl); } } private final UserId userId; private final ApplicationId appId; private final Nick nick; private final StatusMessage statusMessage; private final Url profileUrl; private String tokenFCM; public User(final ApplicationId aAppId, final UserId aUserId, final Nick aNick, final StatusMessage aStatusMessage, final Url aProfileUrl) { Utils.assertRequiredArgument(aAppId, "invalid_app_id"); Utils.assertRequiredArgument(aUserId, "invalid_user_id"); this.appId = aAppId; this.userId = aUserId; this.nick = aNick; this.statusMessage = aStatusMessage; this.profileUrl = aProfileUrl; } public ApplicationId appId() { return appId; } public UserId userId() { return userId; } public Nick nick() { return nick; } public StatusMessage statusMessage() { return statusMessage; } public Url profileUrl() { return profileUrl; } public void setToken(final String aToken) { Utils.assertRequiredArgumentLength(aToken, 1, 4096, "invalid_push_token"); tokenFCM = aToken; } public void clearToken() { tokenFCM = null; } public String token() { return tokenFCM; } }
package org.bian.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import org.bian.dto.BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceAnalysis; import org.bian.dto.BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceRecord; import org.bian.dto.BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceReport; import org.bian.dto.CRITSystemDevelopmentProjectCreateInputModelITSystemDevelopmentProjectInstanceRecord; import javax.validation.Valid; /** * BQFunctionalSpecificationRetrieveOutputModel */ public class BQFunctionalSpecificationRetrieveOutputModel { private CRITSystemDevelopmentProjectCreateInputModelITSystemDevelopmentProjectInstanceRecord iTSystemDevelopmentProjectInstanceRecord = null; private BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceRecord functionalSpecificationInstanceRecord = null; private String functionalSpecificationRetrieveActionTaskReference = null; private Object functionalSpecificationRetrieveActionTaskRecord = null; private String functionalSpecificationRetrieveActionResponse = null; private BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceReport functionalSpecificationInstanceReport = null; private BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceAnalysis functionalSpecificationInstanceAnalysis = null; /** * Get iTSystemDevelopmentProjectInstanceRecord * @return iTSystemDevelopmentProjectInstanceRecord **/ public CRITSystemDevelopmentProjectCreateInputModelITSystemDevelopmentProjectInstanceRecord getITSystemDevelopmentProjectInstanceRecord() { return iTSystemDevelopmentProjectInstanceRecord; } @JsonProperty("iTSystemDevelopmentProjectInstanceRecord") public void setITSystemDevelopmentProjectInstanceRecord(CRITSystemDevelopmentProjectCreateInputModelITSystemDevelopmentProjectInstanceRecord iTSystemDevelopmentProjectInstanceRecord) { this.iTSystemDevelopmentProjectInstanceRecord = iTSystemDevelopmentProjectInstanceRecord; } /** * Get functionalSpecificationInstanceRecord * @return functionalSpecificationInstanceRecord **/ public BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceRecord getFunctionalSpecificationInstanceRecord() { return functionalSpecificationInstanceRecord; } public void setFunctionalSpecificationInstanceRecord(BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceRecord functionalSpecificationInstanceRecord) { this.functionalSpecificationInstanceRecord = functionalSpecificationInstanceRecord; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::ISO20022andUNCEFACT::Identifier general-info: Reference to a Functional Specification instance retrieve service call * @return functionalSpecificationRetrieveActionTaskReference **/ public String getFunctionalSpecificationRetrieveActionTaskReference() { return functionalSpecificationRetrieveActionTaskReference; } public void setFunctionalSpecificationRetrieveActionTaskReference(String functionalSpecificationRetrieveActionTaskReference) { this.functionalSpecificationRetrieveActionTaskReference = functionalSpecificationRetrieveActionTaskReference; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Binary general-info: The retrieve service call consolidated processing record * @return functionalSpecificationRetrieveActionTaskRecord **/ public Object getFunctionalSpecificationRetrieveActionTaskRecord() { return functionalSpecificationRetrieveActionTaskRecord; } public void setFunctionalSpecificationRetrieveActionTaskRecord(Object functionalSpecificationRetrieveActionTaskRecord) { this.functionalSpecificationRetrieveActionTaskRecord = functionalSpecificationRetrieveActionTaskRecord; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Details of the retrieve action service response (lists returned reports) * @return functionalSpecificationRetrieveActionResponse **/ public String getFunctionalSpecificationRetrieveActionResponse() { return functionalSpecificationRetrieveActionResponse; } public void setFunctionalSpecificationRetrieveActionResponse(String functionalSpecificationRetrieveActionResponse) { this.functionalSpecificationRetrieveActionResponse = functionalSpecificationRetrieveActionResponse; } /** * Get functionalSpecificationInstanceReport * @return functionalSpecificationInstanceReport **/ public BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceReport getFunctionalSpecificationInstanceReport() { return functionalSpecificationInstanceReport; } public void setFunctionalSpecificationInstanceReport(BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceReport functionalSpecificationInstanceReport) { this.functionalSpecificationInstanceReport = functionalSpecificationInstanceReport; } /** * Get functionalSpecificationInstanceAnalysis * @return functionalSpecificationInstanceAnalysis **/ public BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceAnalysis getFunctionalSpecificationInstanceAnalysis() { return functionalSpecificationInstanceAnalysis; } public void setFunctionalSpecificationInstanceAnalysis(BQFunctionalSpecificationRetrieveOutputModelFunctionalSpecificationInstanceAnalysis functionalSpecificationInstanceAnalysis) { this.functionalSpecificationInstanceAnalysis = functionalSpecificationInstanceAnalysis; } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Jean-Baptiste Quenot, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import jenkins.util.SystemProperties; import com.google.common.collect.Lists; import hudson.Plugin.DummyImpl; import hudson.PluginWrapper.Dependency; import hudson.model.Hudson; import jenkins.util.AntClassLoader; import hudson.util.CyclicGraphDetector; import hudson.util.CyclicGraphDetector.CycleDetectedException; import hudson.util.IOUtils; import hudson.util.MaskingClassLoader; import hudson.util.VersionNumber; import jenkins.ClassLoaderReflectionToolkit; import jenkins.ExtensionFilter; import org.apache.commons.io.output.NullOutputStream; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.Expand; import org.apache.tools.ant.taskdefs.Zip; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.types.PatternSet; import org.apache.tools.ant.types.Resource; import org.apache.tools.ant.types.ZipFileSet; import org.apache.tools.ant.types.resources.MappedResourceCollection; import org.apache.tools.ant.util.GlobPatternMapper; import org.apache.tools.zip.ZipEntry; import org.apache.tools.zip.ZipExtraField; import org.apache.tools.zip.ZipOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.lang.reflect.Field; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.Vector; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Level; import java.util.logging.Logger; import org.jenkinsci.bytecode.Transformer; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import javax.annotation.Nonnull; import static org.apache.commons.io.FilenameUtils.getBaseName; public class ClassicPluginStrategy implements PluginStrategy { /** * Filter for jar files. */ private static final FilenameFilter JAR_FILTER = new FilenameFilter() { public boolean accept(File dir,String name) { return name.endsWith(".jar"); } }; private PluginManager pluginManager; /** * All the plugins eventually delegate this classloader to load core, servlet APIs, and SE runtime. */ private final MaskingClassLoader coreClassLoader = new MaskingClassLoader(getClass().getClassLoader()); public ClassicPluginStrategy(PluginManager pluginManager) { this.pluginManager = pluginManager; } @Override public String getShortName(File archive) throws IOException { Manifest manifest; if (isLinked(archive)) { manifest = loadLinkedManifest(archive); } else { try (JarFile jf = new JarFile(archive, false)) { manifest = jf.getManifest(); } } return PluginWrapper.computeShortName(manifest, archive.getName()); } private static boolean isLinked(File archive) { return archive.getName().endsWith(".hpl") || archive.getName().endsWith(".jpl"); } private static Manifest loadLinkedManifest(File archive) throws IOException { // resolve the .hpl file to the location of the manifest file try { // Locate the manifest String firstLine; FileInputStream manifestHeaderInput = new FileInputStream(archive); try { firstLine = IOUtils.readFirstLine(manifestHeaderInput, "UTF-8"); } finally { manifestHeaderInput.close(); } if (firstLine.startsWith("Manifest-Version:")) { // this is the manifest already } else { // indirection archive = resolve(archive, firstLine); } // Read the manifest FileInputStream manifestInput = new FileInputStream(archive); try { return new Manifest(manifestInput); } finally { manifestInput.close(); } } catch (IOException e) { throw new IOException("Failed to load " + archive, e); } } @Override public PluginWrapper createPluginWrapper(File archive) throws IOException { final Manifest manifest; URL baseResourceURL = null; File expandDir = null; // if .hpi, this is the directory where war is expanded boolean isLinked = isLinked(archive); if (isLinked) { manifest = loadLinkedManifest(archive); } else { if (archive.isDirectory()) {// already expanded expandDir = archive; } else { File f = pluginManager.getWorkDir(); expandDir = new File(f == null ? archive.getParentFile() : f, getBaseName(archive.getName())); explode(archive, expandDir); } File manifestFile = new File(expandDir, PluginWrapper.MANIFEST_FILENAME); if (!manifestFile.exists()) { throw new IOException( "Plugin installation failed. No manifest at " + manifestFile); } try (FileInputStream fin = new FileInputStream(manifestFile)) { manifest = new Manifest(fin); } } final Attributes atts = manifest.getMainAttributes(); // TODO: define a mechanism to hide classes // String export = manifest.getMainAttributes().getValue("Export"); List<File> paths = new ArrayList<File>(); if (isLinked) { parseClassPath(manifest, archive, paths, "Libraries", ","); parseClassPath(manifest, archive, paths, "Class-Path", " +"); // backward compatibility baseResourceURL = resolve(archive,atts.getValue("Resource-Path")).toURI().toURL(); } else { File classes = new File(expandDir, "WEB-INF/classes"); if (classes.exists()) paths.add(classes); File lib = new File(expandDir, "WEB-INF/lib"); File[] libs = lib.listFiles(JAR_FILTER); if (libs != null) paths.addAll(Arrays.asList(libs)); baseResourceURL = expandDir.toPath().toUri().toURL(); } File disableFile = new File(archive.getPath() + ".disabled"); if (disableFile.exists()) { LOGGER.info("Plugin " + archive.getName() + " is disabled"); } // compute dependencies List<PluginWrapper.Dependency> dependencies = new ArrayList<PluginWrapper.Dependency>(); List<PluginWrapper.Dependency> optionalDependencies = new ArrayList<PluginWrapper.Dependency>(); String v = atts.getValue("Plugin-Dependencies"); if (v != null) { for (String s : v.split(",")) { PluginWrapper.Dependency d = new PluginWrapper.Dependency(s); if (d.optional) { optionalDependencies.add(d); } else { dependencies.add(d); } } } fix(atts,optionalDependencies); // Register global classpath mask. This is useful for hiding JavaEE APIs that you might see from the container, // such as database plugin for JPA support. The Mask-Classes attribute is insufficient because those classes // also need to be masked by all the other plugins that depend on the database plugin. String masked = atts.getValue("Global-Mask-Classes"); if(masked!=null) { for (String pkg : masked.trim().split("[ \t\r\n]+")) coreClassLoader.add(pkg); } ClassLoader dependencyLoader = new DependencyClassLoader(coreClassLoader, archive, Util.join(dependencies,optionalDependencies)); dependencyLoader = getBaseClassLoader(atts, dependencyLoader); return new PluginWrapper(pluginManager, archive, manifest, baseResourceURL, createClassLoader(paths, dependencyLoader, atts), disableFile, dependencies, optionalDependencies); } private static void fix(Attributes atts, List<PluginWrapper.Dependency> optionalDependencies) { String pluginName = atts.getValue("Short-Name"); String jenkinsVersion = atts.getValue("Jenkins-Version"); if (jenkinsVersion==null) jenkinsVersion = atts.getValue("Hudson-Version"); optionalDependencies.addAll(getImpliedDependencies(pluginName, jenkinsVersion)); } /** * Returns all the plugin dependencies that are implicit based on a particular Jenkins version * @since 2.0 */ @Nonnull public static List<PluginWrapper.Dependency> getImpliedDependencies(String pluginName, String jenkinsVersion) { List<PluginWrapper.Dependency> out = new ArrayList<>(); for (DetachedPlugin detached : DETACHED_LIST) { // don't fix the dependency for itself, or else we'll have a cycle if (detached.shortName.equals(pluginName)) { continue; } if (BREAK_CYCLES.contains(pluginName + '/' + detached.shortName)) { LOGGER.log(Level.FINE, "skipping implicit dependency {0} → {1}", new Object[] {pluginName, detached.shortName}); continue; } // some earlier versions of maven-hpi-plugin apparently puts "null" as a literal in Hudson-Version. watch out for them. if (jenkinsVersion == null || jenkinsVersion.equals("null") || new VersionNumber(jenkinsVersion).compareTo(detached.splitWhen) <= 0) { out.add(new PluginWrapper.Dependency(detached.shortName + ':' + detached.requiredVersion)); LOGGER.log(Level.FINE, "adding implicit dependency {0} → {1} because of {2}", new Object[] {pluginName, detached.shortName, jenkinsVersion}); } } return out; } @Deprecated protected ClassLoader createClassLoader(List<File> paths, ClassLoader parent) throws IOException { return createClassLoader( paths, parent, null ); } /** * Creates the classloader that can load all the specified jar files and delegate to the given parent. */ protected ClassLoader createClassLoader(List<File> paths, ClassLoader parent, Attributes atts) throws IOException { if (atts != null) { String usePluginFirstClassLoader = atts.getValue( "PluginFirstClassLoader" ); if (Boolean.valueOf( usePluginFirstClassLoader )) { PluginFirstClassLoader classLoader = new PluginFirstClassLoader(); classLoader.setParentFirst( false ); classLoader.setParent( parent ); classLoader.addPathFiles( paths ); return classLoader; } } AntClassLoader2 classLoader = new AntClassLoader2(parent); classLoader.addPathFiles(paths); return classLoader; } /** * Get the list of all plugins that have ever been {@link DetachedPlugin detached} from Jenkins core. * @return A {@link List} of {@link DetachedPlugin}s. */ @Restricted(NoExternalUse.class) public static @Nonnull List<DetachedPlugin> getDetachedPlugins() { return DETACHED_LIST; } /** * Get the list of plugins that have been detached since a specific Jenkins release version. * @param since The Jenkins version. * @return A {@link List} of {@link DetachedPlugin}s. */ @Restricted(NoExternalUse.class) public static @Nonnull List<DetachedPlugin> getDetachedPlugins(@Nonnull VersionNumber since) { List<DetachedPlugin> detachedPlugins = new ArrayList<>(); for (DetachedPlugin detachedPlugin : DETACHED_LIST) { if (!detachedPlugin.getSplitWhen().isOlderThan(since)) { detachedPlugins.add(detachedPlugin); } } return detachedPlugins; } /** * Is the named plugin a plugin that was detached from Jenkins at some point in the past. * @param pluginId The plugin ID. * @return {@code true} if the plugin is a plugin that was detached from Jenkins at some * point in the past, otherwise {@code false}. */ @Restricted(NoExternalUse.class) public static boolean isDetachedPlugin(@Nonnull String pluginId) { for (DetachedPlugin detachedPlugin : DETACHED_LIST) { if (detachedPlugin.getShortName().equals(pluginId)) { return true; } } return false; } /** * Information about plugins that were originally in the core. * <p> * A detached plugin is one that has any of the following characteristics: * <ul> * <li> * Was an existing plugin that at some time previously bundled with the Jenkins war file. * </li> * <li> * Was previous code in jenkins core that was split to a separate-plugin (but may not have * ever been bundled in a jenkins war file - i.e. it gets split after this 2.0 update). * </li> * </ul> */ @Restricted(NoExternalUse.class) public static final class DetachedPlugin { private final String shortName; /** * Plugins built for this Jenkins version (and earlier) will automatically be assumed to have * this plugin in its dependency. * * When core/pom.xml version is 1.123-SNAPSHOT when the code is removed, then this value should * be "1.123.*" (because 1.124 will be the first version that doesn't include the removed code.) */ private final VersionNumber splitWhen; private final String requiredVersion; private DetachedPlugin(String shortName, String splitWhen, String requiredVersion) { this.shortName = shortName; this.splitWhen = new VersionNumber(splitWhen); this.requiredVersion = requiredVersion; } /** * Get the short name of the plugin. * @return The short name of the plugin. */ public String getShortName() { return shortName; } /** * Get the Jenkins version from which the plugin was detached. * @return The Jenkins version from which the plugin was detached. */ public VersionNumber getSplitWhen() { return splitWhen; } /** * Gets the minimum required version for the current version of Jenkins. * * @return the minimum required version for the current version of Jenkins. * @sice 2.16 */ public VersionNumber getRequiredVersion() { return new VersionNumber(requiredVersion); } } private static final List<DetachedPlugin> DETACHED_LIST = Collections.unmodifiableList(Arrays.asList( new DetachedPlugin("maven-plugin", "1.296", "1.296"), new DetachedPlugin("subversion", "1.310", "1.0"), new DetachedPlugin("cvs", "1.340", "0.1"), new DetachedPlugin("ant", "1.430.*", "1.0"), new DetachedPlugin("javadoc", "1.430.*", "1.0"), new DetachedPlugin("external-monitor-job", "1.467.*", "1.0"), new DetachedPlugin("ldap", "1.467.*", "1.0"), new DetachedPlugin("pam-auth", "1.467.*", "1.0"), new DetachedPlugin("mailer", "1.493.*", "1.2"), new DetachedPlugin("matrix-auth", "1.535.*", "1.0.2"), new DetachedPlugin("windows-slaves", "1.547.*", "1.0"), new DetachedPlugin("antisamy-markup-formatter", "1.553.*", "1.0"), new DetachedPlugin("matrix-project", "1.561.*", "1.0"), new DetachedPlugin("junit", "1.577.*", "1.0"), new DetachedPlugin("bouncycastle-api", "2.16.*", "2.16.0") )); /** Implicit dependencies that are known to be unnecessary and which must be cut out to prevent a dependency cycle among bundled plugins. */ private static final Set<String> BREAK_CYCLES = new HashSet<String>(Arrays.asList( "script-security/matrix-auth", "script-security/windows-slaves", "script-security/antisamy-markup-formatter", "script-security/matrix-project", "credentials/matrix-auth", "credentials/windows-slaves" )); /** * Computes the classloader that takes the class masking into account. * * <p> * This mechanism allows plugins to have their own versions for libraries that core bundles. */ private ClassLoader getBaseClassLoader(Attributes atts, ClassLoader base) { String masked = atts.getValue("Mask-Classes"); if(masked!=null) base = new MaskingClassLoader(base, masked.trim().split("[ \t\r\n]+")); return base; } public void initializeComponents(PluginWrapper plugin) { } public <T> List<ExtensionComponent<T>> findComponents(Class<T> type, Hudson hudson) { List<ExtensionFinder> finders; if (type==ExtensionFinder.class) { // Avoid infinite recursion of using ExtensionFinders to find ExtensionFinders finders = Collections.<ExtensionFinder>singletonList(new ExtensionFinder.Sezpoz()); } else { finders = hudson.getExtensionList(ExtensionFinder.class); } /** * See {@link ExtensionFinder#scout(Class, Hudson)} for the dead lock issue and what this does. */ if (LOGGER.isLoggable(Level.FINER)) LOGGER.log(Level.FINER,"Scout-loading ExtensionList: "+type, new Throwable()); for (ExtensionFinder finder : finders) { finder.scout(type, hudson); } List<ExtensionComponent<T>> r = Lists.newArrayList(); for (ExtensionFinder finder : finders) { try { r.addAll(finder.find(type, hudson)); } catch (AbstractMethodError e) { // backward compatibility for (T t : finder.findExtensions(type, hudson)) r.add(new ExtensionComponent<T>(t)); } } List<ExtensionComponent<T>> filtered = Lists.newArrayList(); for (ExtensionComponent<T> e : r) { if (ExtensionFilter.isAllowed(type,e)) filtered.add(e); } return filtered; } public void load(PluginWrapper wrapper) throws IOException { // override the context classloader. This no longer makes sense, // but it is left for the backward compatibility ClassLoader old = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(wrapper.classLoader); try { String className = wrapper.getPluginClass(); if(className==null) { // use the default dummy instance wrapper.setPlugin(new DummyImpl()); } else { try { Class<?> clazz = wrapper.classLoader.loadClass(className); Object o = clazz.newInstance(); if(!(o instanceof Plugin)) { throw new IOException(className+" doesn't extend from hudson.Plugin"); } wrapper.setPlugin((Plugin) o); } catch (LinkageError | ClassNotFoundException e) { throw new IOException("Unable to load " + className + " from " + wrapper.getShortName(),e); } catch (IllegalAccessException | InstantiationException e) { throw new IOException("Unable to create instance of " + className + " from " + wrapper.getShortName(),e); } } // initialize plugin try { Plugin plugin = wrapper.getPlugin(); plugin.setServletContext(pluginManager.context); startPlugin(wrapper); } catch(Throwable t) { // gracefully handle any error in plugin. throw new IOException("Failed to initialize",t); } } finally { Thread.currentThread().setContextClassLoader(old); } } public void startPlugin(PluginWrapper plugin) throws Exception { plugin.getPlugin().start(); } @Override public void updateDependency(PluginWrapper depender, PluginWrapper dependee) { DependencyClassLoader classLoader = findAncestorDependencyClassLoader(depender.classLoader); if (classLoader != null) { classLoader.updateTransientDependencies(); LOGGER.log(Level.INFO, "Updated dependency of {0}", depender.getShortName()); } } private DependencyClassLoader findAncestorDependencyClassLoader(ClassLoader classLoader) { for (; classLoader != null; classLoader = classLoader.getParent()) { if (classLoader instanceof DependencyClassLoader) { return (DependencyClassLoader)classLoader; } if (classLoader instanceof AntClassLoader) { // AntClassLoaders hold parents not only as AntClassLoader#getParent() // but also as AntClassLoader#getConfiguredParent() DependencyClassLoader ret = findAncestorDependencyClassLoader( ((AntClassLoader)classLoader).getConfiguredParent() ); if (ret != null) { return ret; } } } return null; } private static File resolve(File base, String relative) { File rel = new File(relative); if(rel.isAbsolute()) return rel; else return new File(base.getParentFile(),relative); } private static void parseClassPath(Manifest manifest, File archive, List<File> paths, String attributeName, String separator) throws IOException { String classPath = manifest.getMainAttributes().getValue(attributeName); if(classPath==null) return; // attribute not found for (String s : classPath.split(separator)) { File file = resolve(archive, s); if(file.getName().contains("*")) { // handle wildcard FileSet fs = new FileSet(); File dir = file.getParentFile(); fs.setDir(dir); fs.setIncludes(file.getName()); for( String included : fs.getDirectoryScanner(new Project()).getIncludedFiles() ) { paths.add(new File(dir,included)); } } else { if(!file.exists()) throw new IOException("No such file: "+file); paths.add(file); } } } /** * Explodes the plugin into a directory, if necessary. */ private static void explode(File archive, File destDir) throws IOException { destDir.mkdirs(); // timestamp check File explodeTime = new File(destDir,".timestamp2"); if(explodeTime.exists() && explodeTime.lastModified()==archive.lastModified()) return; // no need to expand // delete the contents so that old files won't interfere with new files Util.deleteRecursive(destDir); try { Project prj = new Project(); unzipExceptClasses(archive, destDir, prj); createClassJarFromWebInfClasses(archive, destDir, prj); } catch (BuildException x) { throw new IOException("Failed to expand " + archive,x); } try { new FilePath(explodeTime).touch(archive.lastModified()); } catch (InterruptedException e) { throw new AssertionError(e); // impossible } } /** * Repackage classes directory into a jar file to make it remoting friendly. * The remoting layer can cache jar files but not class files. */ private static void createClassJarFromWebInfClasses(File archive, File destDir, Project prj) throws IOException { File classesJar = new File(destDir, "WEB-INF/lib/classes.jar"); ZipFileSet zfs = new ZipFileSet(); zfs.setProject(prj); zfs.setSrc(archive); zfs.setIncludes("WEB-INF/classes/"); MappedResourceCollection mapper = new MappedResourceCollection(); mapper.add(zfs); GlobPatternMapper gm = new GlobPatternMapper(); gm.setFrom("WEB-INF/classes/*"); gm.setTo("*"); mapper.add(gm); final long dirTime = archive.lastModified(); // this ZipOutputStream is reused and not created for each directory try (ZipOutputStream wrappedZOut = new ZipOutputStream(new NullOutputStream()) { @Override public void putNextEntry(ZipEntry ze) throws IOException { ze.setTime(dirTime+1999); // roundup super.putNextEntry(ze); } }) { Zip z = new Zip() { /** * Forces the fixed timestamp for directories to make sure * classes.jar always get a consistent checksum. */ protected void zipDir(Resource dir, ZipOutputStream zOut, String vPath, int mode, ZipExtraField[] extra) throws IOException { // use wrappedZOut instead of zOut super.zipDir(dir,wrappedZOut,vPath,mode,extra); } }; z.setProject(prj); z.setTaskType("zip"); classesJar.getParentFile().mkdirs(); z.setDestFile(classesJar); z.add(mapper); z.execute(); } } private static void unzipExceptClasses(File archive, File destDir, Project prj) { Expand e = new Expand(); e.setProject(prj); e.setTaskType("unzip"); e.setSrc(archive); e.setDest(destDir); PatternSet p = new PatternSet(); p.setExcludes("WEB-INF/classes/"); e.addPatternset(p); e.execute(); } /** * Used to load classes from dependency plugins. */ final class DependencyClassLoader extends ClassLoader { /** * This classloader is created for this plugin. Useful during debugging. */ private final File _for; private List<Dependency> dependencies; /** * Topologically sorted list of transient dependencies. */ private volatile List<PluginWrapper> transientDependencies; public DependencyClassLoader(ClassLoader parent, File archive, List<Dependency> dependencies) { super(parent); this._for = archive; this.dependencies = dependencies; } private void updateTransientDependencies() { // This will be recalculated at the next time. transientDependencies = null; } private List<PluginWrapper> getTransitiveDependencies() { if (transientDependencies==null) { CyclicGraphDetector<PluginWrapper> cgd = new CyclicGraphDetector<PluginWrapper>() { @Override protected List<PluginWrapper> getEdges(PluginWrapper pw) { List<PluginWrapper> dep = new ArrayList<PluginWrapper>(); for (Dependency d : pw.getDependencies()) { PluginWrapper p = pluginManager.getPlugin(d.shortName); if (p!=null && p.isActive()) dep.add(p); } return dep; } }; try { for (Dependency d : dependencies) { PluginWrapper p = pluginManager.getPlugin(d.shortName); if (p!=null && p.isActive()) cgd.run(Collections.singleton(p)); } } catch (CycleDetectedException e) { throw new AssertionError(e); // such error should have been reported earlier } transientDependencies = cgd.getSorted(); } return transientDependencies; } // public List<PluginWrapper> getDependencyPluginWrappers() { // List<PluginWrapper> r = new ArrayList<PluginWrapper>(); // for (Dependency d : dependencies) { // PluginWrapper w = pluginManager.getPlugin(d.shortName); // if (w!=null) r.add(w); // } // return r; // } @Override protected Class<?> findClass(String name) throws ClassNotFoundException { if (PluginManager.FAST_LOOKUP) { for (PluginWrapper pw : getTransitiveDependencies()) { try { Class<?> c = ClassLoaderReflectionToolkit._findLoadedClass(pw.classLoader, name); if (c!=null) return c; return ClassLoaderReflectionToolkit._findClass(pw.classLoader, name); } catch (ClassNotFoundException e) { //not found. try next } } } else { for (Dependency dep : dependencies) { PluginWrapper p = pluginManager.getPlugin(dep.shortName); if(p!=null) try { return p.classLoader.loadClass(name); } catch (ClassNotFoundException _) { // try next } } } throw new ClassNotFoundException(name); } @Override protected Enumeration<URL> findResources(String name) throws IOException { HashSet<URL> result = new HashSet<URL>(); if (PluginManager.FAST_LOOKUP) { for (PluginWrapper pw : getTransitiveDependencies()) { Enumeration<URL> urls = ClassLoaderReflectionToolkit._findResources(pw.classLoader, name); while (urls != null && urls.hasMoreElements()) result.add(urls.nextElement()); } } else { for (Dependency dep : dependencies) { PluginWrapper p = pluginManager.getPlugin(dep.shortName); if (p!=null) { Enumeration<URL> urls = p.classLoader.getResources(name); while (urls != null && urls.hasMoreElements()) result.add(urls.nextElement()); } } } return Collections.enumeration(result); } @Override protected URL findResource(String name) { if (PluginManager.FAST_LOOKUP) { for (PluginWrapper pw : getTransitiveDependencies()) { URL url = ClassLoaderReflectionToolkit._findResource(pw.classLoader, name); if (url!=null) return url; } } else { for (Dependency dep : dependencies) { PluginWrapper p = pluginManager.getPlugin(dep.shortName); if(p!=null) { URL url = p.classLoader.getResource(name); if (url!=null) return url; } } } return null; } } /** * {@link AntClassLoader} with a few methods exposed, {@link Closeable} support, and {@link Transformer} support. */ private final class AntClassLoader2 extends AntClassLoader implements Closeable { private final Vector pathComponents; private AntClassLoader2(ClassLoader parent) { super(parent,true); try { Field $pathComponents = AntClassLoader.class.getDeclaredField("pathComponents"); $pathComponents.setAccessible(true); pathComponents = (Vector)$pathComponents.get(this); } catch (NoSuchFieldException | IllegalAccessException e) { throw new Error(e); } } public void addPathFiles(Collection<File> paths) throws IOException { for (File f : paths) addPathFile(f); } public void close() throws IOException { cleanup(); } /** * As of 1.8.0, {@link AntClassLoader} doesn't implement {@link #findResource(String)} * in any meaningful way, which breaks fast lookup. Implement it properly. */ @Override protected URL findResource(String name) { URL url = null; // try and load from this loader if the parent either didn't find // it or wasn't consulted. Enumeration e = pathComponents.elements(); while (e.hasMoreElements() && url == null) { File pathComponent = (File) e.nextElement(); url = getResourceURL(pathComponent, name); if (url != null) { log("Resource " + name + " loaded from ant loader", Project.MSG_DEBUG); } } return url; } @Override protected Class defineClassFromData(File container, byte[] classData, String classname) throws IOException { if (!DISABLE_TRANSFORMER) classData = pluginManager.getCompatibilityTransformer().transform(classname, classData, this); return super.defineClassFromData(container, classData, classname); } } public static boolean useAntClassLoader = SystemProperties.getBoolean(ClassicPluginStrategy.class.getName()+".useAntClassLoader"); private static final Logger LOGGER = Logger.getLogger(ClassicPluginStrategy.class.getName()); public static boolean DISABLE_TRANSFORMER = SystemProperties.getBoolean(ClassicPluginStrategy.class.getName()+".noBytecodeTransformer"); }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.MiddlemanExpander; import com.google.devtools.build.lib.actions.ArtifactResolver; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.PackageRootResolutionException; import com.google.devtools.build.lib.actions.PackageRootResolver; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.extra.CppCompileInfo; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.PerLabelOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.CollectionUtils; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.Platform; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CppCompileActionContext.Reply; import com.google.devtools.build.lib.rules.cpp.CppConfiguration.Tool; import com.google.devtools.build.lib.util.DependencySet; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; /** * Action that represents some kind of C++ compilation step. */ @ThreadCompatible public class CppCompileAction extends AbstractAction implements IncludeScannable { /** * Represents logic that determines which artifacts, if any, should be added to the actual inputs * for each included file (in addition to the included file itself) */ public interface IncludeResolver { /** * Returns the set of files to be added for an included file (as returned in the .d file) */ Collection<Artifact> getInputsForIncludedFile( Artifact includedFile, ArtifactResolver artifactResolver); } public static final IncludeResolver VOID_INCLUDE_RESOLVER = new IncludeResolver() { @Override public Collection<Artifact> getInputsForIncludedFile(Artifact includedFile, ArtifactResolver artifactResolver) { return ImmutableList.of(); } }; private static final int VALIDATION_DEBUG = 0; // 0==none, 1==warns/errors, 2==all private static final boolean VALIDATION_DEBUG_WARN = VALIDATION_DEBUG >= 1; /** * A string constant for the c compilation action. */ public static final String C_COMPILE = "c-compile"; /** * A string constant for the c++ compilation action. */ public static final String CPP_COMPILE = "c++-compile"; /** * A string constant for the c++ header parsing. */ public static final String CPP_HEADER_PARSING = "c++-header-parsing"; /** * A string constant for the c++ header preprocessing. */ public static final String CPP_HEADER_PREPROCESSING = "c++-header-preprocessing"; /** * A string constant for the c++ module compilation action. * Note: currently we don't support C module compilation. */ public static final String CPP_MODULE_COMPILE = "c++-module-compile"; /** * A string constant for the assembler actions. */ public static final String ASSEMBLE = "assemble"; public static final String PREPROCESS_ASSEMBLE = "preprocess-assemble"; private final BuildConfiguration configuration; protected final Artifact outputFile; private final Label sourceLabel; private final Artifact dwoFile; private final Artifact optionalSourceFile; private final NestedSet<Artifact> mandatoryInputs; private final boolean shouldScanIncludes; private final CppCompilationContext context; private final Collection<PathFragment> extraSystemIncludePrefixes; private final Iterable<IncludeScannable> lipoScannables; private final CppCompileCommandLine cppCompileCommandLine; private final boolean usePic; @VisibleForTesting final CppConfiguration cppConfiguration; protected final Class<? extends CppCompileActionContext> actionContext; private final IncludeResolver includeResolver; /** * Identifier for the actual execution time behavior of the action. * * <p>Required because the behavior of this class can be modified by injecting code in the * constructor or by inheritance, and we want to have different cache keys for those. */ private final UUID actionClassId; // This can be read/written from multiple threads, and so accesses should be synchronized. @GuardedBy("this") private boolean inputsKnown = false; /** * Set when the action prepares for execution. Used to preserve state between preparation and * execution. */ private Collection<Artifact> additionalInputs = null; /** * Creates a new action to compile C/C++ source files. * * @param owner the owner of the action, usually the configured target that * emitted it * @param sourceFile the source file that should be compiled. {@code mandatoryInputs} must * contain this file * @param shouldScanIncludes a boolean indicating whether scanning of {@code sourceFile} * is to be performed looking for inclusions. * @param sourceLabel the label of the rule the source file is generated by * @param mandatoryInputs any additional files that need to be present for the * compilation to succeed, can be empty but not null, for example, extra sources for FDO. * @param outputFile the object file that is written as result of the * compilation, or the fake object for {@link FakeCppCompileAction}s * @param dotdFile the .d file that is generated as a side-effect of * compilation * @param gcnoFile the coverage notes that are written in coverage mode, can * be null * @param dwoFile the .dwo output file where debug information is stored for Fission * builds (null if Fission mode is disabled) * @param optionalSourceFile an additional optional source file (null if unneeded) * @param configuration the build configurations * @param context the compilation context * @param copts options for the compiler * @param coptsFilter regular expression to remove options from {@code copts} */ protected CppCompileAction(ActionOwner owner, // TODO(bazel-team): Eventually we will remove 'features'; all functionality in 'features' // will be provided by 'featureConfiguration'. ImmutableList<String> features, FeatureConfiguration featureConfiguration, CcToolchainFeatures.Variables variables, Artifact sourceFile, boolean shouldScanIncludes, Label sourceLabel, NestedSet<Artifact> mandatoryInputs, Artifact outputFile, DotdFile dotdFile, @Nullable Artifact gcnoFile, @Nullable Artifact dwoFile, Artifact optionalSourceFile, BuildConfiguration configuration, CppConfiguration cppConfiguration, CppCompilationContext context, Class<? extends CppCompileActionContext> actionContext, ImmutableList<String> copts, ImmutableList<String> pluginOpts, Predicate<String> coptsFilter, ImmutableList<PathFragment> extraSystemIncludePrefixes, @Nullable String fdoBuildStamp, IncludeResolver includeResolver, Iterable<IncludeScannable> lipoScannables, UUID actionClassId, boolean usePic, RuleContext ruleContext) { super(owner, createInputs(mandatoryInputs, context.getCompilationPrerequisites(), optionalSourceFile), CollectionUtils.asListWithoutNulls(outputFile, (dotdFile == null ? null : dotdFile.artifact()), gcnoFile, dwoFile)); this.configuration = configuration; this.sourceLabel = sourceLabel; this.outputFile = Preconditions.checkNotNull(outputFile); this.dwoFile = dwoFile; this.optionalSourceFile = optionalSourceFile; this.context = context; this.extraSystemIncludePrefixes = extraSystemIncludePrefixes; this.includeResolver = includeResolver; this.cppConfiguration = cppConfiguration; // inputsKnown begins as the logical negation of shouldScanIncludes. // When scanning includes, the inputs begin as not known, and become // known after inclusion scanning. When *not* scanning includes, // the inputs are as declared, hence known, and remain so. this.shouldScanIncludes = shouldScanIncludes; this.inputsKnown = !shouldScanIncludes; this.cppCompileCommandLine = new CppCompileCommandLine( sourceFile, dotdFile, copts, coptsFilter, pluginOpts, features, featureConfiguration, variables, fdoBuildStamp); this.actionContext = actionContext; this.lipoScannables = lipoScannables; this.actionClassId = actionClassId; this.usePic = usePic; // We do not need to include the middleman artifact since it is a generated // artifact and will definitely exist prior to this action execution. this.mandatoryInputs = mandatoryInputs; verifyIncludePaths(ruleContext); } /** * Verifies that the include paths of this action are within the limits of the execution root. */ private void verifyIncludePaths(RuleContext ruleContext) { if (ruleContext == null) { return; } Iterable<PathFragment> ignoredDirs = getValidationIgnoredDirs(); // We currently do not check the output of: // - getQuoteIncludeDirs(): those only come from includes attributes, and are checked in // CcCommon.getIncludeDirsFromIncludesAttribute(). // - getBuiltinIncludeDirs(): while in practice this doesn't happen, bazel can be configured // to use an absolute system root, in which case the builtin include dirs might be absolute. for (PathFragment include : Iterables.concat(getIncludeDirs(), getSystemIncludeDirs())) { // Ignore headers from built-in include directories. if (FileSystemUtils.startsWithAny(include, ignoredDirs)) { continue; } if (include.isAbsolute() || !PathFragment.EMPTY_FRAGMENT.getRelative(include).normalize().isNormalized()) { ruleContext.ruleError( "The include path '" + include + "' references a path outside of the execution root."); } } } private static NestedSet<Artifact> createInputs( NestedSet<Artifact> mandatoryInputs, Set<Artifact> prerequisites, Artifact optionalSourceFile) { NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder(); if (optionalSourceFile != null) { builder.add(optionalSourceFile); } builder.addAll(prerequisites); builder.addTransitive(mandatoryInputs); return builder.build(); } /** * Whether we should do "include scanning". Note that this does *not* mean whether we should parse * the .d files to determine which include files were used during compilation. Instead, this means * whether we should a) run the pre-execution include scanner (see {@code IncludeScanningContext}) * if one exists and b) whether the action inputs should be modified to match the results of that * pre-execution scanning and (if enabled) again after execution to match the results of the .d * file parsing. * * <p>This does *not* have anything to do with "hdrs_check". */ public boolean shouldScanIncludes() { return shouldScanIncludes; } @Override public List<PathFragment> getBuiltInIncludeDirectories() { return cppConfiguration.getBuiltInIncludeDirectories(); } @Nullable @Override public Artifact getBuiltInIncludeFile() { return cppConfiguration.getBuiltInIncludeFile(); } public String getHostSystemName() { return cppConfiguration.getHostSystemName(); } @Override public NestedSet<Artifact> getMandatoryInputs() { return mandatoryInputs; } @Override public synchronized boolean inputsKnown() { return inputsKnown; } /** * Returns the list of additional inputs found by dependency discovery, during action preparation, * and clears the stored list. {@link #prepare} must be called before this method is called, on * each action execution. */ public Collection<? extends ActionInput> getAdditionalInputs() { Collection<? extends ActionInput> result = Preconditions.checkNotNull(additionalInputs); additionalInputs = null; return result; } @Override public boolean discoversInputs() { return true; } @Nullable @Override public Collection<Artifact> discoverInputs(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { Executor executor = actionExecutionContext.getExecutor(); Collection<Artifact> initialResult; try { initialResult = executor.getContext(actionContext) .findAdditionalInputs(this, actionExecutionContext); } catch (ExecException e) { throw e.toActionExecutionException("Include scanning of rule '" + getOwner().getLabel() + "'", executor.getVerboseFailures(), this); } if (initialResult == null) { // We will find inputs during execution. Store an empty list to show we did try to discover // inputs and return null to inform the caller that inputs will be discovered later. this.additionalInputs = ImmutableList.of(); return null; } this.additionalInputs = initialResult; // In some cases, execution backends need extra files for each included file. Add them // to the set of inputs the caller may need to be aware of. Collection<Artifact> result = new HashSet<>(); ArtifactResolver artifactResolver = executor.getContext(IncludeScanningContext.class).getArtifactResolver(); for (Artifact artifact : initialResult) { result.addAll(includeResolver.getInputsForIncludedFile(artifact, artifactResolver)); } for (Artifact artifact : getInputs()) { result.addAll(includeResolver.getInputsForIncludedFile(artifact, artifactResolver)); } if (result.isEmpty()) { result = initialResult; } else { result.addAll(initialResult); } return result; } @Override public Artifact getPrimaryInput() { return getSourceFile(); } @Override public Artifact getPrimaryOutput() { return getOutputFile(); } /** * Returns the path of the c/cc source for gcc. */ public final Artifact getSourceFile() { return cppCompileCommandLine.sourceFile; } /** * Returns the path where gcc should put its result. */ public Artifact getOutputFile() { return outputFile; } /** * Returns the path of the debug info output file (when debug info is * spliced out of the .o file via fission). */ @Nullable Artifact getDwoFile() { return dwoFile; } protected PathFragment getInternalOutputFile() { return outputFile.getExecPath(); } @VisibleForTesting public List<String> getPluginOpts() { return cppCompileCommandLine.pluginOpts; } Collection<PathFragment> getExtraSystemIncludePrefixes() { return extraSystemIncludePrefixes; } @Override public Map<Artifact, Artifact> getLegalGeneratedScannerFileMap() { Map<Artifact, Artifact> legalOuts = new HashMap<>(); for (Artifact a : context.getDeclaredIncludeSrcs()) { if (!a.isSourceArtifact()) { legalOuts.put(a, null); } } for (Pair<Artifact, Artifact> pregreppedSrcs : context.getPregreppedHeaders()) { Artifact hdr = pregreppedSrcs.getFirst(); Preconditions.checkState(!hdr.isSourceArtifact(), hdr); legalOuts.put(hdr, pregreppedSrcs.getSecond()); } return Collections.unmodifiableMap(legalOuts); } /** * Returns the path where gcc should put the discovered dependency * information. */ public DotdFile getDotdFile() { return cppCompileCommandLine.dotdFile; } @Override public String describeStrategy(Executor executor) { return executor.getContext(actionContext).strategyLocality(); } @VisibleForTesting public CppCompilationContext getContext() { return context; } @Override public List<PathFragment> getQuoteIncludeDirs() { return context.getQuoteIncludeDirs(); } @Override public List<PathFragment> getIncludeDirs() { ImmutableList.Builder<PathFragment> result = ImmutableList.builder(); result.addAll(context.getIncludeDirs()); for (String opt : cppCompileCommandLine.copts) { if (opt.startsWith("-I") && opt.length() > 2) { // We insist on the combined form "-Idir". result.add(new PathFragment(opt.substring(2))); } } return result.build(); } @Override public List<PathFragment> getSystemIncludeDirs() { // TODO(bazel-team): parsing the command line flags here couples us to gcc-style compiler // command lines; use a different way to specify system includes (for example through a // system_includes attribute in cc_toolchain); note that that would disallow users from // specifying system include paths via the copts attribute. // Currently, this works together with the include_paths features because getCommandLine() will // get the system include paths from the CppCompilationContext instead. ImmutableList.Builder<PathFragment> result = ImmutableList.builder(); List<String> compilerOptions = getCompilerOptions(); for (int i = 0; i < compilerOptions.size(); i++) { String opt = compilerOptions.get(i); if (opt.startsWith("-isystem")) { if (opt.length() > 8) { result.add(new PathFragment(opt.substring(8).trim())); } else if (i + 1 < compilerOptions.size()) { i++; result.add(new PathFragment(compilerOptions.get(i))); } else { System.err.println("WARNING: dangling -isystem flag in options for " + prettyPrint()); } } } return result.build(); } @Override public List<String> getCmdlineIncludes() { ImmutableList.Builder<String> cmdlineIncludes = ImmutableList.builder(); List<String> args = getArgv(); for (Iterator<String> argi = args.iterator(); argi.hasNext();) { String arg = argi.next(); if (arg.equals("-include") && argi.hasNext()) { cmdlineIncludes.add(argi.next()); } } return cmdlineIncludes.build(); } @Override public Artifact getMainIncludeScannerSource() { return CppFileTypes.CPP_MODULE_MAP.matches(getSourceFile().getPath()) ? Iterables.getFirst(context.getHeaderModuleSrcs(), null) : getSourceFile(); } @Override public Collection<Artifact> getIncludeScannerSources() { NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder(); // For every header module we use for the build we need the set of sources that it can // reference. builder.addTransitive(context.getTransitiveHeaderModuleSrcs()); if (CppFileTypes.CPP_MODULE_MAP.matches(getSourceFile().getPath())) { // If this is an action that compiles the header module itself, the source we build is the // module map, and we need to include-scan all headers that are referenced in the module map. // We need to do include scanning as long as we want to support building code bases that are // not fully strict layering clean. builder.addTransitive(context.getHeaderModuleSrcs()); } else { builder.add(getSourceFile()); } return builder.build().toCollection(); } @Override public Iterable<IncludeScannable> getAuxiliaryScannables() { return lipoScannables; } /** * Returns the list of "-D" arguments that should be used by this gcc * invocation. Only used for testing. */ @VisibleForTesting public ImmutableCollection<String> getDefines() { return context.getDefines(); } /** * Returns an (immutable) map of environment key, value pairs to be * provided to the C++ compiler. */ public ImmutableMap<String, String> getEnvironment() { Map<String, String> environment = new LinkedHashMap<>(configuration.getDefaultShellEnvironment()); if (configuration.isCodeCoverageEnabled()) { environment.put("PWD", "/proc/self/cwd"); } // TODO(bazel-team): Handle at the level of crosstool (feature) templates instead of in this // compile action. This will also prevent the need for apple host system and target platform // evaluation here. AppleConfiguration appleConfiguration = configuration.getFragment(AppleConfiguration.class); if (CppConfiguration.MAC_SYSTEM_NAME.equals(getHostSystemName())) { environment.putAll(appleConfiguration.appleHostSystemEnv()); } if (Platform.isApplePlatform(cppConfiguration.getTargetCpu())) { environment.putAll(appleConfiguration.appleTargetPlatformEnv( Platform.forTargetCpu(cppConfiguration.getTargetCpu()))); } // TODO(bazel-team): Check (crosstool) host system name instead of using OS.getCurrent. if (OS.getCurrent() == OS.WINDOWS) { // TODO(bazel-team): Both GCC and clang rely on their execution directories being on // PATH, otherwise they fail to find dependent DLLs (and they fail silently...). On // the other hand, Windows documentation says that the directory of the executable // is always searched for DLLs first. Not sure what to make of it. // Other options are to forward the system path (brittle), or to add a PATH field to // the crosstool file. // // @see com.google.devtools.build.lib.rules.cpp.CppLinkAction#getEnvironment environment.put("PATH", cppConfiguration.getToolPathFragment(Tool.GCC).getParentDirectory() .getPathString()); } return ImmutableMap.copyOf(environment); } /** * Returns a new, mutable list of command and arguments (argv) to be passed * to the gcc subprocess. */ public final List<String> getArgv() { return getArgv(getInternalOutputFile()); } protected final List<String> getArgv(PathFragment outputFile) { return cppCompileCommandLine.getArgv(outputFile); } @Override public ExtraActionInfo.Builder getExtraActionInfo() { CppCompileInfo.Builder info = CppCompileInfo.newBuilder(); info.setTool(cppConfiguration.getToolPathFragment(Tool.GCC).getPathString()); for (String option : getCompilerOptions()) { info.addCompilerOption(option); } info.setOutputFile(outputFile.getExecPathString()); info.setSourceFile(getSourceFile().getExecPathString()); if (inputsKnown()) { info.addAllSourcesAndHeaders(Artifact.toExecPaths(getInputs())); } else { info.addSourcesAndHeaders(getSourceFile().getExecPathString()); info.addAllSourcesAndHeaders( Artifact.toExecPaths(context.getDeclaredIncludeSrcs())); } return super.getExtraActionInfo() .setExtension(CppCompileInfo.cppCompileInfo, info.build()); } /** * Returns the compiler options. */ @VisibleForTesting public List<String> getCompilerOptions() { return cppCompileCommandLine.getCompilerOptions(); } /** * Enforce that the includes actually visited during the compile were properly * declared in the rules. * * <p>The technique is to walk through all of the reported includes that gcc * emits into the .d file, and verify that they came from acceptable * relative include directories. This is done in two steps: * * <p>First, each included file is stripped of any include path prefix from * {@code quoteIncludeDirs} to produce an effective relative include dir+name. * * <p>Second, the remaining directory is looked up in {@code declaredIncludeDirs}, * a list of acceptable dirs. This list contains a set of dir fragments that * have been calculated by the configured target to be allowable for inclusion * by this source. If no match is found, an error is reported and an exception * is thrown. * * @throws ActionExecutionException iff there was an undeclared dependency */ @VisibleForTesting public void validateInclusions( Iterable<Artifact> inputsForValidation, MiddlemanExpander middlemanExpander, EventHandler eventHandler) throws ActionExecutionException { IncludeProblems errors = new IncludeProblems(); IncludeProblems warnings = new IncludeProblems(); Set<Artifact> allowedIncludes = new HashSet<>(); for (Artifact input : mandatoryInputs) { if (input.isMiddlemanArtifact()) { middlemanExpander.expand(input, allowedIncludes); } allowedIncludes.add(input); } if (optionalSourceFile != null) { allowedIncludes.add(optionalSourceFile); } Iterable<PathFragment> ignoreDirs = getValidationIgnoredDirs(); // Copy the sets to hash sets for fast contains checking. // Avoid immutable sets here to limit memory churn. Set<PathFragment> declaredIncludeDirs = Sets.newHashSet(context.getDeclaredIncludeDirs()); Set<PathFragment> warnIncludeDirs = Sets.newHashSet(context.getDeclaredIncludeWarnDirs()); Set<Artifact> declaredIncludeSrcs = Sets.newHashSet(context.getDeclaredIncludeSrcs()); for (Artifact input : inputsForValidation) { if (context.getCompilationPrerequisites().contains(input) || allowedIncludes.contains(input)) { continue; // ignore our fixed source in mandatoryInput: we just want includes } // Ignore headers from built-in include directories. if (FileSystemUtils.startsWithAny(input.getExecPath(), ignoreDirs)) { continue; } if (!isDeclaredIn(input, declaredIncludeDirs, declaredIncludeSrcs)) { // This call can never match the declared include sources (they would be matched above). // There are no declared include sources we need to warn about, so use an empty set here. if (isDeclaredIn(input, warnIncludeDirs, ImmutableSet.<Artifact>of())) { warnings.add(input.getPath().toString()); } else { errors.add(input.getPath().toString()); } } } if (VALIDATION_DEBUG_WARN) { synchronized (System.err) { if (VALIDATION_DEBUG >= 2 || errors.hasProblems() || warnings.hasProblems()) { if (errors.hasProblems()) { System.err.println("ERROR: Include(s) were not in declared srcs:"); } else if (warnings.hasProblems()) { System.err.println("WARN: Include(s) were not in declared srcs:"); } else { System.err.println("INFO: Include(s) were OK for '" + getSourceFile() + "', declared srcs:"); } for (Artifact a : context.getDeclaredIncludeSrcs()) { System.err.println(" '" + a.toDetailString() + "'"); } System.err.println(" or under declared dirs:"); for (PathFragment f : Sets.newTreeSet(context.getDeclaredIncludeDirs())) { System.err.println(" '" + f + "'"); } System.err.println(" or under declared warn dirs:"); for (PathFragment f : Sets.newTreeSet(context.getDeclaredIncludeWarnDirs())) { System.err.println(" '" + f + "'"); } System.err.println(" with prefixes:"); for (PathFragment dirpath : context.getQuoteIncludeDirs()) { System.err.println(" '" + dirpath + "'"); } } } } if (warnings.hasProblems()) { eventHandler.handle( new Event(EventKind.WARNING, getOwner().getLocation(), warnings.getMessage(this, getSourceFile()), Label.print(getOwner().getLabel()))); } errors.assertProblemFree(this, getSourceFile()); } private Iterable<PathFragment> getValidationIgnoredDirs() { List<PathFragment> cxxSystemIncludeDirs = cppConfiguration.getBuiltInIncludeDirectories(); return Iterables.concat( cxxSystemIncludeDirs, extraSystemIncludePrefixes, context.getSystemIncludeDirs()); } /** * Returns true if an included artifact is declared in a set of allowed * include directories. The simple case is that the artifact's parent * directory is contained in the set, or is empty. * * <p>This check also supports a wildcard suffix of '**' for the cases where the * calculations are inexact. * * <p>It also handles unseen non-nested-package subdirs by walking up the path looking * for matches. */ private static boolean isDeclaredIn( Artifact input, Set<PathFragment> declaredIncludeDirs, Set<Artifact> declaredIncludeSrcs) { // First check if it's listed in "srcs". If so, then its declared & OK. if (declaredIncludeSrcs.contains(input)) { return true; } // If it's a derived artifact, then it MUST be listed in "srcs" as checked above. // We define derived here as being not source and not under the include link tree. if (!input.isSourceArtifact() && !input.getRoot().getExecPath().getBaseName().equals("include")) { return false; } // Need to do dir/package matching: first try a quick exact lookup. PathFragment includeDir = input.getRootRelativePath().getParentDirectory(); if (includeDir.segmentCount() == 0 || declaredIncludeDirs.contains(includeDir)) { return true; // OK: quick exact match. } // Not found in the quick lookup: try the wildcards. for (PathFragment declared : declaredIncludeDirs) { if (declared.getBaseName().equals("**")) { if (includeDir.startsWith(declared.getParentDirectory())) { return true; // OK: under a wildcard dir. } } } // Still not found: see if it is in a subdir of a declared package. Path root = input.getRoot().getPath(); for (Path dir = input.getPath().getParentDirectory();;) { if (dir.getRelative("BUILD").exists()) { return false; // Bad: this is a sub-package, not a subdir of a declared package. } dir = dir.getParentDirectory(); if (dir.equals(root)) { return false; // Bad: at the top, give up. } if (declaredIncludeDirs.contains(dir.relativeTo(root))) { return true; // OK: found under a declared dir. } } } /** * Recalculates this action's live input collection, including sources, middlemen. * * @throws ActionExecutionException iff any errors happen during update. */ @VisibleForTesting @ThreadCompatible public final synchronized void updateActionInputs(NestedSet<Artifact> discoveredInputs) throws ActionExecutionException { inputsKnown = false; NestedSetBuilder<Artifact> inputs = NestedSetBuilder.stableOrder(); Profiler.instance().startTask(ProfilerTask.ACTION_UPDATE, this); try { inputs.addTransitive(mandatoryInputs); if (optionalSourceFile != null) { inputs.add(optionalSourceFile); } inputs.addAll(context.getCompilationPrerequisites()); inputs.addTransitive(discoveredInputs); inputsKnown = true; } finally { Profiler.instance().completeTask(ProfilerTask.ACTION_UPDATE); synchronized (this) { setInputs(inputs.build()); } } } private DependencySet processDepset(Path execRoot, CppCompileActionContext.Reply reply) throws IOException { DotdFile dotdFile = getDotdFile(); Preconditions.checkNotNull(dotdFile); DependencySet depSet = new DependencySet(execRoot); // artifact() is null if we are using in-memory .d files. We also want to prepare for the // case where we expected an in-memory .d file, but we did not get an appropriate response. // Perhaps we produced the file locally. if (dotdFile.artifact() != null || reply == null) { return depSet.read(dotdFile.getPath()); } else { // This is an in-memory .d file. return depSet.process(reply.getContents()); } } /** * Returns a collection with additional input artifacts relevant to the action by reading the * dynamically-discovered dependency information from the .d file after the action has run. * * <p>Artifacts are considered inputs but not "mandatory" inputs. * * @param reply the reply from the compilation. * @throws ActionExecutionException iff the .d is missing (when required), malformed, or has * unresolvable included artifacts. */ @VisibleForTesting @ThreadCompatible public NestedSet<Artifact> discoverInputsFromDotdFiles( Path execRoot, ArtifactResolver artifactResolver, Reply reply) throws ActionExecutionException { NestedSetBuilder<Artifact> inputs = NestedSetBuilder.stableOrder(); if (getDotdFile() == null) { return inputs.build(); } try { // Read .d file. DependencySet depSet = processDepset(execRoot, reply); // Determine prefixes of allowed absolute inclusions. CppConfiguration toolchain = cppConfiguration; List<PathFragment> systemIncludePrefixes = new ArrayList<>(); for (PathFragment includePath : toolchain.getBuiltInIncludeDirectories()) { if (includePath.isAbsolute()) { systemIncludePrefixes.add(includePath); } } systemIncludePrefixes.addAll(extraSystemIncludePrefixes); // Check inclusions. IncludeProblems problems = new IncludeProblems(); Map<PathFragment, Artifact> allowedDerivedInputsMap = getAllowedDerivedInputsMap(); for (PathFragment execPath : depSet.getDependencies()) { if (execPath.isAbsolute()) { // Absolute includes from system paths are ignored. if (FileSystemUtils.startsWithAny(execPath, systemIncludePrefixes)) { continue; } // Since gcc is given only relative paths on the command line, // non-system include paths here should never be absolute. If they // are, it's probably due to a non-hermetic #include, & we should stop // the build with an error. if (execPath.startsWith(execRoot.asFragment())) { execPath = execPath.relativeTo(execRoot.asFragment()); // funky but tolerable path } else { problems.add(execPath.getPathString()); continue; } } Artifact artifact = allowedDerivedInputsMap.get(execPath); if (artifact == null) { artifact = artifactResolver.resolveSourceArtifact(execPath); } if (artifact != null) { inputs.add(artifact); // In some cases, execution backends need extra files for each included file. Add them // to the set of actual inputs. inputs.addAll(includeResolver.getInputsForIncludedFile(artifact, artifactResolver)); } else { // Abort if we see files that we can't resolve, likely caused by // undeclared includes or illegal include constructs. problems.add(execPath.getPathString()); } } problems.assertProblemFree(this, getSourceFile()); } catch (IOException e) { // Some kind of IO or parse exception--wrap & rethrow it to stop the build. throw new ActionExecutionException("error while parsing .d file", e, this, false); } return inputs.build(); } @Override public Iterable<Artifact> resolveInputsFromCache( ArtifactResolver artifactResolver, PackageRootResolver resolver, Collection<PathFragment> inputPaths) throws PackageRootResolutionException { // Note that this method may trigger a violation of the desirable invariant that getInputs() // is a superset of getMandatoryInputs(). See bug about an "action not in canonical form" // error message and the integration test test_crosstool_change_and_failure(). Map<PathFragment, Artifact> allowedDerivedInputsMap = getAllowedDerivedInputsMap(); List<Artifact> inputs = new ArrayList<>(); List<PathFragment> unresolvedPaths = new ArrayList<>(); for (PathFragment execPath : inputPaths) { Artifact artifact = allowedDerivedInputsMap.get(execPath); if (artifact != null) { inputs.add(artifact); } else { // Remember this execPath, we will try to resolve it as a source artifact. unresolvedPaths.add(execPath); } } Map<PathFragment, Artifact> resolvedArtifacts = artifactResolver.resolveSourceArtifacts(unresolvedPaths, resolver); if (resolvedArtifacts == null) { // We are missing some dependencies. We need to rerun this update later. return null; } for (PathFragment execPath : unresolvedPaths) { Artifact artifact = resolvedArtifacts.get(execPath); // If PathFragment cannot be resolved into the artifact - ignore it. This could happen if // rule definition has changed and action no longer depends on, e.g., additional source file // in the separate package and that package is no longer referenced anywhere else. // It is safe to ignore such paths because dependency checker would identify change in inputs // (ignored path was used before) and will force action execution. if (artifact != null) { inputs.add(artifact); } } return inputs; } @Override public synchronized void updateInputs(Iterable<Artifact> inputs) { inputsKnown = true; synchronized (this) { setInputs(inputs); } } private Map<PathFragment, Artifact> getAllowedDerivedInputsMap() { Map<PathFragment, Artifact> allowedDerivedInputMap = new HashMap<>(); addToMap(allowedDerivedInputMap, mandatoryInputs); addToMap(allowedDerivedInputMap, context.getDeclaredIncludeSrcs()); addToMap(allowedDerivedInputMap, context.getCompilationPrerequisites()); Artifact artifact = getSourceFile(); if (!artifact.isSourceArtifact()) { allowedDerivedInputMap.put(artifact.getExecPath(), artifact); } return allowedDerivedInputMap; } private void addToMap(Map<PathFragment, Artifact> map, Iterable<Artifact> artifacts) { for (Artifact artifact : artifacts) { if (!artifact.isSourceArtifact()) { map.put(artifact.getExecPath(), artifact); } } } @Override protected String getRawProgressMessage() { return "Compiling " + getSourceFile().prettyPrint(); } /** * Return the directories in which to look for headers (pertains to headers * not specifically listed in {@code declaredIncludeSrcs}). The return value * may contain duplicate elements. */ public NestedSet<PathFragment> getDeclaredIncludeDirs() { return context.getDeclaredIncludeDirs(); } /** * Return the directories in which to look for headers and issue a warning. * (pertains to headers not specifically listed in {@code * declaredIncludeSrcs}). The return value may contain duplicate elements. */ public NestedSet<PathFragment> getDeclaredIncludeWarnDirs() { return context.getDeclaredIncludeWarnDirs(); } /** * Return explicit header files (i.e., header files explicitly listed). The * return value may contain duplicate elements. */ public NestedSet<Artifact> getDeclaredIncludeSrcs() { return context.getDeclaredIncludeSrcs(); } /** * Return explicit header files (i.e., header files explicitly listed) in an order * that is stable between builds. */ protected final List<PathFragment> getDeclaredIncludeSrcsInStableOrder() { List<PathFragment> paths = new ArrayList<>(); for (Artifact declaredIncludeSrc : context.getDeclaredIncludeSrcs()) { paths.add(declaredIncludeSrc.getExecPath()); } Collections.sort(paths); // Order is not important, but stability is. return paths; } @Override public ResourceSet estimateResourceConsumption(Executor executor) { return executor.getContext(actionContext).estimateResourceConsumption(this); } @VisibleForTesting public Class<? extends CppCompileActionContext> getActionContext() { return actionContext; } /** * Estimate resource consumption when this action is executed locally. */ public ResourceSet estimateResourceConsumptionLocal() { // We use a local compile, so much of the time is spent waiting for IO, // but there is still significant CPU; hence we estimate 50% cpu usage. return ResourceSet.createWithRamCpuIo(/*memoryMb=*/200, /*cpuUsage=*/0.5, /*ioUsage=*/0.0); } @Override public String computeKey() { Fingerprint f = new Fingerprint(); f.addUUID(actionClassId); f.addStrings(getArgv()); /* * getArgv() above captures all changes which affect the compilation * command and hence the contents of the object file. But we need to * also make sure that we reexecute the action if any of the fields * that affect whether validateIncludes() will report an error or warning * have changed, otherwise we might miss some errors. */ f.addPaths(context.getDeclaredIncludeDirs()); f.addPaths(context.getDeclaredIncludeWarnDirs()); f.addPaths(getDeclaredIncludeSrcsInStableOrder()); f.addPaths(getExtraSystemIncludePrefixes()); return f.hexDigestAndReset(); } @Override @ThreadCompatible public void execute( ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { Executor executor = actionExecutionContext.getExecutor(); CppCompileActionContext.Reply reply; try { reply = executor.getContext(actionContext).execWithReply(this, actionExecutionContext); } catch (ExecException e) { throw e.toActionExecutionException("C++ compilation of rule '" + getOwner().getLabel() + "'", executor.getVerboseFailures(), this); } ensureCoverageNotesFilesExist(); // This is the .d file scanning part. IncludeScanningContext scanningContext = executor.getContext(IncludeScanningContext.class); NestedSet<Artifact> discoveredInputs = discoverInputsFromDotdFiles( executor.getExecRoot(), scanningContext.getArtifactResolver(), reply); reply = null; // Clear in-memory .d files early. // Post-execute "include scanning", which modifies the action inputs to match what the compile // action actually used by incorporating the results of .d file parsing. // // We enable this when "include scanning" itself is enabled, or when hdrs_check is set to loose // or warn, as otherwise the action might be missing inputs that the compiler used and rebuilds // become incorrect. // // Note that this effectively disables post-execute "include scanning" in Bazel, because // hdrs_check is forced to "strict" and "include scanning" is forced to off. boolean usesStrictHdrsChecks = context.getDeclaredIncludeDirs().isEmpty() && context.getDeclaredIncludeWarnDirs().isEmpty(); if (shouldScanIncludes() || !usesStrictHdrsChecks) { updateActionInputs(discoveredInputs); } // hdrs_check: This cannot be switched off, because doing so would allow for incorrect builds. validateInclusions( discoveredInputs, actionExecutionContext.getMiddlemanExpander(), executor.getEventHandler()); } /** * Gcc only creates ".gcno" files if the compilation unit is non-empty. * To ensure that the set of outputs for a CppCompileAction remains consistent * and doesn't vary dynamically depending on the _contents_ of the input files, * we create empty ".gcno" files if gcc didn't create them. */ private void ensureCoverageNotesFilesExist() throws ActionExecutionException { for (Artifact output : getOutputs()) { if (CppFileTypes.COVERAGE_NOTES.matches(output.getFilename()) // ".gcno" && !output.getPath().exists()) { try { FileSystemUtils.createEmptyFile(output.getPath()); } catch (IOException e) { throw new ActionExecutionException( "Error creating file '" + output.getPath() + "': " + e.getMessage(), e, this, false); } } } } /** * Provides list of include files needed for performing extra actions on this action when run * remotely. The list of include files is created by performing a header scan on the known input * files. */ @Override public Iterable<Artifact> getInputFilesForExtraAction( ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { Collection<Artifact> scannedIncludes = actionExecutionContext.getExecutor().getContext(actionContext) .getScannedIncludeFiles(this, actionExecutionContext); // Use a set to eliminate duplicates. ImmutableSet.Builder<Artifact> result = ImmutableSet.builder(); return result.addAll(getInputs()).addAll(scannedIncludes).build(); } @Override public String getMnemonic() { return "CppCompile"; } @Override public String describeKey() { StringBuilder message = new StringBuilder(); message.append(getProgressMessage()); message.append('\n'); message.append(" Command: "); message.append( ShellEscaper.escapeString(cppConfiguration.getLdExecutable().getPathString())); message.append('\n'); // Outputting one argument per line makes it easier to diff the results. for (String argument : ShellEscaper.escapeAll(getArgv())) { message.append(" Argument: "); message.append(argument); message.append('\n'); } for (PathFragment path : context.getDeclaredIncludeDirs()) { message.append(" Declared include directory: "); message.append(ShellEscaper.escapeString(path.getPathString())); message.append('\n'); } for (PathFragment path : getDeclaredIncludeSrcsInStableOrder()) { message.append(" Declared include source: "); message.append(ShellEscaper.escapeString(path.getPathString())); message.append('\n'); } for (PathFragment path : getExtraSystemIncludePrefixes()) { message.append(" Extra system include prefix: "); message.append(ShellEscaper.escapeString(path.getPathString())); message.append('\n'); } return message.toString(); } /** * The compile command line for the enclosing C++ compile action. */ public final class CppCompileCommandLine { private final Artifact sourceFile; private final DotdFile dotdFile; private final List<String> copts; private final Predicate<String> coptsFilter; private final List<String> pluginOpts; private final Collection<String> features; private final FeatureConfiguration featureConfiguration; private final CcToolchainFeatures.Variables variables; // The value of the BUILD_FDO_TYPE macro to be defined on command line @Nullable private final String fdoBuildStamp; public CppCompileCommandLine( Artifact sourceFile, DotdFile dotdFile, ImmutableList<String> copts, Predicate<String> coptsFilter, ImmutableList<String> pluginOpts, Collection<String> features, FeatureConfiguration featureConfiguration, CcToolchainFeatures.Variables variables, @Nullable String fdoBuildStamp) { this.sourceFile = Preconditions.checkNotNull(sourceFile); this.dotdFile = CppFileTypes.mustProduceDotdFile(sourceFile.getPath().toString()) ? Preconditions.checkNotNull(dotdFile) : null; this.copts = Preconditions.checkNotNull(copts); this.coptsFilter = coptsFilter; this.pluginOpts = Preconditions.checkNotNull(pluginOpts); this.features = Preconditions.checkNotNull(features); this.featureConfiguration = featureConfiguration; this.variables = variables; this.fdoBuildStamp = fdoBuildStamp; } protected List<String> getArgv(PathFragment outputFile) { List<String> commandLine = new ArrayList<>(); // first: The command name. commandLine.add(cppConfiguration.getToolPathFragment(Tool.GCC).getPathString()); // second: The compiler options. commandLine.addAll(getCompilerOptions()); // third: The file to compile! commandLine.add("-c"); commandLine.add(sourceFile.getExecPathString()); // finally: The output file. (Prefixed with -o). commandLine.add("-o"); commandLine.add(outputFile.getPathString()); return commandLine; } private String getActionName() { PathFragment sourcePath = sourceFile.getExecPath(); if (CppFileTypes.CPP_MODULE_MAP.matches(sourcePath)) { return CPP_MODULE_COMPILE; } else if (CppFileTypes.CPP_HEADER.matches(sourcePath)) { // TODO(bazel-team): Handle C headers that probably don't work in C++ mode. if (featureConfiguration.isEnabled(CppRuleClasses.PARSE_HEADERS)) { return CPP_HEADER_PARSING; } else if (featureConfiguration.isEnabled(CppRuleClasses.PREPROCESS_HEADERS)) { return CPP_HEADER_PREPROCESSING; } else { // CcCommon.collectCAndCppSources() ensures we do not add headers to // the compilation artifacts unless either 'parse_headers' or // 'preprocess_headers' is set. throw new IllegalStateException(); } } else if (CppFileTypes.C_SOURCE.matches(sourcePath)) { return C_COMPILE; } else if (CppFileTypes.CPP_SOURCE.matches(sourcePath)) { return CPP_COMPILE; } else if (CppFileTypes.ASSEMBLER.matches(sourcePath)) { return ASSEMBLE; } else if (CppFileTypes.ASSEMBLER_WITH_C_PREPROCESSOR.matches(sourcePath)) { return PREPROCESS_ASSEMBLE; } // CcLibraryHelper ensures CppCompileAction only gets instantiated for supported file types. throw new IllegalStateException(); } public List<String> getCompilerOptions() { List<String> options = new ArrayList<>(); CppConfiguration toolchain = cppConfiguration; // pluginOpts has to be added before defaultCopts because -fplugin must precede -plugin-arg. options.addAll(pluginOpts); addFilteredOptions(options, toolchain.getCompilerOptions(features)); String sourceFilename = sourceFile.getExecPathString(); if (CppFileTypes.C_SOURCE.matches(sourceFilename)) { addFilteredOptions(options, toolchain.getCOptions()); } if (CppFileTypes.CPP_SOURCE.matches(sourceFilename) || CppFileTypes.CPP_HEADER.matches(sourceFilename) || CppFileTypes.CPP_MODULE_MAP.matches(sourceFilename)) { addFilteredOptions(options, toolchain.getCxxOptions(features)); } for (String warn : cppConfiguration.getCWarns()) { options.add("-W" + warn); } for (String define : context.getDefines()) { options.add("-D" + define); } // Stamp FDO builds with FDO subtype string if (fdoBuildStamp != null) { options.add("-D" + CppConfiguration.FDO_STAMP_MACRO + "=\"" + fdoBuildStamp + "\""); } // TODO(bazel-team): This needs to be before adding getUnfilteredCompilerOptions() and after // adding the warning flags until all toolchains are migrated; currently toolchains use the // unfiltered compiler options to inject include paths, which is superseded by the feature // configuration; on the other hand toolchains switch off warnings for the layering check // that will be re-added by the feature flags. addFilteredOptions(options, featureConfiguration.getCommandLine(getActionName(), variables)); // Users don't expect the explicit copts to be filtered by coptsFilter, add them verbatim. // Make sure these are added after the options from the feature configuration, so that // those options can be overriden. options.addAll(copts); // Unfiltered compiler options contain system include paths. These must be added after // the user provided options, otherwise users adding include paths will not pick up their // own include paths first. options.addAll(toolchain.getUnfilteredCompilerOptions(features)); // GCC gives randomized names to symbols which are defined in // an anonymous namespace but have external linkage. To make // computation of these deterministic, we want to override the // default seed for the random number generator. It's safe to use // any value which differs for all translation units; we use the // path to the object file. options.add("-frandom-seed=" + outputFile.getExecPathString()); // Add the options of --per_file_copt, if the label or the base name of the source file // matches the specified regular expression filter. for (PerLabelOptions perLabelOptions : cppConfiguration.getPerFileCopts()) { if ((sourceLabel != null && perLabelOptions.isIncluded(sourceLabel)) || perLabelOptions.isIncluded(sourceFile)) { options.addAll(perLabelOptions.getOptions()); } } // Enable <object>.d file generation. if (dotdFile != null) { // Gcc options: // -MD turns on .d file output as a side-effect (doesn't imply -E) // -MM[D] enables user includes only, not system includes // -MF <name> specifies the dotd file name // Issues: // -M[M] alone subverts actual .o output (implies -E) // -M[M]D alone breaks some of the .d naming assumptions // This combination gets user and system includes with specified name: // -MD -MF <name> options.add("-MD"); options.add("-MF"); options.add(dotdFile.getSafeExecPath().getPathString()); } if (FileType.contains(outputFile, CppFileTypes.ASSEMBLER, CppFileTypes.PIC_ASSEMBLER)) { options.add("-S"); } else if (FileType.contains(outputFile, CppFileTypes.PREPROCESSED_C, CppFileTypes.PREPROCESSED_CPP, CppFileTypes.PIC_PREPROCESSED_C, CppFileTypes.PIC_PREPROCESSED_CPP)) { options.add("-E"); } if (cppConfiguration.useFission()) { options.add("-gsplit-dwarf"); } if (usePic) { options.add("-fPIC"); } return options; } // For each option in 'in', add it to 'out' unless it is matched by the 'coptsFilter' regexp. private void addFilteredOptions(List<String> out, List<String> in) { Iterables.addAll(out, Iterables.filter(in, coptsFilter)); } } /** * A reference to a .d file. There are two modes: * <ol> * <li>an Artifact that represents a real on-disk file * <li>just an execPath that refers to a virtual .d file that is not written to disk * </ol> */ public static class DotdFile { private final Artifact artifact; private final PathFragment execPath; public DotdFile(Artifact artifact) { this.artifact = artifact; this.execPath = null; } public DotdFile(PathFragment execPath) { this.artifact = null; this.execPath = execPath; } /** * @return the Artifact or null */ public Artifact artifact() { return artifact; } /** * @return Gets the execPath regardless of whether this is a real Artifact */ public PathFragment getSafeExecPath() { return execPath == null ? artifact.getExecPath() : execPath; } /** * @return the on-disk location of the .d file or null */ public Path getPath() { return artifact.getPath(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.infinispan.remote; import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.camel.BindToRegistry; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.infinispan.InfinispanConstants; import org.apache.camel.component.infinispan.InfinispanOperation; import org.apache.camel.component.infinispan.InfinispanProducerTestSupport; import org.apache.camel.spring.boot.CamelAutoConfiguration; import org.apache.camel.test.spring.junit5.CamelSpringBootTest; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.infinispan.client.hotrod.ServerStatistics; import org.infinispan.commons.api.BasicCache; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Bean; import org.springframework.test.annotation.DirtiesContext; import java.util.function.BiFunction; @DirtiesContext @CamelSpringBootTest @SpringBootTest( classes = { CamelAutoConfiguration.class, InfinispanRemoteProducerIT.class } ) public class InfinispanRemoteProducerIT extends InfinispanRemoteTestSupport implements InfinispanProducerTestSupport { @Bean public BiFunction<String, String, String> mappingFunction() { return (k, v) -> v + "replay"; } @Test public void statsOperation() { fluentTemplate() .to("direct:start") .withHeader(InfinispanConstants.KEY, InfinispanProducerTestSupport.KEY_ONE) .withHeader(InfinispanConstants.VALUE, InfinispanProducerTestSupport.VALUE_ONE) .withHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT) .send(); assertEquals(InfinispanProducerTestSupport.VALUE_ONE, getCache().get(InfinispanProducerTestSupport.KEY_ONE)); fluentTemplate() .to("direct:start") .withHeader(InfinispanConstants.KEY, InfinispanProducerTestSupport.KEY_TWO) .withHeader(InfinispanConstants.VALUE, InfinispanProducerTestSupport.VALUE_TWO) .withHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT) .send(); assertEquals(InfinispanProducerTestSupport.VALUE_TWO, getCache().get(InfinispanProducerTestSupport.KEY_TWO)); assertEquals( 2, fluentTemplate() .to("direct:start") .withHeader(InfinispanConstants.OPERATION, InfinispanOperation.STATS) .request(ServerStatistics.class) .getIntStatistic(ServerStatistics.CURRENT_NR_OF_ENTRIES)); } // ***************************** // // ***************************** @BeforeEach protected void beforeEach() { // cleanup the default test cache before each run getCache().clear(); } @Override public BasicCache<Object, Object> getCache() { return super.getCache(); } @Override public BasicCache<Object, Object> getCache(String name) { return InfinispanRemoteTestSupport.getCacheByName(name); } @Bean protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() { from("direct:start") .toF("infinispan:%s", getCacheName()); from("direct:compute") .toF("infinispan:%s?remappingFunction=#mappingFunction", getCacheName()); from("direct:explicitput") .toF("infinispan:%s?operation=PUT&key=a&value=3", getCacheName()); } }; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.ui; import com.bulenkov.iconloader.IconLoader; import com.bulenkov.iconloader.util.EmptyIcon; import com.bulenkov.iconloader.util.Pair; import com.bulenkov.iconloader.util.SystemInfo; import com.bulenkov.iconloader.util.UIUtil; import javax.swing.*; import javax.swing.border.Border; import javax.swing.plaf.UIResource; import java.awt.*; /** * @author Konstantin Bulenkov */ public class JBUI { private static float scaleFactor = 1.0f; static { calculateScaleFactor(); } private static void calculateScaleFactor() { if (SystemInfo.isMac) { scaleFactor = 1.0f; return; } if (System.getProperty("hidpi") != null && !"true".equalsIgnoreCase(System.getProperty("hidpi"))) { scaleFactor = 1.0f; return; } UIUtil.initSystemFontData(); Pair<String, Integer> fdata = UIUtil.getSystemFontData(); int size; if (fdata != null) { size = fdata.getSecond(); } else { size = Fonts.label().getSize(); } setScaleFactor(size / UIUtil.DEF_SYSTEM_FONT_SIZE); } public static void setScaleFactor(float scale) { final String hidpi = System.getProperty("hidpi"); if ("false".equalsIgnoreCase(hidpi)) { return; } if (scale < 1.25f) scale = 1.0f; else if (scale < 1.5f) scale = 1.25f; else if (scale < 1.75f) scale = 1.5f; else if (scale < 2f) scale = 1.75f; else scale = 2.0f; if (SystemInfo.isLinux && scale == 1.25f) { //Default UI font size for Unity and Gnome is 15. Scaling factor 1.25f works badly on Linux scale = 1f; } if (scaleFactor == scale) { return; } scaleFactor = scale; IconLoader.setScale(scale); } public static int scale(int i) { return Math.round(scaleFactor * i); } public static int scaleFontSize(int fontSize) { if (scaleFactor == 1.25f) return (int)(fontSize * 1.34f); if (scaleFactor == 1.75f) return (int)(fontSize * 1.67f); return scale(fontSize); } public static JBDimension size(int width, int height) { return new JBDimension(width, height); } public static JBDimension size(int widthAndHeight) { return new JBDimension(widthAndHeight, widthAndHeight); } public static JBDimension size(Dimension size) { if (size instanceof JBDimension) { final JBDimension jbSize = (JBDimension)size; if (jbSize.originalScale == scale(1f)) { return jbSize; } final JBDimension newSize = new JBDimension((int)(jbSize.width / jbSize.originalScale), (int)(jbSize.height / jbSize.originalScale)); return size instanceof UIResource ? newSize.asUIResource() : newSize; } return new JBDimension(size.width, size.height); } public static JBInsets insets(int top, int left, int bottom, int right) { return new JBInsets(top, left, bottom, right); } public static JBInsets insets(int all) { return insets(all, all, all, all); } public static JBInsets insets(int topBottom, int leftRight) { return insets(topBottom, leftRight, topBottom, leftRight); } public static JBInsets emptyInsets() { return new JBInsets(0, 0, 0, 0); } public static JBInsets insetsTop(int t) { return insets(t, 0, 0, 0); } public static JBInsets insetsLeft(int l) { return insets(0, l, 0, 0); } public static JBInsets insetsBottom(int b) { return insets(0, 0, b, 0); } public static JBInsets insetsRight(int r) { return insets(0, 0, 0, r); } public static EmptyIcon emptyIcon(int i) { return (EmptyIcon)EmptyIcon.create(scale(i)); } public static JBDimension emptySize() { return new JBDimension(0, 0); } public static float scale(float f) { return f * scaleFactor; } public static JBInsets insets(Insets insets) { return JBInsets.create(insets); } public static boolean isHiDPI() { return scaleFactor > 1.0f; } public static class Fonts { public static JBFont label() { return JBFont.create(UIManager.getFont("Label.font"), false); } public static JBFont label(float size) { return label().deriveFont(scale(size)); } public static JBFont smallFont() { return label().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL)); } public static JBFont miniFont() { return label().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.MINI)); } public static JBFont create(String fontFamily, int size) { return JBFont.create(new Font(fontFamily, Font.PLAIN, size)); } } public static class Borders { public static JBEmptyBorder empty(int top, int left, int bottom, int right) { return new JBEmptyBorder(top, left, bottom, right); } public static JBEmptyBorder empty(int topAndBottom, int leftAndRight) { return empty(topAndBottom, leftAndRight, topAndBottom, leftAndRight); } public static JBEmptyBorder emptyTop(int offset) { return empty(offset, 0, 0, 0); } public static JBEmptyBorder emptyLeft(int offset) { return empty(0, offset, 0, 0); } public static JBEmptyBorder emptyBottom(int offset) { return empty(0, 0, offset, 0); } public static JBEmptyBorder emptyRight(int offset) { return empty(0, 0, 0, offset); } public static JBEmptyBorder empty() { return empty(0, 0, 0, 0); } public static Border empty(int offsets) { return empty(offsets, offsets, offsets, offsets); } // public static Border customLine(Color color, int top, int left, int bottom, int right) { // return new CustomLineBorder(color, insets(top, left, bottom, right)); // } // // public static Border customLine(Color color, int thickness) { // return customLine(color, thickness, thickness, thickness, thickness); // } // // public static Border customLine(Color color) { // return customLine(color, 1); // } // // public static Border merge(@Nullable Border source, @NotNull Border extra, boolean extraIsOutside) { // if (source == null) return extra; // return new CompoundBorder(extraIsOutside ? extra : source, extraIsOutside? source : extra); // } } // public static class Panels { // public static BorderLayoutPanel simplePanel() { // return new BorderLayoutPanel(); // } // // public static BorderLayoutPanel simplePanel(Component comp) { // return simplePanel().addToCenter(comp); // } // // public static BorderLayoutPanel simplePanel(int hgap, int vgap) { // return new BorderLayoutPanel(hgap, vgap); // } // } }
package com.example.li.springboot_poi_demo.utils; import cn.afterturn.easypoi.excel.ExcelExportUtil; import cn.afterturn.easypoi.excel.ExcelImportUtil; import cn.afterturn.easypoi.excel.entity.ExportParams; import cn.afterturn.easypoi.excel.entity.ImportParams; import cn.afterturn.easypoi.excel.entity.enmus.ExcelType; import cn.afterturn.easypoi.excel.imports.ExcelImportService; import org.apache.commons.lang3.StringUtils; import org.apache.poi.ss.usermodel.Workbook; import org.springframework.web.multipart.MultipartFile; import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URLEncoder; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; public class ExcelUtils { /** * excel 导出 * * @param list 数据 * @param title 标题 * @param sheetName sheet名称 * @param pojoClass pojo类型 * @param fileName 文件名称 * @param isCreateHeader 是否创建表头 * @param response */ public static void exportExcel(List<?> list, String title, String sheetName, Class<?> pojoClass, String fileName, boolean isCreateHeader, HttpServletResponse response) throws IOException { ExportParams exportParams = new ExportParams(title, sheetName, ExcelType.XSSF); exportParams.setCreateHeadRows(isCreateHeader); defaultExport(list, pojoClass, fileName, response, exportParams); } /** * excel 导出 * * @param list 数据 * @param title 标题 * @param sheetName sheet名称 * @param pojoClass pojo类型 * @param fileName 文件名称 * @param response */ public static void exportExcel(List<?> list, String title, String sheetName, Class<?> pojoClass, String fileName, HttpServletResponse response) throws IOException { defaultExport(list, pojoClass, fileName, response, new ExportParams(title, sheetName, ExcelType.XSSF)); } /** * excel 导出 * * @param list 数据 * @param pojoClass pojo类型 * @param fileName 文件名称 * @param response * @param exportParams 导出参数 */ public static void exportExcel(List<?> list, Class<?> pojoClass, String fileName, ExportParams exportParams, HttpServletResponse response) throws IOException { defaultExport(list, pojoClass, fileName, response, exportParams); } /** * excel 导出 * * @param list 数据 * @param fileName 文件名称 * @param response */ public static void exportExcel(List<Map<String, Object>> list, String fileName, HttpServletResponse response) throws IOException { defaultExport(list, fileName, response); } /** * 默认的 excel 导出 * * @param list 数据 * @param pojoClass pojo类型 * @param fileName 文件名称 * @param response * @param exportParams 导出参数 */ private static void defaultExport(List<?> list, Class<?> pojoClass, String fileName, HttpServletResponse response, ExportParams exportParams) throws IOException { Workbook workbook = ExcelExportUtil.exportExcel(exportParams, pojoClass, list); downLoadExcel(fileName, response, workbook); } /** * 默认的 excel 导出 * * @param list 数据 * @param fileName 文件名称 * @param response */ private static void defaultExport(List<Map<String, Object>> list, String fileName, HttpServletResponse response) throws IOException { Workbook workbook = ExcelExportUtil.exportExcel(list, ExcelType.HSSF); downLoadExcel(fileName, response, workbook); } /** * 下载 * * @param fileName 文件名称 * @param response * @param workbook excel数据 */ private static void downLoadExcel(String fileName, HttpServletResponse response, Workbook workbook) throws IOException { try { response.setCharacterEncoding("UTF-8"); response.setHeader("content-Type", "application/vnd.ms-excel"); response.setHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(fileName + "." + ExcelTypeEnum.XLSX.getValue(), "UTF-8")); workbook.write(response.getOutputStream()); } catch (Exception e) { throw new IOException(e.getMessage()); } } /** * excel 导入 * * @param filePath excel文件路径 * @param titleRows 标题行 * @param headerRows 表头行 * @param pojoClass pojo类型 * @param <T> * @return */ public static <T> List<T> importExcel(String filePath, Integer titleRows, Integer headerRows, Class<T> pojoClass) throws IOException { if (StringUtils.isBlank(filePath)) { return null; } ImportParams params = new ImportParams(); params.setTitleRows(titleRows); params.setHeadRows(headerRows); params.setNeedSave(true); params.setSaveUrl("/excel/"); try { return (new MyExcelImportService()).importExcelByIs(new FileInputStream(new File(filePath)), pojoClass, params, false).getList(); } catch (NoSuchElementException e) { throw new IOException("模板不能为空"); } catch (Exception e) { throw new IOException(e.getMessage()); } } /** * excel 导入 * * @param file excel文件 * @param pojoClass pojo类型 * @param <T> * @return */ public static <T> List<T> importExcel(MultipartFile file, Class<T> pojoClass) throws IOException { return importExcel(file, 1, 1, pojoClass); } /** * excel 导入 * * @param file excel文件 * @param titleRows 标题行 * @param headerRows 表头行 * @param pojoClass pojo类型 * @param <T> * @return */ public static <T> List<T> importExcel(MultipartFile file, Integer titleRows, Integer headerRows, Class<T> pojoClass) throws IOException { return importExcel(file, titleRows, headerRows, false, pojoClass); } /** * excel 导入 * * @param file 上传的文件 * @param titleRows 标题行 * @param headerRows 表头行 * @param needVerfiy 是否检验excel内容 * @param pojoClass pojo类型 * @param <T> * @return */ public static <T> List<T> importExcel(MultipartFile file, Integer titleRows, Integer headerRows, boolean needVerfiy, Class<T> pojoClass) throws IOException { if (file == null) { return null; } try { return importExcel(file.getInputStream(), titleRows, headerRows, needVerfiy, pojoClass); } catch (Exception e) { throw new IOException(e.getMessage()); } } /** * excel 导入 * * @param inputStream 文件输入流 * @param titleRows 标题行 * @param headerRows 表头行 * @param needVerfiy 是否检验excel内容 * @param pojoClass pojo类型 * @param <T> * @return */ public static <T> List<T> importExcel(InputStream inputStream, Integer titleRows, Integer headerRows, boolean needVerfiy, Class<T> pojoClass) throws IOException { if (inputStream == null) { return null; } ImportParams params = new ImportParams(); params.setTitleRows(titleRows); params.setHeadRows(headerRows); params.setSaveUrl("/excel/"); params.setNeedSave(true); params.setNeedVerify(needVerfiy); try { return (new MyExcelImportService()).importExcelByIs(inputStream, pojoClass, params, false).getList(); } catch (NoSuchElementException e) { throw new IOException("excel文件不能为空"); } catch (Exception e) { throw new IOException(e.getMessage()); } } /** * Excel 类型枚举 */ enum ExcelTypeEnum { XLS("xls"), XLSX("xlsx"); private String value; ExcelTypeEnum(String value) { this.value = value; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } } }
/* * Copyright 2015 Eiren 'Eirenliel' Rain and GreenCubes.org * authors * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the * Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall * be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE * OR OTHER DEALINGS IN THE SOFTWARE. */ package org.greencubes.nbt; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.greencubes.util.Util; public class NBTTagFloat extends NBTBase implements NBTNumber { public float a; public NBTTagFloat() { } public NBTTagFloat(float paramFloat) { this.a = paramFloat; } @Override void write(DataOutput paramDataOutput) throws IOException { paramDataOutput.writeFloat(this.a); } @Override void read(DataInput paramDataInput) throws IOException { this.a = paramDataInput.readFloat(); } @Override public byte getId() { return 5; } @Override public byte getIdMinecraft() { return 5; } @Override public String dump() { return Float.toString(this.a); } @Override public String toString() { return Float.toString(this.a); } @Override public NBTTagFloat clone() { return new NBTTagFloat(this.a); } @Override public boolean equals(NBTBase tag) { if(tag instanceof NBTTagFloat) return ((NBTTagFloat) tag).a == this.a; return false; } @Override public StringBuilder structureDump(StringBuilder store, int depth) { if(store == null) store = new StringBuilder(); Util.dump(store, depth, "Float(" + getId() + "," + getName() + "): " + a + "\n"); return store; } @Override public int hashCode() { return Float.floatToIntBits(a); } @Override public int getIntValue() { return (int) a; } @Override public long getLongValue() { return (long) a; } @Override public short getShortValue() { return (short) a; } @Override public boolean getBooleanValue() { return a != 0; } @Override public byte getByteValue() { return (byte) a; } @Override public float getFloatValue() { return a; } @Override public double getDoubleValue() { return a; } }
package kotlin.sequences; import java.util.Iterator; import kotlin.Metadata; import kotlin.jvm.functions.Function2; @Metadata(bv = {1, 0, 3}, d1 = {"\u0000\u0011\n\u0000\n\u0002\u0018\u0002\n\u0000\n\u0002\u0010(\n\u0000*\u0001\u0000\b\n\u0018\u00002\b\u0012\u0004\u0012\u00028\u00000\u0001J\u000f\u0010\u0002\u001a\b\u0012\u0004\u0012\u00028\u00000\u0003H–\u0002¨\u0006\u0004¸\u0006\u0000"}, d2 = {"kotlin/sequences/SequencesKt__SequencesKt$Sequence$1", "Lkotlin/sequences/Sequence;", "iterator", "", "kotlin-stdlib"}, k = 1, mv = {1, 1, 16}) /* compiled from: Sequences.kt */ public final class SequencesKt__SequenceBuilderKt$sequence$$inlined$Sequence$1 implements Sequence<T> { final /* synthetic */ Function2 $block$inlined; public SequencesKt__SequenceBuilderKt$sequence$$inlined$Sequence$1(Function2 function2) { this.$block$inlined = function2; } public Iterator<T> iterator() { return SequencesKt.iterator(this.$block$inlined); } }
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.core.integrate.type.ms; import io.shardingsphere.core.common.base.AbstractSQLTest; import io.shardingsphere.core.constant.DatabaseType; import io.shardingsphere.core.constant.SQLType; import io.shardingsphere.core.integrate.jaxb.SQLShardingRule; import org.junit.Before; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Collection; import java.util.List; @RunWith(Parameterized.class) public class MasterSlaveOnlyDMLAndDDLTest extends AbstractMasterSlaveOnlyTest { public MasterSlaveOnlyDMLAndDDLTest(final String testCaseName, final String sql, final DatabaseType type, final List<SQLShardingRule> sqlShardingRules) { super(testCaseName, sql, type, sqlShardingRules); } @Parameterized.Parameters(name = "{0}In{2}") public static Collection<Object[]> dataParameters() { return AbstractSQLTest.dataParameters(SQLType.DML, SQLType.DDL); } @Before public void cleanAndInitTable() throws Exception { importDataSet(); } }
package module6; import java.util.Arrays; public class SelectionSort { public static void main(String[] args) { int[] test = {7,16,66,43,97,51}; selectionSort(test); System.out.println(Arrays.toString(test)); } //selectionSort method private static void selectionSort (int[] values){ for (int i=0; i<values.length-1; i++) { int min=i; for (int j=i+1; j<values.length; j++){ if (values[min]>values[j]) { min=j; } } swap (values, min, i); } } //private helper method to swap private static void swap(int[] values, int a, int b) { int x = values[b]; values[b] = values[a]; values[a] = x; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.api.dto; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.action.Action; import org.apache.nifi.action.component.details.ComponentDetails; import org.apache.nifi.action.component.details.ExtensionDetails; import org.apache.nifi.action.component.details.FlowChangeExtensionDetails; import org.apache.nifi.action.component.details.FlowChangeRemoteProcessGroupDetails; import org.apache.nifi.action.component.details.RemoteProcessGroupDetails; import org.apache.nifi.action.details.ActionDetails; import org.apache.nifi.action.details.ConfigureDetails; import org.apache.nifi.action.details.ConnectDetails; import org.apache.nifi.action.details.FlowChangeConfigureDetails; import org.apache.nifi.action.details.FlowChangeConnectDetails; import org.apache.nifi.action.details.FlowChangeMoveDetails; import org.apache.nifi.action.details.FlowChangePurgeDetails; import org.apache.nifi.action.details.MoveDetails; import org.apache.nifi.action.details.PurgeDetails; import org.apache.nifi.annotation.behavior.Restricted; import org.apache.nifi.annotation.behavior.Restriction; import org.apache.nifi.annotation.behavior.Stateful; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.DeprecationNotice; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.authorization.AccessPolicy; import org.apache.nifi.authorization.Authorizer; import org.apache.nifi.authorization.AuthorizerCapabilityDetection; import org.apache.nifi.authorization.Group; import org.apache.nifi.authorization.RequestAction; import org.apache.nifi.authorization.Resource; import org.apache.nifi.authorization.User; import org.apache.nifi.authorization.resource.Authorizable; import org.apache.nifi.authorization.resource.ComponentAuthorizable; import org.apache.nifi.authorization.resource.OperationAuthorizable; import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.authorization.user.NiFiUserUtils; import org.apache.nifi.bundle.Bundle; import org.apache.nifi.bundle.BundleCoordinate; import org.apache.nifi.bundle.BundleDetails; import org.apache.nifi.cluster.coordination.heartbeat.NodeHeartbeat; import org.apache.nifi.cluster.coordination.node.NodeConnectionStatus; import org.apache.nifi.cluster.event.NodeEvent; import org.apache.nifi.cluster.manager.StatusMerger; import org.apache.nifi.cluster.protocol.NodeIdentifier; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDependency; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; import org.apache.nifi.components.validation.ValidationState; import org.apache.nifi.components.validation.ValidationStatus; import org.apache.nifi.connectable.Connectable; import org.apache.nifi.connectable.ConnectableType; import org.apache.nifi.connectable.Connection; import org.apache.nifi.connectable.Funnel; import org.apache.nifi.connectable.Port; import org.apache.nifi.connectable.Position; import org.apache.nifi.controller.ActiveThreadInfo; import org.apache.nifi.controller.ComponentNode; import org.apache.nifi.controller.ControllerService; import org.apache.nifi.controller.Counter; import org.apache.nifi.controller.FlowController; import org.apache.nifi.controller.ProcessorNode; import org.apache.nifi.controller.ReportingTaskNode; import org.apache.nifi.controller.Snippet; import org.apache.nifi.controller.Template; import org.apache.nifi.controller.ThreadDetails; import org.apache.nifi.controller.flow.FlowManager; import org.apache.nifi.controller.label.Label; import org.apache.nifi.controller.queue.DropFlowFileState; import org.apache.nifi.controller.queue.DropFlowFileStatus; import org.apache.nifi.controller.queue.FlowFileQueue; import org.apache.nifi.controller.queue.FlowFileSummary; import org.apache.nifi.controller.queue.ListFlowFileState; import org.apache.nifi.controller.queue.ListFlowFileStatus; import org.apache.nifi.controller.queue.LoadBalanceStrategy; import org.apache.nifi.controller.queue.LocalQueuePartitionDiagnostics; import org.apache.nifi.controller.queue.QueueDiagnostics; import org.apache.nifi.controller.queue.QueueSize; import org.apache.nifi.controller.queue.RemoteQueuePartitionDiagnostics; import org.apache.nifi.controller.repository.FlowFileRecord; import org.apache.nifi.controller.repository.claim.ContentClaim; import org.apache.nifi.controller.repository.claim.ResourceClaim; import org.apache.nifi.controller.service.ControllerServiceNode; import org.apache.nifi.controller.service.ControllerServiceProvider; import org.apache.nifi.controller.state.SortedStateUtils; import org.apache.nifi.controller.status.ConnectionStatus; import org.apache.nifi.controller.status.PortStatus; import org.apache.nifi.controller.status.ProcessGroupStatus; import org.apache.nifi.controller.status.ProcessorStatus; import org.apache.nifi.controller.status.RemoteProcessGroupStatus; import org.apache.nifi.controller.status.analytics.ConnectionStatusPredictions; import org.apache.nifi.controller.status.analytics.StatusAnalytics; import org.apache.nifi.controller.status.history.GarbageCollectionHistory; import org.apache.nifi.controller.status.history.GarbageCollectionStatus; import org.apache.nifi.diagnostics.GarbageCollection; import org.apache.nifi.diagnostics.StorageUsage; import org.apache.nifi.diagnostics.SystemDiagnostics; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFilePrioritizer; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.groups.ProcessGroupCounts; import org.apache.nifi.groups.RemoteProcessGroup; import org.apache.nifi.groups.RemoteProcessGroupCounts; import org.apache.nifi.history.History; import org.apache.nifi.nar.ExtensionManager; import org.apache.nifi.nar.NarClassLoadersHolder; import org.apache.nifi.parameter.Parameter; import org.apache.nifi.parameter.ParameterContext; import org.apache.nifi.parameter.ParameterDescriptor; import org.apache.nifi.parameter.ParameterReferenceManager; import org.apache.nifi.processor.Processor; import org.apache.nifi.processor.Relationship; import org.apache.nifi.provenance.lineage.ComputeLineageResult; import org.apache.nifi.provenance.lineage.ComputeLineageSubmission; import org.apache.nifi.provenance.lineage.LineageEdge; import org.apache.nifi.provenance.lineage.LineageNode; import org.apache.nifi.provenance.lineage.ProvenanceEventLineageNode; import org.apache.nifi.registry.ComponentVariableRegistry; import org.apache.nifi.registry.VariableDescriptor; import org.apache.nifi.registry.flow.FlowRegistry; import org.apache.nifi.registry.flow.VersionControlInformation; import org.apache.nifi.registry.flow.VersionedComponent; import org.apache.nifi.registry.flow.VersionedFlowState; import org.apache.nifi.registry.flow.VersionedFlowStatus; import org.apache.nifi.registry.flow.VersionedProcessGroup; import org.apache.nifi.registry.flow.diff.DifferenceType; import org.apache.nifi.registry.flow.diff.FlowComparison; import org.apache.nifi.registry.flow.diff.FlowDifference; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedComponent; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedConnection; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedControllerService; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedFunnel; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedLabel; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedPort; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessGroup; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessor; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedRemoteGroupPort; import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedRemoteProcessGroup; import org.apache.nifi.registry.variable.VariableRegistryUpdateRequest; import org.apache.nifi.registry.variable.VariableRegistryUpdateStep; import org.apache.nifi.remote.PublicPort; import org.apache.nifi.remote.RemoteGroupPort; import org.apache.nifi.reporting.Bulletin; import org.apache.nifi.reporting.BulletinRepository; import org.apache.nifi.reporting.ReportingTask; import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.util.FlowDifferenceFilters; import org.apache.nifi.util.FormatUtils; import org.apache.nifi.web.FlowModification; import org.apache.nifi.web.Revision; import org.apache.nifi.web.api.dto.action.ActionDTO; import org.apache.nifi.web.api.dto.action.HistoryDTO; import org.apache.nifi.web.api.dto.action.component.details.ComponentDetailsDTO; import org.apache.nifi.web.api.dto.action.component.details.ExtensionDetailsDTO; import org.apache.nifi.web.api.dto.action.component.details.RemoteProcessGroupDetailsDTO; import org.apache.nifi.web.api.dto.action.details.ActionDetailsDTO; import org.apache.nifi.web.api.dto.action.details.ConfigureDetailsDTO; import org.apache.nifi.web.api.dto.action.details.ConnectDetailsDTO; import org.apache.nifi.web.api.dto.action.details.MoveDetailsDTO; import org.apache.nifi.web.api.dto.action.details.PurgeDetailsDTO; import org.apache.nifi.web.api.dto.diagnostics.ClassLoaderDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.ConnectionDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.ConnectionDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.ControllerServiceDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.GCDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.GarbageCollectionDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.JVMControllerDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.JVMFlowDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.JVMSystemDiagnosticsSnapshotDTO; import org.apache.nifi.web.api.dto.diagnostics.LocalQueuePartitionDTO; import org.apache.nifi.web.api.dto.diagnostics.ProcessorDiagnosticsDTO; import org.apache.nifi.web.api.dto.diagnostics.RemoteQueuePartitionDTO; import org.apache.nifi.web.api.dto.diagnostics.RepositoryUsageDTO; import org.apache.nifi.web.api.dto.diagnostics.ThreadDumpDTO; import org.apache.nifi.web.api.dto.flow.FlowBreadcrumbDTO; import org.apache.nifi.web.api.dto.flow.FlowDTO; import org.apache.nifi.web.api.dto.flow.ProcessGroupFlowDTO; import org.apache.nifi.web.api.dto.provenance.lineage.LineageDTO; import org.apache.nifi.web.api.dto.provenance.lineage.LineageRequestDTO; import org.apache.nifi.web.api.dto.provenance.lineage.LineageRequestDTO.LineageRequestType; import org.apache.nifi.web.api.dto.provenance.lineage.LineageResultsDTO; import org.apache.nifi.web.api.dto.provenance.lineage.ProvenanceLinkDTO; import org.apache.nifi.web.api.dto.provenance.lineage.ProvenanceNodeDTO; import org.apache.nifi.web.api.dto.status.ConnectionStatisticsDTO; import org.apache.nifi.web.api.dto.status.ConnectionStatisticsSnapshotDTO; import org.apache.nifi.web.api.dto.status.ConnectionStatusDTO; import org.apache.nifi.web.api.dto.status.ConnectionStatusPredictionsSnapshotDTO; import org.apache.nifi.web.api.dto.status.ConnectionStatusSnapshotDTO; import org.apache.nifi.web.api.dto.status.PortStatusDTO; import org.apache.nifi.web.api.dto.status.PortStatusSnapshotDTO; import org.apache.nifi.web.api.dto.status.ProcessGroupStatusDTO; import org.apache.nifi.web.api.dto.status.ProcessGroupStatusSnapshotDTO; import org.apache.nifi.web.api.dto.status.ProcessorStatusDTO; import org.apache.nifi.web.api.dto.status.ProcessorStatusSnapshotDTO; import org.apache.nifi.web.api.dto.status.RemoteProcessGroupStatusDTO; import org.apache.nifi.web.api.dto.status.RemoteProcessGroupStatusSnapshotDTO; import org.apache.nifi.web.api.entity.AccessPolicyEntity; import org.apache.nifi.web.api.entity.AccessPolicySummaryEntity; import org.apache.nifi.web.api.entity.AffectedComponentEntity; import org.apache.nifi.web.api.entity.AllowableValueEntity; import org.apache.nifi.web.api.entity.BulletinEntity; import org.apache.nifi.web.api.entity.ComponentReferenceEntity; import org.apache.nifi.web.api.entity.ConnectionStatusSnapshotEntity; import org.apache.nifi.web.api.entity.ControllerServiceEntity; import org.apache.nifi.web.api.entity.FlowBreadcrumbEntity; import org.apache.nifi.web.api.entity.ParameterContextReferenceEntity; import org.apache.nifi.web.api.entity.ParameterEntity; import org.apache.nifi.web.api.entity.PortEntity; import org.apache.nifi.web.api.entity.PortStatusSnapshotEntity; import org.apache.nifi.web.api.entity.ProcessGroupEntity; import org.apache.nifi.web.api.entity.ProcessGroupStatusSnapshotEntity; import org.apache.nifi.web.api.entity.ProcessorEntity; import org.apache.nifi.web.api.entity.ProcessorStatusSnapshotEntity; import org.apache.nifi.web.api.entity.RemoteProcessGroupEntity; import org.apache.nifi.web.api.entity.RemoteProcessGroupStatusSnapshotEntity; import org.apache.nifi.web.api.entity.TenantEntity; import org.apache.nifi.web.api.entity.VariableEntity; import org.apache.nifi.web.controller.ControllerFacade; import org.apache.nifi.web.revision.RevisionManager; import javax.ws.rs.WebApplicationException; import java.text.Collator; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TimeZone; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; public final class DtoFactory { @SuppressWarnings("rawtypes") private final static Comparator<Class> CLASS_NAME_COMPARATOR = new Comparator<Class>() { @Override public int compare(final Class class1, final Class class2) { return Collator.getInstance(Locale.US).compare(class1.getSimpleName(), class2.getSimpleName()); } }; public static final String SENSITIVE_VALUE_MASK = "********"; private BulletinRepository bulletinRepository; private ControllerServiceProvider controllerServiceProvider; private EntityFactory entityFactory; private Authorizer authorizer; private ExtensionManager extensionManager; public ControllerConfigurationDTO createControllerConfigurationDto(final ControllerFacade controllerFacade) { final ControllerConfigurationDTO dto = new ControllerConfigurationDTO(); dto.setMaxTimerDrivenThreadCount(controllerFacade.getMaxTimerDrivenThreadCount()); dto.setMaxEventDrivenThreadCount(controllerFacade.getMaxEventDrivenThreadCount()); return dto; } public FlowConfigurationDTO createFlowConfigurationDto(final String autoRefreshInterval, final Long defaultBackPressureObjectThreshold, final String defaultBackPressureDataSizeThreshold) { final FlowConfigurationDTO dto = new FlowConfigurationDTO(); // get the refresh interval final long refreshInterval = FormatUtils.getTimeDuration(autoRefreshInterval, TimeUnit.SECONDS); dto.setAutoRefreshIntervalSeconds(refreshInterval); dto.setSupportsManagedAuthorizer(AuthorizerCapabilityDetection.isManagedAuthorizer(authorizer)); dto.setSupportsConfigurableUsersAndGroups(AuthorizerCapabilityDetection.isConfigurableUserGroupProvider(authorizer)); dto.setSupportsConfigurableAuthorizer(AuthorizerCapabilityDetection.isConfigurableAccessPolicyProvider(authorizer)); final Date now = new Date(); dto.setTimeOffset(TimeZone.getDefault().getOffset(now.getTime())); dto.setCurrentTime(now); dto.setDefaultBackPressureDataSizeThreshold(defaultBackPressureDataSizeThreshold); dto.setDefaultBackPressureObjectThreshold(defaultBackPressureObjectThreshold); return dto; } /** * Creates an ActionDTO for the specified Action. * * @param action action * @return dto */ public ActionDTO createActionDto(final Action action) { final ActionDTO actionDto = new ActionDTO(); actionDto.setId(action.getId()); actionDto.setSourceId(action.getSourceId()); actionDto.setSourceName(action.getSourceName()); actionDto.setSourceType(action.getSourceType().toString()); actionDto.setTimestamp(action.getTimestamp()); actionDto.setUserIdentity(action.getUserIdentity()); actionDto.setOperation(action.getOperation().toString()); actionDto.setActionDetails(createActionDetailsDto(action.getActionDetails())); actionDto.setComponentDetails(createComponentDetailsDto(action.getComponentDetails())); return actionDto; } /** * Creates an ActionDetailsDTO for the specified ActionDetails. * * @param actionDetails details * @return dto */ private ActionDetailsDTO createActionDetailsDto(final ActionDetails actionDetails) { if (actionDetails == null) { return null; } if (actionDetails instanceof FlowChangeConfigureDetails) { final ConfigureDetailsDTO configureDetails = new ConfigureDetailsDTO(); configureDetails.setName(((ConfigureDetails) actionDetails).getName()); configureDetails.setPreviousValue(((ConfigureDetails) actionDetails).getPreviousValue()); configureDetails.setValue(((ConfigureDetails) actionDetails).getValue()); return configureDetails; } else if (actionDetails instanceof FlowChangeConnectDetails) { final ConnectDetailsDTO connectDetails = new ConnectDetailsDTO(); connectDetails.setSourceId(((ConnectDetails) actionDetails).getSourceId()); connectDetails.setSourceName(((ConnectDetails) actionDetails).getSourceName()); connectDetails.setSourceType(((ConnectDetails) actionDetails).getSourceType().toString()); connectDetails.setRelationship(((ConnectDetails) actionDetails).getRelationship()); connectDetails.setDestinationId(((ConnectDetails) actionDetails).getDestinationId()); connectDetails.setDestinationName(((ConnectDetails) actionDetails).getDestinationName()); connectDetails.setDestinationType(((ConnectDetails) actionDetails).getDestinationType().toString()); return connectDetails; } else if (actionDetails instanceof FlowChangeMoveDetails) { final MoveDetailsDTO moveDetails = new MoveDetailsDTO(); moveDetails.setPreviousGroup(((MoveDetails) actionDetails).getPreviousGroup()); moveDetails.setPreviousGroupId(((MoveDetails) actionDetails).getPreviousGroupId()); moveDetails.setGroup(((MoveDetails) actionDetails).getGroup()); moveDetails.setGroupId(((MoveDetails) actionDetails).getGroupId()); return moveDetails; } else if (actionDetails instanceof FlowChangePurgeDetails) { final PurgeDetailsDTO purgeDetails = new PurgeDetailsDTO(); purgeDetails.setEndDate(((PurgeDetails) actionDetails).getEndDate()); return purgeDetails; } else { throw new WebApplicationException(new IllegalArgumentException(String.format("Unrecognized type of action details encountered %s during serialization.", actionDetails.toString()))); } } /** * Creates a ComponentDetailsDTO for the specified ComponentDetails. * * @param componentDetails details * @return dto */ private ComponentDetailsDTO createComponentDetailsDto(final ComponentDetails componentDetails) { if (componentDetails == null) { return null; } if (componentDetails instanceof FlowChangeExtensionDetails) { final ExtensionDetailsDTO processorDetails = new ExtensionDetailsDTO(); processorDetails.setType(((ExtensionDetails) componentDetails).getType()); return processorDetails; } else if (componentDetails instanceof FlowChangeRemoteProcessGroupDetails) { final RemoteProcessGroupDetailsDTO remoteProcessGroupDetails = new RemoteProcessGroupDetailsDTO(); remoteProcessGroupDetails.setUri(((RemoteProcessGroupDetails) componentDetails).getUri()); return remoteProcessGroupDetails; } else { throw new WebApplicationException(new IllegalArgumentException(String.format("Unrecognized type of component details encountered %s during serialization. ", componentDetails.toString()))); } } /** * Creates a HistoryDTO from the specified History. * * @param history history * @return dto */ public HistoryDTO createHistoryDto(final History history) { final HistoryDTO historyDto = new HistoryDTO(); historyDto.setTotal(history.getTotal()); historyDto.setLastRefreshed(history.getLastRefreshed()); return historyDto; } /** * Creates a ComponentStateDTO for the given component and state's. * * @param componentId component id * @param localState local state * @param clusterState cluster state * @return dto */ public ComponentStateDTO createComponentStateDTO(final String componentId, final Class<?> componentClass, final StateMap localState, final StateMap clusterState) { final ComponentStateDTO dto = new ComponentStateDTO(); dto.setComponentId(componentId); dto.setStateDescription(getStateDescription(componentClass)); dto.setLocalState(createStateMapDTO(Scope.LOCAL, localState)); dto.setClusterState(createStateMapDTO(Scope.CLUSTER, clusterState)); return dto; } /** * Gets the description of the state this component persists. * * @param componentClass the component class * @return state description */ private String getStateDescription(final Class<?> componentClass) { final Stateful capabilityDesc = componentClass.getAnnotation(Stateful.class); if (capabilityDesc != null) { return capabilityDesc.description(); } else { return null; } } /** * Creates a StateMapDTO for the given scope and state map. * * @param scope the scope * @param stateMap the state map * @return dto */ public StateMapDTO createStateMapDTO(final Scope scope, final StateMap stateMap) { if (stateMap == null) { return null; } final StateMapDTO dto = new StateMapDTO(); dto.setScope(scope.toString()); final TreeMap<String, String> sortedState = new TreeMap<>(SortedStateUtils.getKeyComparator()); final Map<String, String> state = stateMap.toMap(); sortedState.putAll(state); int count = 0; final List<StateEntryDTO> stateEntries = new ArrayList<>(); final Set<Map.Entry<String, String>> entrySet = sortedState.entrySet(); for (final Iterator<Entry<String, String>> iter = entrySet.iterator(); iter.hasNext() && count++ < SortedStateUtils.MAX_COMPONENT_STATE_ENTRIES;) { final Map.Entry<String, String> entry = iter.next(); final StateEntryDTO entryDTO = new StateEntryDTO(); entryDTO.setKey(entry.getKey()); entryDTO.setValue(entry.getValue()); stateEntries.add(entryDTO); } dto.setTotalEntryCount(state.size()); dto.setState(stateEntries); return dto; } /** * Creates CounterDTOs for each Counter specified. * * @param counterDtos dtos * @return dto */ public CountersSnapshotDTO createCountersDto(final Collection<CounterDTO> counterDtos) { final CountersSnapshotDTO dto = new CountersSnapshotDTO(); dto.setCounters(counterDtos); dto.setGenerated(new Date()); return dto; } /** * Creates a CounterDTO from the specified Counter. * * @param counter counter * @return dto */ public CounterDTO createCounterDto(final Counter counter) { final CounterDTO dto = new CounterDTO(); dto.setId(counter.getIdentifier()); dto.setContext(counter.getContext()); dto.setName(counter.getName()); dto.setValueCount(counter.getValue()); dto.setValue(FormatUtils.formatCount(counter.getValue())); return dto; } /** * Creates a PositionDTO from the specified position * * @param position position * @return dto */ public PositionDTO createPositionDto(final Position position) { return new PositionDTO(position.getX(), position.getY()); } private boolean isDropRequestComplete(final DropFlowFileState state) { return DropFlowFileState.COMPLETE.equals(state) || DropFlowFileState.CANCELED.equals(state) || DropFlowFileState.FAILURE.equals(state); } /** * Creates a DropRequestDTO from the specified flow file status. * * @param dropRequest dropRequest * @return dto */ public DropRequestDTO createDropRequestDTO(final DropFlowFileStatus dropRequest) { final DropRequestDTO dto = new DropRequestDTO(); dto.setId(dropRequest.getRequestIdentifier()); dto.setSubmissionTime(new Date(dropRequest.getRequestSubmissionTime())); dto.setLastUpdated(new Date(dropRequest.getLastUpdated())); dto.setState(dropRequest.getState().toString()); dto.setFailureReason(dropRequest.getFailureReason()); dto.setFinished(isDropRequestComplete(dropRequest.getState())); final QueueSize dropped = dropRequest.getDroppedSize(); dto.setDroppedCount(dropped.getObjectCount()); dto.setDroppedSize(dropped.getByteCount()); dto.setDropped(FormatUtils.formatCount(dropped.getObjectCount()) + " / " + FormatUtils.formatDataSize(dropped.getByteCount())); final QueueSize current = dropRequest.getCurrentSize(); dto.setCurrentCount(current.getObjectCount()); dto.setCurrentSize(current.getByteCount()); dto.setCurrent(FormatUtils.formatCount(current.getObjectCount()) + " / " + FormatUtils.formatDataSize(current.getByteCount())); final QueueSize original = dropRequest.getOriginalSize(); dto.setOriginalCount(original.getObjectCount()); dto.setOriginalSize(original.getByteCount()); dto.setOriginal(FormatUtils.formatCount(original.getObjectCount()) + " / " + FormatUtils.formatDataSize(original.getByteCount())); if (isDropRequestComplete(dropRequest.getState())) { dto.setPercentCompleted(100); } else { dto.setPercentCompleted((dropped.getObjectCount() * 100) / original.getObjectCount()); } return dto; } private boolean isListingRequestComplete(final ListFlowFileState state) { return ListFlowFileState.COMPLETE.equals(state) || ListFlowFileState.CANCELED.equals(state) || ListFlowFileState.FAILURE.equals(state); } private QueueSizeDTO createQueueSizeDTO(final QueueSize queueSize) { final QueueSizeDTO dto = new QueueSizeDTO(); dto.setByteCount(queueSize.getByteCount()); dto.setObjectCount(queueSize.getObjectCount()); return dto; } /** * Creates a ListingRequestDTO from the specified ListFlowFileStatus. * * @param listingRequest listingRequest * @return dto */ public ListingRequestDTO createListingRequestDTO(final ListFlowFileStatus listingRequest) { final ListingRequestDTO dto = new ListingRequestDTO(); dto.setId(listingRequest.getRequestIdentifier()); dto.setSubmissionTime(new Date(listingRequest.getRequestSubmissionTime())); dto.setLastUpdated(new Date(listingRequest.getLastUpdated())); dto.setState(listingRequest.getState().toString()); dto.setFailureReason(listingRequest.getFailureReason()); dto.setFinished(isListingRequestComplete(listingRequest.getState())); dto.setMaxResults(listingRequest.getMaxResults()); dto.setPercentCompleted(listingRequest.getCompletionPercentage()); dto.setQueueSize(createQueueSizeDTO(listingRequest.getQueueSize())); if (isListingRequestComplete(listingRequest.getState())) { final List<FlowFileSummary> flowFileSummaries = listingRequest.getFlowFileSummaries(); if (flowFileSummaries != null) { final Date now = new Date(); final List<FlowFileSummaryDTO> summaryDtos = new ArrayList<>(flowFileSummaries.size()); for (final FlowFileSummary summary : flowFileSummaries) { summaryDtos.add(createFlowFileSummaryDTO(summary, now)); } dto.setFlowFileSummaries(summaryDtos); } } return dto; } /** * Creates a FlowFileSummaryDTO from the specified FlowFileSummary. * * @param summary summary * @return dto */ public FlowFileSummaryDTO createFlowFileSummaryDTO(final FlowFileSummary summary, final Date now) { final FlowFileSummaryDTO dto = new FlowFileSummaryDTO(); dto.setUuid(summary.getUuid()); dto.setFilename(summary.getFilename()); dto.setPenalized(summary.isPenalized()); final long penaltyExpiration = summary.getPenaltyExpirationMillis() - now.getTime(); dto.setPenaltyExpiresIn(penaltyExpiration>=0?penaltyExpiration:0); dto.setPosition(summary.getPosition()); dto.setSize(summary.getSize()); final long queuedDuration = now.getTime() - summary.getLastQueuedTime(); dto.setQueuedDuration(queuedDuration); final long age = now.getTime() - summary.getLineageStartDate(); dto.setLineageDuration(age); return dto; } /** * Creates a FlowFileDTO from the specified FlowFileRecord. * * @param record record * @return dto */ public FlowFileDTO createFlowFileDTO(final FlowFileRecord record) { final Date now = new Date(); final FlowFileDTO dto = new FlowFileDTO(); dto.setUuid(record.getAttribute(CoreAttributes.UUID.key())); dto.setFilename(record.getAttribute(CoreAttributes.FILENAME.key())); dto.setPenalized(record.isPenalized()); final long penaltyExpiration = record.getPenaltyExpirationMillis() - now.getTime(); dto.setPenaltyExpiresIn(penaltyExpiration>=0?penaltyExpiration:0); dto.setSize(record.getSize()); dto.setAttributes(record.getAttributes()); final long queuedDuration = now.getTime() - record.getLastQueueDate(); dto.setQueuedDuration(queuedDuration); final long age = now.getTime() - record.getLineageStartDate(); dto.setLineageDuration(age); final ContentClaim contentClaim = record.getContentClaim(); if (contentClaim != null) { final ResourceClaim resourceClaim = contentClaim.getResourceClaim(); dto.setContentClaimSection(resourceClaim.getSection()); dto.setContentClaimContainer(resourceClaim.getContainer()); dto.setContentClaimIdentifier(resourceClaim.getId()); dto.setContentClaimOffset(contentClaim.getOffset() + record.getContentClaimOffset()); dto.setContentClaimFileSizeBytes(record.getSize()); dto.setContentClaimFileSize(FormatUtils.formatDataSize(record.getSize())); } return dto; } /** * Creates a ConnectionDTO from the specified Connection. * * @param connection connection * @return dto */ public ConnectionDTO createConnectionDto(final Connection connection) { if (connection == null) { return null; } final ConnectionDTO dto = new ConnectionDTO(); dto.setId(connection.getIdentifier()); dto.setParentGroupId(connection.getProcessGroup().getIdentifier()); final List<PositionDTO> bendPoints = new ArrayList<>(); for (final Position bendPoint : connection.getBendPoints()) { bendPoints.add(createPositionDto(bendPoint)); } dto.setBends(bendPoints); dto.setName(connection.getName()); dto.setLabelIndex(connection.getLabelIndex()); dto.setzIndex(connection.getZIndex()); dto.setSource(createConnectableDto(connection.getSource())); dto.setDestination(createConnectableDto(connection.getDestination())); dto.setVersionedComponentId(connection.getVersionedComponentId().orElse(null)); final FlowFileQueue flowFileQueue = connection.getFlowFileQueue(); dto.setBackPressureObjectThreshold(flowFileQueue.getBackPressureObjectThreshold()); dto.setBackPressureDataSizeThreshold(flowFileQueue.getBackPressureDataSizeThreshold()); dto.setFlowFileExpiration(flowFileQueue.getFlowFileExpiration()); dto.setPrioritizers(new ArrayList<String>()); for (final FlowFilePrioritizer comparator : flowFileQueue.getPriorities()) { dto.getPrioritizers().add(comparator.getClass().getCanonicalName()); } // For ports, we do not want to populate the relationships. for (final Relationship selectedRelationship : connection.getRelationships()) { if (!Relationship.ANONYMOUS.equals(selectedRelationship)) { if (dto.getSelectedRelationships() == null) { dto.setSelectedRelationships(new TreeSet<String>(Collator.getInstance(Locale.US))); } dto.getSelectedRelationships().add(selectedRelationship.getName()); } } // For ports, we do not want to populate the relationships. for (final Relationship availableRelationship : connection.getSource().getRelationships()) { if (!Relationship.ANONYMOUS.equals(availableRelationship)) { if (dto.getAvailableRelationships() == null) { dto.setAvailableRelationships(new TreeSet<String>(Collator.getInstance(Locale.US))); } dto.getAvailableRelationships().add(availableRelationship.getName()); } } final LoadBalanceStrategy loadBalanceStrategy = flowFileQueue.getLoadBalanceStrategy(); dto.setLoadBalancePartitionAttribute(flowFileQueue.getPartitioningAttribute()); dto.setLoadBalanceStrategy(loadBalanceStrategy.name()); dto.setLoadBalanceCompression(flowFileQueue.getLoadBalanceCompression().name()); if (loadBalanceStrategy == LoadBalanceStrategy.DO_NOT_LOAD_BALANCE) { dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_NOT_CONFIGURED); } else if (flowFileQueue.isActivelyLoadBalancing()) { dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_ACTIVE); } else { dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_INACTIVE); } return dto; } /** * Creates a ConnectableDTO from the specified Connectable. * * @param connectable connectable * @return dto */ public ConnectableDTO createConnectableDto(final Connectable connectable) { if (connectable == null) { return null; } boolean isAuthorized = connectable.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); final ConnectableDTO dto = new ConnectableDTO(); dto.setId(connectable.getIdentifier()); dto.setName(isAuthorized ? connectable.getName() : connectable.getIdentifier()); dto.setType(connectable.getConnectableType().name()); dto.setVersionedComponentId(connectable.getVersionedComponentId().orElse(null)); if (connectable instanceof RemoteGroupPort) { final RemoteGroupPort remoteGroupPort = (RemoteGroupPort) connectable; final RemoteProcessGroup remoteGroup = remoteGroupPort.getRemoteProcessGroup(); dto.setGroupId(remoteGroup.getIdentifier()); dto.setRunning(remoteGroupPort.isTargetRunning()); dto.setTransmitting(remoteGroupPort.isRunning()); dto.setExists(remoteGroupPort.getTargetExists()); if (isAuthorized) { dto.setComments(remoteGroup.getComments()); } } else { dto.setGroupId(connectable.getProcessGroup().getIdentifier()); dto.setRunning(connectable.isRunning()); if (isAuthorized) { dto.setComments(connectable.getComments()); } } return dto; } /** * Creates a LabelDTO from the specified Label. * * @param label label * @return dto */ public LabelDTO createLabelDto(final Label label) { if (label == null) { return null; } final LabelDTO dto = new LabelDTO(); dto.setId(label.getIdentifier()); dto.setPosition(createPositionDto(label.getPosition())); dto.setStyle(label.getStyle()); dto.setHeight(label.getSize().getHeight()); dto.setWidth(label.getSize().getWidth()); dto.setLabel(label.getValue()); dto.setParentGroupId(label.getProcessGroup().getIdentifier()); dto.setVersionedComponentId(label.getVersionedComponentId().orElse(null)); return dto; } /** * Creates a {@link UserDTO} from the specified {@link User}. * * @param user user * @return dto */ public UserDTO createUserDto(final User user, final Set<TenantEntity> groups, final Set<AccessPolicySummaryEntity> accessPolicies) { if (user == null) { return null; } final UserDTO dto = new UserDTO(); dto.setId(user.getIdentifier()); dto.setUserGroups(groups); dto.setIdentity(user.getIdentity()); dto.setConfigurable(AuthorizerCapabilityDetection.isUserConfigurable(authorizer, user)); dto.setAccessPolicies(accessPolicies); return dto; } /** * Creates a {@link TenantDTO} from the specified {@link User}. * * @param user user * @return dto */ public TenantDTO createTenantDTO(User user) { if (user == null) { return null; } final TenantDTO dto = new TenantDTO(); dto.setId(user.getIdentifier()); dto.setIdentity(user.getIdentity()); dto.setConfigurable(AuthorizerCapabilityDetection.isUserConfigurable(authorizer, user)); return dto; } /** * Creates a {@link UserGroupDTO} from the specified {@link Group}. * * @param userGroup user group * @return dto */ public UserGroupDTO createUserGroupDto(final Group userGroup, Set<TenantEntity> users, final Set<AccessPolicySummaryEntity> accessPolicies) { if (userGroup == null) { return null; } // convert to access policies to handle backward compatibility due to incorrect // type in the UserGroupDTO final Set<AccessPolicyEntity> policies = accessPolicies.stream().map(summaryEntity -> { final AccessPolicyDTO policy = new AccessPolicyDTO(); policy.setId(summaryEntity.getId()); if (summaryEntity.getPermissions().getCanRead()) { final AccessPolicySummaryDTO summary = summaryEntity.getComponent(); policy.setResource(summary.getResource()); policy.setAction(summary.getAction()); policy.setConfigurable(summary.getConfigurable()); policy.setComponentReference(summary.getComponentReference()); } return entityFactory.createAccessPolicyEntity(policy, summaryEntity.getRevision(), summaryEntity.getPermissions()); }).collect(Collectors.toSet()); final UserGroupDTO dto = new UserGroupDTO(); dto.setId(userGroup.getIdentifier()); dto.setUsers(users); dto.setIdentity(userGroup.getName()); dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup)); dto.setAccessPolicies(policies); return dto; } /** * Creates a {@link TenantDTO} from the specified {@link User}. * * @param userGroup user * @return dto */ public TenantDTO createTenantDTO(Group userGroup) { if (userGroup == null) { return null; } final TenantDTO dto = new TenantDTO(); dto.setId(userGroup.getIdentifier()); dto.setIdentity(userGroup.getName()); dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup)); return dto; } /** * Creates a FunnelDTO from the specified Funnel. * * @param funnel funnel * @return dto */ public FunnelDTO createFunnelDto(final Funnel funnel) { if (funnel == null) { return null; } final FunnelDTO dto = new FunnelDTO(); dto.setId(funnel.getIdentifier()); dto.setPosition(createPositionDto(funnel.getPosition())); dto.setParentGroupId(funnel.getProcessGroup().getIdentifier()); dto.setVersionedComponentId(funnel.getVersionedComponentId().orElse(null)); return dto; } /** * Creates a SnippetDTO from the specified Snippet. * * @param snippet snippet * @return dto */ public SnippetDTO createSnippetDto(final Snippet snippet) { final SnippetDTO dto = new SnippetDTO(); dto.setId(snippet.getId()); dto.setParentGroupId(snippet.getParentGroupId()); // populate the snippet contents ids dto.setConnections(mapRevisionToDto(snippet.getConnections())); dto.setFunnels(mapRevisionToDto(snippet.getFunnels())); dto.setInputPorts(mapRevisionToDto(snippet.getInputPorts())); dto.setLabels(mapRevisionToDto(snippet.getLabels())); dto.setOutputPorts(mapRevisionToDto(snippet.getOutputPorts())); dto.setProcessGroups(mapRevisionToDto(snippet.getProcessGroups())); dto.setProcessors(mapRevisionToDto(snippet.getProcessors())); dto.setRemoteProcessGroups(mapRevisionToDto(snippet.getRemoteProcessGroups())); return dto; } private Map<String, RevisionDTO> mapRevisionToDto(final Map<String, Revision> revisionMap) { final Map<String, RevisionDTO> dtos = new HashMap<>(revisionMap.size()); for (final Map.Entry<String, Revision> entry : revisionMap.entrySet()) { final Revision revision = entry.getValue(); final RevisionDTO revisionDto = new RevisionDTO(); revisionDto.setClientId(revision.getClientId()); revisionDto.setVersion(revision.getVersion()); dtos.put(entry.getKey(), revisionDto); } return dtos; } /** * Creates a TemplateDTO from the specified template. * * @param template template * @return dto */ public TemplateDTO createTemplateDTO(final Template template) { if (template == null) { return null; } final TemplateDTO original = template.getDetails(); final TemplateDTO copy = new TemplateDTO(); copy.setId(original.getId()); copy.setGroupId(template.getProcessGroup().getIdentifier()); copy.setName(original.getName()); copy.setDescription(original.getDescription()); copy.setTimestamp(original.getTimestamp()); copy.setUri(original.getUri()); copy.setEncodingVersion(original.getEncodingVersion()); return copy; } public RemoteProcessGroupStatusDTO createRemoteProcessGroupStatusDto(final RemoteProcessGroup remoteProcessGroup, final RemoteProcessGroupStatus remoteProcessGroupStatus) { final RemoteProcessGroupStatusDTO dto = new RemoteProcessGroupStatusDTO(); dto.setId(remoteProcessGroupStatus.getId()); dto.setGroupId(remoteProcessGroupStatus.getGroupId()); dto.setTargetUri(remoteProcessGroupStatus.getTargetUri()); dto.setName(remoteProcessGroupStatus.getName()); dto.setTransmissionStatus(remoteProcessGroupStatus.getTransmissionStatus().toString()); dto.setStatsLastRefreshed(new Date()); dto.setValidationStatus(getRemoteProcessGroupValidationStatus(remoteProcessGroup).name()); final RemoteProcessGroupStatusSnapshotDTO snapshot = new RemoteProcessGroupStatusSnapshotDTO(); dto.setAggregateSnapshot(snapshot); snapshot.setId(remoteProcessGroupStatus.getId()); snapshot.setGroupId(remoteProcessGroupStatus.getGroupId()); snapshot.setName(remoteProcessGroupStatus.getName()); snapshot.setTargetUri(remoteProcessGroupStatus.getTargetUri()); snapshot.setTransmissionStatus(remoteProcessGroupStatus.getTransmissionStatus().toString()); snapshot.setActiveThreadCount(remoteProcessGroupStatus.getActiveThreadCount()); snapshot.setFlowFilesSent(remoteProcessGroupStatus.getSentCount()); snapshot.setBytesSent(remoteProcessGroupStatus.getSentContentSize()); snapshot.setFlowFilesReceived(remoteProcessGroupStatus.getReceivedCount()); snapshot.setBytesReceived(remoteProcessGroupStatus.getReceivedContentSize()); StatusMerger.updatePrettyPrintedFields(snapshot); return dto; } private ValidationStatus getRemoteProcessGroupValidationStatus(RemoteProcessGroup remoteProcessGroup) { final boolean hasAuthIssue = remoteProcessGroup.getAuthorizationIssue() != null && !remoteProcessGroup.getAuthorizationIssue().isEmpty(); final Collection<ValidationResult> validationResults = remoteProcessGroup.validate(); final boolean hasValidationIssue = validationResults != null && !validationResults.isEmpty(); return hasAuthIssue || hasValidationIssue ? ValidationStatus.INVALID : ValidationStatus.VALID; } public ProcessGroupStatusDTO createConciseProcessGroupStatusDto(final ProcessGroupStatus processGroupStatus) { final ProcessGroupStatusDTO processGroupStatusDto = new ProcessGroupStatusDTO(); processGroupStatusDto.setId(processGroupStatus.getId()); processGroupStatusDto.setName(processGroupStatus.getName()); processGroupStatusDto.setStatsLastRefreshed(new Date()); final ProcessGroupStatusSnapshotDTO snapshot = new ProcessGroupStatusSnapshotDTO(); processGroupStatusDto.setAggregateSnapshot(snapshot); snapshot.setId(processGroupStatus.getId()); snapshot.setName(processGroupStatus.getName()); if (processGroupStatus.getVersionedFlowState() != null) { snapshot.setVersionedFlowState(processGroupStatus.getVersionedFlowState().name()); } snapshot.setFlowFilesQueued(processGroupStatus.getQueuedCount()); snapshot.setBytesQueued(processGroupStatus.getQueuedContentSize()); snapshot.setBytesRead(processGroupStatus.getBytesRead()); snapshot.setBytesWritten(processGroupStatus.getBytesWritten()); snapshot.setFlowFilesIn(processGroupStatus.getInputCount()); snapshot.setBytesIn(processGroupStatus.getInputContentSize()); snapshot.setFlowFilesOut(processGroupStatus.getOutputCount()); snapshot.setBytesOut(processGroupStatus.getOutputContentSize()); snapshot.setFlowFilesTransferred(processGroupStatus.getFlowFilesTransferred()); snapshot.setBytesTransferred(processGroupStatus.getBytesTransferred()); snapshot.setFlowFilesSent(processGroupStatus.getFlowFilesSent()); snapshot.setBytesSent(processGroupStatus.getBytesSent()); snapshot.setFlowFilesReceived(processGroupStatus.getFlowFilesReceived()); snapshot.setBytesReceived(processGroupStatus.getBytesReceived()); snapshot.setActiveThreadCount(processGroupStatus.getActiveThreadCount()); snapshot.setTerminatedThreadCount(processGroupStatus.getTerminatedThreadCount()); StatusMerger.updatePrettyPrintedFields(snapshot); return processGroupStatusDto; } public ProcessGroupStatusDTO createProcessGroupStatusDto(final ProcessGroup processGroup, final ProcessGroupStatus processGroupStatus) { final ProcessGroupStatusDTO processGroupStatusDto = createConciseProcessGroupStatusDto(processGroupStatus); final ProcessGroupStatusSnapshotDTO snapshot = processGroupStatusDto.getAggregateSnapshot(); // processor status final Collection<ProcessorStatusSnapshotEntity> processorStatusSnapshotEntities = new ArrayList<>(); snapshot.setProcessorStatusSnapshots(processorStatusSnapshotEntities); final Collection<ProcessorStatus> processorStatusCollection = processGroupStatus.getProcessorStatus(); if (processorStatusCollection != null) { for (final ProcessorStatus processorStatus : processorStatusCollection) { final ProcessorStatusDTO processorStatusDto = createProcessorStatusDto(processorStatus); final ProcessorNode processor = processGroup.findProcessor(processorStatusDto.getId()); final PermissionsDTO processorPermissions = createPermissionsDto(processor); processorStatusSnapshotEntities.add(entityFactory.createProcessorStatusSnapshotEntity(processorStatusDto.getAggregateSnapshot(), processorPermissions)); } } // connection status final Collection<ConnectionStatusSnapshotEntity> connectionStatusDtoCollection = new ArrayList<>(); snapshot.setConnectionStatusSnapshots(connectionStatusDtoCollection); final Collection<ConnectionStatus> connectionStatusCollection = processGroupStatus.getConnectionStatus(); if (connectionStatusCollection != null) { for (final ConnectionStatus connectionStatus : connectionStatusCollection) { final ConnectionStatusDTO connectionStatusDto = createConnectionStatusDto(connectionStatus); final Connection connection = processGroup.findConnection(connectionStatusDto.getId()); final PermissionsDTO connectionPermissions = createPermissionsDto(connection); connectionStatusDtoCollection.add(entityFactory.createConnectionStatusSnapshotEntity(connectionStatusDto.getAggregateSnapshot(), connectionPermissions)); } } // local child process groups final Collection<ProcessGroupStatusSnapshotEntity> childProcessGroupStatusDtoCollection = new ArrayList<>(); snapshot.setProcessGroupStatusSnapshots(childProcessGroupStatusDtoCollection); final Collection<ProcessGroupStatus> childProcessGroupStatusCollection = processGroupStatus.getProcessGroupStatus(); if (childProcessGroupStatusCollection != null) { for (final ProcessGroupStatus childProcessGroupStatus : childProcessGroupStatusCollection) { final ProcessGroupStatusDTO childProcessGroupStatusDto = createProcessGroupStatusDto(processGroup, childProcessGroupStatus); final ProcessGroup childProcessGroup = processGroup.findProcessGroup(childProcessGroupStatusDto.getId()); final PermissionsDTO childProcessGroupPermissions = createPermissionsDto(childProcessGroup); childProcessGroupStatusDtoCollection.add(entityFactory.createProcessGroupStatusSnapshotEntity(childProcessGroupStatusDto.getAggregateSnapshot(), childProcessGroupPermissions)); } } // remote child process groups final Collection<RemoteProcessGroupStatusSnapshotEntity> childRemoteProcessGroupStatusDtoCollection = new ArrayList<>(); snapshot.setRemoteProcessGroupStatusSnapshots(childRemoteProcessGroupStatusDtoCollection); final Collection<RemoteProcessGroupStatus> childRemoteProcessGroupStatusCollection = processGroupStatus.getRemoteProcessGroupStatus(); if (childRemoteProcessGroupStatusCollection != null) { for (final RemoteProcessGroupStatus childRemoteProcessGroupStatus : childRemoteProcessGroupStatusCollection) { final RemoteProcessGroup remoteProcessGroup = processGroup.findRemoteProcessGroup(childRemoteProcessGroupStatus.getId()); final RemoteProcessGroupStatusDTO childRemoteProcessGroupStatusDto = createRemoteProcessGroupStatusDto(remoteProcessGroup, childRemoteProcessGroupStatus); final PermissionsDTO remoteProcessGroupPermissions = createPermissionsDto(remoteProcessGroup); childRemoteProcessGroupStatusDtoCollection.add(entityFactory.createRemoteProcessGroupStatusSnapshotEntity(childRemoteProcessGroupStatusDto.getAggregateSnapshot(), remoteProcessGroupPermissions)); } } // input ports final Collection<PortStatusSnapshotEntity> inputPortStatusDtoCollection = new ArrayList<>(); snapshot.setInputPortStatusSnapshots(inputPortStatusDtoCollection); final Collection<PortStatus> inputPortStatusCollection = processGroupStatus.getInputPortStatus(); if (inputPortStatusCollection != null) { for (final PortStatus portStatus : inputPortStatusCollection) { final PortStatusDTO portStatusDto = createPortStatusDto(portStatus); final Port inputPort = processGroup.findInputPort(portStatus.getId()); final PermissionsDTO inputPortPermissions = createPermissionsDto(inputPort); inputPortStatusDtoCollection.add(entityFactory.createPortStatusSnapshotEntity(portStatusDto.getAggregateSnapshot(), inputPortPermissions)); } } // output ports final Collection<PortStatusSnapshotEntity> outputPortStatusDtoCollection = new ArrayList<>(); snapshot.setOutputPortStatusSnapshots(outputPortStatusDtoCollection); final Collection<PortStatus> outputPortStatusCollection = processGroupStatus.getOutputPortStatus(); if (outputPortStatusCollection != null) { for (final PortStatus portStatus : outputPortStatusCollection) { final PortStatusDTO portStatusDto = createPortStatusDto(portStatus); final Port outputPort = processGroup.findOutputPort(portStatus.getId()); final PermissionsDTO outputPortPermissions = createPermissionsDto(outputPort); outputPortStatusDtoCollection.add(entityFactory.createPortStatusSnapshotEntity(portStatusDto.getAggregateSnapshot(), outputPortPermissions)); } } return processGroupStatusDto; } public ConnectionStatusDTO createConnectionStatusDto(final ConnectionStatus connectionStatus) { final ConnectionStatusDTO connectionStatusDto = new ConnectionStatusDTO(); connectionStatusDto.setGroupId(connectionStatus.getGroupId()); connectionStatusDto.setId(connectionStatus.getId()); connectionStatusDto.setName(connectionStatus.getName()); connectionStatusDto.setSourceId(connectionStatus.getSourceId()); connectionStatusDto.setSourceName(connectionStatus.getSourceName()); connectionStatusDto.setDestinationId(connectionStatus.getDestinationId()); connectionStatusDto.setDestinationName(connectionStatus.getDestinationName()); connectionStatusDto.setStatsLastRefreshed(new Date()); final ConnectionStatusSnapshotDTO snapshot = new ConnectionStatusSnapshotDTO(); connectionStatusDto.setAggregateSnapshot(snapshot); snapshot.setId(connectionStatus.getId()); snapshot.setGroupId(connectionStatus.getGroupId()); snapshot.setName(connectionStatus.getName()); snapshot.setSourceName(connectionStatus.getSourceName()); snapshot.setDestinationName(connectionStatus.getDestinationName()); snapshot.setFlowFilesQueued(connectionStatus.getQueuedCount()); snapshot.setBytesQueued(connectionStatus.getQueuedBytes()); snapshot.setFlowFilesIn(connectionStatus.getInputCount()); snapshot.setBytesIn(connectionStatus.getInputBytes()); snapshot.setFlowFilesOut(connectionStatus.getOutputCount()); snapshot.setBytesOut(connectionStatus.getOutputBytes()); ConnectionStatusPredictions predictions = connectionStatus.getPredictions(); ConnectionStatusPredictionsSnapshotDTO predictionsDTO = null; if (predictions != null) { predictionsDTO = new ConnectionStatusPredictionsSnapshotDTO(); } if (connectionStatus.getBackPressureObjectThreshold() > 0) { snapshot.setPercentUseCount(Math.min(100, StatusMerger.getUtilization(connectionStatus.getQueuedCount(), connectionStatus.getBackPressureObjectThreshold()))); if (predictionsDTO != null) { snapshot.setPredictions(predictionsDTO); predictionsDTO.setPredictionIntervalSeconds(((Long) (predictions.getPredictionIntervalMillis() / 1000L)).intValue()); predictionsDTO.setPredictedMillisUntilCountBackpressure(predictions.getPredictedTimeToCountBackpressureMillis()); predictionsDTO.setPredictedCountAtNextInterval(predictions.getNextPredictedQueuedCount()); predictionsDTO.setPredictedPercentCount(predictions.getPredictedPercentCount()); predictionsDTO.setPredictedPercentBytes(predictions.getPredictedPercentBytes()); predictionsDTO.setPredictionIntervalSeconds(((Long) (predictions.getPredictionIntervalMillis() / 1000L)).intValue()); } } if (connectionStatus.getBackPressureBytesThreshold() > 0) { snapshot.setPercentUseBytes(Math.min(100, StatusMerger.getUtilization(connectionStatus.getQueuedBytes(), connectionStatus.getBackPressureBytesThreshold()))); if (predictionsDTO != null) { snapshot.setPredictions(predictionsDTO); predictionsDTO.setPredictionIntervalSeconds(((Long) (predictions.getPredictionIntervalMillis() / 1000L)).intValue()); predictionsDTO.setPredictedMillisUntilBytesBackpressure(predictions.getPredictedTimeToBytesBackpressureMillis()); predictionsDTO.setPredictedBytesAtNextInterval(predictions.getNextPredictedQueuedBytes()); predictionsDTO.setPredictedPercentCount(predictions.getPredictedPercentCount()); predictionsDTO.setPredictedPercentBytes(predictions.getPredictedPercentBytes()); predictionsDTO.setPredictionIntervalSeconds(((Long) (predictions.getPredictionIntervalMillis() / 1000L)).intValue()); } } StatusMerger.updatePrettyPrintedFields(snapshot); return connectionStatusDto; } public ConnectionStatisticsDTO createConnectionStatisticsDto(final Connection connection, final StatusAnalytics statusAnalytics) { final ConnectionStatisticsDTO connectionStatisticsDTO = new ConnectionStatisticsDTO(); connectionStatisticsDTO.setId(connection.getIdentifier()); connectionStatisticsDTO.setStatsLastRefreshed(new Date()); final ConnectionStatisticsSnapshotDTO snapshot = new ConnectionStatisticsSnapshotDTO(); connectionStatisticsDTO.setAggregateSnapshot(snapshot); snapshot.setId(connection.getIdentifier()); Map<String,Long> predictions = statusAnalytics.getPredictions(); snapshot.setPredictedMillisUntilBytesBackpressure(predictions.get("timeToBytesBackpressureMillis")); snapshot.setPredictedMillisUntilCountBackpressure(predictions.get("timeToCountBackpressureMillis")); snapshot.setPredictedBytesAtNextInterval(predictions.get("nextIntervalBytes")); snapshot.setPredictedCountAtNextInterval(predictions.get("nextIntervalCount").intValue()); snapshot.setPredictedPercentBytes(predictions.get("nextIntervalPercentageUseBytes").intValue()); snapshot.setPredictedPercentCount(predictions.get("nextIntervalPercentageUseCount").intValue()); snapshot.setPredictionIntervalMillis(predictions.get("intervalTimeMillis")); return connectionStatisticsDTO; } public ProcessorStatusDTO createProcessorStatusDto(final ProcessorStatus procStatus) { final ProcessorStatusDTO dto = new ProcessorStatusDTO(); dto.setId(procStatus.getId()); dto.setGroupId(procStatus.getGroupId()); dto.setName(procStatus.getName()); dto.setStatsLastRefreshed(new Date()); dto.setRunStatus(procStatus.getRunStatus().toString()); final ProcessorStatusSnapshotDTO snapshot = new ProcessorStatusSnapshotDTO(); dto.setAggregateSnapshot(snapshot); snapshot.setId(procStatus.getId()); snapshot.setGroupId(procStatus.getGroupId()); snapshot.setName(procStatus.getName()); snapshot.setFlowFilesOut(procStatus.getOutputCount()); snapshot.setBytesOut(procStatus.getOutputBytes()); snapshot.setFlowFilesIn(procStatus.getInputCount()); snapshot.setBytesIn(procStatus.getInputBytes()); snapshot.setBytesRead(procStatus.getBytesRead()); snapshot.setBytesWritten(procStatus.getBytesWritten()); snapshot.setTaskCount(procStatus.getInvocations()); snapshot.setTasksDurationNanos(procStatus.getProcessingNanos()); snapshot.setTasksDuration(FormatUtils.formatHoursMinutesSeconds(procStatus.getProcessingNanos(), TimeUnit.NANOSECONDS)); // determine the run status snapshot.setRunStatus(procStatus.getRunStatus().toString()); snapshot.setExecutionNode(procStatus.getExecutionNode().toString()); snapshot.setActiveThreadCount(procStatus.getActiveThreadCount()); snapshot.setTerminatedThreadCount(procStatus.getTerminatedThreadCount()); snapshot.setType(procStatus.getType()); StatusMerger.updatePrettyPrintedFields(snapshot); return dto; } public ProcessorRunStatusDetailsDTO createProcessorRunStatusDetailsDto(final ProcessorNode processor, final ProcessorStatus processorStatus) { final ProcessorRunStatusDetailsDTO dto = new ProcessorRunStatusDetailsDTO(); dto.setId(processor.getIdentifier()); dto.setName(processor.getName()); dto.setActiveThreadCount(processorStatus.getActiveThreadCount()); dto.setRunStatus(processorStatus.getRunStatus().name()); dto.setValidationErrors(convertValidationErrors(processor.getValidationErrors())); return dto; } private Set<String> convertValidationErrors(final Collection<ValidationResult> validationErrors) { if (validationErrors == null) { return null; } if (validationErrors.isEmpty()) { return Collections.emptySet(); } final Set<String> errors = new HashSet<>(validationErrors.size()); for (final ValidationResult result : validationErrors) { errors.add(result.toString()); } return errors; } /** * Creates a PortStatusDTO for the specified PortStatus. * * @param portStatus status * @return dto */ public PortStatusDTO createPortStatusDto(final PortStatus portStatus) { final PortStatusDTO dto = new PortStatusDTO(); dto.setId(portStatus.getId()); dto.setGroupId(portStatus.getGroupId()); dto.setName(portStatus.getName()); dto.setRunStatus(portStatus.getRunStatus().toString()); dto.setTransmitting(portStatus.isTransmitting()); dto.setStatsLastRefreshed(new Date()); final PortStatusSnapshotDTO snapshot = new PortStatusSnapshotDTO(); dto.setAggregateSnapshot(snapshot); snapshot.setId(portStatus.getId()); snapshot.setGroupId(portStatus.getGroupId()); snapshot.setName(portStatus.getName()); snapshot.setRunStatus(portStatus.getRunStatus().toString()); snapshot.setActiveThreadCount(portStatus.getActiveThreadCount()); snapshot.setFlowFilesOut(portStatus.getOutputCount()); snapshot.setBytesOut(portStatus.getOutputBytes()); snapshot.setFlowFilesIn(portStatus.getInputCount()); snapshot.setBytesIn(portStatus.getInputBytes()); StatusMerger.updatePrettyPrintedFields(snapshot); return dto; } /** * Copies the specified snippet. * * @param originalSnippet snippet * @return dto */ public FlowSnippetDTO copySnippetContents(final FlowSnippetDTO originalSnippet) { final FlowSnippetDTO copySnippet = new FlowSnippetDTO(); if (originalSnippet.getConnections() != null) { for (final ConnectionDTO connection : originalSnippet.getConnections()) { copySnippet.getConnections().add(copy(connection)); } } if (originalSnippet.getInputPorts() != null) { for (final PortDTO port : originalSnippet.getInputPorts()) { copySnippet.getInputPorts().add(copy(port)); } } if (originalSnippet.getOutputPorts() != null) { for (final PortDTO port : originalSnippet.getOutputPorts()) { copySnippet.getOutputPorts().add(copy(port)); } } if (originalSnippet.getProcessGroups() != null) { for (final ProcessGroupDTO processGroup : originalSnippet.getProcessGroups()) { copySnippet.getProcessGroups().add(copy(processGroup, true)); } } if (originalSnippet.getProcessors() != null) { for (final ProcessorDTO processor : originalSnippet.getProcessors()) { copySnippet.getProcessors().add(copy(processor)); } } if (originalSnippet.getLabels() != null) { for (final LabelDTO label : originalSnippet.getLabels()) { copySnippet.getLabels().add(copy(label)); } } if (originalSnippet.getFunnels() != null) { for (final FunnelDTO funnel : originalSnippet.getFunnels()) { copySnippet.getFunnels().add(copy(funnel)); } } if (originalSnippet.getRemoteProcessGroups() != null) { for (final RemoteProcessGroupDTO remoteGroup : originalSnippet.getRemoteProcessGroups()) { copySnippet.getRemoteProcessGroups().add(copy(remoteGroup)); } } if (originalSnippet.getControllerServices() != null) { for (final ControllerServiceDTO controllerService : originalSnippet.getControllerServices()) { copySnippet.getControllerServices().add(copy(controllerService)); } } return copySnippet; } /** * Creates a PortDTO from the specified Port. * * @param port port * @return dto */ public PortDTO createPortDto(final Port port) { if (port == null) { return null; } final PortDTO dto = new PortDTO(); dto.setId(port.getIdentifier()); dto.setPosition(createPositionDto(port.getPosition())); dto.setName(port.getName()); dto.setComments(port.getComments()); dto.setConcurrentlySchedulableTaskCount(port.getMaxConcurrentTasks()); dto.setParentGroupId(port.getProcessGroup().getIdentifier()); dto.setState(port.getScheduledState().toString()); dto.setType(port.getConnectableType().name()); dto.setVersionedComponentId(port.getVersionedComponentId().orElse(null)); // if this port is remotely accessible, determine if its actually connected to another nifi if (port instanceof PublicPort) { final PublicPort publicPort = (PublicPort) port; dto.setAllowRemoteAccess(true); dto.setTransmitting(publicPort.isTransmitting()); dto.setGroupAccessControl(publicPort.getGroupAccessControl()); dto.setUserAccessControl(publicPort.getUserAccessControl()); } final Collection<ValidationResult> validationErrors = port.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } public ParameterContextDTO createParameterContextDto(final ParameterContext parameterContext, final RevisionManager revisionManager) { final ParameterContextDTO dto = new ParameterContextDTO(); dto.setId(parameterContext.getIdentifier()); dto.setName(parameterContext.getName()); dto.setDescription(parameterContext.getDescription()); final Set<ProcessGroupEntity> boundGroups = new HashSet<>(); for (final ProcessGroup processGroup : parameterContext.getParameterReferenceManager().getProcessGroupsBound(parameterContext)) { final ProcessGroupDTO processGroupDto = createConciseProcessGroupDto(processGroup); final RevisionDTO revisionDto = createRevisionDTO(revisionManager.getRevision(processGroup.getIdentifier())); final PermissionsDTO permissionsDto = createPermissionsDto(processGroup); final ProcessGroupEntity processGroupEntity = entityFactory.createProcessGroupEntity(processGroupDto, revisionDto, permissionsDto, null, null); boundGroups.add(processGroupEntity); } dto.setBoundProcessGroups(boundGroups); final Set<ParameterEntity> parameterEntities = new LinkedHashSet<>(); for (final Parameter parameter : parameterContext.getParameters().values()) { parameterEntities.add(createParameterEntity(parameterContext, parameter, revisionManager)); } dto.setParameters(parameterEntities); return dto; } public ParameterEntity createParameterEntity(final ParameterContext parameterContext, final Parameter parameter, final RevisionManager revisionManager) { final ParameterDTO dto = createParameterDto(parameterContext, parameter, revisionManager); final ParameterEntity entity = new ParameterEntity(); entity.setParameter(dto); final boolean canWrite = isWritable(dto.getReferencingComponents()); entity.setCanWrite(canWrite); return entity; } public ParameterDTO createParameterDto(final ParameterContext parameterContext, final Parameter parameter, final RevisionManager revisionManager) { final ParameterDescriptor descriptor = parameter.getDescriptor(); final ParameterDTO dto = new ParameterDTO(); dto.setName(descriptor.getName()); dto.setDescription(descriptor.getDescription()); dto.setSensitive(descriptor.isSensitive()); if (parameter.getValue() != null) { dto.setValue(descriptor.isSensitive() ? SENSITIVE_VALUE_MASK : parameter.getValue()); } final ParameterReferenceManager parameterReferenceManager = parameterContext.getParameterReferenceManager(); final Set<ComponentNode> referencingComponents = new HashSet<>(); referencingComponents.addAll(parameterReferenceManager.getProcessorsReferencing(parameterContext, descriptor.getName())); referencingComponents.addAll(parameterReferenceManager.getControllerServicesReferencing(parameterContext, descriptor.getName())); final Set<AffectedComponentEntity> referencingComponentEntities = createAffectedComponentEntities(referencingComponents, revisionManager); dto.setReferencingComponents(referencingComponentEntities); return dto; } public ReportingTaskDTO createReportingTaskDto(final ReportingTaskNode reportingTaskNode) { final BundleCoordinate bundleCoordinate = reportingTaskNode.getBundleCoordinate(); final List<Bundle> compatibleBundles = extensionManager.getBundles(reportingTaskNode.getCanonicalClassName()).stream().filter(bundle -> { final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate(); return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId()); }).collect(Collectors.toList()); final ReportingTaskDTO dto = new ReportingTaskDTO(); dto.setId(reportingTaskNode.getIdentifier()); dto.setName(reportingTaskNode.getName()); dto.setType(reportingTaskNode.getCanonicalClassName()); dto.setBundle(createBundleDto(bundleCoordinate)); dto.setSchedulingStrategy(reportingTaskNode.getSchedulingStrategy().name()); dto.setSchedulingPeriod(reportingTaskNode.getSchedulingPeriod()); dto.setState(reportingTaskNode.getScheduledState().name()); dto.setActiveThreadCount(reportingTaskNode.getActiveThreadCount()); dto.setAnnotationData(reportingTaskNode.getAnnotationData()); dto.setComments(reportingTaskNode.getComments()); dto.setPersistsState(reportingTaskNode.getReportingTask().getClass().isAnnotationPresent(Stateful.class)); dto.setRestricted(reportingTaskNode.isRestricted()); dto.setDeprecated(reportingTaskNode.isDeprecated()); dto.setExtensionMissing(reportingTaskNode.isExtensionMissing()); dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1); final Map<String, String> defaultSchedulingPeriod = new HashMap<>(); defaultSchedulingPeriod.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod()); defaultSchedulingPeriod.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod()); dto.setDefaultSchedulingPeriod(defaultSchedulingPeriod); // sort a copy of the properties final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() { @Override public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) { return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName()); } }); sortedProperties.putAll(reportingTaskNode.getRawPropertyValues()); // get the property order from the reporting task final ReportingTask reportingTask = reportingTaskNode.getReportingTask(); final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>(); final List<PropertyDescriptor> descriptors = reportingTask.getPropertyDescriptors(); if (descriptors != null && !descriptors.isEmpty()) { for (final PropertyDescriptor descriptor : descriptors) { orderedProperties.put(descriptor, null); } } orderedProperties.putAll(sortedProperties); // build the descriptor and property dtos dto.setDescriptors(new LinkedHashMap<>()); dto.setProperties(new LinkedHashMap<>()); for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); // store the property descriptor dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, null)); // determine the property value - don't include sensitive properties String propertyValue = entry.getValue(); if (propertyValue != null && descriptor.isSensitive()) { propertyValue = SENSITIVE_VALUE_MASK; } else if (propertyValue == null && descriptor.getDefaultValue() != null) { propertyValue = descriptor.getDefaultValue(); } // set the property value dto.getProperties().put(descriptor.getName(), propertyValue); } final ValidationStatus validationStatus = reportingTaskNode.getValidationStatus(1, TimeUnit.MILLISECONDS); dto.setValidationStatus(validationStatus.name()); // add the validation errors final Collection<ValidationResult> validationErrors = reportingTaskNode.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } public ControllerServiceDTO createControllerServiceDto(final ControllerServiceNode controllerServiceNode) { final BundleCoordinate bundleCoordinate = controllerServiceNode.getBundleCoordinate(); final List<Bundle> compatibleBundles = extensionManager.getBundles(controllerServiceNode.getCanonicalClassName()).stream().filter(bundle -> { final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate(); return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId()); }).collect(Collectors.toList()); final ControllerServiceDTO dto = new ControllerServiceDTO(); dto.setId(controllerServiceNode.getIdentifier()); dto.setParentGroupId(controllerServiceNode.getProcessGroup() == null ? null : controllerServiceNode.getProcessGroup().getIdentifier()); dto.setName(controllerServiceNode.getName()); dto.setType(controllerServiceNode.getCanonicalClassName()); dto.setBundle(createBundleDto(bundleCoordinate)); dto.setControllerServiceApis(createControllerServiceApiDto(controllerServiceNode.getControllerServiceImplementation().getClass())); dto.setState(controllerServiceNode.getState().name()); dto.setAnnotationData(controllerServiceNode.getAnnotationData()); dto.setComments(controllerServiceNode.getComments()); dto.setPersistsState(controllerServiceNode.getControllerServiceImplementation().getClass().isAnnotationPresent(Stateful.class)); dto.setRestricted(controllerServiceNode.isRestricted()); dto.setDeprecated(controllerServiceNode.isDeprecated()); dto.setExtensionMissing(controllerServiceNode.isExtensionMissing()); dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1); dto.setVersionedComponentId(controllerServiceNode.getVersionedComponentId().orElse(null)); // sort a copy of the properties final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() { @Override public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) { return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName()); } }); sortedProperties.putAll(controllerServiceNode.getRawPropertyValues()); // get the property order from the controller service final ControllerService controllerService = controllerServiceNode.getControllerServiceImplementation(); final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>(); final List<PropertyDescriptor> descriptors = controllerService.getPropertyDescriptors(); if (descriptors != null && !descriptors.isEmpty()) { for (final PropertyDescriptor descriptor : descriptors) { orderedProperties.put(descriptor, null); } } orderedProperties.putAll(sortedProperties); // build the descriptor and property dtos dto.setDescriptors(new LinkedHashMap<>()); dto.setProperties(new LinkedHashMap<>()); for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); // store the property descriptor final String groupId = controllerServiceNode.getProcessGroup() == null ? null : controllerServiceNode.getProcessGroup().getIdentifier(); dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, groupId)); // determine the property value - don't include sensitive properties String propertyValue = entry.getValue(); if (propertyValue != null && descriptor.isSensitive()) { propertyValue = SENSITIVE_VALUE_MASK; } else if (propertyValue == null && descriptor.getDefaultValue() != null) { propertyValue = descriptor.getDefaultValue(); } // set the property value dto.getProperties().put(descriptor.getName(), propertyValue); } dto.setValidationStatus(controllerServiceNode.getValidationStatus(1, TimeUnit.MILLISECONDS).name()); // add the validation errors final Collection<ValidationResult> validationErrors = controllerServiceNode.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } public ControllerServiceReferencingComponentDTO createControllerServiceReferencingComponentDTO(final ComponentNode component) { final ControllerServiceReferencingComponentDTO dto = new ControllerServiceReferencingComponentDTO(); dto.setId(component.getIdentifier()); dto.setName(component.getName()); String processGroupId = null; List<PropertyDescriptor> propertyDescriptors = null; Collection<ValidationResult> validationErrors = null; if (component instanceof ProcessorNode) { final ProcessorNode node = ((ProcessorNode) component); dto.setGroupId(node.getProcessGroup().getIdentifier()); dto.setState(node.getScheduledState().name()); dto.setActiveThreadCount(node.getActiveThreadCount()); dto.setType(node.getComponentType()); dto.setReferenceType(Processor.class.getSimpleName()); propertyDescriptors = node.getProcessor().getPropertyDescriptors(); validationErrors = node.getValidationErrors(); processGroupId = node.getProcessGroup().getIdentifier(); } else if (component instanceof ControllerServiceNode) { final ControllerServiceNode node = ((ControllerServiceNode) component); dto.setState(node.getState().name()); dto.setType(node.getComponentType()); dto.setReferenceType(ControllerService.class.getSimpleName()); propertyDescriptors = node.getControllerServiceImplementation().getPropertyDescriptors(); validationErrors = node.getValidationErrors(); processGroupId = node.getProcessGroup() == null ? null : node.getProcessGroup().getIdentifier(); } else if (component instanceof ReportingTaskNode) { final ReportingTaskNode node = ((ReportingTaskNode) component); dto.setState(node.getScheduledState().name()); dto.setActiveThreadCount(node.getActiveThreadCount()); dto.setType(node.getComponentType()); dto.setReferenceType(ReportingTask.class.getSimpleName()); propertyDescriptors = node.getReportingTask().getPropertyDescriptors(); validationErrors = node.getValidationErrors(); processGroupId = null; } // ensure descriptors is non null if (propertyDescriptors == null) { propertyDescriptors = new ArrayList<>(); } // process properties unconditionally since dynamic properties are available here and not in getPropertyDescriptors final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() { @Override public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) { return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName()); } }); sortedProperties.putAll(component.getRawPropertyValues()); final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>(); for (final PropertyDescriptor descriptor : propertyDescriptors) { orderedProperties.put(descriptor, null); } orderedProperties.putAll(sortedProperties); // build the descriptor and property dtos dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>()); dto.setProperties(new LinkedHashMap<String, String>()); for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); // store the property descriptor dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, processGroupId)); // determine the property value - don't include sensitive properties String propertyValue = entry.getValue(); if (propertyValue != null && descriptor.isSensitive()) { propertyValue = SENSITIVE_VALUE_MASK; } // set the property value dto.getProperties().put(descriptor.getName(), propertyValue); } if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } public RemoteProcessGroupPortDTO createRemoteProcessGroupPortDto(final RemoteGroupPort port) { if (port == null) { return null; } final RemoteProcessGroupPortDTO dto = new RemoteProcessGroupPortDTO(); dto.setId(port.getIdentifier()); dto.setGroupId(port.getRemoteProcessGroup().getIdentifier()); dto.setTargetId(port.getTargetIdentifier()); dto.setName(port.getName()); dto.setComments(port.getComments()); dto.setTransmitting(port.isRunning()); dto.setTargetRunning(port.isTargetRunning()); dto.setConcurrentlySchedulableTaskCount(port.getMaxConcurrentTasks()); dto.setUseCompression(port.isUseCompression()); dto.setExists(port.getTargetExists()); dto.setVersionedComponentId(port.getVersionedComponentId().orElse(null)); final BatchSettingsDTO batchDTO = new BatchSettingsDTO(); batchDTO.setCount(port.getBatchCount()); batchDTO.setSize(port.getBatchSize()); batchDTO.setDuration(port.getBatchDuration()); dto.setBatchSettings(batchDTO); // determine if this port is currently connected to another component locally if (ConnectableType.REMOTE_OUTPUT_PORT.equals(port.getConnectableType())) { dto.setConnected(!port.getConnections().isEmpty()); } else { dto.setConnected(port.hasIncomingConnection()); } return dto; } /** * Creates a RemoteProcessGroupDTO from the specified RemoteProcessGroup. * * @param group group * @return dto */ public RemoteProcessGroupDTO createRemoteProcessGroupDto(final RemoteProcessGroup group) { if (group == null) { return null; } final Set<RemoteProcessGroupPortDTO> inputPorts = new HashSet<>(); final Set<RemoteProcessGroupPortDTO> outputPorts = new HashSet<>(); int activeRemoteInputPortCount = 0; int inactiveRemoteInputPortCount = 0; for (final Port port : group.getInputPorts()) { inputPorts.add(createRemoteProcessGroupPortDto((RemoteGroupPort) port)); if (port.hasIncomingConnection()) { if (port.isRunning()) { activeRemoteInputPortCount++; } else { inactiveRemoteInputPortCount++; } } } int activeRemoteOutputPortCount = 0; int inactiveRemoteOutputPortCount = 0; for (final Port port : group.getOutputPorts()) { outputPorts.add(createRemoteProcessGroupPortDto((RemoteGroupPort) port)); if (!port.getConnections().isEmpty()) { if (port.isRunning()) { activeRemoteOutputPortCount++; } else { inactiveRemoteOutputPortCount++; } } } final RemoteProcessGroupContentsDTO contents = new RemoteProcessGroupContentsDTO(); contents.setInputPorts(inputPorts); contents.setOutputPorts(outputPorts); final RemoteProcessGroupDTO dto = new RemoteProcessGroupDTO(); dto.setId(group.getIdentifier()); dto.setName(group.getName()); dto.setPosition(createPositionDto(group.getPosition())); dto.setComments(group.getComments()); dto.setTransmitting(group.isTransmitting()); dto.setCommunicationsTimeout(group.getCommunicationsTimeout()); dto.setYieldDuration(group.getYieldDuration()); dto.setParentGroupId(group.getProcessGroup().getIdentifier()); dto.setTargetUris(group.getTargetUris()); dto.setFlowRefreshed(group.getLastRefreshTime()); dto.setContents(contents); dto.setTransportProtocol(group.getTransportProtocol().name()); dto.setProxyHost(group.getProxyHost()); dto.setProxyPort(group.getProxyPort()); dto.setProxyUser(group.getProxyUser()); if (!StringUtils.isEmpty(group.getProxyPassword())) { dto.setProxyPassword(SENSITIVE_VALUE_MASK); } // only specify the secure flag if we know the target system has site to site enabled if (group.isSiteToSiteEnabled()) { dto.setTargetSecure(group.getSecureFlag()); } if (group.getAuthorizationIssue() != null) { dto.setAuthorizationIssues(Arrays.asList(group.getAuthorizationIssue())); } final Collection<ValidationResult> validationErrors = group.validate(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } dto.setLocalNetworkInterface(group.getNetworkInterface()); dto.setActiveRemoteInputPortCount(activeRemoteInputPortCount); dto.setInactiveRemoteInputPortCount(inactiveRemoteInputPortCount); dto.setActiveRemoteOutputPortCount(activeRemoteOutputPortCount); dto.setInactiveRemoteOutputPortCount(inactiveRemoteOutputPortCount); dto.setVersionedComponentId(group.getVersionedComponentId().orElse(null)); final RemoteProcessGroupCounts counts = group.getCounts(); if (counts != null) { dto.setInputPortCount(counts.getInputPortCount()); dto.setOutputPortCount(counts.getOutputPortCount()); } return dto; } /** * Creates a FlowBreadcrumbEntity from the specified parent ProcessGroup. * * @param group group * @return dto */ private FlowBreadcrumbEntity createBreadcrumbEntity(final ProcessGroup group) { if (group == null) { return null; } final FlowBreadcrumbDTO dto = createBreadcrumbDto(group); final PermissionsDTO permissions = createPermissionsDto(group); final FlowBreadcrumbEntity entity = entityFactory.createFlowBreadcrumbEntity(dto, permissions); if (group.getParent() != null) { entity.setParentBreadcrumb(createBreadcrumbEntity(group.getParent())); } return entity; } /** * Creates a FlowBreadcrumbDTO from the specified parent ProcessGroup. * * @param group group * @return dto */ private FlowBreadcrumbDTO createBreadcrumbDto(final ProcessGroup group) { if (group == null) { return null; } final FlowBreadcrumbDTO dto = new FlowBreadcrumbDTO(); dto.setId(group.getIdentifier()); dto.setName(group.getName()); final VersionControlInformationDTO versionControlInformation = createVersionControlInformationDto(group); dto.setVersionControlInformation(versionControlInformation); return dto; } public ComponentReferenceDTO createComponentReferenceDto(final Authorizable authorizable) { if (authorizable == null || !(authorizable instanceof ComponentAuthorizable)) { return null; } final ComponentAuthorizable componentAuthorizable = (ComponentAuthorizable) authorizable; final ComponentReferenceDTO dto = new ComponentReferenceDTO(); dto.setId(componentAuthorizable.getIdentifier()); dto.setParentGroupId(componentAuthorizable.getProcessGroupIdentifier()); dto.setName(authorizable.getResource().getName()); return dto; } public AccessPolicySummaryDTO createAccessPolicySummaryDto(final AccessPolicy accessPolicy, final ComponentReferenceEntity componentReference) { if (accessPolicy == null) { return null; } final AccessPolicySummaryDTO dto = new AccessPolicySummaryDTO(); dto.setId(accessPolicy.getIdentifier()); dto.setResource(accessPolicy.getResource()); dto.setAction(accessPolicy.getAction().toString()); dto.setConfigurable(AuthorizerCapabilityDetection.isAccessPolicyConfigurable(authorizer, accessPolicy)); dto.setComponentReference(componentReference); return dto; } public AccessPolicyDTO createAccessPolicyDto(final AccessPolicy accessPolicy, final Set<TenantEntity> userGroups, final Set<TenantEntity> users, final ComponentReferenceEntity componentReference) { if (accessPolicy == null) { return null; } final AccessPolicyDTO dto = new AccessPolicyDTO(); dto.setUserGroups(userGroups); dto.setUsers(users); dto.setId(accessPolicy.getIdentifier()); dto.setResource(accessPolicy.getResource()); dto.setAction(accessPolicy.getAction().toString()); dto.setConfigurable(AuthorizerCapabilityDetection.isAccessPolicyConfigurable(authorizer, accessPolicy)); dto.setComponentReference(componentReference); return dto; } /** * Creates the PermissionsDTO based on the specified Authorizable. * * @param authorizable authorizable * @return dto */ public PermissionsDTO createPermissionsDto(final Authorizable authorizable) { return createPermissionsDto(authorizable, NiFiUserUtils.getNiFiUser()); } /** * Creates the PermissionsDTO based on the specified Authorizable for the given user * * @param authorizable authorizable * @param user the NiFi User for which the Permissions are being created * @return dto */ public PermissionsDTO createPermissionsDto(final Authorizable authorizable, final NiFiUser user) { final PermissionsDTO dto = new PermissionsDTO(); dto.setCanRead(authorizable.isAuthorized(authorizer, RequestAction.READ, user)); dto.setCanWrite(authorizable.isAuthorized(authorizer, RequestAction.WRITE, user)); return dto; } public AffectedComponentEntity createAffectedComponentEntity(final ProcessorEntity processorEntity) { if (processorEntity == null) { return null; } final AffectedComponentEntity component = new AffectedComponentEntity(); component.setBulletins(processorEntity.getBulletins()); component.setId(processorEntity.getId()); component.setPermissions(processorEntity.getPermissions()); component.setPosition(processorEntity.getPosition()); component.setRevision(processorEntity.getRevision()); component.setUri(processorEntity.getUri()); final ProcessorDTO processorDto = processorEntity.getComponent(); final AffectedComponentDTO componentDto = new AffectedComponentDTO(); if (componentDto == null) { componentDto.setId(processorEntity.getId()); componentDto.setName(processorEntity.getId()); } else { componentDto.setId(processorDto.getId()); componentDto.setName(processorDto.getName()); componentDto.setProcessGroupId(processorDto.getParentGroupId()); componentDto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_PROCESSOR); componentDto.setState(processorDto.getState()); componentDto.setValidationErrors(processorDto.getValidationErrors()); } component.setComponent(componentDto); return component; } public AffectedComponentEntity createAffectedComponentEntity(final PortEntity portEntity, final String referenceType) { if (portEntity == null) { return null; } final AffectedComponentEntity component = new AffectedComponentEntity(); component.setBulletins(portEntity.getBulletins()); component.setId(portEntity.getId()); component.setPermissions(portEntity.getPermissions()); component.setPosition(portEntity.getPosition()); component.setRevision(portEntity.getRevision()); component.setUri(portEntity.getUri()); final PortDTO portDto = portEntity.getComponent(); final AffectedComponentDTO componentDto = new AffectedComponentDTO(); if (componentDto == null) { componentDto.setId(portEntity.getId()); componentDto.setName(portEntity.getId()); } else { componentDto.setId(portDto.getId()); componentDto.setName(portDto.getName()); componentDto.setProcessGroupId(portDto.getParentGroupId()); componentDto.setReferenceType(referenceType); componentDto.setState(portDto.getState()); componentDto.setValidationErrors(portDto.getValidationErrors()); } component.setComponent(componentDto); return component; } public AffectedComponentEntity createAffectedComponentEntity(final ControllerServiceEntity serviceEntity) { if (serviceEntity == null) { return null; } final AffectedComponentEntity component = new AffectedComponentEntity(); component.setBulletins(serviceEntity.getBulletins()); component.setId(serviceEntity.getId()); component.setPermissions(serviceEntity.getPermissions()); component.setPosition(serviceEntity.getPosition()); component.setRevision(serviceEntity.getRevision()); component.setUri(serviceEntity.getUri()); final ControllerServiceDTO serviceDto = serviceEntity.getComponent(); final AffectedComponentDTO componentDto = new AffectedComponentDTO(); if (serviceDto == null) { componentDto.setId(serviceEntity.getId()); componentDto.setName(serviceEntity.getId()); componentDto.setProcessGroupId(serviceEntity.getParentGroupId()); } else { componentDto.setId(serviceDto.getId()); componentDto.setName(serviceDto.getName()); componentDto.setProcessGroupId(serviceDto.getParentGroupId()); componentDto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE); componentDto.setState(serviceDto.getState()); componentDto.setValidationErrors(serviceDto.getValidationErrors()); } component.setComponent(componentDto); return component; } public AffectedComponentEntity createAffectedComponentEntity(final RemoteProcessGroupPortDTO remotePortDto, final String referenceType, final RemoteProcessGroupEntity rpgEntity) { if (remotePortDto == null) { return null; } final AffectedComponentEntity component = new AffectedComponentEntity(); component.setId(remotePortDto.getId()); component.setPermissions(rpgEntity.getPermissions()); component.setRevision(rpgEntity.getRevision()); component.setUri(rpgEntity.getUri()); final AffectedComponentDTO componentDto = new AffectedComponentDTO(); componentDto.setId(remotePortDto.getId()); componentDto.setName(remotePortDto.getName()); componentDto.setProcessGroupId(remotePortDto.getGroupId()); componentDto.setReferenceType(referenceType); componentDto.setState(remotePortDto.isTransmitting() ? "Running" : "Stopped"); component.setComponent(componentDto); return component; } public AffectedComponentDTO createAffectedComponentDto(final ComponentNode component) { final AffectedComponentDTO dto = new AffectedComponentDTO(); dto.setId(component.getIdentifier()); dto.setName(component.getName()); dto.setProcessGroupId(component.getProcessGroupIdentifier()); if (component instanceof ProcessorNode) { final ProcessorNode node = ((ProcessorNode) component); dto.setState(node.getDesiredState().name()); dto.setActiveThreadCount(node.getActiveThreadCount()); dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_PROCESSOR); } else if (component instanceof ControllerServiceNode) { final ControllerServiceNode node = ((ControllerServiceNode) component); dto.setState(node.getState().name()); dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE); } final Collection<ValidationResult> validationErrors = component.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } public ComponentValidationResultDTO createComponentValidationResultDto(final ComponentNode component, final ValidationState validationResults) { final ComponentValidationResultDTO dto = new ComponentValidationResultDTO(); dto.setId(component.getIdentifier()); dto.setName(component.getName()); dto.setProcessGroupId(component.getProcessGroupIdentifier()); if (component instanceof ProcessorNode) { final ProcessorNode node = ((ProcessorNode) component); dto.setState(node.getScheduledState().name()); dto.setActiveThreadCount(node.getActiveThreadCount()); dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_PROCESSOR); } else if (component instanceof ControllerServiceNode) { final ControllerServiceNode node = ((ControllerServiceNode) component); dto.setState(node.getState().name()); dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE); } final Collection<ValidationResult> validationErrors = component.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); dto.setCurrentlyValid(false); } else { dto.setCurrentlyValid(true); } final List<String> resultantValidationErrors = validationResults.getValidationErrors().stream() .map(ValidationResult::toString) .collect(Collectors.toList()); dto.setResultantValidationErrors(resultantValidationErrors); dto.setResultsValid(resultantValidationErrors.isEmpty()); return dto; } /** * Creates a ProcessGroupDTO from the specified ProcessGroup. * * @param group group * @return dto */ public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group) { return createProcessGroupDto(group, false); } public ProcessGroupFlowDTO createProcessGroupFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final RevisionManager revisionManager, final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) { final ProcessGroupFlowDTO dto = new ProcessGroupFlowDTO(); dto.setId(group.getIdentifier()); dto.setLastRefreshed(new Date()); dto.setBreadcrumb(createBreadcrumbEntity(group)); dto.setFlow(createFlowDto(group, groupStatus, revisionManager, getProcessGroupBulletins)); final ProcessGroup parent = group.getParent(); if (parent != null) { dto.setParentGroupId(parent.getIdentifier()); } final ParameterContext parameterContext = group.getParameterContext(); if (parameterContext != null) { dto.setParameterContext(entityFactory.createParameterReferenceEntity(createParameterContextReference(parameterContext), createPermissionsDto(parameterContext))); } return dto; } public ParameterContextReferenceDTO createParameterContextReference(final ParameterContext parameterContext) { if (parameterContext == null) { return null; } final ParameterContextReferenceDTO dto = new ParameterContextReferenceDTO(); dto.setId(parameterContext.getIdentifier()); dto.setName(parameterContext.getName()); return dto; } public FlowDTO createFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final FlowSnippetDTO snippet, final RevisionManager revisionManager, final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) { if (snippet == null) { return null; } final FlowDTO flow = new FlowDTO(); for (final ConnectionDTO snippetConnection : snippet.getConnections()) { final Connection connection = group.getConnection(snippetConnection.getId()); // marshal the actual connection as the snippet is pruned final ConnectionDTO dto = createConnectionDto(connection); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(connection.getIdentifier())); final PermissionsDTO accessPolicy = createPermissionsDto(connection); final ConnectionStatusDTO status = getComponentStatus( () -> groupStatus.getConnectionStatus().stream().filter(connectionStatus -> connection.getIdentifier().equals(connectionStatus.getId())).findFirst().orElse(null), this::createConnectionStatusDto ); flow.getConnections().add(entityFactory.createConnectionEntity(dto, revision, accessPolicy, status)); } for (final FunnelDTO snippetFunnel : snippet.getFunnels()) { final Funnel funnel = group.getFunnel(snippetFunnel.getId()); // marshal the actual funnel as the snippet is pruned final FunnelDTO dto = createFunnelDto(funnel); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(funnel.getIdentifier())); final PermissionsDTO accessPolicy = createPermissionsDto(funnel); flow.getFunnels().add(entityFactory.createFunnelEntity(dto, revision, accessPolicy)); } for (final PortDTO snippetInputPort : snippet.getInputPorts()) { final Port inputPort = group.getInputPort(snippetInputPort.getId()); // marshal the actual port as the snippet is pruned final PortDTO dto = createPortDto(inputPort); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(inputPort.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(inputPort); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(inputPort)); final PortStatusDTO status = getComponentStatus( () -> groupStatus.getInputPortStatus().stream().filter(inputPortStatus -> inputPort.getIdentifier().equals(inputPortStatus.getId())).findFirst().orElse(null), inputPortStatus -> createPortStatusDto(inputPortStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(inputPort.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); flow.getInputPorts().add(entityFactory.createPortEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities)); } for (final PortDTO snippetOutputPort : snippet.getOutputPorts()) { final Port outputPort = group.getOutputPort(snippetOutputPort.getId()); // marshal the actual port as the snippet is pruned final PortDTO dto = createPortDto(outputPort); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(outputPort.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(outputPort); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(outputPort)); final PortStatusDTO status = getComponentStatus( () -> groupStatus.getOutputPortStatus().stream().filter(outputPortStatus -> outputPort.getIdentifier().equals(outputPortStatus.getId())).findFirst().orElse(null), outputPortStatus -> createPortStatusDto(outputPortStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(outputPort.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); flow.getOutputPorts().add(entityFactory.createPortEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities)); } for (final LabelDTO snippetLabel : snippet.getLabels()) { final Label label = group.getLabel(snippetLabel.getId()); // marshal the actual label as the snippet is pruned final LabelDTO dto = createLabelDto(label); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(label.getIdentifier())); final PermissionsDTO accessPolicy = createPermissionsDto(label); flow.getLabels().add(entityFactory.createLabelEntity(dto, revision, accessPolicy)); } for (final ProcessGroupDTO snippetProcessGroup : snippet.getProcessGroups()) { final ProcessGroup processGroup = group.getProcessGroup(snippetProcessGroup.getId()); // marshal the actual group as the snippet is pruned final ProcessGroupDTO dto = createProcessGroupDto(processGroup); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(processGroup.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(processGroup); final ProcessGroupStatusDTO status = getComponentStatus( () -> groupStatus.getProcessGroupStatus().stream().filter(processGroupStatus -> processGroup.getIdentifier().equals(processGroupStatus.getId())).findFirst().orElse(null), processGroupStatus -> createConciseProcessGroupStatusDto(processGroupStatus) ); final List<BulletinEntity> bulletins = getProcessGroupBulletins.apply(processGroup); flow.getProcessGroups().add(entityFactory.createProcessGroupEntity(dto, revision, permissions, status, bulletins)); } for (final ProcessorDTO snippetProcessor : snippet.getProcessors()) { final ProcessorNode processor = group.getProcessor(snippetProcessor.getId()); // marshal the actual processor as the snippet is pruned final ProcessorDTO dto = createProcessorDto(processor); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(processor.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(processor); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(processor)); final ProcessorStatusDTO status = getComponentStatus( () -> groupStatus.getProcessorStatus().stream().filter(processorStatus -> processor.getIdentifier().equals(processorStatus.getId())).findFirst().orElse(null), processorStatus -> createProcessorStatusDto(processorStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(processor.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); flow.getProcessors().add(entityFactory.createProcessorEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities)); } for (final RemoteProcessGroupDTO snippetRemoteProcessGroup : snippet.getRemoteProcessGroups()) { final RemoteProcessGroup remoteProcessGroup = group.getRemoteProcessGroup(snippetRemoteProcessGroup.getId()); // marshal the actual rpm as the snippet is pruned final RemoteProcessGroupDTO dto = createRemoteProcessGroupDto(remoteProcessGroup); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(remoteProcessGroup.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(remoteProcessGroup); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(remoteProcessGroup)); final RemoteProcessGroupStatusDTO status = getComponentStatus( () -> groupStatus.getRemoteProcessGroupStatus().stream().filter(rpgStatus -> remoteProcessGroup.getIdentifier().equals(rpgStatus.getId())).findFirst().orElse(null), remoteProcessGroupStatus -> createRemoteProcessGroupStatusDto(remoteProcessGroup, remoteProcessGroupStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(remoteProcessGroup.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); flow.getRemoteProcessGroups().add(entityFactory.createRemoteProcessGroupEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities)); } return flow; } private <T, S> T getComponentStatus(final Supplier<S> getComponentStatus, final Function<S, T> convertToDto) { final T statusDTO; final S status = getComponentStatus.get(); if (status != null) { statusDTO = convertToDto.apply(status); } else { statusDTO = null; } return statusDTO; } public FlowDTO createFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final RevisionManager revisionManager, final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) { final FlowDTO dto = new FlowDTO(); for (final ProcessorNode procNode : group.getProcessors()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(procNode.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(procNode); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(procNode)); final ProcessorStatusDTO status = getComponentStatus( () -> groupStatus.getProcessorStatus().stream().filter(processorStatus -> procNode.getIdentifier().equals(processorStatus.getId())).findFirst().orElse(null), processorStatus -> createProcessorStatusDto(processorStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(procNode.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); dto.getProcessors().add(entityFactory.createProcessorEntity(createProcessorDto(procNode), revision, permissions, operatePermissions, status, bulletinEntities)); } for (final Connection connNode : group.getConnections()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(connNode.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(connNode); final ConnectionStatusDTO status = getComponentStatus( () -> groupStatus.getConnectionStatus().stream().filter(connectionStatus -> connNode.getIdentifier().equals(connectionStatus.getId())).findFirst().orElse(null), connectionStatus -> createConnectionStatusDto(connectionStatus) ); dto.getConnections().add(entityFactory.createConnectionEntity(createConnectionDto(connNode), revision, permissions, status)); } for (final Label label : group.getLabels()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(label.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(label); dto.getLabels().add(entityFactory.createLabelEntity(createLabelDto(label), revision, permissions)); } for (final Funnel funnel : group.getFunnels()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(funnel.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(funnel); dto.getFunnels().add(entityFactory.createFunnelEntity(createFunnelDto(funnel), revision, permissions)); } for (final ProcessGroup childGroup : group.getProcessGroups()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(childGroup.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(childGroup); final ProcessGroupStatusDTO status = getComponentStatus( () -> groupStatus.getProcessGroupStatus().stream().filter(processGroupStatus -> childGroup.getIdentifier().equals(processGroupStatus.getId())).findFirst().orElse(null), processGroupStatus -> createConciseProcessGroupStatusDto(processGroupStatus) ); final List<BulletinEntity> bulletins = getProcessGroupBulletins.apply(childGroup); dto.getProcessGroups().add(entityFactory.createProcessGroupEntity(createProcessGroupDto(childGroup), revision, permissions, status, bulletins)); } for (final RemoteProcessGroup rpg : group.getRemoteProcessGroups()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(rpg.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(rpg); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(rpg)); final RemoteProcessGroupStatusDTO status = getComponentStatus( () -> groupStatus.getRemoteProcessGroupStatus().stream().filter(remoteProcessGroupStatus -> rpg.getIdentifier().equals(remoteProcessGroupStatus.getId())).findFirst().orElse(null), remoteProcessGroupStatus -> createRemoteProcessGroupStatusDto(rpg, remoteProcessGroupStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(rpg.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); dto.getRemoteProcessGroups().add(entityFactory.createRemoteProcessGroupEntity(createRemoteProcessGroupDto(rpg), revision, permissions, operatePermissions, status, bulletinEntities)); } for (final Port inputPort : group.getInputPorts()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(inputPort.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(inputPort); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(inputPort)); final PortStatusDTO status = getComponentStatus( () -> groupStatus.getInputPortStatus().stream().filter(inputPortStatus -> inputPort.getIdentifier().equals(inputPortStatus.getId())).findFirst().orElse(null), inputPortStatus -> createPortStatusDto(inputPortStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(inputPort.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); dto.getInputPorts().add(entityFactory.createPortEntity(createPortDto(inputPort), revision, permissions, operatePermissions, status, bulletinEntities)); } for (final Port outputPort : group.getOutputPorts()) { final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(outputPort.getIdentifier())); final PermissionsDTO permissions = createPermissionsDto(outputPort); final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(outputPort)); final PortStatusDTO status = getComponentStatus( () -> groupStatus.getOutputPortStatus().stream().filter(outputPortStatus -> outputPort.getIdentifier().equals(outputPortStatus.getId())).findFirst().orElse(null), outputPortStatus -> createPortStatusDto(outputPortStatus) ); final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(outputPort.getIdentifier())); final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList()); dto.getOutputPorts().add(entityFactory.createPortEntity(createPortDto(outputPort), revision, permissions, operatePermissions, status, bulletinEntities)); } return dto; } /** * Creates a ProcessGroupDTO from the specified ProcessGroup. * * @param group group * @param recurse recurse * @return dto */ public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group, final boolean recurse) { final ProcessGroupDTO dto = createConciseProcessGroupDto(group); dto.setContents(createProcessGroupContentsDto(group, recurse)); return dto; } /** * Creates a ProcessGroupDTO from the specified ProcessGroup. * * @param group group * @return dto */ private ProcessGroupDTO createConciseProcessGroupDto(final ProcessGroup group) { if (group == null) { return null; } final ProcessGroupDTO dto = new ProcessGroupDTO(); dto.setId(group.getIdentifier()); dto.setPosition(createPositionDto(group.getPosition())); dto.setComments(group.getComments()); dto.setName(group.getName()); dto.setVersionedComponentId(group.getVersionedComponentId().orElse(null)); dto.setVersionControlInformation(createVersionControlInformationDto(group)); dto.setFlowfileConcurrency(group.getFlowFileConcurrency().name()); dto.setFlowfileOutboundPolicy(group.getFlowFileOutboundPolicy().name()); final ParameterContext parameterContext = group.getParameterContext(); if (parameterContext != null) { dto.setParameterContext(entityFactory.createParameterReferenceEntity(createParameterContextReference(parameterContext), createPermissionsDto(parameterContext))); } final Map<String, String> variables = group.getVariableRegistry().getVariableMap().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().getName(), Entry::getValue)); dto.setVariables(variables); final ProcessGroup parentGroup = group.getParent(); if (parentGroup != null) { dto.setParentGroupId(parentGroup.getIdentifier()); } final ProcessGroupCounts counts = group.getCounts(); dto.setRunningCount(counts.getRunningCount()); dto.setStoppedCount(counts.getStoppedCount()); dto.setInvalidCount(counts.getInvalidCount()); dto.setDisabledCount(counts.getDisabledCount()); dto.setLocalInputPortCount(counts.getLocalInputPortCount()); dto.setLocalOutputPortCount(counts.getLocalOutputPortCount()); dto.setPublicInputPortCount(counts.getPublicInputPortCount()); dto.setPublicOutputPortCount(counts.getPublicOutputPortCount()); dto.setActiveRemotePortCount(counts.getActiveRemotePortCount()); dto.setInactiveRemotePortCount(counts.getInactiveRemotePortCount()); dto.setUpToDateCount(counts.getUpToDateCount()); dto.setLocallyModifiedCount(counts.getLocallyModifiedCount()); dto.setStaleCount(counts.getStaleCount()); dto.setLocallyModifiedAndStaleCount(counts.getLocallyModifiedAndStaleCount()); dto.setSyncFailureCount(counts.getSyncFailureCount()); return dto; } public Set<ComponentDifferenceDTO> createComponentDifferenceDtos(final FlowComparison comparison, final FlowManager flowManager) { final Map<ComponentDifferenceDTO, List<DifferenceDTO>> differencesByComponent = new HashMap<>(); final Map<String, VersionedProcessGroup> versionedGroups = flattenProcessGroups(comparison.getFlowA().getContents()); for (final FlowDifference difference : comparison.getDifferences()) { // Ignore these as local differences for now because we can't do anything with it if (difference.getDifferenceType() == DifferenceType.BUNDLE_CHANGED) { continue; } // Ignore differences that are the result of the Versioned Flow not having a Scheduled State and the newer flow being "ENABLED". We do this because // Scheduled State was not always part of the Versioned Flow - it was always assumed to be ENABLED. We don't want flows that were previously stored in this // format to now be considered different than the local flow. if (FlowDifferenceFilters.isScheduledStateNew(difference)) { continue; } // Ignore differences for adding remote ports if (FlowDifferenceFilters.isAddedOrRemovedRemotePort(difference)) { continue; } // Ignore name changes to public ports if (FlowDifferenceFilters.isPublicPortNameChange(difference)) { continue; } if (FlowDifferenceFilters.isIgnorableVersionedFlowCoordinateChange(difference)) { continue; } if (FlowDifferenceFilters.isNewPropertyWithDefaultValue(difference, flowManager)) { continue; } final VersionedComponent componentA = difference.getComponentA(); final VersionedProcessGroup relevantProcessGroup = componentA == null ? null : versionedGroups.get(componentA.getGroupIdentifier()); if (relevantProcessGroup != null && FlowDifferenceFilters.isNewRelationshipAutoTerminatedAndDefaulted(difference, relevantProcessGroup, flowManager)) { continue; } final ComponentDifferenceDTO componentDiff = createComponentDifference(difference); final List<DifferenceDTO> differences = differencesByComponent.computeIfAbsent(componentDiff, key -> new ArrayList<>()); final DifferenceDTO dto = new DifferenceDTO(); dto.setDifferenceType(difference.getDifferenceType().getDescription()); dto.setDifference(difference.getDescription()); differences.add(dto); } for (final Map.Entry<ComponentDifferenceDTO, List<DifferenceDTO>> entry : differencesByComponent.entrySet()) { entry.getKey().setDifferences(entry.getValue()); } return differencesByComponent.keySet(); } private Map<String, VersionedProcessGroup> flattenProcessGroups(final VersionedProcessGroup group) { final Map<String, VersionedProcessGroup> flattened = new HashMap<>(); flattenProcessGroups(group, flattened); return flattened; } private void flattenProcessGroups(final VersionedProcessGroup group, final Map<String, VersionedProcessGroup> flattened) { flattened.put(group.getIdentifier(), group); for (final VersionedProcessGroup child : group.getProcessGroups()) { flattenProcessGroups(child, flattened); } } private ComponentDifferenceDTO createComponentDifference(final FlowDifference difference) { VersionedComponent component = difference.getComponentA(); if (component == null || difference.getComponentB() instanceof InstantiatedVersionedComponent) { component = difference.getComponentB(); } final ComponentDifferenceDTO dto = new ComponentDifferenceDTO(); dto.setComponentName(component.getName()); dto.setComponentType(component.getComponentType().toString()); if (component instanceof InstantiatedVersionedComponent) { final InstantiatedVersionedComponent instantiatedComponent = (InstantiatedVersionedComponent) component; dto.setComponentId(instantiatedComponent.getInstanceId()); dto.setProcessGroupId(instantiatedComponent.getInstanceGroupId()); } else { dto.setComponentId(component.getIdentifier()); dto.setProcessGroupId(dto.getProcessGroupId()); } return dto; } public VersionControlInformationDTO createVersionControlInformationDto(final ProcessGroup group) { if (group == null) { return null; } final VersionControlInformation versionControlInfo = group.getVersionControlInformation(); if (versionControlInfo == null) { return null; } final VersionControlInformationDTO dto = new VersionControlInformationDTO(); dto.setGroupId(group.getIdentifier()); dto.setRegistryId(versionControlInfo.getRegistryIdentifier()); dto.setRegistryName(versionControlInfo.getRegistryName()); dto.setBucketId(versionControlInfo.getBucketIdentifier()); dto.setBucketName(versionControlInfo.getBucketName()); dto.setFlowId(versionControlInfo.getFlowIdentifier()); dto.setFlowName(versionControlInfo.getFlowName()); dto.setFlowDescription(versionControlInfo.getFlowDescription()); dto.setVersion(versionControlInfo.getVersion()); final VersionedFlowStatus status = versionControlInfo.getStatus(); final VersionedFlowState state = status.getState(); dto.setState(state == null ? null : state.name()); dto.setStateExplanation(status.getStateExplanation()); return dto; } public Map<String, String> createVersionControlComponentMappingDto(final InstantiatedVersionedProcessGroup group) { final Map<String, String> mapping = new HashMap<>(); mapping.put(group.getInstanceId(), group.getIdentifier()); group.getProcessors().stream() .map(proc -> (InstantiatedVersionedProcessor) proc) .forEach(proc -> mapping.put(proc.getInstanceId(), proc.getIdentifier())); group.getFunnels().stream() .map(funnel -> (InstantiatedVersionedFunnel) funnel) .forEach(funnel -> mapping.put(funnel.getInstanceId(), funnel.getIdentifier())); group.getInputPorts().stream() .map(port -> (InstantiatedVersionedPort) port) .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier())); group.getOutputPorts().stream() .map(port -> (InstantiatedVersionedPort) port) .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier())); group.getControllerServices().stream() .map(service -> (InstantiatedVersionedControllerService) service) .forEach(service -> mapping.put(service.getInstanceId(), service.getIdentifier())); group.getLabels().stream() .map(label -> (InstantiatedVersionedLabel) label) .forEach(label -> mapping.put(label.getInstanceId(), label.getIdentifier())); group.getConnections().stream() .map(conn -> (InstantiatedVersionedConnection) conn) .forEach(conn -> mapping.put(conn.getInstanceId(), conn.getIdentifier())); group.getRemoteProcessGroups().stream() .map(rpg -> (InstantiatedVersionedRemoteProcessGroup) rpg) .forEach(rpg -> { mapping.put(rpg.getInstanceId(), rpg.getIdentifier()); if (rpg.getInputPorts() != null) { rpg.getInputPorts().stream() .map(port -> (InstantiatedVersionedRemoteGroupPort) port) .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier())); } if (rpg.getOutputPorts() != null) { rpg.getOutputPorts().stream() .map(port -> (InstantiatedVersionedRemoteGroupPort) port) .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier())); } }); group.getProcessGroups().stream() .map(child -> (InstantiatedVersionedProcessGroup) child) .forEach(child -> { final Map<String, String> childMapping = createVersionControlComponentMappingDto(child); mapping.putAll(childMapping); }); return mapping; } /** * Creates a ProcessGroupContentDTO from the specified ProcessGroup. * * @param group group * @param recurse recurse * @return dto */ private FlowSnippetDTO createProcessGroupContentsDto(final ProcessGroup group, final boolean recurse) { if (group == null) { return null; } final FlowSnippetDTO dto = new FlowSnippetDTO(); for (final ProcessorNode procNode : group.getProcessors()) { dto.getProcessors().add(createProcessorDto(procNode)); } for (final Connection connNode : group.getConnections()) { dto.getConnections().add(createConnectionDto(connNode)); } for (final Label label : group.getLabels()) { dto.getLabels().add(createLabelDto(label)); } for (final Funnel funnel : group.getFunnels()) { dto.getFunnels().add(createFunnelDto(funnel)); } for (final ProcessGroup childGroup : group.getProcessGroups()) { if (recurse) { dto.getProcessGroups().add(createProcessGroupDto(childGroup, recurse)); } else { dto.getProcessGroups().add(createConciseProcessGroupDto(childGroup)); } } for (final RemoteProcessGroup remoteProcessGroup : group.getRemoteProcessGroups()) { dto.getRemoteProcessGroups().add(createRemoteProcessGroupDto(remoteProcessGroup)); } for (final Port inputPort : group.getInputPorts()) { dto.getInputPorts().add(createPortDto(inputPort)); } for (final Port outputPort : group.getOutputPorts()) { dto.getOutputPorts().add(createPortDto(outputPort)); } return dto; } private boolean isRestricted(final Class<?> cls) { return cls.isAnnotationPresent(Restricted.class); } private String getUsageRestriction(final Class<?> cls) { final Restricted restricted = cls.getAnnotation(Restricted.class); if (restricted == null) { return null; } if (StringUtils.isBlank(restricted.value())) { return null; } return restricted.value(); } private Set<ExplicitRestrictionDTO> getExplicitRestrictions(final Class<?> cls) { final Restricted restricted = cls.getAnnotation(Restricted.class); if (restricted == null) { return null; } final Restriction[] restrictions = restricted.restrictions(); if (restrictions == null || restrictions.length == 0) { return null; } return Arrays.stream(restrictions).map(restriction -> { final RequiredPermissionDTO requiredPermission = new RequiredPermissionDTO(); requiredPermission.setId(restriction.requiredPermission().getPermissionIdentifier()); requiredPermission.setLabel(restriction.requiredPermission().getPermissionLabel()); final ExplicitRestrictionDTO usageRestriction = new ExplicitRestrictionDTO(); usageRestriction.setRequiredPermission(requiredPermission); usageRestriction.setExplanation(restriction.explanation()); return usageRestriction; }).collect(Collectors.toSet()); } private String getDeprecationReason(final Class<?> cls) { final DeprecationNotice deprecationNotice = cls.getAnnotation(DeprecationNotice.class); return deprecationNotice == null ? null : deprecationNotice.reason(); } public Set<AffectedComponentEntity> createAffectedComponentEntities(final Set<ComponentNode> affectedComponents, final RevisionManager revisionManager) { return affectedComponents.stream() .map(component -> createAffectedComponentEntity(component, revisionManager)) .collect(Collectors.toSet()); } public AffectedComponentEntity createAffectedComponentEntity(final ComponentNode componentNode, final RevisionManager revisionManager) { final AffectedComponentDTO affectedComponent = createAffectedComponentDto(componentNode); final PermissionsDTO permissions = createPermissionsDto(componentNode); final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(componentNode.getIdentifier())); final ProcessGroupNameDTO groupNameDto = new ProcessGroupNameDTO(); groupNameDto.setId(componentNode.getProcessGroupIdentifier()); groupNameDto.setName(componentNode.getProcessGroupIdentifier()); ProcessGroup processGroup = null; if (componentNode instanceof ProcessorNode) { processGroup = ((ProcessorNode) componentNode).getProcessGroup(); } else if (componentNode instanceof ControllerServiceNode) { processGroup = ((ControllerServiceNode) componentNode).getProcessGroup(); } if (processGroup != null) { final boolean authorized = processGroup.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); if (authorized) { groupNameDto.setName(processGroup.getName()); } } return entityFactory.createAffectedComponentEntity(affectedComponent, revision, permissions, groupNameDto); } public VariableRegistryDTO createVariableRegistryDto(final ProcessGroup processGroup, final RevisionManager revisionManager) { final ComponentVariableRegistry variableRegistry = processGroup.getVariableRegistry(); final List<String> variableNames = variableRegistry.getVariableMap().keySet().stream() .map(VariableDescriptor::getName) .collect(Collectors.toList()); final Set<VariableEntity> variableEntities = new LinkedHashSet<>(); for (final String variableName : variableNames) { final VariableDTO variableDto = new VariableDTO(); variableDto.setName(variableName); variableDto.setValue(variableRegistry.getVariableValue(variableName)); variableDto.setProcessGroupId(processGroup.getIdentifier()); final Set<AffectedComponentEntity> affectedComponentEntities = createAffectedComponentEntities(processGroup.getComponentsAffectedByVariable(variableName), revisionManager); variableDto.setAffectedComponents(affectedComponentEntities); final boolean canWrite = isWritable(affectedComponentEntities); final VariableEntity variableEntity = new VariableEntity(); variableEntity.setVariable(variableDto); variableEntity.setCanWrite(canWrite); variableEntities.add(variableEntity); } final VariableRegistryDTO registryDto = new VariableRegistryDTO(); registryDto.setProcessGroupId(processGroup.getIdentifier()); registryDto.setVariables(variableEntities); return registryDto; } private boolean isWritable(final Collection<AffectedComponentEntity> affectedComponentEntities) { for (final AffectedComponentEntity affectedComponent : affectedComponentEntities) { final PermissionsDTO permissions = affectedComponent.getPermissions(); if (!permissions.getCanRead() || !permissions.getCanWrite()) { return false; } } return true; } public VariableRegistryUpdateRequestDTO createVariableRegistryUpdateRequestDto(final VariableRegistryUpdateRequest request) { final VariableRegistryUpdateRequestDTO dto = new VariableRegistryUpdateRequestDTO(); dto.setComplete(request.isComplete()); dto.setFailureReason(request.getFailureReason()); dto.setLastUpdated(request.getLastUpdated()); dto.setProcessGroupId(request.getProcessGroupId()); dto.setRequestId(request.getRequestId()); dto.setSubmissionTime(request.getSubmissionTime()); final List<VariableRegistryUpdateStepDTO> updateSteps = new ArrayList<>(); updateSteps.add(createVariableRegistryUpdateStepDto(request.getIdentifyRelevantComponentsStep())); updateSteps.add(createVariableRegistryUpdateStepDto(request.getStopProcessorsStep())); updateSteps.add(createVariableRegistryUpdateStepDto(request.getDisableServicesStep())); updateSteps.add(createVariableRegistryUpdateStepDto(request.getApplyUpdatesStep())); updateSteps.add(createVariableRegistryUpdateStepDto(request.getEnableServicesStep())); updateSteps.add(createVariableRegistryUpdateStepDto(request.getStartProcessorsStep())); dto.setUpdateSteps(updateSteps); dto.setAffectedComponents(new HashSet<>(request.getAffectedComponents().values())); return dto; } public VariableRegistryUpdateStepDTO createVariableRegistryUpdateStepDto(final VariableRegistryUpdateStep step) { final VariableRegistryUpdateStepDTO dto = new VariableRegistryUpdateStepDTO(); dto.setComplete(step.isComplete()); dto.setDescription(step.getDescription()); dto.setFailureReason(step.getFailureReason()); return dto; } public VariableRegistryDTO populateAffectedComponents(final VariableRegistryDTO variableRegistry, final ProcessGroup group, final RevisionManager revisionManager) { if (!group.getIdentifier().equals(variableRegistry.getProcessGroupId())) { throw new IllegalArgumentException("Variable Registry does not have the same Group ID as the given Process Group"); } final Set<VariableEntity> variableEntities = new LinkedHashSet<>(); if (variableRegistry.getVariables() != null) { for (final VariableEntity inputEntity : variableRegistry.getVariables()) { final VariableEntity entity = new VariableEntity(); final VariableDTO inputDto = inputEntity.getVariable(); final VariableDTO variableDto = new VariableDTO(); variableDto.setName(inputDto.getName()); variableDto.setValue(inputDto.getValue()); variableDto.setProcessGroupId(group.getIdentifier()); final Set<AffectedComponentEntity> affectedComponentEntities = createAffectedComponentEntities(group.getComponentsAffectedByVariable(variableDto.getName()), revisionManager); boolean canWrite = true; for (final AffectedComponentEntity affectedComponent : affectedComponentEntities) { final PermissionsDTO permissions = affectedComponent.getPermissions(); if (!permissions.getCanRead() || !permissions.getCanWrite()) { canWrite = false; break; } } variableDto.setAffectedComponents(affectedComponentEntities); entity.setCanWrite(canWrite); entity.setVariable(inputDto); variableEntities.add(entity); } } final VariableRegistryDTO registryDto = new VariableRegistryDTO(); registryDto.setProcessGroupId(group.getIdentifier()); registryDto.setVariables(variableEntities); return registryDto; } /** * Gets the capability description from the specified class. */ private String getCapabilityDescription(final Class<?> cls) { final CapabilityDescription capabilityDesc = cls.getAnnotation(CapabilityDescription.class); return capabilityDesc == null ? null : capabilityDesc.value(); } /** * Gets the tags from the specified class. */ private Set<String> getTags(final Class<?> cls) { final Set<String> tags = new HashSet<>(); final Tags tagsAnnotation = cls.getAnnotation(Tags.class); if (tagsAnnotation != null) { for (final String tag : tagsAnnotation.value()) { tags.add(tag); } } if (cls.isAnnotationPresent(Restricted.class)) { tags.add("restricted"); } return tags; } /** * Creates a bundle DTO from the specified class. * * @param coordinate bundle coordinates * @return dto */ public BundleDTO createBundleDto(final BundleCoordinate coordinate) { final BundleDTO dto = new BundleDTO(); dto.setGroup(coordinate.getGroup()); dto.setArtifact(coordinate.getId()); dto.setVersion(coordinate.getVersion()); return dto; } private List<ControllerServiceApiDTO> createControllerServiceApiDto(final Class cls) { final Set<Class> serviceApis = new HashSet<>(); // if this is a controller service if (ControllerService.class.isAssignableFrom(cls)) { // get all of it's interfaces to determine the controller service api's it implements final List<Class<?>> interfaces = ClassUtils.getAllInterfaces(cls); for (final Class i : interfaces) { // add all controller services that's not ControllerService itself if (ControllerService.class.isAssignableFrom(i) && !ControllerService.class.equals(i)) { serviceApis.add(i); } } final List<ControllerServiceApiDTO> dtos = new ArrayList<>(); for (final Class serviceApi : serviceApis) { final Bundle bundle = extensionManager.getBundle(serviceApi.getClassLoader()); final BundleCoordinate bundleCoordinate = bundle.getBundleDetails().getCoordinate(); final ControllerServiceApiDTO dto = new ControllerServiceApiDTO(); dto.setType(serviceApi.getName()); dto.setBundle(createBundleDto(bundleCoordinate)); dtos.add(dto); } return dtos; } else { return null; } } /** * Gets the DocumentedTypeDTOs from the specified classes. * * @param classes classes * @param bundleGroupFilter if specified, must be member of bundle group * @param bundleArtifactFilter if specified, must be member of bundle artifact * @param typeFilter if specified, type must match * @return dtos */ public Set<DocumentedTypeDTO> fromDocumentedTypes(final Map<Class, Bundle> classes, final String bundleGroupFilter, final String bundleArtifactFilter, final String typeFilter) { final Set<DocumentedTypeDTO> types = new LinkedHashSet<>(); final List<Class> sortedClasses = new ArrayList<>(classes.keySet()); Collections.sort(sortedClasses, CLASS_NAME_COMPARATOR); for (final Class cls : sortedClasses) { final Bundle bundle = classes.get(cls); final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate(); // only include classes that meet the criteria if specified if (bundleGroupFilter != null && !bundleGroupFilter.equals(coordinate.getGroup())) { continue; } if (bundleArtifactFilter != null && !bundleArtifactFilter.equals(coordinate.getId())) { continue; } if (typeFilter != null && !typeFilter.equals(cls.getName())) { continue; } final DocumentedTypeDTO dto = new DocumentedTypeDTO(); dto.setType(cls.getName()); dto.setBundle(createBundleDto(coordinate)); dto.setControllerServiceApis(createControllerServiceApiDto(cls)); dto.setDescription(getCapabilityDescription(cls)); dto.setRestricted(isRestricted(cls)); dto.setUsageRestriction(getUsageRestriction(cls)); dto.setExplicitRestrictions(getExplicitRestrictions(cls)); dto.setDeprecationReason(getDeprecationReason(cls)); dto.setTags(getTags(cls)); types.add(dto); } return types; } /** * Gets the DocumentedTypeDTOs from the specified classes. * * @param classes classes * @param bundleGroupFilter if specified, must be member of bundle group * @param bundleArtifactFilter if specified, must be member of bundle artifact * @param typeFilter if specified, type must match * @return dtos */ public Set<DocumentedTypeDTO> fromDocumentedTypes(final Set<Class> classes, final String bundleGroupFilter, final String bundleArtifactFilter, final String typeFilter) { final Map<Class, Bundle> classBundles = new HashMap<>(); for (final Class cls : classes) { classBundles.put(cls, extensionManager.getBundle(cls.getClassLoader())); } return fromDocumentedTypes(classBundles, bundleGroupFilter, bundleArtifactFilter, typeFilter); } /** * Creates a ProcessorDTO from the specified ProcessorNode. * @param node node * @return dto */ public ProcessorDTO createProcessorDto(final ProcessorNode node) { if (node == null) { return null; } final BundleCoordinate bundleCoordinate = node.getBundleCoordinate(); final List<Bundle> compatibleBundles = extensionManager.getBundles(node.getCanonicalClassName()).stream().filter(bundle -> { final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate(); return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId()); }).collect(Collectors.toList()); final ProcessorDTO dto = new ProcessorDTO(); dto.setId(node.getIdentifier()); dto.setPosition(createPositionDto(node.getPosition())); dto.setStyle(node.getStyle()); dto.setParentGroupId(node.getProcessGroup().getIdentifier()); dto.setInputRequirement(node.getInputRequirement().name()); dto.setPersistsState(node.getProcessor().getClass().isAnnotationPresent(Stateful.class)); dto.setRestricted(node.isRestricted()); dto.setDeprecated(node.isDeprecated()); dto.setExecutionNodeRestricted(node.isExecutionNodeRestricted()); dto.setExtensionMissing(node.isExtensionMissing()); dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1); dto.setVersionedComponentId(node.getVersionedComponentId().orElse(null)); dto.setType(node.getCanonicalClassName()); dto.setBundle(createBundleDto(bundleCoordinate)); dto.setName(node.getName()); dto.setState(node.getScheduledState().toString()); // build the relationship dtos final List<RelationshipDTO> relationships = new ArrayList<>(); for (final Relationship rel : node.getRelationships()) { final RelationshipDTO relationshipDTO = new RelationshipDTO(); relationshipDTO.setDescription(rel.getDescription()); relationshipDTO.setName(rel.getName()); relationshipDTO.setAutoTerminate(node.isAutoTerminated(rel)); relationships.add(relationshipDTO); } // sort the relationships relationships.sort(new Comparator<RelationshipDTO>() { @Override public int compare(final RelationshipDTO r1, final RelationshipDTO r2) { return Collator.getInstance(Locale.US).compare(r1.getName(), r2.getName()); } }); // set the relationships dto.setRelationships(relationships); dto.setDescription(getCapabilityDescription(node.getClass())); dto.setSupportsParallelProcessing(!node.isTriggeredSerially()); dto.setSupportsEventDriven(node.isEventDrivenSupported()); dto.setSupportsBatching(node.isSessionBatchingSupported()); dto.setConfig(createProcessorConfigDto(node)); final ValidationStatus validationStatus = node.getValidationStatus(); dto.setValidationStatus(validationStatus.name()); final Collection<ValidationResult> validationErrors = node.getValidationErrors(); if (validationErrors != null && !validationErrors.isEmpty()) { final List<String> errors = new ArrayList<>(); for (final ValidationResult validationResult : validationErrors) { errors.add(validationResult.toString()); } dto.setValidationErrors(errors); } return dto; } /** * Creates a BulletinBoardDTO for the specified bulletins. * * @param bulletins bulletins * @return dto */ public BulletinBoardDTO createBulletinBoardDto(final List<BulletinEntity> bulletins) { // sort the bulletins Collections.sort(bulletins, new Comparator<BulletinEntity>() { @Override public int compare(final BulletinEntity bulletin1, final BulletinEntity bulletin2) { if (bulletin1 == null && bulletin2 == null) { return 0; } else if (bulletin1 == null) { return 1; } else if (bulletin2 == null) { return -1; } final Date timestamp1 = bulletin1.getTimestamp(); final Date timestamp2 = bulletin2.getTimestamp(); if (timestamp1 == null && timestamp2 == null) { return 0; } else if (timestamp1 == null) { return 1; } else if (timestamp2 == null) { return -1; } else { return timestamp1.compareTo(timestamp2); } } }); // create the bulletin board final BulletinBoardDTO bulletinBoard = new BulletinBoardDTO(); bulletinBoard.setBulletins(bulletins); bulletinBoard.setGenerated(new Date()); return bulletinBoard; } /** * Creates BulletinDTOs for the specified Bulletins. * * @param bulletins bulletin * @return dto */ public List<BulletinDTO> createBulletinDtos(final List<Bulletin> bulletins) { final List<BulletinDTO> bulletinDtos = new ArrayList<>(bulletins.size()); for (final Bulletin bulletin : bulletins) { bulletinDtos.add(createBulletinDto(bulletin)); } return bulletinDtos; } /** * Creates a BulletinDTO for the specified Bulletin. * * @param bulletin bulletin * @return dto */ public BulletinDTO createBulletinDto(final Bulletin bulletin) { final BulletinDTO dto = new BulletinDTO(); dto.setId(bulletin.getId()); dto.setNodeAddress(bulletin.getNodeAddress()); dto.setTimestamp(bulletin.getTimestamp()); dto.setGroupId(bulletin.getGroupId()); dto.setSourceId(bulletin.getSourceId()); dto.setSourceName(bulletin.getSourceName()); dto.setCategory(bulletin.getCategory()); dto.setLevel(bulletin.getLevel()); dto.setMessage(bulletin.getMessage()); return dto; } /** * Creates a ProvenanceEventNodeDTO for the specified ProvenanceEventLineageNode. * * @param node node * @return dto */ public ProvenanceNodeDTO createProvenanceEventNodeDTO(final ProvenanceEventLineageNode node) { final ProvenanceNodeDTO dto = new ProvenanceNodeDTO(); dto.setId(node.getIdentifier()); dto.setType("EVENT"); dto.setEventType(node.getEventType().toString()); dto.setTimestamp(new Date(node.getTimestamp())); dto.setMillis(node.getTimestamp()); dto.setFlowFileUuid(node.getFlowFileUuid()); dto.setParentUuids(node.getParentUuids()); dto.setChildUuids(node.getChildUuids()); return dto; } /** * Creates a FlowFileNodeDTO for the specified LineageNode. * * @param node node * @return dto */ public ProvenanceNodeDTO createFlowFileNodeDTO(final LineageNode node) { final ProvenanceNodeDTO dto = new ProvenanceNodeDTO(); dto.setId(node.getIdentifier()); dto.setType("FLOWFILE"); dto.setTimestamp(new Date(node.getTimestamp())); dto.setMillis(node.getTimestamp()); dto.setFlowFileUuid(node.getFlowFileUuid()); return dto; } /** * Creates a ProvenanceLinkDTO for the specified LineageEdge. * * @param edge edge * @return dto */ public ProvenanceLinkDTO createProvenanceLinkDTO(final LineageEdge edge) { final LineageNode source = edge.getSource(); final LineageNode target = edge.getDestination(); final ProvenanceLinkDTO dto = new ProvenanceLinkDTO(); dto.setTimestamp(new Date(target.getTimestamp())); dto.setMillis(target.getTimestamp()); dto.setFlowFileUuid(edge.getUuid()); dto.setSourceId(source.getIdentifier()); dto.setTargetId(target.getIdentifier()); return dto; } /** * Creates a LineageDTO for the specified Lineage. * * @param computeLineageSubmission submission * @return dto */ public LineageDTO createLineageDto(final ComputeLineageSubmission computeLineageSubmission) { // build the lineage dto final LineageDTO dto = new LineageDTO(); final LineageRequestDTO requestDto = new LineageRequestDTO(); final LineageResultsDTO resultsDto = new LineageResultsDTO(); // include the original request and results dto.setRequest(requestDto); dto.setResults(resultsDto); // rebuild the request from the submission object switch (computeLineageSubmission.getLineageComputationType()) { case EXPAND_CHILDREN: requestDto.setEventId(computeLineageSubmission.getExpandedEventId()); requestDto.setLineageRequestType(LineageRequestType.CHILDREN); break; case EXPAND_PARENTS: requestDto.setEventId(computeLineageSubmission.getExpandedEventId()); requestDto.setLineageRequestType(LineageRequestType.PARENTS); break; case FLOWFILE_LINEAGE: final Collection<String> uuids = computeLineageSubmission.getLineageFlowFileUuids(); if (uuids.size() == 1) { requestDto.setUuid(uuids.iterator().next()); } requestDto.setEventId(computeLineageSubmission.getExpandedEventId()); requestDto.setLineageRequestType(LineageRequestType.FLOWFILE); break; } // include lineage details dto.setId(computeLineageSubmission.getLineageIdentifier()); dto.setSubmissionTime(computeLineageSubmission.getSubmissionTime()); // create the results dto final ComputeLineageResult results = computeLineageSubmission.getResult(); dto.setFinished(results.isFinished()); dto.setPercentCompleted(results.getPercentComplete()); dto.setExpiration(results.getExpiration()); final List<LineageNode> nodes = results.getNodes(); final List<LineageEdge> edges = results.getEdges(); final List<ProvenanceNodeDTO> nodeDtos = new ArrayList<>(); if (results.isFinished()) { // create the node dto's for (final LineageNode node : nodes) { switch (node.getNodeType()) { case FLOWFILE_NODE: nodeDtos.add(createFlowFileNodeDTO(node)); break; case PROVENANCE_EVENT_NODE: nodeDtos.add(createProvenanceEventNodeDTO((ProvenanceEventLineageNode) node)); break; } } } resultsDto.setNodes(nodeDtos); // include any errors if (results.getError() != null) { final Set<String> errors = new HashSet<>(); errors.add(results.getError()); resultsDto.setErrors(errors); } // create the link dto's final List<ProvenanceLinkDTO> linkDtos = new ArrayList<>(); for (final LineageEdge edge : edges) { linkDtos.add(createProvenanceLinkDTO(edge)); } resultsDto.setLinks(linkDtos); return dto; } /** * Creates a SystemDiagnosticsDTO for the specified SystemDiagnostics. * * @param sysDiagnostics diags * @return dto */ public SystemDiagnosticsDTO createSystemDiagnosticsDto(final SystemDiagnostics sysDiagnostics) { final SystemDiagnosticsDTO dto = new SystemDiagnosticsDTO(); final SystemDiagnosticsSnapshotDTO snapshot = new SystemDiagnosticsSnapshotDTO(); dto.setAggregateSnapshot(snapshot); snapshot.setStatsLastRefreshed(new Date(sysDiagnostics.getCreationTimestamp())); // processors snapshot.setAvailableProcessors(sysDiagnostics.getAvailableProcessors()); snapshot.setProcessorLoadAverage(sysDiagnostics.getProcessorLoadAverage()); // threads snapshot.setDaemonThreads(sysDiagnostics.getDaemonThreads()); snapshot.setTotalThreads(sysDiagnostics.getTotalThreads()); // heap snapshot.setMaxHeap(FormatUtils.formatDataSize(sysDiagnostics.getMaxHeap())); snapshot.setMaxHeapBytes(sysDiagnostics.getMaxHeap()); snapshot.setTotalHeap(FormatUtils.formatDataSize(sysDiagnostics.getTotalHeap())); snapshot.setTotalHeapBytes(sysDiagnostics.getTotalHeap()); snapshot.setUsedHeap(FormatUtils.formatDataSize(sysDiagnostics.getUsedHeap())); snapshot.setUsedHeapBytes(sysDiagnostics.getUsedHeap()); snapshot.setFreeHeap(FormatUtils.formatDataSize(sysDiagnostics.getFreeHeap())); snapshot.setFreeHeapBytes(sysDiagnostics.getFreeHeap()); if (sysDiagnostics.getHeapUtilization() != -1) { snapshot.setHeapUtilization(FormatUtils.formatUtilization(sysDiagnostics.getHeapUtilization())); } // non heap snapshot.setMaxNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getMaxNonHeap())); snapshot.setMaxNonHeapBytes(sysDiagnostics.getMaxNonHeap()); snapshot.setTotalNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getTotalNonHeap())); snapshot.setTotalNonHeapBytes(sysDiagnostics.getTotalNonHeap()); snapshot.setUsedNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getUsedNonHeap())); snapshot.setUsedNonHeapBytes(sysDiagnostics.getUsedNonHeap()); snapshot.setFreeNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getFreeNonHeap())); snapshot.setFreeNonHeapBytes(sysDiagnostics.getFreeNonHeap()); if (sysDiagnostics.getNonHeapUtilization() != -1) { snapshot.setNonHeapUtilization(FormatUtils.formatUtilization(sysDiagnostics.getNonHeapUtilization())); } // flow file disk usage final SystemDiagnosticsSnapshotDTO.StorageUsageDTO flowFileRepositoryStorageUsageDto = createStorageUsageDTO(null, sysDiagnostics.getFlowFileRepositoryStorageUsage()); snapshot.setFlowFileRepositoryStorageUsage(flowFileRepositoryStorageUsageDto); // content disk usage final Set<SystemDiagnosticsSnapshotDTO.StorageUsageDTO> contentRepositoryStorageUsageDtos = new LinkedHashSet<>(); snapshot.setContentRepositoryStorageUsage(contentRepositoryStorageUsageDtos); for (final Map.Entry<String, StorageUsage> entry : sysDiagnostics.getContentRepositoryStorageUsage().entrySet()) { contentRepositoryStorageUsageDtos.add(createStorageUsageDTO(entry.getKey(), entry.getValue())); } // provenance disk usage final Set<SystemDiagnosticsSnapshotDTO.StorageUsageDTO> provenanceRepositoryStorageUsageDtos = new LinkedHashSet<>(); snapshot.setProvenanceRepositoryStorageUsage(provenanceRepositoryStorageUsageDtos); for (final Map.Entry<String, StorageUsage> entry : sysDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) { provenanceRepositoryStorageUsageDtos.add(createStorageUsageDTO(entry.getKey(), entry.getValue())); } // garbage collection final Set<SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO> garbageCollectionDtos = new LinkedHashSet<>(); snapshot.setGarbageCollection(garbageCollectionDtos); for (final Map.Entry<String, GarbageCollection> entry : sysDiagnostics.getGarbageCollection().entrySet()) { garbageCollectionDtos.add(createGarbageCollectionDTO(entry.getKey(), entry.getValue())); } // version info final SystemDiagnosticsSnapshotDTO.VersionInfoDTO versionInfoDto = createVersionInfoDTO(); snapshot.setVersionInfo(versionInfoDto); // uptime snapshot.setUptime(FormatUtils.formatHoursMinutesSeconds(sysDiagnostics.getUptime(), TimeUnit.MILLISECONDS)); return dto; } /** * Creates a StorageUsageDTO from the specified StorageUsage. * * @param identifier id * @param storageUsage usage * @return dto */ public SystemDiagnosticsSnapshotDTO.StorageUsageDTO createStorageUsageDTO(final String identifier, final StorageUsage storageUsage) { final SystemDiagnosticsSnapshotDTO.StorageUsageDTO dto = new SystemDiagnosticsSnapshotDTO.StorageUsageDTO(); dto.setIdentifier(identifier); dto.setFreeSpace(FormatUtils.formatDataSize(storageUsage.getFreeSpace())); dto.setTotalSpace(FormatUtils.formatDataSize(storageUsage.getTotalSpace())); dto.setUsedSpace(FormatUtils.formatDataSize(storageUsage.getUsedSpace())); dto.setFreeSpaceBytes(storageUsage.getFreeSpace()); dto.setTotalSpaceBytes(storageUsage.getTotalSpace()); dto.setUsedSpaceBytes(storageUsage.getUsedSpace()); dto.setUtilization(FormatUtils.formatUtilization(storageUsage.getDiskUtilization())); return dto; } /** * Creates a GarbageCollectionDTO from the specified GarbageCollection. * * @param name name * @param garbageCollection gc * @return dto */ public SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO createGarbageCollectionDTO(final String name, final GarbageCollection garbageCollection) { final SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO dto = new SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO(); dto.setName(name); dto.setCollectionCount(garbageCollection.getCollectionCount()); dto.setCollectionTime(FormatUtils.formatHoursMinutesSeconds(garbageCollection.getCollectionTime(), TimeUnit.MILLISECONDS)); dto.setCollectionMillis(garbageCollection.getCollectionTime()); return dto; } public SystemDiagnosticsSnapshotDTO.VersionInfoDTO createVersionInfoDTO() { final SystemDiagnosticsSnapshotDTO.VersionInfoDTO dto = new SystemDiagnosticsSnapshotDTO.VersionInfoDTO(); dto.setJavaVendor(System.getProperty("java.vendor")); dto.setJavaVersion(System.getProperty("java.version")); dto.setOsName(System.getProperty("os.name")); dto.setOsVersion(System.getProperty("os.version")); dto.setOsArchitecture(System.getProperty("os.arch")); final Bundle frameworkBundle = NarClassLoadersHolder.getInstance().getFrameworkBundle(); if (frameworkBundle != null) { final BundleDetails frameworkDetails = frameworkBundle.getBundleDetails(); dto.setNiFiVersion(frameworkDetails.getCoordinate().getVersion()); // Get build info dto.setBuildTag(frameworkDetails.getBuildTag()); dto.setBuildRevision(frameworkDetails.getBuildRevision()); dto.setBuildBranch(frameworkDetails.getBuildBranch()); dto.setBuildTimestamp(frameworkDetails.getBuildTimestampDate()); } return dto; } /** * Creates a ResourceDTO from the specified Resource. * * @param resource resource * @return dto */ public ResourceDTO createResourceDto(final Resource resource) { final ResourceDTO dto = new ResourceDTO(); dto.setIdentifier(resource.getIdentifier()); dto.setName(resource.getName()); return dto; } /** * Creates a ProcessorDiagnosticsDTO from the given Processor and status information with some additional supporting information * * @param procNode the processor to create diagnostics for * @param procStatus the status of given processor * @param bulletinRepo the bulletin repository * @param flowController flowController * @param serviceEntityFactory function for creating a ControllerServiceEntity from a given ID * @return ProcessorDiagnosticsDTO for the given Processor */ public ProcessorDiagnosticsDTO createProcessorDiagnosticsDto(final ProcessorNode procNode, final ProcessorStatus procStatus, final BulletinRepository bulletinRepo, final FlowController flowController, final Function<String, ControllerServiceEntity> serviceEntityFactory) { final ProcessorDiagnosticsDTO procDiagnostics = new ProcessorDiagnosticsDTO(); procDiagnostics.setClassLoaderDiagnostics(createClassLoaderDiagnosticsDto(procNode)); procDiagnostics.setIncomingConnections(procNode.getIncomingConnections().stream() .map(this::createConnectionDiagnosticsDto) .collect(Collectors.toSet())); procDiagnostics.setOutgoingConnections(procNode.getConnections().stream() .map(this::createConnectionDiagnosticsDto) .collect(Collectors.toSet())); procDiagnostics.setJvmDiagnostics(createJvmDiagnosticsDto(flowController)); procDiagnostics.setProcessor(createProcessorDto(procNode)); procDiagnostics.setProcessorStatus(createProcessorStatusDto(procStatus)); procDiagnostics.setThreadDumps(createThreadDumpDtos(procNode)); final Set<ControllerServiceDiagnosticsDTO> referencedServiceDiagnostics = createReferencedServiceDiagnostics(procNode.getEffectivePropertyValues(), flowController.getControllerServiceProvider(), serviceEntityFactory); procDiagnostics.setReferencedControllerServices(referencedServiceDiagnostics); return procDiagnostics; } private Set<ControllerServiceDiagnosticsDTO> createReferencedServiceDiagnostics(final Map<PropertyDescriptor, String> properties, final ControllerServiceProvider serviceProvider, final Function<String, ControllerServiceEntity> serviceEntityFactory) { final Set<ControllerServiceDiagnosticsDTO> referencedServiceDiagnostics = new HashSet<>(); for (final Map.Entry<PropertyDescriptor, String> entry : properties.entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); if (descriptor.getControllerServiceDefinition() == null) { continue; } final String serviceId = entry.getValue(); if (serviceId == null) { continue; } final ControllerServiceNode serviceNode = serviceProvider.getControllerServiceNode(serviceId); if (serviceNode == null) { continue; } final ControllerServiceDiagnosticsDTO serviceDiagnostics = createControllerServiceDiagnosticsDto(serviceNode, serviceEntityFactory, serviceProvider); if (serviceDiagnostics != null) { referencedServiceDiagnostics.add(serviceDiagnostics); } } return referencedServiceDiagnostics; } /** * Creates a ControllerServiceDiagnosticsDTO from the given Controller Service with some additional supporting information * * @param serviceNode the controller service to create diagnostics for * @param serviceEntityFactory a function to convert a controller service id to a controller service entity * @param serviceProvider the controller service provider * @return ControllerServiceDiagnosticsDTO for the given Controller Service */ public ControllerServiceDiagnosticsDTO createControllerServiceDiagnosticsDto(final ControllerServiceNode serviceNode, final Function<String, ControllerServiceEntity> serviceEntityFactory, final ControllerServiceProvider serviceProvider) { final ControllerServiceDiagnosticsDTO serviceDiagnostics = new ControllerServiceDiagnosticsDTO(); final ControllerServiceEntity serviceEntity = serviceEntityFactory.apply(serviceNode.getIdentifier()); serviceDiagnostics.setControllerService(serviceEntity); serviceDiagnostics.setClassLoaderDiagnostics(createClassLoaderDiagnosticsDto(serviceNode)); return serviceDiagnostics; } private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ControllerServiceNode serviceNode) { ClassLoader componentClassLoader = extensionManager.getInstanceClassLoader(serviceNode.getIdentifier()); if (componentClassLoader == null) { componentClassLoader = serviceNode.getControllerServiceImplementation().getClass().getClassLoader(); } return createClassLoaderDiagnosticsDto(componentClassLoader); } private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ProcessorNode procNode) { ClassLoader componentClassLoader = extensionManager.getInstanceClassLoader(procNode.getIdentifier()); if (componentClassLoader == null) { componentClassLoader = procNode.getProcessor().getClass().getClassLoader(); } return createClassLoaderDiagnosticsDto(componentClassLoader); } private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ClassLoader classLoader) { final ClassLoaderDiagnosticsDTO dto = new ClassLoaderDiagnosticsDTO(); final Bundle bundle = extensionManager.getBundle(classLoader); if (bundle != null) { dto.setBundle(createBundleDto(bundle.getBundleDetails().getCoordinate())); } final ClassLoader parentClassLoader = classLoader.getParent(); if (parentClassLoader != null) { dto.setParentClassLoader(createClassLoaderDiagnosticsDto(parentClassLoader)); } return dto; } private ConnectionDiagnosticsDTO createConnectionDiagnosticsDto(final Connection connection) { final ConnectionDiagnosticsDTO dto = new ConnectionDiagnosticsDTO(); dto.setConnection(createConnectionDto(connection)); dto.setAggregateSnapshot(createConnectionDiagnosticsSnapshotDto(connection)); return dto; } private ConnectionDiagnosticsSnapshotDTO createConnectionDiagnosticsSnapshotDto(final Connection connection) { final ConnectionDiagnosticsSnapshotDTO dto = new ConnectionDiagnosticsSnapshotDTO(); final QueueDiagnostics queueDiagnostics = connection.getFlowFileQueue().getQueueDiagnostics(); final FlowFileQueue queue = connection.getFlowFileQueue(); final QueueSize totalSize = queue.size(); dto.setTotalByteCount(totalSize.getByteCount()); dto.setTotalFlowFileCount(totalSize.getObjectCount()); final LocalQueuePartitionDiagnostics localDiagnostics = queueDiagnostics.getLocalQueuePartitionDiagnostics(); dto.setLocalQueuePartition(createLocalQueuePartitionDto(localDiagnostics)); final List<RemoteQueuePartitionDiagnostics> remoteDiagnostics = queueDiagnostics.getRemoteQueuePartitionDiagnostics(); if (remoteDiagnostics != null) { final List<RemoteQueuePartitionDTO> remoteDiagnosticsDtos = remoteDiagnostics.stream() .map(this::createRemoteQueuePartitionDto) .collect(Collectors.toList()); dto.setRemoteQueuePartitions(remoteDiagnosticsDtos); } return dto; } private LocalQueuePartitionDTO createLocalQueuePartitionDto(final LocalQueuePartitionDiagnostics queueDiagnostics) { final LocalQueuePartitionDTO dto = new LocalQueuePartitionDTO(); final QueueSize activeSize = queueDiagnostics.getActiveQueueSize(); dto.setActiveQueueByteCount(activeSize.getByteCount()); dto.setActiveQueueFlowFileCount(activeSize.getObjectCount()); final QueueSize inFlightSize = queueDiagnostics.getUnacknowledgedQueueSize(); dto.setInFlightByteCount(inFlightSize.getByteCount()); dto.setInFlightFlowFileCount(inFlightSize.getObjectCount()); final QueueSize swapSize = queueDiagnostics.getSwapQueueSize(); dto.setSwapByteCount(swapSize.getByteCount()); dto.setSwapFlowFileCount(swapSize.getObjectCount()); dto.setSwapFiles(queueDiagnostics.getSwapFileCount()); dto.setTotalByteCount(activeSize.getByteCount() + inFlightSize.getByteCount() + swapSize.getByteCount()); dto.setTotalFlowFileCount(activeSize.getObjectCount() + inFlightSize.getObjectCount() + swapSize.getObjectCount()); dto.setAllActiveQueueFlowFilesPenalized(queueDiagnostics.isAllActiveFlowFilesPenalized()); dto.setAnyActiveQueueFlowFilesPenalized(queueDiagnostics.isAnyActiveFlowFilePenalized()); return dto; } private RemoteQueuePartitionDTO createRemoteQueuePartitionDto(final RemoteQueuePartitionDiagnostics queueDiagnostics) { final RemoteQueuePartitionDTO dto = new RemoteQueuePartitionDTO(); dto.setNodeIdentifier(queueDiagnostics.getNodeIdentifier()); final QueueSize activeSize = queueDiagnostics.getActiveQueueSize(); dto.setActiveQueueByteCount(activeSize.getByteCount()); dto.setActiveQueueFlowFileCount(activeSize.getObjectCount()); final QueueSize inFlightSize = queueDiagnostics.getUnacknowledgedQueueSize(); dto.setInFlightByteCount(inFlightSize.getByteCount()); dto.setInFlightFlowFileCount(inFlightSize.getObjectCount()); final QueueSize swapSize = queueDiagnostics.getSwapQueueSize(); dto.setSwapByteCount(swapSize.getByteCount()); dto.setSwapFlowFileCount(swapSize.getObjectCount()); dto.setSwapFiles(queueDiagnostics.getSwapFileCount()); dto.setTotalByteCount(activeSize.getByteCount() + inFlightSize.getByteCount() + swapSize.getByteCount()); dto.setTotalFlowFileCount(activeSize.getObjectCount() + inFlightSize.getObjectCount() + swapSize.getObjectCount()); return dto; } private JVMDiagnosticsDTO createJvmDiagnosticsDto(final FlowController flowController) { final JVMDiagnosticsDTO dto = new JVMDiagnosticsDTO(); dto.setAggregateSnapshot(createJvmDiagnosticsSnapshotDto(flowController)); dto.setClustered(flowController.isClustered()); dto.setConnected(flowController.isConnected()); return dto; } private JVMDiagnosticsSnapshotDTO createJvmDiagnosticsSnapshotDto(final FlowController flowController) { final JVMDiagnosticsSnapshotDTO dto = new JVMDiagnosticsSnapshotDTO(); final JVMControllerDiagnosticsSnapshotDTO controllerDiagnosticsDto = new JVMControllerDiagnosticsSnapshotDTO(); final JVMFlowDiagnosticsSnapshotDTO flowDiagnosticsDto = new JVMFlowDiagnosticsSnapshotDTO(); final JVMSystemDiagnosticsSnapshotDTO systemDiagnosticsDto = new JVMSystemDiagnosticsSnapshotDTO(); dto.setControllerDiagnostics(controllerDiagnosticsDto); dto.setFlowDiagnosticsDto(flowDiagnosticsDto); dto.setSystemDiagnosticsDto(systemDiagnosticsDto); final SystemDiagnostics systemDiagnostics = flowController.getSystemDiagnostics(); // flow-related information final Set<BundleDTO> bundlesLoaded = extensionManager.getAllBundles().stream() .map(bundle -> bundle.getBundleDetails().getCoordinate()) .sorted((a, b) -> a.getCoordinate().compareTo(b.getCoordinate())) .map(this::createBundleDto) .collect(Collectors.toCollection(LinkedHashSet::new)); flowDiagnosticsDto.setActiveEventDrivenThreads(flowController.getActiveEventDrivenThreadCount()); flowDiagnosticsDto.setActiveTimerDrivenThreads(flowController.getActiveTimerDrivenThreadCount()); flowDiagnosticsDto.setBundlesLoaded(bundlesLoaded); flowDiagnosticsDto.setTimeZone(System.getProperty("user.timezone")); flowDiagnosticsDto.setUptime(FormatUtils.formatHoursMinutesSeconds(systemDiagnostics.getUptime(), TimeUnit.MILLISECONDS)); // controller-related information controllerDiagnosticsDto.setClusterCoordinator(flowController.isClusterCoordinator()); controllerDiagnosticsDto.setPrimaryNode(flowController.isPrimary()); controllerDiagnosticsDto.setMaxEventDrivenThreads(flowController.getMaxEventDrivenThreadCount()); controllerDiagnosticsDto.setMaxTimerDrivenThreads(flowController.getMaxTimerDrivenThreadCount()); // system-related information systemDiagnosticsDto.setMaxOpenFileDescriptors(systemDiagnostics.getMaxOpenFileHandles()); systemDiagnosticsDto.setOpenFileDescriptors(systemDiagnostics.getOpenFileHandles()); systemDiagnosticsDto.setPhysicalMemoryBytes(systemDiagnostics.getTotalPhysicalMemory()); systemDiagnosticsDto.setPhysicalMemory(FormatUtils.formatDataSize(systemDiagnostics.getTotalPhysicalMemory())); final NumberFormat percentageFormat = NumberFormat.getPercentInstance(); percentageFormat.setMaximumFractionDigits(2); final Set<RepositoryUsageDTO> contentRepoUsage = new HashSet<>(); for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getContentRepositoryStorageUsage().entrySet()) { final String repoName = entry.getKey(); final StorageUsage usage = entry.getValue(); final RepositoryUsageDTO usageDto = new RepositoryUsageDTO(); usageDto.setName(repoName); usageDto.setFileStoreHash(DigestUtils.sha256Hex(flowController.getContentRepoFileStoreName(repoName))); usageDto.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace())); usageDto.setFreeSpaceBytes(usage.getFreeSpace()); usageDto.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace())); usageDto.setTotalSpaceBytes(usage.getTotalSpace()); final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace(); final String utilization = percentageFormat.format(usedPercentage); usageDto.setUtilization(utilization); contentRepoUsage.add(usageDto); } final Set<RepositoryUsageDTO> provRepoUsage = new HashSet<>(); for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) { final String repoName = entry.getKey(); final StorageUsage usage = entry.getValue(); final RepositoryUsageDTO usageDto = new RepositoryUsageDTO(); usageDto.setName(repoName); usageDto.setFileStoreHash(DigestUtils.sha256Hex(flowController.getProvenanceRepoFileStoreName(repoName))); usageDto.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace())); usageDto.setFreeSpaceBytes(usage.getFreeSpace()); usageDto.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace())); usageDto.setTotalSpaceBytes(usage.getTotalSpace()); final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace(); final String utilization = percentageFormat.format(usedPercentage); usageDto.setUtilization(utilization); provRepoUsage.add(usageDto); } final RepositoryUsageDTO flowFileRepoUsage = new RepositoryUsageDTO(); for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) { final String repoName = entry.getKey(); final StorageUsage usage = entry.getValue(); flowFileRepoUsage.setName(repoName); flowFileRepoUsage.setFileStoreHash(DigestUtils.sha256Hex(flowController.getFlowRepoFileStoreName())); flowFileRepoUsage.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace())); flowFileRepoUsage.setFreeSpaceBytes(usage.getFreeSpace()); flowFileRepoUsage.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace())); flowFileRepoUsage.setTotalSpaceBytes(usage.getTotalSpace()); final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace(); final String utilization = percentageFormat.format(usedPercentage); flowFileRepoUsage.setUtilization(utilization); } systemDiagnosticsDto.setContentRepositoryStorageUsage(contentRepoUsage); systemDiagnosticsDto.setCpuCores(systemDiagnostics.getAvailableProcessors()); systemDiagnosticsDto.setCpuLoadAverage(systemDiagnostics.getProcessorLoadAverage()); systemDiagnosticsDto.setFlowFileRepositoryStorageUsage(flowFileRepoUsage); systemDiagnosticsDto.setMaxHeapBytes(systemDiagnostics.getMaxHeap()); systemDiagnosticsDto.setMaxHeap(FormatUtils.formatDataSize(systemDiagnostics.getMaxHeap())); systemDiagnosticsDto.setProvenanceRepositoryStorageUsage(provRepoUsage); // Create the Garbage Collection History info final GarbageCollectionHistory gcHistory = flowController.getGarbageCollectionHistory(); final List<GarbageCollectionDiagnosticsDTO> gcDiagnostics = new ArrayList<>(); for (final String memoryManager : gcHistory.getMemoryManagerNames()) { final List<GarbageCollectionStatus> statuses = gcHistory.getGarbageCollectionStatuses(memoryManager); final List<GCDiagnosticsSnapshotDTO> gcSnapshots = new ArrayList<>(); for (final GarbageCollectionStatus status : statuses) { final GCDiagnosticsSnapshotDTO snapshotDto = new GCDiagnosticsSnapshotDTO(); snapshotDto.setTimestamp(status.getTimestamp()); snapshotDto.setCollectionCount(status.getCollectionCount()); snapshotDto.setCollectionMillis(status.getCollectionMillis()); gcSnapshots.add(snapshotDto); } gcSnapshots.sort(Comparator.comparing(GCDiagnosticsSnapshotDTO::getTimestamp).reversed()); final GarbageCollectionDiagnosticsDTO gcDto = new GarbageCollectionDiagnosticsDTO(); gcDto.setMemoryManagerName(memoryManager); gcDto.setSnapshots(gcSnapshots); gcDiagnostics.add(gcDto); } systemDiagnosticsDto.setGarbageCollectionDiagnostics(gcDiagnostics); return dto; } private List<ThreadDumpDTO> createThreadDumpDtos(final ProcessorNode procNode) { final List<ThreadDumpDTO> threadDumps = new ArrayList<>(); final List<ActiveThreadInfo> activeThreads = procNode.getActiveThreads(ThreadDetails.capture()); for (final ActiveThreadInfo threadInfo : activeThreads) { final ThreadDumpDTO dto = new ThreadDumpDTO(); dto.setStackTrace(threadInfo.getStackTrace()); dto.setThreadActiveMillis(threadInfo.getActiveMillis()); dto.setThreadName(threadInfo.getThreadName()); dto.setTaskTerminated(threadInfo.isTerminated()); threadDumps.add(dto); } return threadDumps; } /** * Creates a ProcessorConfigDTO from the specified ProcessorNode. * * @param procNode node * @return dto */ public ProcessorConfigDTO createProcessorConfigDto(final ProcessorNode procNode) { if (procNode == null) { return null; } final ProcessorConfigDTO dto = new ProcessorConfigDTO(); // sort a copy of the properties final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() { @Override public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) { return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName()); } }); sortedProperties.putAll(procNode.getRawPropertyValues()); // get the property order from the processor final Processor processor = procNode.getProcessor(); final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>(); final List<PropertyDescriptor> descriptors = processor.getPropertyDescriptors(); if (descriptors != null && !descriptors.isEmpty()) { for (final PropertyDescriptor descriptor : descriptors) { orderedProperties.put(descriptor, null); } } orderedProperties.putAll(sortedProperties); // build the descriptor and property dtos dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>()); dto.setProperties(new LinkedHashMap<String, String>()); for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); // store the property descriptor dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, procNode.getProcessGroup().getIdentifier())); // determine the property value - don't include sensitive properties String propertyValue = entry.getValue(); if (propertyValue != null && descriptor.isSensitive()) { propertyValue = SENSITIVE_VALUE_MASK; } else if (propertyValue == null && descriptor.getDefaultValue() != null) { propertyValue = descriptor.getDefaultValue(); } // set the property value dto.getProperties().put(descriptor.getName(), propertyValue); } dto.setSchedulingPeriod(procNode.getSchedulingPeriod()); dto.setPenaltyDuration(procNode.getPenalizationPeriod()); dto.setYieldDuration(procNode.getYieldPeriod()); dto.setRunDurationMillis(procNode.getRunDuration(TimeUnit.MILLISECONDS)); dto.setConcurrentlySchedulableTaskCount(procNode.getMaxConcurrentTasks()); dto.setLossTolerant(procNode.isLossTolerant()); dto.setComments(procNode.getComments()); dto.setBulletinLevel(procNode.getBulletinLevel().name()); dto.setSchedulingStrategy(procNode.getSchedulingStrategy().name()); dto.setExecutionNode(procNode.getExecutionNode().name()); dto.setAnnotationData(procNode.getAnnotationData()); // set up the default values for concurrent tasks and scheduling period final Map<String, String> defaultConcurrentTasks = new HashMap<>(); defaultConcurrentTasks.put(SchedulingStrategy.TIMER_DRIVEN.name(), String.valueOf(SchedulingStrategy.TIMER_DRIVEN.getDefaultConcurrentTasks())); defaultConcurrentTasks.put(SchedulingStrategy.EVENT_DRIVEN.name(), String.valueOf(SchedulingStrategy.EVENT_DRIVEN.getDefaultConcurrentTasks())); defaultConcurrentTasks.put(SchedulingStrategy.CRON_DRIVEN.name(), String.valueOf(SchedulingStrategy.CRON_DRIVEN.getDefaultConcurrentTasks())); dto.setDefaultConcurrentTasks(defaultConcurrentTasks); final Map<String, String> defaultSchedulingPeriod = new HashMap<>(); defaultSchedulingPeriod.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod()); defaultSchedulingPeriod.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod()); dto.setDefaultSchedulingPeriod(defaultSchedulingPeriod); return dto; } /** * Creates a PropertyDesriptorDTO from the specified PropertyDesriptor. * * @param propertyDescriptor descriptor * @param groupId the Identifier of the Process Group that the component belongs to * @return dto */ public PropertyDescriptorDTO createPropertyDescriptorDto(final PropertyDescriptor propertyDescriptor, final String groupId) { if (propertyDescriptor == null) { return null; } final PropertyDescriptorDTO dto = new PropertyDescriptorDTO(); dto.setName(propertyDescriptor.getName()); dto.setDisplayName(propertyDescriptor.getDisplayName()); dto.setRequired(propertyDescriptor.isRequired()); dto.setSensitive(propertyDescriptor.isSensitive()); dto.setDynamic(propertyDescriptor.isDynamic()); dto.setDescription(propertyDescriptor.getDescription()); dto.setDefaultValue(propertyDescriptor.getDefaultValue()); dto.setSupportsEl(propertyDescriptor.isExpressionLanguageSupported()); // to support legacy/deprecated method .expressionLanguageSupported(true) String description = propertyDescriptor.isExpressionLanguageSupported() && propertyDescriptor.getExpressionLanguageScope().equals(ExpressionLanguageScope.NONE) ? "true (undefined scope)" : propertyDescriptor.getExpressionLanguageScope().getDescription(); dto.setExpressionLanguageScope(description); // set the identifies controller service is applicable if (propertyDescriptor.getControllerServiceDefinition() != null) { final Class serviceClass = propertyDescriptor.getControllerServiceDefinition(); final Bundle serviceBundle = extensionManager.getBundle(serviceClass.getClassLoader()); dto.setIdentifiesControllerService(serviceClass.getName()); dto.setIdentifiesControllerServiceBundle(createBundleDto(serviceBundle.getBundleDetails().getCoordinate())); } final Class<? extends ControllerService> serviceDefinition = propertyDescriptor.getControllerServiceDefinition(); if (propertyDescriptor.getAllowableValues() == null) { if (serviceDefinition == null) { dto.setAllowableValues(null); } else { final List<AllowableValueEntity> allowableValues = new ArrayList<>(); final List<String> controllerServiceIdentifiers = new ArrayList<>(controllerServiceProvider.getControllerServiceIdentifiers(serviceDefinition, groupId)); Collections.sort(controllerServiceIdentifiers, Collator.getInstance(Locale.US)); for (final String serviceIdentifier : controllerServiceIdentifiers) { final ControllerServiceNode service = controllerServiceProvider.getControllerServiceNode(serviceIdentifier); final boolean isServiceAuthorized = service.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); final String displayName = isServiceAuthorized ? service.getName() : serviceIdentifier; final AllowableValueDTO allowableValue = new AllowableValueDTO(); allowableValue.setDisplayName(displayName); allowableValue.setValue(serviceIdentifier); allowableValues.add(entityFactory.createAllowableValueEntity(allowableValue, isServiceAuthorized)); } dto.setAllowableValues(allowableValues); } } else { final List<AllowableValueEntity> allowableValues = new ArrayList<>(); for (final AllowableValue allowableValue : propertyDescriptor.getAllowableValues()) { final AllowableValueDTO allowableValueDto = new AllowableValueDTO(); allowableValueDto.setDisplayName(allowableValue.getDisplayName()); allowableValueDto.setValue(allowableValue.getValue()); allowableValueDto.setDescription(allowableValue.getDescription()); allowableValues.add(entityFactory.createAllowableValueEntity(allowableValueDto, true)); } dto.setAllowableValues(allowableValues); } // Add any dependencies final Set<PropertyDependency> dependencies = propertyDescriptor.getDependencies(); final List<PropertyDependencyDTO> dependencyDtos = dependencies.stream() .map(this::createPropertyDependencyDto) .collect(Collectors.toList()); dto.setDependencies(dependencyDtos); return dto; } private PropertyDependencyDTO createPropertyDependencyDto(final PropertyDependency dependency) { final PropertyDependencyDTO dto = new PropertyDependencyDTO(); dto.setPropertyName(dependency.getPropertyName()); dto.setDependentValues(dependency.getDependentValues()); return dto; } // Copy methods public LabelDTO copy(final LabelDTO original) { final LabelDTO copy = new LabelDTO(); copy.setId(original.getId()); copy.setParentGroupId(original.getParentGroupId()); copy.setLabel(original.getLabel()); copy.setStyle(copy(original.getStyle())); copy.setPosition(original.getPosition()); copy.setWidth(original.getWidth()); copy.setHeight(original.getHeight()); copy.setVersionedComponentId(original.getVersionedComponentId()); return copy; } public ControllerServiceDTO copy(final ControllerServiceDTO original) { final ControllerServiceDTO copy = new ControllerServiceDTO(); copy.setAnnotationData(original.getAnnotationData()); copy.setControllerServiceApis(original.getControllerServiceApis()); copy.setComments(original.getComments()); copy.setCustomUiUrl(original.getCustomUiUrl()); copy.setDescriptors(copy(original.getDescriptors())); copy.setId(original.getId()); copy.setParentGroupId(original.getParentGroupId()); copy.setName(original.getName()); copy.setProperties(copy(original.getProperties())); copy.setReferencingComponents(copy(original.getReferencingComponents())); copy.setState(original.getState()); copy.setType(original.getType()); copy.setBundle(copy(original.getBundle())); copy.setExtensionMissing(original.getExtensionMissing()); copy.setMultipleVersionsAvailable(original.getMultipleVersionsAvailable()); copy.setPersistsState(original.getPersistsState()); copy.setValidationErrors(copy(original.getValidationErrors())); copy.setValidationStatus(original.getValidationStatus()); copy.setVersionedComponentId(original.getVersionedComponentId()); return copy; } public FunnelDTO copy(final FunnelDTO original) { final FunnelDTO copy = new FunnelDTO(); copy.setId(original.getId()); copy.setParentGroupId(original.getParentGroupId()); copy.setPosition(original.getPosition()); copy.setVersionedComponentId(original.getVersionedComponentId()); return copy; } private <T> List<T> copy(final List<T> original) { if (original == null) { return null; } else { return new ArrayList<>(original); } } private <T> List<T> copy(final Collection<T> original) { if (original == null) { return null; } else { return new ArrayList<>(original); } } private <T> Set<T> copy(final Set<T> original) { if (original == null) { return null; } else { return new LinkedHashSet<>(original); } } private <S, T> Map<S, T> copy(final Map<S, T> original) { if (original == null) { return null; } else { return new LinkedHashMap<>(original); } } public BundleDTO copy(final BundleDTO original) { if (original == null) { return null; } final BundleDTO copy = new BundleDTO(); copy.setGroup(original.getGroup()); copy.setArtifact(original.getArtifact()); copy.setVersion(original.getVersion()); return copy; } public ProcessorDTO copy(final ProcessorDTO original) { final ProcessorDTO copy = new ProcessorDTO(); copy.setConfig(copy(original.getConfig())); copy.setPosition(original.getPosition()); copy.setId(original.getId()); copy.setName(original.getName()); copy.setDescription(original.getDescription()); copy.setParentGroupId(original.getParentGroupId()); copy.setRelationships(copy(original.getRelationships())); copy.setState(original.getState()); copy.setStyle(copy(original.getStyle())); copy.setType(original.getType()); copy.setBundle(copy(original.getBundle())); copy.setSupportsParallelProcessing(original.getSupportsParallelProcessing()); copy.setSupportsEventDriven(original.getSupportsEventDriven()); copy.setSupportsBatching(original.getSupportsBatching()); copy.setPersistsState(original.getPersistsState()); copy.setExecutionNodeRestricted(original.isExecutionNodeRestricted()); copy.setExtensionMissing(original.getExtensionMissing()); copy.setMultipleVersionsAvailable(original.getMultipleVersionsAvailable()); copy.setValidationErrors(copy(original.getValidationErrors())); copy.setValidationStatus(original.getValidationStatus()); copy.setVersionedComponentId(original.getVersionedComponentId()); return copy; } private ProcessorConfigDTO copy(final ProcessorConfigDTO original) { final ProcessorConfigDTO copy = new ProcessorConfigDTO(); copy.setAnnotationData(original.getAnnotationData()); copy.setAutoTerminatedRelationships(copy(original.getAutoTerminatedRelationships())); copy.setComments(original.getComments()); copy.setSchedulingStrategy(original.getSchedulingStrategy()); copy.setExecutionNode(original.getExecutionNode()); copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount()); copy.setCustomUiUrl(original.getCustomUiUrl()); copy.setDescriptors(copy(original.getDescriptors())); copy.setProperties(copy(original.getProperties())); copy.setSchedulingPeriod(original.getSchedulingPeriod()); copy.setPenaltyDuration(original.getPenaltyDuration()); copy.setYieldDuration(original.getYieldDuration()); copy.setRunDurationMillis(original.getRunDurationMillis()); copy.setBulletinLevel(original.getBulletinLevel()); copy.setDefaultConcurrentTasks(original.getDefaultConcurrentTasks()); copy.setDefaultSchedulingPeriod(original.getDefaultSchedulingPeriod()); copy.setLossTolerant(original.isLossTolerant()); return copy; } public ConnectionDTO copy(final ConnectionDTO original) { final ConnectionDTO copy = new ConnectionDTO(); copy.setAvailableRelationships(copy(original.getAvailableRelationships())); copy.setDestination(original.getDestination()); copy.setPosition(original.getPosition()); copy.setId(original.getId()); copy.setName(original.getName()); copy.setParentGroupId(original.getParentGroupId()); copy.setSelectedRelationships(copy(original.getSelectedRelationships())); copy.setFlowFileExpiration(original.getFlowFileExpiration()); copy.setBackPressureObjectThreshold(original.getBackPressureObjectThreshold()); copy.setBackPressureDataSizeThreshold(original.getBackPressureDataSizeThreshold()); copy.setPrioritizers(copy(original.getPrioritizers())); copy.setSource(original.getSource()); copy.setzIndex(original.getzIndex()); copy.setLabelIndex(original.getLabelIndex()); copy.setBends(copy(original.getBends())); copy.setLoadBalancePartitionAttribute(original.getLoadBalancePartitionAttribute()); copy.setLoadBalanceStrategy(original.getLoadBalanceStrategy()); copy.setLoadBalanceCompression(original.getLoadBalanceCompression()); copy.setLoadBalanceStatus(original.getLoadBalanceStatus()); copy.setVersionedComponentId(original.getVersionedComponentId()); return copy; } public BulletinDTO copy(final BulletinDTO original) { final BulletinDTO copy = new BulletinDTO(); copy.setId(original.getId()); copy.setTimestamp(original.getTimestamp()); copy.setGroupId(original.getGroupId()); copy.setSourceId(original.getSourceId()); copy.setSourceName(original.getSourceName()); copy.setCategory(original.getCategory()); copy.setLevel(original.getLevel()); copy.setMessage(original.getMessage()); copy.setNodeAddress(original.getNodeAddress()); return copy; } public PortDTO copy(final PortDTO original) { final PortDTO copy = new PortDTO(); copy.setPosition(original.getPosition()); copy.setId(original.getId()); copy.setName(original.getName()); copy.setComments(original.getComments()); copy.setParentGroupId(original.getParentGroupId()); copy.setState(original.getState()); copy.setType(original.getType()); copy.setTransmitting(original.isTransmitting()); copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount()); copy.setUserAccessControl(copy(original.getUserAccessControl())); copy.setGroupAccessControl(copy(original.getGroupAccessControl())); copy.setValidationErrors(copy(original.getValidationErrors())); copy.setVersionedComponentId(original.getVersionedComponentId()); copy.setAllowRemoteAccess(original.getAllowRemoteAccess()); return copy; } public RemoteProcessGroupPortDTO copy(final RemoteProcessGroupPortDTO original) { final RemoteProcessGroupPortDTO copy = new RemoteProcessGroupPortDTO(); copy.setId(original.getId()); copy.setTargetId(original.getTargetId()); copy.setGroupId(original.getGroupId()); copy.setName(original.getName()); copy.setComments(original.getComments()); copy.setConnected(original.isConnected()); copy.setTargetRunning(original.isTargetRunning()); copy.setTransmitting(original.isTransmitting()); copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount()); copy.setUseCompression(original.getUseCompression()); copy.setExists(original.getExists()); copy.setVersionedComponentId(original.getVersionedComponentId()); final BatchSettingsDTO batchOrg = original.getBatchSettings(); if (batchOrg != null) { final BatchSettingsDTO batchCopy = new BatchSettingsDTO(); batchCopy.setCount(batchOrg.getCount()); batchCopy.setSize(batchOrg.getSize()); batchCopy.setDuration(batchOrg.getDuration()); copy.setBatchSettings(batchCopy); } return copy; } public ProcessGroupDTO copy(final ProcessGroupDTO original, final boolean deep) { final ProcessGroupDTO copy = new ProcessGroupDTO(); copy.setComments(original.getComments()); copy.setContents(copy(original.getContents(), deep)); copy.setPosition(original.getPosition()); copy.setId(original.getId()); copy.setLocalInputPortCount(original.getLocalInputPortCount()); copy.setPublicInputPortCount(original.getPublicInputPortCount()); copy.setInvalidCount(original.getInvalidCount()); copy.setName(original.getName()); copy.setVersionControlInformation(copy(original.getVersionControlInformation())); copy.setParameterContext(copy(original.getParameterContext())); copy.setLocalOutputPortCount(original.getLocalOutputPortCount()); copy.setPublicOutputPortCount(original.getPublicOutputPortCount()); copy.setOutputPortCount(original.getOutputPortCount()); copy.setParentGroupId(original.getParentGroupId()); copy.setVersionedComponentId(original.getVersionedComponentId()); copy.setFlowfileConcurrency(original.getFlowfileConcurrency()); copy.setFlowfileOutboundPolicy(original.getFlowfileOutboundPolicy()); copy.setRunningCount(original.getRunningCount()); copy.setStoppedCount(original.getStoppedCount()); copy.setDisabledCount(original.getDisabledCount()); copy.setActiveRemotePortCount(original.getActiveRemotePortCount()); copy.setInactiveRemotePortCount(original.getInactiveRemotePortCount()); copy.setUpToDateCount(original.getUpToDateCount()); copy.setLocallyModifiedCount(original.getLocallyModifiedCount()); copy.setStaleCount(original.getStaleCount()); copy.setLocallyModifiedAndStaleCount(original.getLocallyModifiedAndStaleCount()); copy.setSyncFailureCount(original.getSyncFailureCount()); if (original.getVariables() != null) { copy.setVariables(new HashMap<>(original.getVariables())); } return copy; } public ParameterContextReferenceEntity copy(final ParameterContextReferenceEntity original) { if (original == null) { return null; } final ParameterContextReferenceEntity copy = new ParameterContextReferenceEntity(); copy.setId(original.getId()); copy.setPermissions(copy(original.getPermissions())); if (original.getComponent() != null) { final ParameterContextReferenceDTO dtoOriginal = original.getComponent(); final ParameterContextReferenceDTO dtoCopy = new ParameterContextReferenceDTO(); dtoCopy.setId(dtoOriginal.getId()); dtoCopy.setName(dtoOriginal.getName()); copy.setComponent(dtoCopy); } return copy; } public PermissionsDTO copy(final PermissionsDTO original) { if (original == null) { return null; } final PermissionsDTO copy = new PermissionsDTO(); copy.setCanRead(original.getCanRead()); copy.setCanWrite(original.getCanWrite()); return copy; } public VersionControlInformationDTO copy(final VersionControlInformationDTO original) { if (original == null) { return null; } final VersionControlInformationDTO copy = new VersionControlInformationDTO(); copy.setRegistryId(original.getRegistryId()); copy.setRegistryName(original.getRegistryName()); copy.setBucketId(original.getBucketId()); copy.setBucketName(original.getBucketName()); copy.setFlowId(original.getFlowId()); copy.setFlowName(original.getFlowName()); copy.setFlowDescription(original.getFlowDescription()); copy.setVersion(original.getVersion()); copy.setState(original.getState()); copy.setStateExplanation(original.getStateExplanation()); return copy; } public RemoteProcessGroupDTO copy(final RemoteProcessGroupDTO original) { final RemoteProcessGroupContentsDTO originalContents = original.getContents(); final RemoteProcessGroupContentsDTO copyContents = new RemoteProcessGroupContentsDTO(); if (originalContents.getInputPorts() != null) { final Set<RemoteProcessGroupPortDTO> inputPorts = new HashSet<>(); for (final RemoteProcessGroupPortDTO port : originalContents.getInputPorts()) { inputPorts.add(copy(port)); } copyContents.setInputPorts(inputPorts); } if (originalContents.getOutputPorts() != null) { final Set<RemoteProcessGroupPortDTO> outputPorts = new HashSet<>(); for (final RemoteProcessGroupPortDTO port : originalContents.getOutputPorts()) { outputPorts.add(copy(port)); } copyContents.setOutputPorts(outputPorts); } final RemoteProcessGroupDTO copy = new RemoteProcessGroupDTO(); copy.setComments(original.getComments()); copy.setPosition(original.getPosition()); copy.setId(original.getId()); copy.setCommunicationsTimeout(original.getCommunicationsTimeout()); copy.setYieldDuration(original.getYieldDuration()); copy.setName(original.getName()); copy.setInputPortCount(original.getInputPortCount()); copy.setOutputPortCount(original.getOutputPortCount()); copy.setActiveRemoteInputPortCount(original.getActiveRemoteInputPortCount()); copy.setInactiveRemoteInputPortCount(original.getInactiveRemoteInputPortCount()); copy.setActiveRemoteOutputPortCount(original.getActiveRemoteOutputPortCount()); copy.setInactiveRemoteOutputPortCount(original.getInactiveRemoteOutputPortCount()); copy.setParentGroupId(original.getParentGroupId()); copy.setTargetUris(original.getTargetUris()); copy.setTransportProtocol(original.getTransportProtocol()); copy.setProxyHost(original.getProxyHost()); copy.setProxyPort(original.getProxyPort()); copy.setProxyUser(original.getProxyUser()); copy.setProxyPassword(original.getProxyPassword()); copy.setLocalNetworkInterface(original.getLocalNetworkInterface()); copy.setVersionedComponentId(original.getVersionedComponentId()); copy.setContents(copyContents); return copy; } public ConnectableDTO createConnectableDto(final PortDTO port, final ConnectableType type) { final ConnectableDTO connectable = new ConnectableDTO(); connectable.setGroupId(port.getParentGroupId()); connectable.setId(port.getId()); connectable.setName(port.getName()); connectable.setType(type.name()); connectable.setVersionedComponentId(port.getVersionedComponentId()); return connectable; } public ConnectableDTO createConnectableDto(final ProcessorDTO processor) { final ConnectableDTO connectable = new ConnectableDTO(); connectable.setGroupId(processor.getParentGroupId()); connectable.setId(processor.getId()); connectable.setName(processor.getName()); connectable.setType(ConnectableType.PROCESSOR.name()); connectable.setVersionedComponentId(processor.getVersionedComponentId()); return connectable; } public ConnectableDTO createConnectableDto(final FunnelDTO funnel) { final ConnectableDTO connectable = new ConnectableDTO(); connectable.setGroupId(funnel.getParentGroupId()); connectable.setId(funnel.getId()); connectable.setType(ConnectableType.FUNNEL.name()); connectable.setVersionedComponentId(funnel.getVersionedComponentId()); return connectable; } public ConnectableDTO createConnectableDto(final RemoteProcessGroupPortDTO remoteGroupPort, final ConnectableType type) { final ConnectableDTO connectable = new ConnectableDTO(); connectable.setGroupId(remoteGroupPort.getGroupId()); connectable.setId(remoteGroupPort.getId()); connectable.setName(remoteGroupPort.getName()); connectable.setType(type.name()); connectable.setVersionedComponentId(connectable.getVersionedComponentId()); return connectable; } /** * * @param original orig * @param deep if <code>true</code>, all Connections, ProcessGroups, Ports, Processors, etc. will be copied. If <code>false</code>, the copy will have links to the same objects referenced by * <code>original</code>. * * @return dto */ private FlowSnippetDTO copy(final FlowSnippetDTO original, final boolean deep) { final FlowSnippetDTO copy = new FlowSnippetDTO(); final Set<ConnectionDTO> connections = new LinkedHashSet<>(); final Set<ProcessGroupDTO> groups = new LinkedHashSet<>(); final Set<PortDTO> inputPorts = new LinkedHashSet<>(); final Set<PortDTO> outputPorts = new LinkedHashSet<>(); final Set<LabelDTO> labels = new LinkedHashSet<>(); final Set<ProcessorDTO> processors = new LinkedHashSet<>(); final Set<RemoteProcessGroupDTO> remoteProcessGroups = new LinkedHashSet<>(); final Set<FunnelDTO> funnels = new LinkedHashSet<>(); final Set<ControllerServiceDTO> controllerServices = new LinkedHashSet<>(); if (deep) { for (final ProcessGroupDTO group : original.getProcessGroups()) { groups.add(copy(group, deep)); } for (final PortDTO port : original.getInputPorts()) { inputPorts.add(copy(port)); } for (final PortDTO port : original.getOutputPorts()) { outputPorts.add(copy(port)); } for (final LabelDTO label : original.getLabels()) { labels.add(copy(label)); } for (final ProcessorDTO processor : original.getProcessors()) { processors.add(copy(processor)); } for (final RemoteProcessGroupDTO remoteGroup : original.getRemoteProcessGroups()) { remoteProcessGroups.add(copy(remoteGroup)); } for (final FunnelDTO funnel : original.getFunnels()) { funnels.add(copy(funnel)); } for (final ConnectionDTO connection : original.getConnections()) { connections.add(copy(connection)); } for (final ControllerServiceDTO controllerService : original.getControllerServices()) { controllerServices.add(copy(controllerService)); } } else { if (original.getConnections() != null) { connections.addAll(copy(original.getConnections())); } if (original.getProcessGroups() != null) { groups.addAll(copy(original.getProcessGroups())); } if (original.getInputPorts() != null) { inputPorts.addAll(copy(original.getInputPorts())); } if (original.getOutputPorts() != null) { outputPorts.addAll(copy(original.getOutputPorts())); } if (original.getLabels() != null) { labels.addAll(copy(original.getLabels())); } if (original.getProcessors() != null) { processors.addAll(copy(original.getProcessors())); } if (original.getRemoteProcessGroups() != null) { remoteProcessGroups.addAll(copy(original.getRemoteProcessGroups())); } if (original.getFunnels() != null) { funnels.addAll(copy(original.getFunnels())); } if (original.getControllerServices() != null) { controllerServices.addAll(copy(original.getControllerServices())); } } copy.setConnections(connections); copy.setProcessGroups(groups); copy.setInputPorts(inputPorts); copy.setLabels(labels); copy.setOutputPorts(outputPorts); copy.setProcessors(processors); copy.setRemoteProcessGroups(remoteProcessGroups); copy.setFunnels(funnels); copy.setControllerServices(controllerServices); return copy; } /** * Factory method for creating a new RevisionDTO based on this controller. * * @param lastMod mod * @return dto */ public RevisionDTO createRevisionDTO(final FlowModification lastMod) { final Revision revision = lastMod.getRevision(); // create the dto final RevisionDTO revisionDTO = new RevisionDTO(); revisionDTO.setVersion(revision.getVersion()); revisionDTO.setClientId(revision.getClientId()); revisionDTO.setLastModifier(lastMod.getLastModifier()); return revisionDTO; } public RevisionDTO createRevisionDTO(final Revision revision) { final RevisionDTO dto = new RevisionDTO(); dto.setVersion(revision.getVersion()); dto.setClientId(revision.getClientId()); return dto; } public NodeDTO createNodeDTO(final NodeIdentifier nodeId, final NodeConnectionStatus status, final NodeHeartbeat nodeHeartbeat, final List<NodeEvent> events, final Set<String> roles) { final NodeDTO nodeDto = new NodeDTO(); // populate node dto nodeDto.setNodeId(nodeId.getId()); nodeDto.setAddress(nodeId.getApiAddress()); nodeDto.setApiPort(nodeId.getApiPort()); nodeDto.setStatus(status.getState().name()); nodeDto.setRoles(roles); if (status.getConnectionRequestTime() != null) { final Date connectionRequested = new Date(status.getConnectionRequestTime()); nodeDto.setConnectionRequested(connectionRequested); } // only connected nodes have heartbeats if (nodeHeartbeat != null) { final Date heartbeat = new Date(nodeHeartbeat.getTimestamp()); nodeDto.setHeartbeat(heartbeat); nodeDto.setNodeStartTime(new Date(nodeHeartbeat.getSystemStartTime())); nodeDto.setActiveThreadCount(nodeHeartbeat.getActiveThreadCount()); nodeDto.setQueued(FormatUtils.formatCount(nodeHeartbeat.getFlowFileCount()) + " / " + FormatUtils.formatDataSize(nodeHeartbeat.getFlowFileBytes())); } // populate node events final List<NodeEvent> nodeEvents = new ArrayList<>(events); Collections.sort(nodeEvents, new Comparator<NodeEvent>() { @Override public int compare(final NodeEvent event1, final NodeEvent event2) { return new Date(event2.getTimestamp()).compareTo(new Date(event1.getTimestamp())); } }); // create the node event dtos final List<NodeEventDTO> nodeEventDtos = new ArrayList<>(); for (final NodeEvent event : nodeEvents) { // create node event dto final NodeEventDTO nodeEventDto = new NodeEventDTO(); nodeEventDtos.add(nodeEventDto); // populate node event dto nodeEventDto.setMessage(event.getMessage()); nodeEventDto.setCategory(event.getSeverity().name()); nodeEventDto.setTimestamp(new Date(event.getTimestamp())); } nodeDto.setEvents(nodeEventDtos); return nodeDto; } public RegistryDTO createRegistryDto(FlowRegistry registry) { final RegistryDTO dto = new RegistryDTO(); dto.setDescription(registry.getDescription()); dto.setId(registry.getIdentifier()); dto.setName(registry.getName()); dto.setUri(registry.getURL()); return dto; } /* setters */ public void setControllerServiceProvider(final ControllerServiceProvider controllerServiceProvider) { this.controllerServiceProvider = controllerServiceProvider; } public void setAuthorizer(final Authorizer authorizer) { this.authorizer = authorizer; } public void setEntityFactory(final EntityFactory entityFactory) { this.entityFactory = entityFactory; } public void setBulletinRepository(BulletinRepository bulletinRepository) { this.bulletinRepository = bulletinRepository; } public void setExtensionManager(ExtensionManager extensionManager) { this.extensionManager = extensionManager; } }
/* * Copyright 2008-present MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Original Work: Apache License, Version 2.0, Copyright 2017 Hans-Peter Grahsl. */ package com.mongodb.kafka.connect.sink.processor.field.projection; import static com.mongodb.kafka.connect.sink.MongoSinkTopicConfig.KEY_PROJECTION_LIST_CONFIG; import static com.mongodb.kafka.connect.sink.MongoSinkTopicConfig.KEY_PROJECTION_TYPE_CONFIG; import static com.mongodb.kafka.connect.sink.MongoSinkTopicConfig.VALUE_PROJECTION_LIST_CONFIG; import static com.mongodb.kafka.connect.sink.MongoSinkTopicConfig.VALUE_PROJECTION_TYPE_CONFIG; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.apache.kafka.common.config.AbstractConfig; import org.bson.BsonDocument; import com.mongodb.kafka.connect.sink.MongoSinkTopicConfig; import com.mongodb.kafka.connect.sink.processor.PostProcessor; public abstract class FieldProjector extends PostProcessor { private static final String FIELD_LIST_SPLIT_EXPR = "\\s*,\\s*"; static final String SINGLE_WILDCARD = "*"; static final String DOUBLE_WILDCARD = "**"; static final String SUB_FIELD_DOT_SEPARATOR = "."; private final Set<String> fields; public FieldProjector(final MongoSinkTopicConfig config, final Set<String> fields) { super(config); this.fields = fields; } public Set<String> getFields() { return fields; } protected abstract void doProjection(String field, BsonDocument doc); protected static Set<String> getKeyFields(final AbstractConfig config) { return buildProjectionList( config.getString(KEY_PROJECTION_TYPE_CONFIG), config.getString(KEY_PROJECTION_LIST_CONFIG)); } protected static Set<String> getValueFields(final AbstractConfig config) { return buildProjectionList( config.getString(VALUE_PROJECTION_TYPE_CONFIG), config.getString(VALUE_PROJECTION_LIST_CONFIG)); } private static Set<String> buildProjectionList( final String projectionType, final String fieldList) { if (projectionType.equalsIgnoreCase( MongoSinkTopicConfig.FieldProjectionType.BLACKLIST.name())) { return new HashSet<>(toList(fieldList)); } else if (projectionType.equalsIgnoreCase( MongoSinkTopicConfig.FieldProjectionType.WHITELIST.name())) { // NOTE: for sub document notation all left prefix bound paths are created // which allows for easy recursion mechanism to whitelist nested doc fields HashSet<String> whitelistExpanded = new HashSet<>(); List<String> fields = toList(fieldList); for (String f : fields) { String entry = f; whitelistExpanded.add(entry); while (entry.contains(".")) { entry = entry.substring(0, entry.lastIndexOf(".")); if (!entry.isEmpty()) { whitelistExpanded.add(entry); } } } return whitelistExpanded; } else { return new HashSet<>(); } } private static List<String> toList(final String value) { return Arrays.stream(value.trim().split(FIELD_LIST_SPLIT_EXPR)) .filter(s -> !s.isEmpty()) .collect(Collectors.toList()); } }
package se.vidstige.jadb; import se.vidstige.jadb.managers.Bash; import java.io.*; import java.util.ArrayList; import java.util.List; public class JadbDevice { public enum State { Unknown, Offline, Device, Recovery, BootLoader }; private final String serial; private final ITransportFactory transportFactory; JadbDevice(String serial, String type, ITransportFactory tFactory) { this.serial = serial; this.transportFactory = tFactory; } static JadbDevice createAny(JadbConnection connection) { return new JadbDevice(connection); } private JadbDevice(ITransportFactory tFactory) { serial = null; this.transportFactory = tFactory; } private State convertState(String type) { switch (type) { case "device": return State.Device; case "offline": return State.Offline; case "bootloader": return State.BootLoader; case "recovery": return State.Recovery; default: return State.Unknown; } } private Transport getTransport() throws IOException, JadbException { Transport transport = transportFactory.createTransport(); if (serial == null) { transport.send("host:transport-any"); transport.verifyResponse(); } else { transport.send("host:transport:" + serial); transport.verifyResponse(); } return transport; } public String getSerial() { return serial; } public State getState() throws IOException, JadbException { Transport transport = transportFactory.createTransport(); if (serial == null) { transport.send("host:get-state"); transport.verifyResponse(); } else { transport.send("host-serial:" + serial + ":get-state"); transport.verifyResponse(); } State state = convertState(transport.readString()); transport.close(); return state; } /** <p>Execute a shell command.</p> * * <p>For Lollipop and later see: {@link #execute(String, String...)}</p> * * @param command main command to run. E.g. "ls" * @param args arguments to the command. * @return combined stdout/stderr stream. * @throws IOException * @throws JadbException */ public InputStream executeShell(String command, String... args) throws IOException, JadbException { Transport transport = getTransport(); StringBuilder shellLine = buildCmdLine(command, args); send(transport, "shell:" + shellLine.toString()); return new AdbFilterInputStream(new BufferedInputStream(transport.getInputStream())); } /** * * @deprecated Use InputStream executeShell(String command, String... args) method instead. Together with * Stream.copy(in, out), it is possible to achieve the same effect. */ @Deprecated public void executeShell(OutputStream output, String command, String... args) throws IOException, JadbException { Transport transport = getTransport(); StringBuilder shellLine = buildCmdLine(command, args); send(transport, "shell:" + shellLine.toString()); if (output != null) { AdbFilterOutputStream out = new AdbFilterOutputStream(output); try { transport.readResponseTo(out); } finally { out.close(); } } } /** <p>Execute a command with raw binary output.</p> * * <p>Support for this command was added in Lollipop (Android 5.0), and is the recommended way to transmit binary * data with that version or later. For earlier versions of Android, use * {@link #executeShell(String, String...)}.</p> * * @param command main command to run, e.g. "screencap" * @param args arguments to the command, e.g. "-p". * @return combined stdout/stderr stream. * @throws IOException * @throws JadbException */ public InputStream execute(String command, String... args) throws IOException, JadbException { Transport transport = getTransport(); StringBuilder shellLine = buildCmdLine(command, args); send(transport, "exec:" + shellLine.toString()); return new BufferedInputStream(transport.getInputStream()); } /** * Builds a command line string from the command and its arguments. * * @param command the command. * @param args the list of arguments. * @return the command line. */ private StringBuilder buildCmdLine(String command, String... args) { StringBuilder shellLine = new StringBuilder(command); for (String arg : args) { shellLine.append(" "); shellLine.append(Bash.quote(arg)); } return shellLine; } public List<RemoteFile> list(String remotePath) throws IOException, JadbException { Transport transport = getTransport(); SyncTransport sync = transport.startSync(); sync.send("LIST", remotePath); List<RemoteFile> result = new ArrayList<RemoteFile>(); for (RemoteFileRecord dent = sync.readDirectoryEntry(); dent != RemoteFileRecord.DONE; dent = sync.readDirectoryEntry()) { result.add(dent); } return result; } private int getMode(File file) { //noinspection OctalInteger return 0664; } public void push(InputStream source, long lastModified, int mode, RemoteFile remote) throws IOException, JadbException { Transport transport = getTransport(); SyncTransport sync = transport.startSync(); sync.send("SEND", remote.getPath() + "," + Integer.toString(mode)); sync.sendStream(source); sync.sendStatus("DONE", (int) lastModified); sync.verifyStatus(); } public void push(File local, RemoteFile remote) throws IOException, JadbException { FileInputStream fileStream = new FileInputStream(local); push(fileStream, local.lastModified(), getMode(local), remote); fileStream.close(); } public void pull(RemoteFile remote, OutputStream destination) throws IOException, JadbException { Transport transport = getTransport(); try { SyncTransport sync = transport.startSync(); sync.send("RECV", remote.getPath()); sync.readChunksTo(destination); }catch (Exception e) { if(transport != null) transport.close(); throw e; } } public void pull(RemoteFile remote, File local) throws IOException, JadbException { FileOutputStream fileStream = null; try { fileStream = new FileOutputStream(local); pull(remote, fileStream); } finally { if (fileStream != null) fileStream.close(); } } private void send(Transport transport, String command) throws IOException, JadbException { transport.send(command); transport.verifyResponse(); } @Override public String toString() { return "Android Device with serial " + serial; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((serial == null) ? 0 : serial.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; JadbDevice other = (JadbDevice) obj; if (serial == null) { if (other.serial != null) return false; } else if (!serial.equals(other.serial)) return false; return true; } }
/* * This file was automatically generated by EvoSuite * Sat Nov 28 18:27:42 GMT 2020 */ package Newzgrabber; import org.junit.Test; import static org.junit.Assert.*; import static org.evosuite.runtime.EvoAssertions.*; import Newzgrabber.Base64Decoder; import Newzgrabber.BufferedCustomInputStream; import Newzgrabber.Newzgrabber; import java.io.ByteArrayInputStream; import java.io.OutputStream; import java.io.PipedInputStream; import java.io.PipedOutputStream; import org.evosuite.runtime.EvoRunner; import org.evosuite.runtime.EvoRunnerParameters; import org.junit.runner.RunWith; @RunWith(EvoRunner.class) @EvoRunnerParameters(mockJVMNonDeterminism = true, useVFS = true, useVNET = true, resetStaticState = true, separateClassLoader = true, useJEE = true) public class Base64Decoder_ESTest extends Base64Decoder_ESTest_scaffolding { /** //Test case number: 0 /*Coverage entropy=0.6365141682948128 */ @Test(timeout = 4000) public void test0() throws Throwable { PipedInputStream pipedInputStream0 = new PipedInputStream(); PipedOutputStream pipedOutputStream0 = new PipedOutputStream(pipedInputStream0); Base64Decoder base64Decoder0 = new Base64Decoder((BufferedCustomInputStream) null, pipedOutputStream0); // Undeclared exception! try { base64Decoder0.decodeStream(); fail("Expecting exception: NullPointerException"); } catch(NullPointerException e) { // // no message in exception (getMessage() returned null) // verifyException("Newzgrabber.Base64Decoder", e); } } /** //Test case number: 1 /*Coverage entropy=0.6365141682948128 */ @Test(timeout = 4000) public void test1() throws Throwable { byte[] byteArray0 = new byte[8]; ByteArrayInputStream byteArrayInputStream0 = new ByteArrayInputStream(byteArray0, (byte) (-56), (-5081)); BufferedCustomInputStream bufferedCustomInputStream0 = new BufferedCustomInputStream(byteArrayInputStream0, false); Base64Decoder base64Decoder0 = new Base64Decoder(bufferedCustomInputStream0, (OutputStream) null); Newzgrabber.verbose = true; base64Decoder0.decodeStream(); assertFalse(base64Decoder0.ProgressSet); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.softlayer.parse; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.core.MediaType; import org.jclouds.date.internal.SimpleDateFormatDateService; import org.jclouds.softlayer.domain.VirtualGuest; import org.jclouds.softlayer.internal.BaseSoftLayerParseTest; import org.testng.annotations.Test; import com.google.common.collect.ImmutableSet; @Test(groups = "unit") public class VirtualGuestsParseTest extends BaseSoftLayerParseTest<Set<VirtualGuest>> { @Override public String resource() { return "/account_list.json"; } @Override @Consumes(MediaType.APPLICATION_JSON) public Set<VirtualGuest> expected() { return ImmutableSet.of( VirtualGuest.builder() .accountId(278184) .createDate(new SimpleDateFormatDateService().iso8601DateParse("2013-07-26T14:08:21.552-07:00")) .dedicatedAccountHostOnly(false) .domain("test.com") .fullyQualifiedDomainName("my.test.com") .hostname("my") .id(3001812) .lastVerifiedDate(null) .maxCpu(1) .maxCpuUnits("CORE") .maxMemory(1024) .metricPollDate(null) .modifyDate(new SimpleDateFormatDateService().iso8601DateParse("2013-07-26T14:10:21.552-07:00")) .privateNetworkOnlyFlag(false) .startCpus(1) .statusId(1001) .uuid("92102aff-93c9-05f1-b3f2-50787e865344") .primaryBackendIpAddress("10.32.23.74") .primaryIpAddress("174.37.252.118") .billingItemId(0) .operatingSystem(null) .datacenter(null) .powerState(null) .softwareLicense(null) .build()); } }
package cloud.jgo.jjdom.dom.nodes; public interface Comment extends Node { // da completare }
int factorial(n = 2) { if (2 <= 1) return 1; return 2 * factorial(1); }
package Model; import com.google.common.collect.ForwardingSet; import java.util.HashSet; import java.util.Set; public class Groups extends ForwardingSet<GroupData> { private Set <GroupData> delegate; public Groups(Groups groups) { this.delegate = new HashSet<GroupData>(groups.delegate); } public Groups() { this.delegate = new HashSet<>(); } @Override protected Set<GroupData> delegate() { return delegate; } public Groups withAdded(GroupData group) { Groups groups = new Groups(this); groups.add(group); return groups; } public Groups without(GroupData group) { Groups groups = new Groups(this); groups.remove(group); return groups; } public Groups withEdited (GroupData group, GroupData editedGroup) { Groups groups = new Groups(this); groups.remove(editedGroup); groups.add(group); return groups; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.cosmos.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; /** The UniqueKeyPolicy model. */ @Fluent public final class UniqueKeyPolicy { @JsonIgnore private final ClientLogger logger = new ClientLogger(UniqueKeyPolicy.class); /* * List of unique keys on that enforces uniqueness constraint on documents * in the collection in the Azure Cosmos DB service. */ @JsonProperty(value = "uniqueKeys") private List<UniqueKey> uniqueKeys; /** * Get the uniqueKeys property: List of unique keys on that enforces uniqueness constraint on documents in the * collection in the Azure Cosmos DB service. * * @return the uniqueKeys value. */ public List<UniqueKey> uniqueKeys() { return this.uniqueKeys; } /** * Set the uniqueKeys property: List of unique keys on that enforces uniqueness constraint on documents in the * collection in the Azure Cosmos DB service. * * @param uniqueKeys the uniqueKeys value to set. * @return the UniqueKeyPolicy object itself. */ public UniqueKeyPolicy withUniqueKeys(List<UniqueKey> uniqueKeys) { this.uniqueKeys = uniqueKeys; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (uniqueKeys() != null) { uniqueKeys().forEach(e -> e.validate()); } } }
package com.example.admin.mybledemo.ui; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.le.ScanFilter; import android.content.Context; import android.graphics.drawable.ColorDrawable; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.AttributeSet; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.ImageView; import android.widget.PopupWindow; import android.widget.SeekBar; import android.widget.TextView; import com.example.admin.mybledemo.R; import cn.com.heaton.blelibrary.ble.Ble; import cn.com.heaton.blelibrary.ble.Options; public class FilterView extends BaseFrameLayout { private static final String TAG = "FilterView"; private Context context; private PopupWindow popupWindow; private InputMethodManager imms; private TextView tvFilters; private ImageView ivExpand, ivClear; private FilterListener filterListener; public FilterView(@NonNull Context context) { super(context); } public FilterView(@NonNull Context context, @Nullable AttributeSet attrs) { super(context, attrs); this.context = context; } public FilterView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); } public void init(FilterListener filterListener){ this.filterListener = filterListener; } @Override protected int layoutId() { return R.layout.layout_filterview; } @Override protected void bindData() { tvFilters = mView.findViewById(R.id.tv_filters); ivExpand = mView.findViewById(R.id.iv_expand); ivClear = mView.findViewById(R.id.iv_clear); tvFilters.setText("-50dBm"); } @Override protected void bindListener() { super.bindListener(); ivExpand.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { show(); } }); ivClear.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { filterListener.onCancel(); } }); } /** * 设置添加屏幕的背景透明度(值越大,透明度越高) * * @param bgAlpha */ public void backgroundAlpha(Activity context, float bgAlpha) { WindowManager.LayoutParams lp = context.getWindow().getAttributes(); lp.alpha = bgAlpha; context.getWindow().addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND); context.getWindow().setAttributes(lp); } public void show(){ if (popupWindow != null && imms != null) { imms.toggleSoftInput(0, InputMethodManager.SHOW_FORCED); } if (popupWindow == null){ imms = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE); View layout = LayoutInflater.from(context).inflate(R.layout.filter_window, null); popupWindow = new PopupWindow(layout,ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, true); // popupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); imms.toggleSoftInput(0, InputMethodManager.SHOW_FORCED); popupWindow.setBackgroundDrawable(new ColorDrawable(0xb0000000)); popupWindow.setOutsideTouchable(true); EditText etNameAddress = layout.findViewById(R.id.et_name_address); ImageView ivClear = layout.findViewById(R.id.iv_clear); SeekBar sbRssi = layout.findViewById(R.id.sb_rssi); TextView tvRssi = layout.findViewById(R.id.tv_rssi); etNameAddress.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { Options options = Ble.options(); Log.e(TAG, "afterTextChanged: "+s.toString()); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { String add_name = s.toString(); if (!TextUtils.isEmpty(add_name)){ boolean isAddress = BluetoothAdapter.checkBluetoothAddress(add_name); ScanFilter.Builder builder = new ScanFilter.Builder(); if (isAddress){ builder.setDeviceAddress(add_name); }else { builder.setDeviceName(add_name); } options.setScanFilter(builder.build()); }else { //清除过滤 options.setScanFilter(null); } filterListener.onAddressNameChanged(add_name); } } }); ivClear.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { etNameAddress.setText(""); } }); sbRssi.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { filterListener.onRssiChanged(seekBar.getProgress()); } }); } if (!popupWindow.isShowing()) { // popupWindow.showAtLocation(mView, Gravity.BOTTOM, 0, 0); popupWindow.showAsDropDown(mView); } else { popupWindow.dismiss(); } } public interface FilterListener { void onAddressNameChanged(String addressOrName); void onRssiChanged(int rssi); void onCancel(); } }
package org.reflections; import org.reflections.adapters.MetadataAdapter; import org.reflections.scanners.Scanner; import org.reflections.serializers.Serializer; import java.net.URL; import java.util.Set; import java.util.concurrent.ExecutorService; /** * Configuration is used to create a configured instance of {@link Reflections} * <p>it is preferred to use {@link org.reflections.util.ConfigurationBuilder} */ public interface Configuration { /** the scanner instances used for scanning different metadata */ Set<Scanner> getScanners(); /** the urls to be scanned */ Set<URL> getUrls(); /** the metadata adapter used to fetch metadata from classes */ @SuppressWarnings({"RawUseOfParameterizedType"}) MetadataAdapter getMetadataAdapter(); /** the fully qualified name filter used to filter types to be scanned */ boolean acceptsInput(String inputFqn); /** executor service used to scan files * if null, scanning is done in a simple for loop */ ExecutorService getExecutorService(); /** the default serializer to use when saving Reflection */ Serializer getSerializer(); /** get class loaders, might be used for resolving methods/fields */ /*@Nullable*/ ClassLoader[] getClassLoaders(); }
package com.github.herowzz.simlogmonitor.collect; import java.io.BufferedInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.net.Socket; import java.net.SocketAddress; import java.sql.Timestamp; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import org.slf4j.LoggerFactory; import com.github.herowzz.simlogmonitor.manager.ChannelManager; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.spi.ILoggingEvent; import io.netty.handler.codec.http.websocketx.TextWebSocketFrame; public class ColletSocketNode implements Runnable { Socket socket; LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); ObjectInputStream ois; SocketAddress remoteSocketAddress; Logger logger; boolean closed = false; ColletSocketServer socketServer; public ColletSocketNode(ColletSocketServer socketServer, Socket socket) { this.socketServer = socketServer; this.socket = socket; remoteSocketAddress = socket.getRemoteSocketAddress(); logger = context.getLogger(ColletSocketNode.class); } public void run() { try { ois = new ObjectInputStream(new BufferedInputStream(socket.getInputStream())); } catch (Exception e) { logger.error("Could not open ObjectInputStream to " + socket, e); closed = true; } ILoggingEvent event; Logger remoteLogger; try { while (!closed) { event = (ILoggingEvent) ois.readObject(); remoteLogger = context.getLogger(event.getLoggerName()); remoteLogger.callAppenders(event); String message = event.getFormattedMessage(); LocalDateTime dateTime = new Timestamp(event.getTimeStamp()).toLocalDateTime(); String dateTimeStr = dateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss,SSS")); StringBuilder logMsgBuild = new StringBuilder(); logMsgBuild.append("[").append(dateTimeStr).append("] [").append(event.getLevel()).append("] ").append(message); TextWebSocketFrame tws = new TextWebSocketFrame(logMsgBuild.toString()); ChannelManager.sendAll(tws); } } catch (java.io.EOFException e) { logger.info("Caught java.io.EOFException closing connection."); } catch (java.net.SocketException e) { logger.info("Caught java.net.SocketException closing connection."); } catch (IOException e) { logger.info("Caught java.io.IOException: " + e); logger.info("Closing connection."); } catch (Exception e) { logger.error("Unexpected exception. Closing connection.", e); } socketServer.socketNodeClosing(this); close(); } void close() { if (closed) { return; } closed = true; if (ois != null) { try { ois.close(); } catch (IOException e) { logger.warn("Could not close connection.", e); } finally { ois = null; } } } @Override public String toString() { return this.getClass().getName() + remoteSocketAddress.toString(); } }
package net.minecraft.world.gen.feature.structure; import com.mojang.serialization.Codec; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.util.Direction; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.ChunkPos; import net.minecraft.util.math.MutableBoundingBox; import net.minecraft.util.registry.DynamicRegistries; import net.minecraft.world.IBlockReader; import net.minecraft.world.biome.Biome; import net.minecraft.world.gen.ChunkGenerator; import net.minecraft.world.gen.feature.NoFeatureConfig; import net.minecraft.world.gen.feature.template.TemplateManager; public class NetherFossilStructure extends Structure<NoFeatureConfig> { public NetherFossilStructure(Codec<NoFeatureConfig> p_i232105_1_) { super(p_i232105_1_); } public Structure.IStartFactory<NoFeatureConfig> getStartFactory() { return NetherFossilStructure.Start::new; } public static class Start extends MarginedStructureStart<NoFeatureConfig> { public Start(Structure<NoFeatureConfig> p_i232106_1_, int p_i232106_2_, int p_i232106_3_, MutableBoundingBox p_i232106_4_, int p_i232106_5_, long p_i232106_6_) { super(p_i232106_1_, p_i232106_2_, p_i232106_3_, p_i232106_4_, p_i232106_5_, p_i232106_6_); } public void func_230364_a_(DynamicRegistries p_230364_1_, ChunkGenerator p_230364_2_, TemplateManager p_230364_3_, int p_230364_4_, int p_230364_5_, Biome p_230364_6_, NoFeatureConfig p_230364_7_) { ChunkPos chunkpos = new ChunkPos(p_230364_4_, p_230364_5_); int i = chunkpos.getXStart() + this.rand.nextInt(16); int j = chunkpos.getZStart() + this.rand.nextInt(16); int k = p_230364_2_.getSeaLevel(); int l = k + this.rand.nextInt(p_230364_2_.getMaxBuildHeight() - 2 - k); IBlockReader iblockreader = p_230364_2_.func_230348_a_(i, j); for(BlockPos.Mutable blockpos$mutable = new BlockPos.Mutable(i, l, j); l > k; --l) { BlockState blockstate = iblockreader.getBlockState(blockpos$mutable); blockpos$mutable.move(Direction.DOWN); BlockState blockstate1 = iblockreader.getBlockState(blockpos$mutable); if (blockstate.isAir() && (blockstate1.isIn(Blocks.SOUL_SAND) || blockstate1.isSolidSide(iblockreader, blockpos$mutable, Direction.UP))) { break; } } if (l > k) { NetherFossilStructures.func_236994_a_(p_230364_3_, this.components, this.rand, new BlockPos(i, l, j)); this.recalculateStructureSize(); } } } }
import java.util.Scanner; public class Timings { public static void display_1() // displays the time schedule for Route 1 { System.out.println("1 Saharanpur 10:00"); System.out.println("2 Shahpur 10:14"); System.out.println("3 Rampur 10:22"); System.out.println("4 Shafipur 10:30"); System.out.println("5 Roorkee(Civil lines) 10:38"); System.out.println("6 Mohanpura 10:45"); System.out.println("7 Akashdeep Enclave 10:51"); System.out.println("8 Talhedi 11:00"); System.out.println("The given schedule repeats itself every 10 minutes.."); } public static void display_2() // displays the time schedule for Route 2 { System.out.println("1 Brahmpuri 09:30"); System.out.println("2 Haridwar 09:38"); System.out.println("3 Devpura 09:45"); System.out.println("4 Mayapura 09:52"); System.out.println("5 Patanjali Yogpeeth 10:03"); System.out.println("6 Adarsh Nagar 10:12"); System.out.println("7 IITR(Gate no. 1) 10:20"); System.out.println("8 Roorkee(Civil lines) 10:22"); System.out.println("9 Mohanpura 10:29"); System.out.println("10 Akashdeep Enclave 10:35"); System.out.println("11 Talhedi 10:44"); System.out.println("The given schedule repeats itself after every 10 minutes."); } public static void timings_main(){ // asks for route and then displays the timings Scanner input = new Scanner(System.in); System.out.println("METRO TIMINGS\nRoute 1: Saharanpur to Talhedi"); System.out.println("Route 2: Brahmpuri to Talhedi"); System.out.println("Enter Route number"); int m = input.nextInt(); System.out.println("Sr. Station Time"); System.out.println("no name "); switch(m) { case 1: display_1(); break; case 2: display_2(); break; default:System.out.println("\nEnter correct Route number!"); } Scanner in = new Scanner(System.in); int a=0; System.out.println("\nPress any non zero integer to Return to main menu."); a = in.nextInt(); if(a !=0) Metro_Etiquettes.mainMenu(); else{ System.out.print("Wrong choice!\nSystem is terminating.....\n..\nTerminated."); } }// closes timings_main } // closes Timings (class)
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.instructions.spark.functions; import java.util.ArrayList; import java.util.Iterator; import org.apache.spark.api.java.function.FlatMapFunction; import scala.Tuple2; import org.apache.sysml.runtime.matrix.data.FrameBlock; public class ConvertFrameBlockToIJVLines implements FlatMapFunction<Tuple2<Long,FrameBlock>, String> { private static final long serialVersionUID = 1803516615963340115L; @Override public Iterable<String> call(Tuple2<Long, FrameBlock> kv) throws Exception { long rowoffset = kv._1; FrameBlock block = kv._2; ArrayList<String> cells = new ArrayList<String>(); //write frame meta data if( rowoffset == 1 ) { for( int j=0; j<block.getNumColumns(); j++ ) if( !block.isColumnMetadataDefault(j) ) { cells.add("-1 " + (j+1) + " " + block.getColumnMetadata(j).getNumDistinct()); cells.add("-2 " + (j+1) + " " + block.getColumnMetadata(j).getMvValue()); } } //convert frame block to list of ijv cell triples StringBuilder sb = new StringBuilder(); Iterator<String[]> iter = block.getStringRowIterator(); for( int i=0; iter.hasNext(); i++ ) { //for all rows String rowIndex = Long.toString(rowoffset + i); String[] row = iter.next(); for( int j=0; j<row.length; j++ ) { if( row[j] != null ) { sb.append( rowIndex ); sb.append(' '); sb.append( j+1 ); sb.append(' '); sb.append( row[j] ); cells.add( sb.toString() ); sb.setLength(0); } } } return cells; } }
/* * Copyright 2017 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apicurio.hub.editing.metrics; import java.io.IOException; /** * Interface used to report metrics information for the WebSocket based editing component. * @author eric.wittmann@gmail.com */ public interface IEditingMetrics { /** * Returns the current state of the metrics. This information is typically presented * via a REST API or servlet. * @return metrics information */ public String getCurrentMetricsInfo() throws IOException; /** * Indicates that a web socket was connected. * @param designId * @param user */ public void socketConnected(String designId, String user); /** * Indicates that an editing session was created. * @param designId */ public void editingSessionCreated(String designId); /** * Indicates that a content command was received. * @param designId */ public void contentCommand(String designId); /** * Indicates that a user wants to "undo" a command/content version. * @param designId * @param contentVersion */ public void undoCommand(String designId, long contentVersion); /** * Indicates that a user wants to "redo" a command/content version. * @param designId * @param contentVersion */ public void redoCommand(String designId, long contentVersion); }
/* * Copyright (c) 2021, the hapjs-platform Project Contributors * SPDX-License-Identifier: Apache-2.0 */ package org.hapjs.component.constants; import android.content.res.Configuration; import android.text.TextUtils; import android.util.Log; import org.hapjs.card.sdk.utils.CardThemeUtils; import org.hapjs.common.json.JSONObject; import org.hapjs.common.utils.DisplayUtil; import org.hapjs.common.utils.FloatUtil; import org.hapjs.render.Page; import org.hapjs.render.vdom.DocAnimator; import org.hapjs.runtime.HapEngine; import org.json.JSONException; public class Attributes { private static final String TAG = "Attributes"; private Attributes() { } public static int getPageOpenEnterAnimation(JSONObject animationObj, int defValue) { if (animationObj == null || !animationObj.has(PageAnimation.ACTION_OPEN_ENTER)) { return defValue; } String temp = null; try { Object obj = animationObj.get(PageAnimation.ACTION_OPEN_ENTER); if (obj instanceof String) { temp = obj.toString().trim(); } } catch (JSONException e) { Log.e(TAG, "getPageOpenEnterAnimation: ", e); return defValue; } if (PageAnimation.SLIDE.equalsIgnoreCase(temp)) { return DocAnimator.TYPE_PAGE_OPEN_ENTER; } else if (PageAnimation.NONE.equalsIgnoreCase(temp)) { // 0表示无动画 return 0; } return defValue; } public static int getPageCloseEnterAnimation(JSONObject animationObj, int defValue) { if (animationObj == null || !animationObj.has(PageAnimation.ACTION_CLOSE_ENTER)) { return defValue; } String temp = null; try { Object obj = animationObj.get(PageAnimation.ACTION_CLOSE_ENTER); if (obj instanceof String) { temp = obj.toString().trim(); } } catch (JSONException e) { Log.e(TAG, "getPageCloseEnterAnimation: ", e); return defValue; } if (PageAnimation.SLIDE.equalsIgnoreCase(temp)) { return DocAnimator.TYPE_PAGE_CLOSE_ENTER; } else if (PageAnimation.NONE.equalsIgnoreCase(temp)) { // 0表示无动画 return 0; } return defValue; } public static int getPageCloseExitAnimation(JSONObject animationObj, int defValue) { if (animationObj == null || !animationObj.has(PageAnimation.ACTION_CLOSE_EXIT)) { return defValue; } String temp = null; try { Object obj = animationObj.get(PageAnimation.ACTION_CLOSE_EXIT); if (obj instanceof String) { temp = obj.toString().trim(); } } catch (JSONException e) { Log.e(TAG, "getPageCloseExitAnimation: ", e); return defValue; } if (PageAnimation.SLIDE.equalsIgnoreCase(temp)) { return DocAnimator.TYPE_PAGE_CLOSE_EXIT; } else if (PageAnimation.NONE.equalsIgnoreCase(temp)) { // 0表示无动画 return 0; } return defValue; } public static int getPageOpenExitAnimation(JSONObject animationObj, int defValue) { if (animationObj == null || !animationObj.has(PageAnimation.ACTION_OPEN_EXIT)) { return defValue; } String temp = null; try { Object obj = animationObj.get(PageAnimation.ACTION_OPEN_EXIT); if (obj instanceof String) { temp = obj.toString().trim(); } } catch (JSONException e) { Log.e(TAG, "getPageOpenExitAnimation: ", e); return defValue; } if (PageAnimation.SLIDE.equalsIgnoreCase(temp)) { return DocAnimator.TYPE_PAGE_OPEN_EXIT; } else if (PageAnimation.NONE.equalsIgnoreCase(temp)) { // 0表示无动画 return 0; } return defValue; } public static int getInt(HapEngine hapEngine, Object value) { return getInt(hapEngine, value, 0); } public static int getInt(HapEngine hapEngine, Object value, int defValue) { return Math.round(getFloat(hapEngine, value, defValue)); } public static float getFloat(HapEngine hapEngine, Object value) { return getFloat(hapEngine, value, FloatUtil.UNDEFINED); } public static float getFloat(HapEngine hapEngine, Object value, float defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); if (temp.startsWith(CardThemeUtils.KEY_THEME)) { String themeValue = CardThemeUtils.getThemeValue(temp); if (!TextUtils.isEmpty(themeValue)) { temp = themeValue; } else { return defValue; } } try { // px if (temp.endsWith(Unit.PX)) { temp = temp.substring(0, temp.length() - Unit.PX.length()); float result = Float.parseFloat(temp); if (hapEngine == null) { return defValue; } return DisplayUtil.getRealPxByWidth(result, hapEngine.getDesignWidth()); } // dp if (temp.endsWith(Unit.DP)) { temp = temp.substring(0, temp.length() - Unit.DP.length()); float result = Float.parseFloat(temp); if (hapEngine == null) { return defValue; } return DisplayUtil.dip2Pixel(hapEngine.getContext(), (int) result); } // default return Float.parseFloat(temp); } catch (Exception e) { Log.e(TAG, "Attribute get float error: " + temp, e); } return defValue; } public static boolean isSpecificAttributes(String value) { if (TextUtils.isEmpty(value)) { return false; } if (value.endsWith(Unit.PX)) { return true; } else if (value.endsWith(Unit.DP)) { return true; } else { return value.startsWith(CardThemeUtils.KEY_THEME); } } public static double getDouble(Object value) { return getDouble(value, Double.NaN); } public static double getDouble(Object value, double defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); try { return Double.parseDouble(temp); } catch (Exception e) { Log.e(TAG, "Attribute get double error: " + temp, e); } return defValue; } public static String getString(Object value) { return getString(value, null); } public static String getString(Object value, String defValue) { if (value == null || "".equals(value)) { return defValue; } if (value instanceof String) { String value1 = (String) value; if (value1.startsWith(CardThemeUtils.KEY_THEME)) { String themeValue = CardThemeUtils.getThemeValue(value1); if (!TextUtils.isEmpty(themeValue)) { return themeValue; } return defValue; } return value1; } return value.toString(); } public static long getLong(Object value) { return getLong(value, 0L); } public static long getLong(Object value, long defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); try { return Long.parseLong(temp); } catch (Exception e) { Log.e(TAG, "Attribute get long error: " + temp, e); } return 0; } public static boolean getBoolean(Object value, Boolean defValue) { if (value == null || "".equals(value)) { return defValue; } if (TextUtils.equals("false", value.toString())) { return false; } else if (TextUtils.equals("true", value.toString())) { return true; } return defValue; } public static float getPercent(Object value, float defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); if (temp.endsWith(Unit.PERCENT)) { temp = temp.substring(0, temp.length() - Unit.PERCENT.length()); } try { return Float.parseFloat(temp) / 100; } catch (Exception e) { Log.e(TAG, "Attribute get percent error: " + temp, e); } return defValue; } public static float getEm(Object value, float defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); if (temp.endsWith(Unit.EM)) { temp = temp.substring(0, temp.length() - Unit.EM.length()); } try { return Float.parseFloat(temp); } catch (Exception e) { Log.e(TAG, "Attribute get em error: " + temp, e); } return defValue; } public static float getCm(Object value, float defValue) { if (value == null || "".equals(value)) { return defValue; } String temp = value.toString().trim(); if (temp.endsWith(Unit.CM)) { temp = temp.substring(0, temp.length() - Unit.CM.length()); } try { return Float.parseFloat(temp); } catch (Exception e) { Log.e(TAG, "Attribute get cm error: " + temp, e); } return defValue; } public static int getFontSize(HapEngine hapEngine, Page page, Object value) { return getFontSize(hapEngine, page, value, 0); } public static int getFontSize(HapEngine hapEngine, Page page, Object value, int defValue) { if (hapEngine == null || page == null) { return defValue; } float size = getFloat(hapEngine, value, defValue); if (page.isTextSizeAdjustAuto()) { Configuration configuration = hapEngine.getContext().getResources().getConfiguration(); size *= configuration.fontScale; } return Math.round(size); } public interface Style { String ID = "id"; String TARGET = "target"; String WIDTH = "width"; String HEIGHT = "height"; String MIN_WIDTH = "minWidth"; String MIN_HEIGHT = "minHeight"; String MAX_WIDTH = "maxWidth"; String MAX_HEIGHT = "maxHeight"; String AUTO = "auto"; String NONE = "none"; String MIN_CONTENT = "minContent"; String MAX_CONTENT = "maxContent"; String FIT_CONTENT = "fitContent"; String PADDING = "padding"; String PADDING_LEFT = "paddingLeft"; String PADDING_TOP = "paddingTop"; String PADDING_RIGHT = "paddingRight"; String PADDING_BOTTOM = "paddingBottom"; String MARGIN = "margin"; String MARGIN_AUTO = "auto"; String MARGIN_LEFT = "marginLeft"; String MARGIN_TOP = "marginTop"; String MARGIN_RIGHT = "marginRight"; String MARGIN_BOTTOM = "marginBottom"; String BORDER_WIDTH = "borderWidth"; String BORDER_LEFT_WIDTH = "borderLeftWidth"; String BORDER_TOP_WIDTH = "borderTopWidth"; String BORDER_RIGHT_WIDTH = "borderRightWidth"; String BORDER_BOTTOM_WIDTH = "borderBottomWidth"; String BORDER_COLOR = "borderColor"; String BORDER_LEFT_COLOR = "borderLeftColor"; String BORDER_TOP_COLOR = "borderTopColor"; String BORDER_RIGHT_COLOR = "borderRightColor"; String BORDER_BOTTOM_COLOR = "borderBottomColor"; String BORDER_STYLE = "borderStyle"; String BORDER_RADIUS = "borderRadius"; String BORDER_TOP_LEFT_RADIUS = "borderTopLeftRadius"; String BORDER_TOP_RIGHT_RADIUS = "borderTopRightRadius"; String BORDER_BOTTOM_LEFT_RADIUS = "borderBottomLeftRadius"; String BORDER_BOTTOM_RIGHT_RADIUS = "borderBottomRightRadius"; String BACKGROUND_COLOR = "backgroundColor"; String BACKGROUND_IMAGE = "backgroundImage"; String BACKGROUND_SIZE = "backgroundSize"; String BACKGROUND_REPEAT = "backgroundRepeat"; String BACKGROUND_POSITION = "backgroundPosition"; String BACKGROUND = "background"; String OPACITY = "opacity"; String DISPLAY = "display"; String SHOW = "show"; String VISIBILITY = "visibility"; String POSITION = "position"; String LEFT = "left"; String TOP = "top"; String RIGHT = "right"; String BOTTOM = "bottom"; String FLEX = "flex"; String FLEX_GROW = "flexGrow"; String FLEX_SHRINK = "flexShrink"; String FLEX_BASIS = "flexBasis"; String FILTER = "filter"; String ALIGN_SELF = "alignSelf"; String FLEX_DIRECTION = "flexDirection"; String JUSTIFY_CONTENT = "justifyContent"; String ALIGN_ITEMS = "alignItems"; String FLEX_WRAP = "flexWrap"; String ALIGN_CONTENT = "alignContent"; String VIDEO_FULLSCREEN_CONTAINER = "enablevideofullscreencontainer"; String OVERFLOW = "overflow"; String HREF = "href"; String LINES = "lines"; String LINE_HEIGHT = "lineHeight"; String COLOR = "color"; String FONT_SIZE = "fontSize"; String FONT_STYLE = "fontStyle"; String FONT_WEIGHT = "fontWeight"; String TEXT_DECORATION = "textDecoration"; String TEXT_ALIGN = "textAlign"; String PLACEHOLDER = "placeholder"; String PLACEHOLDER_COLOR = "placeholderColor"; String TYPE = "type"; String TEXT_OVERFLOW = "textOverflow"; String TEXT_INDENT = "textIndent"; String NAME = "name"; String VALUE = "value"; String CONTENT = "content"; @Deprecated String RESIZE_MODE = "resizeMode"; String OBJECT_FIT = "objectFit"; String ALT_OBJECT_FIT = "altObjectFit"; String SRC = "src"; String ALT = "alt"; String INDEX = "index"; String AUTO_PLAY = "autoplay"; String SCREEN_ORIENTATION = "screenOrientation"; String ORIENTATION = "orientation"; String PERCENT = "percent"; String STROKE_WIDTH = "strokeWidth"; String MIN = "min"; String MAX = "max"; String STEP = "step"; String ENABLE = "enable"; String SELECTED_COLOR = "selectedColor"; String START = "start"; String END = "end"; String RANGE = "range"; String SELECTED = "selected"; String PROGRESS_COLOR = "progressColor"; String OFFSET = "offset"; String REFRESHING = "refreshing"; String ENABLE_REFRESH = "enableRefresh"; String ANIMATION_DURATION = "animationDuration"; String ANIMATION_TIMING_FUNCTION = "animationTimingFunction"; String ANIMATION_DELAY = "animationDelay"; String ANIMATION_ITERATION_COUNT = "animationIterationCount"; String ANIMATION_FILL_MODE = "animationFillMode"; String ANIMATION_KEYFRAMES = "animationKeyframes"; String ANIMATION_DIRECTION = "animationDirection"; String PAGE_ANIMATION_KEYFRAMES = "pageAnimationKeyframes"; String TRANSFORM = "transform"; String TRANSFORM_ORIGIN = "transformOrigin"; String PAGE_ANIMATION_ORIGIN = "pageTransformOrigin"; String TRANSITION_PROPERTY = "transitionProperty"; String TRANSITION_DURATION = "transitionDuration"; String TRANSITION_TIMING_FUNCTION = "transitionTimingFunction"; String TRANSITION_DELAY = "transitionDelay"; String ALL = "all"; String SCROLL_PAGE = "scrollpage"; String COLUMNS = "columns"; String COLUMN_SPAN = "columnSpan"; String DIRECTION = "direction"; String ENABLE_SWIPE = "enableswipe"; String ACTIVE = "active"; String DISABLED = "disabled"; String FOCUSABLE = "focusable"; String CHECKED = "checked"; String DESCENDANT_FOCUSABILITY = "descendantfocusability"; String MODE = "mode"; String ARIA_LABEL = "ariaLabel"; String ARIA_UNFOCUSABLE = "ariaUnfocusable"; String FORCE_DARK = "forcedark"; String AUTO_FOCUS = "autofocus"; } public interface Unit { // when add new unit must check isSpecificAttributes function String PX = "px"; String PERCENT = "%"; String EM = "em"; String CM = "cm"; String DP = "dp"; } public interface TextType { String TEXT = "text"; String HTML = "html"; } public interface InputType { String BUTTON = "button"; String TEXT = "text"; String CHECK_BOX = "checkbox"; String DATE = "date"; String TIME = "time"; String EMAIL = "email"; String NUMBER = "number"; String PASSWORD = "password"; String TELEPHONE = "tel"; } public interface AutoComplete { String ON = "on"; String OFF = "off"; } public interface PickerType { String DATE = "date"; String TIME = "time"; String TEXT = "text"; } public interface PlayCount { String ONCE = "1"; String INFINITE = "infinite"; } public interface ProgressType { String HORIZONTAL = "horizontal"; String CIRCULAR = "circular"; } public interface Event { // common String CLICK = "click"; String FOCUS = "focus"; String BLUR = "blur"; String LONGPRESS = "longpress"; String CHANGE = "change"; String RESIZE = "resize"; // list String SCROLL = "scroll"; String SCROLL_BOTTOM = "scrollbottom"; String SCROLL_TOP = "scrolltop"; String SCROLL_END = "scrollend"; String SCROLL_TOUCH_UP = "scrolltouchup"; // refresh String REFRESH = "refresh"; String APPEAR = "appear"; String DISAPPEAR = "disappear"; String SWIPE = "swipe"; String FULLSCREEN_CHANGE = "fullscreenchange"; // touch String TOUCH_START = "touchstart"; String TOUCH_MOVE = "touchmove"; String TOUCH_END = "touchend"; String TOUCH_CANCEL = "touchcancel"; String TOUCH_CLICK = CLICK; String TOUCH_LONG_PRESS = LONGPRESS; // KeyEvent String KEY_EVENT = "key"; String KEY_EVENT_PAGE = "pagekey"; // animation String ANIMATION_START = "animationstart"; String ANIMATION_END = "animationend"; String ANIMATION_ITERATION = "animationiteration"; } public interface EventParams { String IS_FROM_USER = "isFromUser"; } public interface Display { String FLEX = "flex"; String NONE = "none"; } public interface Visibility { String VISIBLE = "visible"; String HIDDEN = "hidden"; } public interface Position { String FIXED = "fixed"; String RELATIVE = "relative"; String ABSOLUTE = "absolute"; } public interface Align { String AUTO = "auto"; String FLEX_START = "flex-start"; String CENTER = "center"; String FLEX_END = "flex-end"; String STRETCH = "stretch"; String BASELINE = "baseline"; String SPACE_BETWEEN = "space-between"; String SPACE_AROUND = "space-around"; } public interface TextOverflow { String CLIP = "clip"; String ELLIPSIS = "ellipsis"; String STRING = "string"; } public interface Mode { String SCROLLABLE = "scrollable"; String FIXED = "fixed"; } public interface ImageMode { String NONE = "none"; String CONTAIN = "contain"; String COVER = "cover"; } public interface RepeatMode { // default String REPEAT = "repeat"; String REPEAT_X = "repeat-x"; String REPEAT_Y = "repeat-y"; String REPEAT_NONE = "no-repeat"; } public interface ObjectFit { String CONTAIN = "contain"; String COVER = "cover"; String FILL = "fill"; String NONE = "none"; String SCALE_DOWN = "scale-down"; } public interface PositionMode { // default String TOP_LEFT = "0px 0px"; } public interface PageAnimation { String ACTION_OPEN_ENTER = "openEnter"; String ACTION_CLOSE_ENTER = "closeEnter"; String ACTION_OPEN_EXIT = "openExit"; String ACTION_CLOSE_EXIT = "closeExit"; String SLIDE = "slide"; // TODO: fade animation need to be defined at the standard conference String FADE = "fade"; String NONE = "none"; } public interface DescendantFocusabilityType { String BEFORE = "before"; String AFTER = "after"; String BLOCK = "block"; } public interface OverflowType { String VISIBLE = "visible"; String HIDDEN = "hidden"; } }
package org.flybird.rock.mapper; import java.util.List; import org.apache.ibatis.annotations.Param; import org.flybird.rock.model.CmsPrefrenceArea; import org.flybird.rock.model.CmsPrefrenceAreaExample; public interface CmsPrefrenceAreaMapper { long countByExample(CmsPrefrenceAreaExample example); int deleteByExample(CmsPrefrenceAreaExample example); int deleteByPrimaryKey(Long id); int insert(CmsPrefrenceArea record); int insertSelective(CmsPrefrenceArea record); List<CmsPrefrenceArea> selectByExampleWithBLOBs(CmsPrefrenceAreaExample example); List<CmsPrefrenceArea> selectByExample(CmsPrefrenceAreaExample example); CmsPrefrenceArea selectByPrimaryKey(Long id); int updateByExampleSelective(@Param("record") CmsPrefrenceArea record, @Param("example") CmsPrefrenceAreaExample example); int updateByExampleWithBLOBs(@Param("record") CmsPrefrenceArea record, @Param("example") CmsPrefrenceAreaExample example); int updateByExample(@Param("record") CmsPrefrenceArea record, @Param("example") CmsPrefrenceAreaExample example); int updateByPrimaryKeySelective(CmsPrefrenceArea record); int updateByPrimaryKeyWithBLOBs(CmsPrefrenceArea record); int updateByPrimaryKey(CmsPrefrenceArea record); }
package org.enodeframework.tests.EventHandlers; import org.enodeframework.annotation.Event; import org.enodeframework.annotation.Priority; import org.enodeframework.annotation.Subscribe; import org.enodeframework.common.io.AsyncTaskResult; import org.enodeframework.tests.Domain.Event1; import org.enodeframework.tests.TestClasses.CommandAndEventServiceTest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; @Priority(3) @Event public class Handler2 { private Logger logger = LoggerFactory.getLogger(Handler2.class); @Subscribe public AsyncTaskResult HandleAsync(Event1 evnt) { logger.info("event1 handled by handler2."); CommandAndEventServiceTest.HandlerTypes.computeIfAbsent(1, k -> new ArrayList<>()).add(getClass().getName()); return AsyncTaskResult.Success; } }
package com.huotu.huotao.sayhi; import com.huotu.huotao.sayhi.bean.BaseBean; import com.huotu.huotao.sayhi.bean.TaskResultBeam; import java.util.Map; import retrofit2.Call; import retrofit2.http.FieldMap; import retrofit2.http.FormUrlEncoded; import retrofit2.http.POST; /** * Created by Administrator on 2017/2/21. */ public interface ApiDefine { @FormUrlEncoded @POST("AjaxHandler.aspx?action=adddevice") Call<BaseBean> addDevice(@FieldMap Map<String, String> params ); @FormUrlEncoded @POST("AjaxHandler.aspx?action=gettaskinfo") Call<TaskResultBeam> getTaskInfo(@FieldMap Map<String,String> params); @FormUrlEncoded @POST("AjaxHandler.aspx?action=updatelocationstatus") Call<BaseBean> updateTaskLocationStatus(@FieldMap Map<String,String> params); @FormUrlEncoded @POST("AjaxHandler.aspx?action=updatetaskstatusrun") Call<BaseBean> UpdateTaskStatusRun(@FieldMap Map<String,String> params); }
/** * 活动相关 */ /** * @since 2.1.0 * @author <a href="mailto:sikai.wang@elanking.com">sikai.wang</a> * @version 2016年7月25日 */ package com.lanking.uxb.service.activity;
package io.nuls.crosschain.nuls.utils.thread; import io.nuls.base.data.Transaction; import io.nuls.crosschain.nuls.model.bo.Chain; import io.nuls.crosschain.nuls.utils.TxUtil; public class VerifierInitTxHandler implements Runnable { private Chain chain; private Transaction transaction; public VerifierInitTxHandler(Chain chain, Transaction transaction){ this.chain = chain; this.transaction = transaction; } @Override public void run() { TxUtil.handleNewCtx(transaction,chain,null); } }
/* * Copyright 2021 Brackeys IDE contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.brackeys.ui.language.xml.lexer; import org.jetbrains.annotations.NotNull; @SuppressWarnings("all") /** * This class is a scanner generated by * <a href="http://www.jflex.de/">JFlex</a> 1.8.2 * from the specification file <tt>xml.flex</tt> */ public class XmlLexer { /** This character denotes the end of file. */ public static final int YYEOF = -1; /** Initial size of the lookahead buffer. */ private static final int ZZ_BUFFERSIZE = 16384; // Lexical states. public static final int YYINITIAL = 0; public static final int DOC_TYPE = 2; public static final int COMMENT = 4; public static final int START_TAG_NAME = 6; public static final int END_TAG_NAME = 8; public static final int BEFORE_TAG_ATTRIBUTES = 10; public static final int TAG_ATTRIBUTES = 12; public static final int ATTRIBUTE_VALUE_START = 14; public static final int ATTRIBUTE_VALUE_DQ = 16; public static final int ATTRIBUTE_VALUE_SQ = 18; public static final int PROCESSING_INSTRUCTION = 20; public static final int TAG_CHARACTERS = 22; public static final int C_COMMENT_START = 24; public static final int C_COMMENT_END = 26; public static final int CDATA = 28; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l * at the beginning of a line * l is of the form l = 2*k, k a non negative integer */ private static final int ZZ_LEXSTATE[] = { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14 }; /** * Top-level table for translating characters to character classes */ private static final int [] ZZ_CMAP_TOP = zzUnpackcmap_top(); private static final String ZZ_CMAP_TOP_PACKED_0 = "\1\0\1\u0100\1\u0200\1\u0300\1\u0400\1\u0500\1\u0600\1\u0700"+ "\1\u0800\1\u0900\1\u0a00\1\u0b00\1\u0c00\1\u0d00\1\u0e00\1\u0f00"+ "\1\u1000\1\u0100\1\u1100\1\u1200\1\u1300\1\u0100\1\u1400\1\u1500"+ "\1\u1600\1\u1700\1\u1800\1\u1900\1\u1a00\1\u1b00\1\u0100\1\u1c00"+ "\1\u1d00\1\u1e00\12\u1f00\1\u2000\1\u2100\1\u2200\1\u1f00\1\u2300"+ "\1\u2400\2\u1f00\31\u0100\1\u2500\121\u0100\1\u2600\4\u0100\1\u2700"+ "\1\u0100\1\u2800\1\u2900\1\u2a00\1\u2b00\1\u2c00\1\u2d00\53\u0100"+ "\1\u2e00\41\u1f00\1\u0100\1\u2f00\1\u3000\1\u0100\1\u3100\1\u3200"+ "\1\u3300\1\u3400\1\u1f00\1\u3500\1\u3600\1\u3700\1\u3800\1\u0100"+ "\1\u3900\1\u3a00\1\u3b00\1\u3c00\1\u3d00\1\u3e00\1\u3f00\1\u1f00"+ "\1\u4000\1\u4100\1\u4200\1\u4300\1\u4400\1\u4500\1\u4600\1\u4700"+ "\1\u4800\1\u4900\1\u4a00\1\u4b00\1\u1f00\1\u4c00\1\u4d00\1\u4e00"+ "\1\u1f00\3\u0100\1\u4f00\1\u5000\1\u5100\12\u1f00\4\u0100\1\u5200"+ "\17\u1f00\2\u0100\1\u5300\41\u1f00\2\u0100\1\u5400\1\u5500\2\u1f00"+ "\1\u5600\1\u5700\27\u0100\1\u5800\2\u0100\1\u5900\45\u1f00\1\u0100"+ "\1\u5a00\1\u5b00\11\u1f00\1\u5c00\27\u1f00\1\u5d00\1\u5e00\1\u5f00"+ "\1\u6000\11\u1f00\1\u6100\1\u6200\5\u1f00\1\u6300\1\u6400\4\u1f00"+ "\1\u6500\21\u1f00\246\u0100\1\u6600\20\u0100\1\u6700\1\u6800\25\u0100"+ "\1\u6900\34\u0100\1\u6a00\14\u1f00\2\u0100\1\u6b00\u0e05\u1f00"; private static int [] zzUnpackcmap_top() { int [] result = new int[4352]; int offset = 0; offset = zzUnpackcmap_top(ZZ_CMAP_TOP_PACKED_0, offset, result); return result; } private static int zzUnpackcmap_top(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Second-level tables for translating characters to character classes */ private static final int [] ZZ_CMAP_BLOCKS = zzUnpackcmap_blocks(); private static final String ZZ_CMAP_BLOCKS_PACKED_0 = "\11\0\2\1\1\0\2\1\22\0\1\1\1\2\1\3"+ "\1\4\1\5\1\0\1\6\1\7\2\10\3\0\1\11"+ "\1\12\1\13\12\14\1\15\1\16\1\17\1\0\1\20"+ "\1\21\1\0\1\22\1\23\1\24\1\25\1\26\1\27"+ "\1\30\1\31\1\32\2\30\1\33\1\34\1\30\1\35"+ "\1\36\2\30\1\37\1\40\1\41\2\30\1\42\1\43"+ "\1\30\1\44\1\45\1\46\1\0\1\15\1\0\1\47"+ "\1\50\1\51\1\52\1\26\1\27\1\53\1\31\1\32"+ "\2\30\1\54\1\55\1\56\1\57\1\60\1\61\1\30"+ "\1\62\1\63\1\64\2\30\1\42\1\43\1\30\1\0"+ "\1\10\10\0\1\65\44\0\1\30\12\0\1\30\4\0"+ "\1\30\5\0\27\30\1\0\37\30\1\0\u01ca\30\4\0"+ "\14\30\16\0\5\30\7\0\1\30\1\0\1\30\201\0"+ "\5\30\1\0\2\30\2\0\4\30\1\0\1\30\6\0"+ "\1\30\1\0\3\30\1\0\1\30\1\0\24\30\1\0"+ "\123\30\1\0\213\30\10\0\246\30\1\0\46\30\2\0"+ "\1\30\6\0\51\30\107\0\33\30\4\0\4\30\55\0"+ "\53\30\43\0\2\30\1\0\143\30\1\0\1\30\17\0"+ "\2\30\7\0\2\30\12\0\3\30\2\0\1\30\20\0"+ "\1\30\1\0\36\30\35\0\131\30\13\0\1\30\30\0"+ "\41\30\11\0\2\30\4\0\1\30\5\0\26\30\4\0"+ "\1\30\11\0\1\30\3\0\1\30\27\0\31\30\7\0"+ "\13\30\65\0\25\30\1\0\10\30\106\0\66\30\3\0"+ "\1\30\22\0\1\30\7\0\12\30\17\0\20\30\4\0"+ "\10\30\2\0\2\30\2\0\26\30\1\0\7\30\1\0"+ "\1\30\3\0\4\30\3\0\1\30\20\0\1\30\15\0"+ "\2\30\1\0\3\30\16\0\2\30\12\0\1\30\10\0"+ "\6\30\4\0\2\30\2\0\26\30\1\0\7\30\1\0"+ "\2\30\1\0\2\30\1\0\2\30\37\0\4\30\1\0"+ "\1\30\23\0\3\30\20\0\11\30\1\0\3\30\1\0"+ "\26\30\1\0\7\30\1\0\2\30\1\0\5\30\3\0"+ "\1\30\22\0\1\30\17\0\2\30\27\0\1\30\13\0"+ "\10\30\2\0\2\30\2\0\26\30\1\0\7\30\1\0"+ "\2\30\1\0\5\30\3\0\1\30\36\0\2\30\1\0"+ "\3\30\17\0\1\30\21\0\1\30\1\0\6\30\3\0"+ "\3\30\1\0\4\30\3\0\2\30\1\0\1\30\1\0"+ "\2\30\3\0\2\30\3\0\3\30\3\0\14\30\26\0"+ "\1\30\64\0\10\30\1\0\3\30\1\0\27\30\1\0"+ "\20\30\3\0\1\30\32\0\3\30\5\0\2\30\36\0"+ "\1\30\4\0\10\30\1\0\3\30\1\0\27\30\1\0"+ "\12\30\1\0\5\30\3\0\1\30\40\0\1\30\1\0"+ "\2\30\17\0\2\30\22\0\10\30\1\0\3\30\1\0"+ "\51\30\2\0\1\30\20\0\1\30\5\0\3\30\10\0"+ "\3\30\30\0\6\30\5\0\22\30\3\0\30\30\1\0"+ "\11\30\1\0\1\30\2\0\7\30\72\0\60\30\1\0"+ "\2\30\14\0\7\30\72\0\2\30\1\0\1\30\1\0"+ "\5\30\1\0\30\30\1\0\1\30\1\0\12\30\1\0"+ "\2\30\11\0\1\30\2\0\5\30\1\0\1\30\25\0"+ "\4\30\40\0\1\30\77\0\10\30\1\0\44\30\33\0"+ "\5\30\163\0\53\30\24\0\1\30\20\0\6\30\4\0"+ "\4\30\3\0\1\30\3\0\2\30\7\0\3\30\4\0"+ "\15\30\14\0\1\30\21\0\46\30\1\0\1\30\5\0"+ "\1\30\2\0\53\30\1\0\115\30\1\0\4\30\2\0"+ "\7\30\1\0\1\30\1\0\4\30\2\0\51\30\1\0"+ "\4\30\2\0\41\30\1\0\4\30\2\0\7\30\1\0"+ "\1\30\1\0\4\30\2\0\17\30\1\0\71\30\1\0"+ "\4\30\2\0\103\30\45\0\20\30\20\0\126\30\2\0"+ "\6\30\3\0\u016c\30\2\0\21\30\1\0\32\30\5\0"+ "\113\30\6\0\10\30\7\0\15\30\1\0\4\30\16\0"+ "\22\30\16\0\22\30\16\0\15\30\1\0\3\30\17\0"+ "\64\30\43\0\1\30\4\0\1\30\103\0\131\30\7\0"+ "\5\30\2\0\42\30\1\0\1\30\5\0\106\30\12\0"+ "\37\30\61\0\36\30\2\0\5\30\13\0\54\30\4\0"+ "\32\30\66\0\27\30\11\0\65\30\122\0\1\30\135\0"+ "\57\30\21\0\7\30\67\0\36\30\15\0\2\30\12\0"+ "\54\30\32\0\44\30\51\0\3\30\12\0\44\30\2\0"+ "\11\30\7\0\53\30\2\0\3\30\51\0\4\30\1\0"+ "\6\30\1\0\2\30\3\0\1\30\5\0\300\30\100\0"+ "\26\30\2\0\6\30\2\0\46\30\2\0\6\30\2\0"+ "\10\30\1\0\1\30\1\0\1\30\1\0\1\30\1\0"+ "\37\30\2\0\65\30\1\0\7\30\1\0\1\30\3\0"+ "\3\30\1\0\7\30\3\0\4\30\2\0\6\30\4\0"+ "\15\30\5\0\3\30\1\0\7\30\53\0\2\65\107\0"+ "\1\30\15\0\1\30\20\0\15\30\145\0\1\30\4\0"+ "\1\30\2\0\12\30\1\0\1\30\3\0\5\30\6\0"+ "\1\30\1\0\1\30\1\0\1\30\1\0\4\30\1\0"+ "\13\30\2\0\4\30\5\0\5\30\4\0\1\30\64\0"+ "\2\30\u017b\0\57\30\1\0\57\30\1\0\205\30\6\0"+ "\4\30\3\0\2\30\14\0\46\30\1\0\1\30\5\0"+ "\1\30\2\0\70\30\7\0\1\30\20\0\27\30\11\0"+ "\7\30\1\0\7\30\1\0\7\30\1\0\7\30\1\0"+ "\7\30\1\0\7\30\1\0\7\30\1\0\7\30\120\0"+ "\1\30\325\0\2\30\52\0\5\30\5\0\2\30\4\0"+ "\126\30\6\0\3\30\1\0\132\30\1\0\4\30\5\0"+ "\53\30\1\0\136\30\21\0\33\30\65\0\306\30\112\0"+ "\360\30\20\0\215\30\103\0\56\30\2\0\15\30\3\0"+ "\20\30\12\0\2\30\24\0\57\30\20\0\37\30\2\0"+ "\106\30\61\0\11\30\2\0\147\30\2\0\65\30\2\0"+ "\5\30\60\0\13\30\1\0\3\30\1\0\4\30\1\0"+ "\27\30\35\0\64\30\16\0\62\30\76\0\6\30\3\0"+ "\1\30\1\0\2\30\13\0\34\30\12\0\27\30\31\0"+ "\35\30\7\0\57\30\34\0\1\30\20\0\5\30\1\0"+ "\12\30\12\0\5\30\1\0\51\30\27\0\3\30\1\0"+ "\10\30\24\0\27\30\3\0\1\30\3\0\62\30\1\0"+ "\1\30\3\0\2\30\2\0\5\30\2\0\1\30\1\0"+ "\1\30\30\0\3\30\2\0\13\30\7\0\3\30\14\0"+ "\6\30\2\0\6\30\2\0\6\30\11\0\7\30\1\0"+ "\7\30\1\0\53\30\1\0\14\30\10\0\163\30\35\0"+ "\244\30\14\0\27\30\4\0\61\30\4\0\156\30\2\0"+ "\152\30\46\0\7\30\14\0\5\30\5\0\1\30\1\0"+ "\12\30\1\0\15\30\1\0\5\30\1\0\1\30\1\0"+ "\2\30\1\0\2\30\1\0\154\30\41\0\153\30\22\0"+ "\100\30\2\0\66\30\50\0\14\30\164\0\5\30\1\0"+ "\207\30\44\0\32\30\6\0\32\30\13\0\131\30\3\0"+ "\6\30\2\0\6\30\2\0\6\30\2\0\3\30\43\0"+ "\14\30\1\0\32\30\1\0\23\30\1\0\2\30\1\0"+ "\17\30\2\0\16\30\42\0\173\30\205\0\35\30\3\0"+ "\61\30\57\0\40\30\15\0\24\30\1\0\10\30\6\0"+ "\46\30\12\0\36\30\2\0\44\30\4\0\10\30\60\0"+ "\236\30\22\0\44\30\4\0\44\30\4\0\50\30\10\0"+ "\64\30\234\0\67\30\11\0\26\30\12\0\10\30\230\0"+ "\6\30\2\0\1\30\1\0\54\30\1\0\2\30\3\0"+ "\1\30\2\0\27\30\12\0\27\30\11\0\37\30\101\0"+ "\23\30\1\0\2\30\12\0\26\30\12\0\32\30\106\0"+ "\70\30\6\0\2\30\100\0\1\30\17\0\4\30\1\0"+ "\3\30\1\0\35\30\52\0\35\30\3\0\35\30\43\0"+ "\10\30\1\0\34\30\33\0\66\30\12\0\26\30\12\0"+ "\23\30\15\0\22\30\156\0\111\30\67\0\63\30\15\0"+ "\63\30\15\0\44\30\334\0\35\30\12\0\1\30\10\0"+ "\26\30\232\0\27\30\14\0\65\30\113\0\55\30\40\0"+ "\31\30\32\0\44\30\35\0\1\30\13\0\43\30\3\0"+ "\1\30\14\0\60\30\16\0\4\30\25\0\1\30\1\0"+ "\1\30\43\0\22\30\1\0\31\30\124\0\7\30\1\0"+ "\1\30\1\0\4\30\1\0\17\30\1\0\12\30\7\0"+ "\57\30\46\0\10\30\2\0\2\30\2\0\26\30\1\0"+ "\7\30\1\0\2\30\1\0\5\30\3\0\1\30\22\0"+ "\1\30\14\0\5\30\236\0\65\30\22\0\4\30\24\0"+ "\1\30\40\0\60\30\24\0\2\30\1\0\1\30\270\0"+ "\57\30\51\0\4\30\44\0\60\30\24\0\1\30\73\0"+ "\53\30\15\0\1\30\107\0\33\30\345\0\54\30\164\0"+ "\100\30\37\0\1\30\240\0\10\30\2\0\47\30\20\0"+ "\1\30\1\0\1\30\34\0\1\30\12\0\50\30\7\0"+ "\1\30\25\0\1\30\13\0\56\30\23\0\1\30\42\0"+ "\71\30\7\0\11\30\1\0\45\30\21\0\1\30\61\0"+ "\36\30\160\0\7\30\1\0\2\30\1\0\46\30\25\0"+ "\1\30\31\0\6\30\1\0\2\30\1\0\40\30\16\0"+ "\1\30\u0147\0\23\30\15\0\232\30\346\0\304\30\274\0"+ "\57\30\321\0\107\30\271\0\71\30\7\0\37\30\161\0"+ "\36\30\22\0\60\30\20\0\4\30\37\0\25\30\5\0"+ "\23\30\260\0\100\30\200\0\113\30\5\0\1\30\102\0"+ "\15\30\100\0\2\30\1\0\1\30\34\0\370\30\10\0"+ "\363\30\15\0\37\30\61\0\3\30\21\0\4\30\10\0"+ "\u018c\30\4\0\153\30\5\0\15\30\3\0\11\30\7\0"+ "\12\30\146\0\125\30\1\0\107\30\1\0\2\30\2\0"+ "\1\30\2\0\2\30\2\0\4\30\1\0\14\30\1\0"+ "\1\30\1\0\7\30\1\0\101\30\1\0\4\30\2\0"+ "\10\30\1\0\7\30\1\0\34\30\1\0\4\30\1\0"+ "\5\30\1\0\1\30\3\0\7\30\1\0\u0154\30\2\0"+ "\31\30\1\0\31\30\1\0\37\30\1\0\31\30\1\0"+ "\37\30\1\0\31\30\1\0\37\30\1\0\31\30\1\0"+ "\37\30\1\0\31\30\1\0\10\30\64\0\55\30\12\0"+ "\7\30\20\0\1\30\u0171\0\54\30\24\0\305\30\73\0"+ "\104\30\7\0\1\30\264\0\4\30\1\0\33\30\1\0"+ "\2\30\1\0\1\30\2\0\1\30\1\0\12\30\1\0"+ "\4\30\1\0\1\30\1\0\1\30\6\0\1\30\4\0"+ "\1\30\1\0\1\30\1\0\1\30\1\0\3\30\1\0"+ "\2\30\1\0\1\30\2\0\1\30\1\0\1\30\1\0"+ "\1\30\1\0\1\30\1\0\1\30\1\0\2\30\1\0"+ "\1\30\2\0\4\30\1\0\7\30\1\0\4\30\1\0"+ "\4\30\1\0\1\30\1\0\12\30\1\0\21\30\5\0"+ "\3\30\1\0\5\30\1\0\21\30\104\0\327\30\51\0"+ "\65\30\13\0\336\30\2\0\u0182\30\16\0\u0131\30\37\0"+ "\36\30\342\0"; private static int [] zzUnpackcmap_blocks() { int [] result = new int[27648]; int offset = 0; offset = zzUnpackcmap_blocks(ZZ_CMAP_BLOCKS_PACKED_0, offset, result); return result; } private static int zzUnpackcmap_blocks(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates DFA states to action switch labels. */ private static final int [] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\1\1\11\0\1\2\4\0\1\1\1\3\5\1\4\4"+ "\1\5\3\4\4\6\1\7\1\10\2\11\1\12\1\13"+ "\1\11\1\14\1\11\1\15\1\16\1\17\1\16\1\20"+ "\1\21\1\22\2\20\1\23\1\24\2\23\1\2\1\25"+ "\1\4\3\26\3\27\1\6\1\27\1\30\1\1\7\0"+ "\1\31\1\32\1\33\7\0\1\31\1\34\1\20\2\0"+ "\1\35\3\0\1\36\10\0\1\37\4\0\1\40\1\0"+ "\1\41\1\42\1\43\2\0\1\44\2\0\1\45\2\0"+ "\1\46\1\4\6\0\1\47\1\50\4\0\1\51\1\52"; private static int [] zzUnpackAction() { int [] result = new int[138]; int offset = 0; offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int zzUnpackAction(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int [] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\66\0\154\0\242\0\330\0\u010e\0\u0144\0\u017a"+ "\0\u01b0\0\u01e6\0\u021c\0\u0252\0\u0288\0\u02be\0\u02f4\0\u032a"+ "\0\u0360\0\u0396\0\u03cc\0\u0402\0\u0438\0\u046e\0\u0396\0\u04a4"+ "\0\u03cc\0\u04da\0\u0396\0\u0510\0\u0546\0\u057c\0\u0396\0\u03cc"+ "\0\u05b2\0\u05e8\0\u0396\0\u0396\0\u0396\0\u03cc\0\u061e\0\u0396"+ "\0\u0654\0\u068a\0\u06c0\0\u0396\0\u06f6\0\u0396\0\u072c\0\u0762"+ "\0\u0396\0\u0396\0\u0798\0\u07ce\0\u0396\0\u0396\0\u03cc\0\u0804"+ "\0\u083a\0\u0396\0\u0870\0\u0396\0\u03cc\0\u06c0\0\u0396\0\u03cc"+ "\0\u08a6\0\u08dc\0\u0912\0\u0396\0\u0948\0\u097e\0\u09b4\0\u09ea"+ "\0\u0a20\0\u0a56\0\u0a8c\0\u0ac2\0\u0af8\0\u0b2e\0\u0396\0\u04a4"+ "\0\u04da\0\u0b64\0\u0b9a\0\u0bd0\0\u0c06\0\u0c3c\0\u0396\0\u0396"+ "\0\u0c72\0\u0ca8\0\u0cde\0\u0396\0\u0d14\0\u0d4a\0\u0d80\0\u0396"+ "\0\u0db6\0\u0dec\0\u0e22\0\u0e58\0\u0e8e\0\u0ec4\0\u0efa\0\u0f30"+ "\0\u0f66\0\u0f9c\0\u0fd2\0\u1008\0\u103e\0\u0396\0\u1074\0\u0396"+ "\0\u0396\0\u0396\0\u10aa\0\u10e0\0\u0396\0\u1116\0\u114c\0\u0396"+ "\0\u1182\0\u11b8\0\u0396\0\u11ee\0\u1224\0\u125a\0\u1290\0\u12c6"+ "\0\u12fc\0\u1332\0\u0396\0\u0396\0\u1368\0\u139e\0\u13d4\0\u140a"+ "\0\u0396\0\u0396"; private static int [] zzUnpackRowMap() { int [] result = new int[138]; int offset = 0; offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int [] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\20\1\21\2\20\2\22\1\23\10\20\1\24\25\20"+ "\1\25\17\20\1\26\1\27\1\21\1\27\1\30\2\27"+ "\1\31\1\32\10\27\1\33\10\27\1\34\4\27\1\35"+ "\1\36\20\27\1\35\1\27\1\36\2\27\1\21\6\37"+ "\1\40\2\37\1\41\5\37\1\42\1\43\23\37\1\44"+ "\21\37\1\45\1\21\4\45\1\46\6\45\1\47\1\45"+ "\1\50\2\45\22\47\3\45\16\47\1\21\1\45\1\21"+ "\4\45\1\46\6\45\1\47\1\45\1\51\2\45\22\47"+ "\3\45\16\47\1\21\1\45\1\52\4\45\1\46\4\45"+ "\1\53\4\45\1\54\44\45\1\52\1\55\1\21\1\55"+ "\1\56\3\55\1\56\3\55\1\53\3\55\1\45\1\54"+ "\44\55\1\57\1\60\1\21\1\60\1\61\3\60\1\62"+ "\3\60\1\63\4\60\1\54\44\60\1\64\3\65\1\66"+ "\2\65\1\67\36\65\1\70\26\65\1\67\1\66\35\65"+ "\1\70\20\65\20\71\1\72\1\73\44\71\1\74\1\21"+ "\4\74\1\75\4\74\1\76\3\74\1\50\1\54\44\74"+ "\1\21\6\77\1\100\2\77\1\101\10\77\22\102\2\77"+ "\1\103\16\102\7\77\1\100\2\77\1\101\10\77\22\102"+ "\2\77\1\104\16\102\1\77\46\22\1\105\17\22\1\20"+ "\1\0\2\20\3\0\10\20\1\0\25\20\1\25\20\20"+ "\1\0\1\21\63\0\1\21\72\0\1\106\10\0\1\107"+ "\4\0\22\107\3\0\1\110\3\107\2\111\1\107\1\112"+ "\2\107\1\113\3\107\3\0\1\114\10\0\1\115\1\0"+ "\1\116\3\0\1\117\22\116\3\0\16\116\1\0\1\20"+ "\1\0\4\20\1\0\10\20\1\0\25\20\1\25\21\20"+ "\1\21\2\20\3\0\10\20\1\0\25\20\1\25\17\20"+ "\1\26\3\120\1\65\62\120\7\121\1\65\56\121\40\0"+ "\1\122\22\0\1\122\43\0\1\123\22\0\1\123\44\0"+ "\1\124\33\0\1\125\56\0\1\126\74\0\2\47\1\0"+ "\2\47\4\0\22\47\3\0\16\47\14\0\1\127\53\0"+ "\1\52\63\0\1\52\20\0\1\130\45\0\1\55\1\0"+ "\1\55\1\0\3\55\1\0\3\55\1\0\3\55\2\0"+ "\46\55\1\21\1\55\1\0\3\55\1\0\3\55\1\0"+ "\3\55\2\0\44\55\1\57\1\60\1\0\11\60\1\131"+ "\4\60\1\0\46\60\1\0\11\60\1\131\4\60\1\130"+ "\46\60\1\21\11\60\1\131\4\60\1\0\44\60\1\64"+ "\5\0\1\65\60\0\20\71\1\0\1\132\64\71\1\72"+ "\45\71\11\0\1\133\55\0\2\102\3\0\1\102\1\0"+ "\1\102\1\0\1\102\1\0\1\102\5\0\22\102\3\0"+ "\17\102\20\0\1\134\113\0\1\135\33\0\1\136\25\0"+ "\1\137\34\0\2\107\1\0\2\107\1\140\3\0\22\107"+ "\3\0\16\107\12\0\2\107\1\0\2\107\1\140\3\0"+ "\22\107\3\0\6\107\1\141\2\107\1\142\4\107\12\0"+ "\2\107\1\0\2\107\1\140\3\0\22\107\3\0\14\107"+ "\1\143\1\107\12\0\2\107\1\0\2\107\1\140\3\0"+ "\22\107\3\0\1\107\1\144\14\107\12\0\2\107\1\0"+ "\2\107\1\140\3\0\22\107\3\0\15\107\1\145\12\0"+ "\1\146\13\0\1\147\16\0\1\150\5\0\1\147\30\0"+ "\1\151\4\0\22\151\3\0\16\151\12\0\2\116\1\0"+ "\2\116\4\0\22\116\3\0\16\116\35\0\1\152\20\0"+ "\1\152\33\0\1\153\24\0\1\153\54\0\1\154\22\0"+ "\1\154\5\0\1\155\15\0\1\156\56\0\1\157\32\0"+ "\1\160\21\0\13\60\1\131\4\60\1\0\45\60\20\71"+ "\1\0\45\71\20\0\1\156\65\0\1\161\61\0\1\136"+ "\1\0\1\162\63\0\1\163\5\0\6\163\17\0\4\163"+ "\24\0\2\107\1\0\2\107\1\140\3\0\22\107\3\0"+ "\11\107\1\143\4\107\12\0\2\107\1\0\2\107\1\140"+ "\3\0\22\107\3\0\10\107\1\164\5\107\12\0\2\107"+ "\1\0\2\107\1\162\3\0\22\107\3\0\16\107\12\0"+ "\2\107\1\0\2\107\1\140\3\0\22\107\3\0\13\107"+ "\1\141\2\107\12\0\2\107\1\0\2\107\1\140\3\0"+ "\22\107\3\0\10\107\1\111\5\107\12\0\1\165\111\0"+ "\1\166\21\0\1\166\32\0\1\167\52\0\2\151\1\0"+ "\2\151\4\0\22\151\3\0\16\151\34\0\1\170\20\0"+ "\1\170\44\0\1\171\20\0\1\171\51\0\1\172\22\0"+ "\1\172\22\0\1\173\56\0\1\174\70\0\1\163\1\0"+ "\1\162\3\0\6\163\17\0\4\163\24\0\2\107\1\0"+ "\2\107\1\140\3\0\22\107\3\0\13\107\1\143\2\107"+ "\25\0\1\175\24\0\1\175\41\0\1\176\72\0\1\177"+ "\61\0\1\200\50\0\1\155\6\0\1\156\105\0\1\201"+ "\22\0\1\201\24\0\1\202\67\0\1\203\24\0\1\203"+ "\50\0\1\204\20\0\1\204\53\0\1\205\62\0\1\206"+ "\63\0\1\207\21\0\1\207\27\0\1\210\71\0\1\211"+ "\103\0\1\212\21\0"; private static int [] zzUnpackTrans() { int [] result = new int[5184]; int offset = 0; offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int zzUnpackTrans(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /** Error code for "Unknown internal scanner error". */ private static final int ZZ_UNKNOWN_ERROR = 0; /** Error code for "could not match input". */ private static final int ZZ_NO_MATCH = 1; /** Error code for "pushback value was too large". */ private static final int ZZ_PUSHBACK_2BIG = 2; /** * Error messages for {@link #ZZ_UNKNOWN_ERROR}, {@link #ZZ_NO_MATCH}, and * {@link #ZZ_PUSHBACK_2BIG} respectively. */ private static final String ZZ_ERROR_MSG[] = { "Unknown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /** * ZZ_ATTRIBUTE[aState] contains the attributes of state {@code aState} */ private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\1\1\11\0\1\1\4\0\2\1\1\11\4\1\1\11"+ "\3\1\1\11\3\1\1\11\3\1\3\11\2\1\1\11"+ "\3\1\1\11\1\1\1\11\2\1\2\11\2\1\2\11"+ "\3\1\1\11\1\1\1\11\2\1\1\11\4\1\1\11"+ "\1\1\7\0\2\1\1\11\7\0\2\11\1\1\2\0"+ "\1\11\3\0\1\11\10\0\1\1\4\0\1\11\1\0"+ "\3\11\2\0\1\11\2\0\1\11\2\0\1\11\1\1"+ "\6\0\2\11\4\0\2\11"; private static int [] zzUnpackAttribute() { int [] result = new int[138]; int offset = 0; offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** Input device. */ private java.io.Reader zzReader; /** Current state of the DFA. */ private int zzState; /** Current lexical state. */ private int zzLexicalState = YYINITIAL; /** * This buffer contains the current text to be matched and is the source of the {@link #yytext()} * string. */ private char zzBuffer[] = new char[ZZ_BUFFERSIZE]; /** Text position at the last accepting state. */ private int zzMarkedPos; /** Current text position in the buffer. */ private int zzCurrentPos; /** Marks the beginning of the {@link #yytext()} string in the buffer. */ private int zzStartRead; /** Marks the last character in the buffer, that has been read from input. */ private int zzEndRead; /** * Whether the scanner is at the end of file. * @see #yyatEOF */ private boolean zzAtEOF; /** * The number of occupied positions in {@link #zzBuffer} beyond {@link #zzEndRead}. * * <p>When a lead/high surrogate has been read from the input stream into the final * {@link #zzBuffer} position, this will have a value of 1; otherwise, it will have a value of 0. */ private int zzFinalHighSurrogate = 0; /** Number of newlines encountered up to the start of the matched text. */ private int yyline; /** Number of characters from the last newline up to the start of the matched text. */ private int yycolumn; /** Number of characters up to the start of the matched text. */ private long yychar; /** Whether the scanner is currently at the beginning of a line. */ @SuppressWarnings("unused") private boolean zzAtBOL = true; /** Whether the user-EOF-code has already been executed. */ @SuppressWarnings("unused") private boolean zzEOFDone; /* user code: */ public final int getTokenStart() { return (int) yychar; } public final int getTokenEnd() { return getTokenStart() + yylength(); } /** * Creates a new scanner * * @param in the java.io.Reader to read input from. */ public XmlLexer(java.io.Reader in) { this.zzReader = in; } /** * Translates raw input code points to DFA table row */ private static int zzCMap(int input) { int offset = input & 255; return offset == input ? ZZ_CMAP_BLOCKS[offset] : ZZ_CMAP_BLOCKS[ZZ_CMAP_TOP[input >> 8] | offset]; } /** * Refills the input buffer. * * @return {@code false} iff there was new input. * @exception java.io.IOException if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { /* first: make room (if you can) */ if (zzStartRead > 0) { zzEndRead += zzFinalHighSurrogate; zzFinalHighSurrogate = 0; System.arraycopy(zzBuffer, zzStartRead, zzBuffer, 0, zzEndRead - zzStartRead); /* translate stored positions */ zzEndRead -= zzStartRead; zzCurrentPos -= zzStartRead; zzMarkedPos -= zzStartRead; zzStartRead = 0; } /* is the buffer big enough? */ if (zzCurrentPos >= zzBuffer.length - zzFinalHighSurrogate) { /* if not: blow it up */ char newBuffer[] = new char[zzBuffer.length * 2]; System.arraycopy(zzBuffer, 0, newBuffer, 0, zzBuffer.length); zzBuffer = newBuffer; zzEndRead += zzFinalHighSurrogate; zzFinalHighSurrogate = 0; } /* fill the buffer with new input */ int requested = zzBuffer.length - zzEndRead; int numRead = zzReader.read(zzBuffer, zzEndRead, requested); /* not supposed to occur according to specification of java.io.Reader */ if (numRead == 0) { throw new java.io.IOException( "Reader returned 0 characters. See JFlex examples/zero-reader for a workaround."); } if (numRead > 0) { zzEndRead += numRead; if (Character.isHighSurrogate(zzBuffer[zzEndRead - 1])) { if (numRead == requested) { // We requested too few chars to encode a full Unicode character --zzEndRead; zzFinalHighSurrogate = 1; } else { // There is room in the buffer for at least one more char int c = zzReader.read(); // Expecting to read a paired low surrogate char if (c == -1) { return true; } else { zzBuffer[zzEndRead++] = (char)c; } } } /* potentially more input available */ return false; } /* numRead < 0 ==> end of stream */ return true; } /** * Closes the input reader. * * @throws java.io.IOException if the reader could not be closed. */ public final void yyclose() throws java.io.IOException { zzAtEOF = true; // indicate end of file zzEndRead = zzStartRead; // invalidate buffer if (zzReader != null) { zzReader.close(); } } /** * Resets the scanner to read from a new input stream. * * <p>Does not close the old reader. * * <p>All internal variables are reset, the old input stream <b>cannot</b> be reused (internal * buffer is discarded and lost). Lexical state is set to {@code ZZ_INITIAL}. * * <p>Internal scan buffer is resized down to its initial length, if it has grown. * * @param reader The new input stream. */ public final void yyreset(java.io.Reader reader) { zzReader = reader; zzEOFDone = false; yyResetPosition(); zzLexicalState = YYINITIAL; if (zzBuffer.length > ZZ_BUFFERSIZE) { zzBuffer = new char[ZZ_BUFFERSIZE]; } } /** * Resets the input position. */ private final void yyResetPosition() { zzAtBOL = true; zzAtEOF = false; zzCurrentPos = 0; zzMarkedPos = 0; zzStartRead = 0; zzEndRead = 0; zzFinalHighSurrogate = 0; yyline = 0; yycolumn = 0; yychar = 0L; } /** * Returns whether the scanner has reached the end of the reader it reads from. * * @return whether the scanner has reached EOF. */ public final boolean yyatEOF() { return zzAtEOF; } /** * Returns the current lexical state. * * @return the current lexical state. */ public final int yystate() { return zzLexicalState; } /** * Enters a new lexical state. * * @param newState the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. * * @return the matched text. */ public final String yytext() { return new String(zzBuffer, zzStartRead, zzMarkedPos-zzStartRead); } /** * Returns the character at the given position from the matched text. * * <p>It is equivalent to {@code yytext().charAt(pos)}, but faster. * * @param position the position of the character to fetch. A value from 0 to {@code yylength()-1}. * * @return the character at {@code position}. */ public final char yycharat(int position) { return zzBuffer[zzStartRead + position]; } /** * How many characters were matched. * * @return the length of the matched text region. */ public final int yylength() { return zzMarkedPos-zzStartRead; } /** * Reports an error that occurred while scanning. * * <p>In a well-formed scanner (no or only correct usage of {@code yypushback(int)} and a * match-all fallback rule) this method will only be called with things that * "Can't Possibly Happen". * * <p>If this method is called, something is seriously wrong (e.g. a JFlex bug producing a faulty * scanner etc.). * * <p>Usual syntax/scanner level error handling should be done in error fallback rules. * * @param errorCode the code of the error message to display. */ private static void zzScanError(int errorCode) { String message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (ArrayIndexOutOfBoundsException e) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Error(message); } /** * Pushes the specified amount of characters back into the input stream. * * <p>They will be read again by then next call of the scanning method. * * @param number the number of characters to be read again. This number must not be greater than * {@link #yylength()}. */ public void yypushback(int number) { if ( number > yylength() ) zzScanError(ZZ_PUSHBACK_2BIG); zzMarkedPos -= number; } /** * Resumes scanning until the next regular expression is matched, the end of input is encountered * or an I/O-Error occurs. * * @return the next token. * @exception java.io.IOException if any I/O-Error occurs. */ @NotNull public XmlToken advance() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; char[] zzBufferL = zzBuffer; int [] zzTransL = ZZ_TRANS; int [] zzRowMapL = ZZ_ROWMAP; int [] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; yychar+= zzMarkedPosL-zzStartRead; boolean zzR = false; int zzCh; int zzCharCount; for (zzCurrentPosL = zzStartRead ; zzCurrentPosL < zzMarkedPosL ; zzCurrentPosL += zzCharCount ) { zzCh = Character.codePointAt(zzBufferL, zzCurrentPosL, zzMarkedPosL); zzCharCount = Character.charCount(zzCh); switch (zzCh) { case '\u000B': // fall through case '\u000C': // fall through case '\u0085': // fall through case '\u2028': // fall through case '\u2029': yyline++; yycolumn = 0; zzR = false; break; case '\r': yyline++; yycolumn = 0; zzR = true; break; case '\n': if (zzR) zzR = false; else { yyline++; yycolumn = 0; } break; default: zzR = false; yycolumn += zzCharCount; } } if (zzR) { // peek one character ahead if it is // (if we have counted one line too much) boolean zzPeek; if (zzMarkedPosL < zzEndReadL) zzPeek = zzBufferL[zzMarkedPosL] == '\n'; else if (zzAtEOF) zzPeek = false; else { boolean eof = zzRefill(); zzEndReadL = zzEndRead; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; if (eof) zzPeek = false; else zzPeek = zzBufferL[zzMarkedPosL] == '\n'; } if (zzPeek) yyline--; } zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; } zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL); zzCurrentPosL += Character.charCount(zzInput); } else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL); zzCurrentPosL += Character.charCount(zzInput); } } int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMap(zzInput) ]; if (zzNext == -1) break zzForAction; zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ( (zzAttributes & 8) == 8 ) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; { return XmlToken.EOF; } } else { switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { return XmlToken.XML_DATA_CHARACTERS; } // fall through case 43: break; case 2: { return XmlToken.XML_PI_TARGET; } // fall through case 44: break; case 3: { return XmlToken.WHITESPACE; } // fall through case 45: break; case 4: { return XmlToken.BAD_CHARACTER; } // fall through case 46: break; case 5: { yybegin(YYINITIAL); return XmlToken.XML_DOCTYPE_END; } // fall through case 47: break; case 6: { return XmlToken.XML_COMMENT_CHARACTERS; } // fall through case 48: break; case 7: { int loc = getTokenStart(); char prev = zzBuffer[loc - 1]; char prevPrev = zzBuffer[loc - 2]; if (prev == '-' && prevPrev == '-') { yybegin(YYINITIAL); return XmlToken.BAD_CHARACTER; } return XmlToken.XML_COMMENT_CHARACTERS; } // fall through case 49: break; case 8: { yybegin(C_COMMENT_START); return XmlToken.XML_CONDITIONAL_COMMENT_START; } // fall through case 50: break; case 9: { yybegin(YYINITIAL); yypushback(1); break; } // fall through case 51: break; case 10: { yybegin(BEFORE_TAG_ATTRIBUTES); return XmlToken.XML_TAG_NAME; } // fall through case 52: break; case 11: { return XmlToken.XML_START_TAG_START; } // fall through case 53: break; case 12: { yybegin(TAG_ATTRIBUTES); return XmlToken.WHITESPACE; } // fall through case 54: break; case 13: { yybegin(YYINITIAL); return XmlToken.XML_TAG_END; } // fall through case 55: break; case 14: { return XmlToken.XML_ATTR_NAME; } // fall through case 56: break; case 15: { yybegin(ATTRIBUTE_VALUE_START); yypushback(1); } // fall through case 57: break; case 16: { yybegin(TAG_ATTRIBUTES); return XmlToken.XML_ATTRIBUTE_VALUE_TOKEN; } // fall through case 58: break; case 17: { yybegin(ATTRIBUTE_VALUE_DQ); return XmlToken.XML_ATTRIBUTE_VALUE_START_DELIMITER; } // fall through case 59: break; case 18: { yybegin(ATTRIBUTE_VALUE_SQ); return XmlToken.XML_ATTRIBUTE_VALUE_START_DELIMITER; } // fall through case 60: break; case 19: { return XmlToken.XML_ATTRIBUTE_VALUE_TOKEN; } // fall through case 61: break; case 20: { yybegin(TAG_ATTRIBUTES); return XmlToken.XML_ATTRIBUTE_VALUE_END_DELIMITER; } // fall through case 62: break; case 21: { yybegin(YYINITIAL); return XmlToken.XML_PI_END; } // fall through case 63: break; case 22: { return XmlToken.XML_TAG_CHARACTERS; } // fall through case 64: break; case 23: { yybegin(COMMENT); return XmlToken.XML_COMMENT_CHARACTERS; } // fall through case 65: break; case 24: { yybegin(COMMENT); return XmlToken.XML_CONDITIONAL_COMMENT_END; } // fall through case 66: break; case 25: { return XmlToken.XML_END_TAG_START; } // fall through case 67: break; case 26: { yybegin(START_TAG_NAME); yypushback(yylength()); } // fall through case 68: break; case 27: { yybegin(PROCESSING_INSTRUCTION); return XmlToken.XML_PI_START; } // fall through case 69: break; case 28: { yybegin(YYINITIAL); return XmlToken.XML_EMPTY_ELEMENT_END; } // fall through case 70: break; case 29: { yybegin(COMMENT); return XmlToken.XML_CONDITIONAL_COMMENT_START_END; } // fall through case 71: break; case 30: { return XmlToken.XML_ENTITY_REF_TOKEN; } // fall through case 72: break; case 31: { yybegin(END_TAG_NAME); yypushback(yylength()); } // fall through case 73: break; case 32: { yybegin(YYINITIAL); return XmlToken.XML_COMMENT_END; } // fall through case 74: break; case 33: { yybegin(C_COMMENT_END); return XmlToken.XML_CONDITIONAL_COMMENT_END_START; } // fall through case 75: break; case 34: { yybegin(YYINITIAL); return XmlToken.XML_CDATA_END; } // fall through case 76: break; case 35: { return XmlToken.XML_CHAR_ENTITY_REF; } // fall through case 77: break; case 36: { yybegin(COMMENT); return XmlToken.XML_COMMENT_START; } // fall through case 78: break; case 37: { return XmlToken.XML_TAG_NAME; } // fall through case 79: break; case 38: { yybegin(YYINITIAL); return XmlToken.BAD_CHARACTER; } // fall through case 80: break; case 39: { return XmlToken.XML_DOCTYPE_PUBLIC; } // fall through case 81: break; case 40: { return XmlToken.XML_DOCTYPE_SYSTEM; } // fall through case 82: break; case 41: { yybegin(DOC_TYPE); return XmlToken.XML_DOCTYPE_START; } // fall through case 83: break; case 42: { yybegin(CDATA); return XmlToken.XML_CDATA_START; } // fall through case 84: break; default: zzScanError(ZZ_NO_MATCH); } } } } }
package com.andersenlab.crm.convertservice; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import java.util.List; import java.util.Locale; /** * A service interface for type conversion. This is the entry point into the convert system. * To perform a type conversion, call convert(S source, Class target). */ public interface ConversionService { /** * Converts the given source to the specified target. * * @param source the source object to convert * @param target the target type to convert to * @param <S> the type of source object * @param <T> the type of object, obtained as a result of conversion * @return the converted object */ <S, T> T convert(S source, Class<T> target); <S, T> T convertWithLocale(S source, Class<T> target, Locale locale); /** * Converts a list of objects to a list of objects of type T. * * @param source the iterable of objects to convert * @param target the target type * @param <S> the type of source objects * @param <T> the type of objects, obtained as a result of conversion * @return the list of converted object */ <S, T> List<T> convertToList(Iterable<S> source, Class<T> target); <S, T> List<T> convertToListWithLocale(Iterable<S> source, Class<T> target, Locale locale); <S, T> Page<T> convertToPage(Pageable pageable, Page<S> source, Class<T> target); <S, T> Page<T> convertToPageWithLocale(Pageable pageable, Page<S> source, Class<T> target, Locale locale); }
// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * AdSenseSettingsFontSize.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201702; public class AdSenseSettingsFontSize implements java.io.Serializable { private java.lang.String _value_; private static java.util.HashMap _table_ = new java.util.HashMap(); // Constructor protected AdSenseSettingsFontSize(java.lang.String value) { _value_ = value; _table_.put(_value_,this); } public static final java.lang.String _DEFAULT = "DEFAULT"; public static final java.lang.String _SMALL = "SMALL"; public static final java.lang.String _MEDIUM = "MEDIUM"; public static final java.lang.String _LARGE = "LARGE"; public static final AdSenseSettingsFontSize DEFAULT = new AdSenseSettingsFontSize(_DEFAULT); public static final AdSenseSettingsFontSize SMALL = new AdSenseSettingsFontSize(_SMALL); public static final AdSenseSettingsFontSize MEDIUM = new AdSenseSettingsFontSize(_MEDIUM); public static final AdSenseSettingsFontSize LARGE = new AdSenseSettingsFontSize(_LARGE); public java.lang.String getValue() { return _value_;} public static AdSenseSettingsFontSize fromValue(java.lang.String value) throws java.lang.IllegalArgumentException { AdSenseSettingsFontSize enumeration = (AdSenseSettingsFontSize) _table_.get(value); if (enumeration==null) throw new java.lang.IllegalArgumentException(); return enumeration; } public static AdSenseSettingsFontSize fromString(java.lang.String value) throws java.lang.IllegalArgumentException { return fromValue(value); } public boolean equals(java.lang.Object obj) {return (obj == this);} public int hashCode() { return toString().hashCode();} public java.lang.String toString() { return _value_;} public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);} public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumSerializer( _javaType, _xmlType); } public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumDeserializer( _javaType, _xmlType); } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(AdSenseSettingsFontSize.class); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201702", "AdSenseSettings.FontSize")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.perf; import javax.jms.ConnectionFactory; import javax.jms.DeliveryMode; import javax.jms.Destination; import javax.jms.JMSException; /** * */ public class KahaDBDurableTransactedTopicTest extends KahaDBDurableTopicTest { @Override protected void setUp() throws Exception { //this.initialConsumerDelay = 10 * 1000; super.setUp(); } @Override protected PerfProducer createProducer(ConnectionFactory fac, Destination dest, int number, byte[] payload) throws JMSException { PerfProducer result = new PerfProducer(fac, dest, payload, true); result.setDeliveryMode(DeliveryMode.PERSISTENT); return result; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.ui.laf.intellij; import com.intellij.ide.ui.laf.darcula.ui.DarculaSpinnerUI; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.LafIconLookup; import org.intellij.lang.annotations.MagicConstant; import javax.swing.*; import javax.swing.plaf.ComponentUI; import javax.swing.plaf.basic.BasicArrowButton; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.geom.Area; import java.awt.geom.Path2D; import java.awt.geom.Rectangle2D; import static com.intellij.ide.ui.laf.darcula.DarculaUIUtil.maximize; import static com.intellij.util.ui.JBUI.*; public class WinIntelliJSpinnerUI extends DarculaSpinnerUI { static final String HOVER_PROPERTY = "JSpinner.hover"; static final int BUTTON_WIDTH = 20; static final int SPINNER_HEIGHT = 22; private MouseListener editorMouseListener; @SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass", "UnusedDeclaration"}) public static ComponentUI createUI(JComponent c) { return new WinIntelliJSpinnerUI(); } @Override protected void installDefaults() { super.installDefaults(); spinner.setOpaque(false); } @Override protected void installListeners() { super.installListeners(); editorMouseListener = new MouseAdapter() { @Override public void mouseEntered(MouseEvent e) { setHover(Boolean.TRUE); } @Override public void mouseExited(MouseEvent e) { setHover(Boolean.FALSE); } private void setHover(Boolean value) { if (spinner.isEnabled()) { spinner.putClientProperty(HOVER_PROPERTY, value); spinner.repaint(); } } }; spinner.addMouseListener(editorMouseListener); getEditorFocusOwner(spinner).addMouseListener(editorMouseListener); } @Override protected void uninstallListeners() { super.uninstallListeners(); if (editorMouseListener != null) { spinner.removeMouseListener(editorMouseListener); getEditorFocusOwner(spinner).removeMouseListener(editorMouseListener); } } private static Component getEditorFocusOwner(JSpinner spinner) { synchronized (spinner.getEditor().getTreeLock()) { return spinner.getEditor().getComponent(0); } } @Override public void paint(Graphics g, JComponent c) { Graphics2D g2 = (Graphics2D)g.create(); try { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE); Rectangle r = new Rectangle(c.getWidth(), c.getHeight()); Container parent = c.getParent(); if (c.isOpaque() && parent != null) { g2.setColor(parent.getBackground()); g2.fill(r); } JBInsets.removeFrom(r, insets(1, 1, 1, BUTTON_WIDTH)); JBInsets.removeFrom(r, c.getInsets()); g2.setColor(c.isEnabled() ? c.getBackground() : UIManager.getColor("Button.background")); if (!c.isEnabled()) { g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.35f)); } g2.fill(r); } finally { g2.dispose(); } } @Override protected JButton createButton(@MagicConstant(intValues = {SwingConstants.NORTH, SwingConstants.SOUTH}) int direction, String name) { JButton button = new BasicArrowButton(direction) { private final String iconName = "spinner" + (getDirection() == SwingConstants.NORTH ? "Up" : "Down") + "Triangle"; @Override public void paint(Graphics g) { Graphics2D g2 = (Graphics2D)g.create(); try { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE); int bw = scale(1); ButtonModel bm = getModel(); // set clip Area clip = new Area(g2.getClip()); if (!bm.isRollover() && !bm.isPressed()){ clip.subtract(new Area(new Rectangle2D.Double(0, 0, bw, getHeight()))); } if (getDirection() == SwingConstants.NORTH && prevButton.getModel().isRollover()) { clip.subtract(new Area(new Rectangle(0, getHeight() - bw, getWidth() - bw, bw))); } else if (getDirection() == SwingConstants.SOUTH && nextButton.getModel().isRollover()) { clip.subtract(new Area(new Rectangle(0, 0, getWidth() - bw, bw))); } g2.setClip(clip); // paint background Rectangle outerRect = new Rectangle(0, getDirection() == SwingConstants.NORTH ? bw : 0, getWidth() - bw, getHeight() - bw); if (spinner.isEnabled()) { if (bm.isPressed()) { g2.setColor(UIManager.getColor("Button.intellij.native.pressedBackgroundColor")); } else if (bm.isRollover()) { g2.setColor(UIManager.getColor("Button.intellij.native.focusedBackgroundColor")); } else { g2.setColor(UIManager.getColor("Button.background")); } } else { g2.setColor(UIManager.getColor("Button.background")); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.47f)); } g2.fill(outerRect); // paint icon Icon icon = LafIconLookup.getIcon(iconName, false, false, isEnabled()); icon.paintIcon(this, g2, scale(5), scale(3)); // paint border if (spinner.isEnabled()) { if (bm.isPressed()) { g2.setColor(UIManager.getColor("Button.intellij.native.pressedBorderColor")); } else if (bm.isRollover()) { g2.setColor(UIManager.getColor("Button.intellij.native.focusedBorderColor")); } else { g2.setColor(UIManager.getColor("Button.intellij.native.borderColor")); } } else { g2.setColor(UIManager.getColor("Button.intellij.native.borderColor")); } Path2D border = new Path2D.Float(Path2D.WIND_EVEN_ODD); border.append(outerRect, false); Rectangle innerRect = new Rectangle(outerRect); JBInsets.removeFrom(innerRect, JBUI.insets(1)); border.append(innerRect, false); g2.fill(border); } finally { g2.dispose(); } } @Override public Dimension getPreferredSize() { Insets si = spinner.getInsets(); return JBUI.size(BUTTON_WIDTH + si.right, (getDirection() == SwingConstants.NORTH) ? 13 : 12); } }; button.setName(name); button.setRolloverEnabled(true); button.setOpaque(false); if (direction == SwingConstants.NORTH) { installNextButtonListeners(button); button.setBorder(Borders.empty(2, 1, 1, 2)); } else { installPreviousButtonListeners(button); button.setBorder(Borders.empty(1, 1, 2, 2)); } return button; } @Override protected LayoutManager createLayout() { return new LayoutManagerDelegate(super.createLayout()) { @Override public void layoutContainer(Container parent) { super.layoutContainer(parent); Rectangle bounds = parent.getBounds(); Dimension nextButtonSize = nextButton.getPreferredSize(); Dimension prevButtonSize = prevButton.getPreferredSize(); nextButtonSize.height = bounds.height * nextButtonSize.height / (nextButtonSize.height + prevButtonSize.height - scale(1)); nextButton.setBounds(bounds.width - nextButtonSize.width, 0, nextButtonSize.width, nextButtonSize.height); prevButton.setBounds(bounds.width - prevButtonSize.width, nextButtonSize.height - scale(1), prevButtonSize.width, bounds.height - nextButtonSize.height + scale(1)); JComponent editor = spinner.getEditor(); if (editor != null) { int w = spinner.getWidth(); int h = spinner.getHeight(); Insets i = spinner.getInsets(); Insets m = editorMargins(); int editorHeight = editor.getPreferredSize().height; int editorOffset = (h - i.top - i.bottom - m.top - m.bottom - editorHeight) / 2; editor.setBounds(i.left + m.left, i.top + m.top + editorOffset, w - (i.left + m.left + scale(BUTTON_WIDTH) + m.right + i.right), editorHeight); } } }; } @Override protected JComponent createEditor() { JComponent editor = super.createEditor(); editor.setBorder(Borders.empty(1, 0)); editor.setOpaque(false); return editor; } @Override protected void replaceEditor(JComponent oldEditor, JComponent newEditor) { super.replaceEditor(oldEditor, newEditor); newEditor.setBorder(Borders.empty(1, 0)); newEditor.setOpaque(false); } protected Dimension getSizeWithButtons(Insets i, Dimension size) { int iconWidth = scale(BUTTON_WIDTH) + i.right; int iconHeight = scale(SPINNER_HEIGHT) + i.top + i.bottom; Dimension minSize = new Dimension(i.left + MINIMUM_WIDTH.get() + i.right, iconHeight); size = maximize(size, minSize); Dimension editorSize = spinner.getEditor() != null ? spinner.getEditor().getPreferredSize() : emptySize(); Insets m = editorMargins(); return new Dimension(Math.max(size.width, i.left + m.left + editorSize.width + m.right + iconWidth), Math.max(size.height, i.top + m.top + editorSize.height + m.bottom + i.bottom)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aliyuncs.cdn.model.v20180510; import java.util.List; import com.aliyuncs.AcsResponse; import com.aliyuncs.cdn.transform.v20180510.DescribeL2VipsByDomainResponseUnmarshaller; import com.aliyuncs.transform.UnmarshallerContext; /** * @author auto create * @version */ public class DescribeL2VipsByDomainResponse extends AcsResponse { private String requestId; private String domainName; private List<String> vips; public String getRequestId() { return this.requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } public String getDomainName() { return this.domainName; } public void setDomainName(String domainName) { this.domainName = domainName; } public List<String> getVips() { return this.vips; } public void setVips(List<String> vips) { this.vips = vips; } @Override public DescribeL2VipsByDomainResponse getInstance(UnmarshallerContext context) { return DescribeL2VipsByDomainResponseUnmarshaller.unmarshall(this, context); } }
package com.gokuai.base; /** * Created by Brandon on 2017/5/24. */ public class NetConfig { //Http post 数据格式 public static final String POST_DEFAULT_FORM_TYPE = "post_default_form_type"; public static final String POST_JSON_TYPE = "post_json_type"; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.cassandra.query; import java.io.IOException; import java.util.List; import java.util.Map; import me.prettyprint.cassandra.serializers.StringSerializer; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.specific.SpecificFixed; import org.apache.gora.persistency.Persistent; import org.apache.gora.query.Query; import org.apache.gora.query.impl.ResultBase; import org.apache.gora.store.DataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CassandraResult<K, T extends Persistent> extends ResultBase<K, T> { public static final Logger LOG = LoggerFactory.getLogger(CassandraResult.class); private int rowNumber; private CassandraResultSet<K> cassandraResultSet; /** * Maps Cassandra columns to Avro fields. */ private Map<String, String> reverseMap; public CassandraResult(DataStore<K, T> dataStore, Query<K, T> query) { super(dataStore, query); } @Override protected boolean nextInner() throws IOException { if (this.rowNumber < this.cassandraResultSet.size()) { updatePersistent(); } ++this.rowNumber; return (this.rowNumber <= this.cassandraResultSet.size()); } /** * Load key/value pair from Cassandra row to Avro record. * @throws IOException */ @SuppressWarnings("unchecked") private void updatePersistent() throws IOException { CassandraRow<K> cassandraRow = this.cassandraResultSet.get(this.rowNumber); // load key this.key = cassandraRow.getKey(); // load value Schema schema = this.persistent.getSchema(); List<Field> fields = schema.getFields(); for (CassandraColumn cassandraColumn: cassandraRow) { // get field name String family = cassandraColumn.getFamily(); String fieldName = this.reverseMap.get(family + ":" + StringSerializer.get().fromByteBuffer(cassandraColumn.getName())); // get field int pos = this.persistent.getFieldIndex(fieldName); Field field = fields.get(pos); // get value cassandraColumn.setField(field); Object value = cassandraColumn.getValue(); this.persistent.put(pos, value); // this field does not need to be written back to the store this.persistent.clearDirty(pos); } } @Override public void close() throws IOException { // TODO Auto-generated method stub } @Override public float getProgress() throws IOException { return (((float) this.rowNumber) / this.cassandraResultSet.size()); } public void setResultSet(CassandraResultSet<K> cassandraResultSet) { this.cassandraResultSet = cassandraResultSet; } public void setReverseMap(Map<String, String> reverseMap) { this.reverseMap = reverseMap; } }
/******************************************************************************* * Copyright 2019 Fabrizio Pastore, Leonardo Mariani * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package regressionTestManager.tcSpecifications; import regressionTestManager.ioInvariantParser.Variable; public class TcSpecificationEquals extends TcSpecificationSingle implements TcSpecification { public TcSpecificationEquals( Variable variable, Object value) { super( variable, value); } public String toString(){ return variable+" == "+value; } }
package cl.uchile.dcc.scrabble.types; import cl.uchile.dcc.scrabble.utils.BinaryUtils; import cl.uchile.dcc.scrabble.utils.NumberUtils; import java.util.Objects; /** Represents an Float (decimal) type for Scrabble, utilizing a Java double value */ public class SFloat implements ISType, IConcatenable, IArithmeticOperable, Comparable<IArithmeticOperable> { private double value; /** * SFloat Constructor * @param value double to represent */ public SFloat(double value){ this.value = value; } /** * Acts like a getValue() * @return value of internal double */ public double toDouble(){ return this.value; } /** * Setter for internal double * @param value New value of internal double */ public void setDouble(double value) { this.value = value; } /** * @param obj Object to check for equality * @return true iff the other object is an SFloat, SInt or SBinary such as its internal value is the same * as of this SFloat internal double. false otherwise */ @Override public boolean equals(Object obj){ if (obj instanceof SFloat){ SFloat o = (SFloat) obj; return this.toDouble() == o.toDouble(); } else if (obj instanceof SInt) { SInt o = (SInt) obj; return this.toDouble() == (double) o.toInt(); } else if (obj instanceof SBinary) { SBinary o = (SBinary) obj; return this.toDouble() == (double) o.asSInt().toInt(); } else { return false; } } @Override public int hashCode(){ return Objects.hashCode(SFloat.class); } /** @return String representation of the internal double*/ @Override public String toString(){ return String.valueOf(this.toDouble()); } /** @return a copy of this SFloat */ public SFloat copy() { return new SFloat(this.toDouble()); } @Override public SString asSString(){ return new SString(this.toString()); } @Override public SString concatenateWithSString(SString other) { return new SString(other.toString() + this.toString()); } @Override public IArithmeticOperable add(IArithmeticOperable other) { return other.addSFloat(this); } @Override public SFloat addSInt(SInt other) { return new SFloat(this.toDouble() + other.toInt()); } @Override public SFloat addSFloat(SFloat other) { return new SFloat(this.toDouble() + other.toDouble()); } /** * Extra implementation: Operates with a SFloat, and returns a SFloat. * {@inheritDoc} */ @Override public SFloat addSBinary(SBinary other) { return new SFloat(this.toDouble() + other.asSInt().toInt()); } @Override public IArithmeticOperable subtract(IArithmeticOperable other) { return other.subtractSFloat(this); } @Override public SFloat subtractSInt(SInt other) { return new SFloat(-this.toDouble() + other.toInt()); } @Override public SFloat subtractSFloat(SFloat other) { return new SFloat(-this.toDouble() + other.toDouble()); } /** * Extra implementation: Operates with a SFloat, and returns a SFloat. * {@inheritDoc} */ @Override public SFloat subtractSBinary(SBinary other) { return new SFloat(-this.toDouble() + other.asSInt().toInt()); } @Override public IArithmeticOperable multiply(IArithmeticOperable other) { return other.multiplySFloat(this); } @Override public SFloat multiplySInt(SInt other) { return new SFloat(this.toDouble() * other.toInt()); } @Override public SFloat multiplySFloat(SFloat other) { return new SFloat(this.toDouble() * other.toDouble()); } /** * Extra implementation: Operates with a SFloat, and returns a SFloat. * {@inheritDoc} */ @Override public SFloat multiplySBinary(SBinary other) { return new SFloat(this.toDouble() * other.asSInt().toInt()); } @Override public IArithmeticOperable divide(IArithmeticOperable other) { return other.divideSFloat(this); } @Override public SFloat divideSInt(SInt other) { return new SFloat(other.toInt() / this.toDouble()); } @Override public SFloat divideSFloat(SFloat other) { return new SFloat(other.toDouble() / this.toDouble()); } /** * Extra implementation: Operates with a SFloat, and returns a SFloat. * {@inheritDoc} */ @Override public SFloat divideSBinary(SBinary other) { return new SFloat(other.asSInt().toInt() / this.toDouble()); } @Override public double asNumber(){ return this.toDouble(); } @Override public int compareTo(IArithmeticOperable other){ double thisNumber = this.asNumber(); double otherNumber = other.asNumber(); return NumberUtils.compareTo(thisNumber, otherNumber); } }
package habuma; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) @SpringBootTest public class StreamingApplicationTests { @Test public void contextLoads() { } }
package com.atguigu.gmall.wms.entity; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.Serializable; import java.util.Date; import lombok.Data; /** * 仓库信息 * * @author anlin * @email anlin@atguigu.com * @date 2020-02-05 11:33:16 */ @ApiModel @Data @TableName("wms_ware_info") public class WareInfoEntity implements Serializable { private static final long serialVersionUID = 1L; /** * id */ @TableId @ApiModelProperty(name = "id",value = "id") private Long id; /** * 仓库名 */ @ApiModelProperty(name = "name",value = "仓库名") private String name; /** * 仓库地址 */ @ApiModelProperty(name = "address",value = "仓库地址") private String address; /** * 区域编码 */ @ApiModelProperty(name = "areacode",value = "区域编码") private String areacode; }
// // Decompiled by Procyon v0.5.36 // package org.mudebug.prapr.reloc.commons.beanutils.converters; import java.math.BigDecimal; import org.mudebug.prapr.reloc.commons.beanutils.ConversionException; import org.mudebug.prapr.reloc.commons.beanutils.Converter; public final class BigDecimalConverter implements Converter { private Object defaultValue; private boolean useDefault; public BigDecimalConverter() { this.defaultValue = null; this.useDefault = true; this.defaultValue = null; this.useDefault = false; } public BigDecimalConverter(final Object defaultValue) { this.defaultValue = null; this.useDefault = true; this.defaultValue = defaultValue; this.useDefault = true; } public Object convert(final Class type, final Object value) { if (value == null) { if (this.useDefault) { return this.defaultValue; } throw new ConversionException("No value specified"); } else { if (value instanceof BigDecimal) { return value; } try { return new BigDecimal(value.toString()); } catch (Exception e) { if (this.useDefault) { return this.defaultValue; } throw new ConversionException(e); } } } }
package com.vladsch.flexmark.html; import com.vladsch.flexmark.html.renderer.LinkResolverContext; import com.vladsch.flexmark.html.renderer.ResolvedLink; import com.vladsch.flexmark.util.ast.Node; import org.jetbrains.annotations.NotNull; public interface LinkResolver { @NotNull ResolvedLink resolveLink(@NotNull Node node, @NotNull LinkResolverContext context, @NotNull ResolvedLink link); }
/* Copyright 2002-2021 CS GROUP * Licensed to CS GROUP (CS) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * CS licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.orekit.utils.units; import org.orekit.errors.OrekitException; import org.orekit.errors.OrekitMessages; /** Converter between units. * <p> * Instances of this class are immutable. * </p> * @author Luc Maisonobe * @since 11.0 */ public class UnitsConverter { /** Identity converter. */ public static final UnitsConverter IDENTITY = new UnitsConverter(Unit.ONE, Unit.ONE); /** Percents to units converter. */ public static final UnitsConverter PERCENTS_TO_UNIT = new UnitsConverter(Unit.PERCENT, Unit.ONE); /** Arcseconds to radians converter. */ public static final UnitsConverter ARC_SECONDS_TO_RADIANS = new UnitsConverter(Unit.parse("as"), Unit.RADIAN); /** Milli arcseconds to radians converter. */ public static final UnitsConverter MILLI_ARC_SECONDS_TO_RADIANS = new UnitsConverter(Unit.parse("mas"), Unit.RADIAN); /** Milli seconds to seconds converter. */ public static final UnitsConverter MILLI_SECONDS_TO_SECONDS = new UnitsConverter(Unit.parse("ms"), Unit.SECOND); /** Days to seconds converter. */ public static final UnitsConverter DAYS_TO_SECONDS = new UnitsConverter(Unit.DAY, Unit.SECOND); /** Kilometres to metres converter. */ public static final UnitsConverter KILOMETRES_TO_METRES = new UnitsConverter(Unit.KILOMETRE, Unit.METRE); /** Square kilometres to square metres converter. */ public static final UnitsConverter KILOMETRES_2_TO_METRES_2 = new UnitsConverter(Unit.parse("km²"), Unit.parse("m²")); /** km³/s² to m³/s² converter. */ public static final UnitsConverter KM3_P_S2_TO_M3_P_S2 = new UnitsConverter(Unit.parse("km³/s²"), Unit.parse("m³/s²")); /** Source unit. */ private final Unit from; /** Destination unit. */ private final Unit to; /** Conversion factor. */ private final double factor; /** Simple constructor. * @param from source unit * @param to destination unit */ public UnitsConverter(final Unit from, final Unit to) { this.from = from; this.to = to; if (!from.sameDimension(to)) { throw new OrekitException(OrekitMessages.INCOMPATIBLE_UNITS, from.getName(), to.getName()); } this.factor = from.getScale() / to.getScale(); } /** Get the source unit. * @return source unit */ public Unit getFrom() { return from; } /** Get the destination unit. * @return destination unit */ public Unit getTo() { return to; } /** Convert a value. * @param value value in the {@link #getFrom() source unit} * @return value converted in the {@link #getTo() destination unit} */ public double convert(final double value) { return factor * value; } /** {@inheritDoc} */ @Override public String toString() { return from.getName() + " → " + to.getName(); } }
package com.telq.sdk.exceptions.httpExceptions.clientSide; public class ApiCredentialsException extends Exception { public ApiCredentialsException(String message) { super("Error with your Api Credentials [" + message + "]"); } }
// Copyright 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.base; import android.os.Handler; import android.os.Looper; import android.os.Process; import org.chromium.base.annotations.CalledByNative; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; /** * Helper methods to deal with threading related tasks. */ public class ThreadUtils { private static final Object sLock = new Object(); private static boolean sWillOverride; private static Handler sUiThreadHandler; private static boolean sThreadAssertsDisabled; public static void setWillOverrideUiThread() { synchronized (sLock) { sWillOverride = true; } } @VisibleForTesting public static void setUiThread(Looper looper) { synchronized (sLock) { if (looper == null) { // Used to reset the looper after tests. sUiThreadHandler = null; return; } if (sUiThreadHandler != null && sUiThreadHandler.getLooper() != looper) { throw new RuntimeException("UI thread looper is already set to " + sUiThreadHandler.getLooper() + " (Main thread looper is " + Looper.getMainLooper() + "), cannot set to new looper " + looper); } else { sUiThreadHandler = new Handler(looper); } } } private static Handler getUiThreadHandler() { synchronized (sLock) { if (sUiThreadHandler == null) { if (sWillOverride) { throw new RuntimeException("Did not yet override the UI thread"); } sUiThreadHandler = new Handler(Looper.getMainLooper()); } return sUiThreadHandler; } } /** * Run the supplied Runnable on the main thread. The method will block until the Runnable * completes. * * @param r The Runnable to run. */ public static void runOnUiThreadBlocking(final Runnable r) { if (runningOnUiThread()) { r.run(); } else { FutureTask<Void> task = new FutureTask<Void>(r, null); postOnUiThread(task); try { task.get(); } catch (Exception e) { throw new RuntimeException("Exception occurred while waiting for runnable", e); } } } /** * Run the supplied Callable on the main thread, wrapping any exceptions in a RuntimeException. * The method will block until the Callable completes. * * @param c The Callable to run * @return The result of the callable */ @VisibleForTesting public static <T> T runOnUiThreadBlockingNoException(Callable<T> c) { try { return runOnUiThreadBlocking(c); } catch (ExecutionException e) { throw new RuntimeException("Error occurred waiting for callable", e); } } /** * Run the supplied Callable on the main thread, The method will block until the Callable * completes. * * @param c The Callable to run * @return The result of the callable * @throws ExecutionException c's exception */ public static <T> T runOnUiThreadBlocking(Callable<T> c) throws ExecutionException { FutureTask<T> task = new FutureTask<T>(c); runOnUiThread(task); try { return task.get(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted waiting for callable", e); } } /** * Run the supplied FutureTask on the main thread. The method will block only if the current * thread is the main thread. * * @param task The FutureTask to run * @return The queried task (to aid inline construction) */ public static <T> FutureTask<T> runOnUiThread(FutureTask<T> task) { if (runningOnUiThread()) { task.run(); } else { postOnUiThread(task); } return task; } /** * Run the supplied Callable on the main thread. The method will block only if the current * thread is the main thread. * * @param c The Callable to run * @return A FutureTask wrapping the callable to retrieve results */ public static <T> FutureTask<T> runOnUiThread(Callable<T> c) { return runOnUiThread(new FutureTask<T>(c)); } /** * Run the supplied Runnable on the main thread. The method will block only if the current * thread is the main thread. * * @param r The Runnable to run */ public static void runOnUiThread(Runnable r) { if (runningOnUiThread()) { r.run(); } else { getUiThreadHandler().post(r); } } /** * Post the supplied FutureTask to run on the main thread. The method will not block, even if * called on the UI thread. * * @param task The FutureTask to run * @return The queried task (to aid inline construction) */ public static <T> FutureTask<T> postOnUiThread(FutureTask<T> task) { getUiThreadHandler().post(task); return task; } /** * Post the supplied Runnable to run on the main thread. The method will not block, even if * called on the UI thread. * * @param task The Runnable to run */ public static void postOnUiThread(Runnable task) { getUiThreadHandler().post(task); } /** * Post the supplied Runnable to run on the main thread after the given amount of time. The * method will not block, even if called on the UI thread. * * @param task The Runnable to run * @param delayMillis The delay in milliseconds until the Runnable will be run */ @VisibleForTesting public static void postOnUiThreadDelayed(Runnable task, long delayMillis) { getUiThreadHandler().postDelayed(task, delayMillis); } /** * Throw an exception (when DCHECKs are enabled) if currently not running on the UI thread. * * Can be disabled by setThreadAssertsDisabledForTesting(true). */ public static void assertOnUiThread() { if (sThreadAssertsDisabled) return; assert runningOnUiThread() : "Must be called on the UI thread."; } /** * Throw an exception (regardless of build) if currently not running on the UI thread. * * Can be disabled by setThreadAssertsEnabledForTesting(false). * * @see #assertOnUiThread() */ public static void checkUiThread() { if (!sThreadAssertsDisabled && !runningOnUiThread()) { throw new IllegalStateException("Must be called on the UI thread."); } } /** * Throw an exception (when DCHECKs are enabled) if currently running on the UI thread. * * Can be disabled by setThreadAssertsDisabledForTesting(true). */ public static void assertOnBackgroundThread() { if (sThreadAssertsDisabled) return; assert !runningOnUiThread() : "Must be called on a thread other than UI."; } /** * Disables thread asserts. * * Can be used by tests where code that normally runs multi-threaded is going to run * single-threaded for the test (otherwise asserts that are valid in production would fail in * those tests). */ public static void setThreadAssertsDisabledForTesting(boolean disabled) { sThreadAssertsDisabled = disabled; } /** * @return true iff the current thread is the main (UI) thread. */ public static boolean runningOnUiThread() { return getUiThreadHandler().getLooper() == Looper.myLooper(); } public static Looper getUiThreadLooper() { return getUiThreadHandler().getLooper(); } /** * Set thread priority to audio. */ @CalledByNative public static void setThreadPriorityAudio(int tid) { Process.setThreadPriority(tid, Process.THREAD_PRIORITY_AUDIO); } /** * Checks whether Thread priority is THREAD_PRIORITY_AUDIO or not. * @param tid Thread id. * @return true for THREAD_PRIORITY_AUDIO and false otherwise. */ @CalledByNative private static boolean isThreadPriorityAudio(int tid) { return Process.getThreadPriority(tid) == Process.THREAD_PRIORITY_AUDIO; } }
package com.space.bluu.services; import android.app.Notification; import android.app.NotificationChannel; import android.app.NotificationManager; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Build; import android.util.Log; import androidx.annotation.NonNull; import androidx.core.app.NotificationCompat; import com.space.bluu.R; import com.google.firebase.messaging.FirebaseMessagingService; import com.google.firebase.messaging.RemoteMessage; import java.util.Map; import java.util.Random; public class MyFirebaseMessagingService extends FirebaseMessagingService { Intent intent; private static final String TAG = "FirebaseService"; @Override public void onMessageReceived(@NonNull RemoteMessage remoteMessage) { super.onMessageReceived(remoteMessage); if (remoteMessage.getData().isEmpty()){ showNotification(remoteMessage.getNotification().getTitle(), remoteMessage.getNotification().getBody()); } else { showNotification(remoteMessage.getData()); } } private void showNotification(Map<String, String>data){ String title = data.get("title"); String body = data.get("body"); NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); String NOTIFICATION_CHANNEL_ID = "Messages"; if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O){ NotificationChannel notificationChannel = new NotificationChannel(NOTIFICATION_CHANNEL_ID,"Messages", NotificationManager.IMPORTANCE_HIGH); notificationChannel.setDescription("Native Channel Communications"); notificationChannel.enableLights(true); notificationChannel.setLightColor(Color.BLUE); notificationManager.createNotificationChannel(notificationChannel); } NotificationCompat.Builder notificationbuilder = new NotificationCompat.Builder(this, NOTIFICATION_CHANNEL_ID); notificationbuilder.setAutoCancel(true) .setDefaults(Notification.DEFAULT_ALL) .setWhen(System.currentTimeMillis()) .setSmallIcon(R.mipmap.ic_launcher) .setContentTitle(title) .setContentText(body) .setContentInfo("Info"); notificationManager.notify(new Random().nextInt(), notificationbuilder.build()); } private void showNotification(String title, String body){ NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); String NOTIFICATION_CHANNEL_ID = "messages"; if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O){ NotificationChannel notificationChannel = new NotificationChannel(NOTIFICATION_CHANNEL_ID,"Messages", NotificationManager.IMPORTANCE_HIGH); notificationChannel.setDescription("Native Channel Communications"); notificationChannel.enableLights(true); notificationChannel.setLightColor(Color.BLUE); notificationManager.createNotificationChannel(notificationChannel); } NotificationCompat.Builder notificationbuilder = new NotificationCompat.Builder(this, NOTIFICATION_CHANNEL_ID); notificationbuilder.setAutoCancel(true) .setDefaults(Notification.DEFAULT_ALL) .setWhen(System.currentTimeMillis()) .setSmallIcon(R.drawable.launch_background) .setContentTitle(title) .setContentText(body) .setContentInfo("Info"); notificationManager.notify(new Random().nextInt(), notificationbuilder.build()); } @Override public void onNewToken(@NonNull String token){ token = intent.getStringExtra("Token"); super.onNewToken(token); Log.d(TAG, "onNewToken: " + token); } }
/* * Copyright by Zoltán Cseresnyés, Ruman Gerst * * Research Group Applied Systems Biology - Head: Prof. Dr. Marc Thilo Figge * https://www.leibniz-hki.de/en/applied-systems-biology.html * HKI-Center for Systems Biology of Infection * Leibniz Institute for Natural Product Research and Infection Biology - Hans Knöll Institute (HKI) * Adolf-Reichwein-Straße 23, 07745 Jena, Germany * * The project code is licensed under BSD 2-Clause. * See the LICENSE file provided with the code for the full license. */ package org.hkijena.jipipe.extensions.imagejalgorithms.ij1.morphology; import ij.ImagePlus; import ij.ImageStack; import ij.plugin.ImageCalculator; import ij.plugin.filter.RankFilters; import org.hkijena.jipipe.api.JIPipeDocumentation; import org.hkijena.jipipe.api.JIPipeIssueReport; import org.hkijena.jipipe.api.JIPipeNode; import org.hkijena.jipipe.api.JIPipeProgressInfo; import org.hkijena.jipipe.api.data.JIPipeDefaultMutableSlotConfiguration; import org.hkijena.jipipe.api.nodes.*; import org.hkijena.jipipe.api.nodes.categories.ImagesNodeTypeCategory; import org.hkijena.jipipe.api.parameters.JIPipeParameter; import org.hkijena.jipipe.extensions.imagejdatatypes.datatypes.greyscale.ImagePlusGreyscaleData; import org.hkijena.jipipe.extensions.imagejdatatypes.datatypes.greyscale.ImagePlusGreyscaleMaskData; import org.hkijena.jipipe.extensions.imagejdatatypes.util.ImageJUtils; /** * Wrapper around {@link ij.process.ImageProcessor} */ @JIPipeDocumentation(name = "Internal gradient 2D", description = "Applies an internal gradient filter that subtracts the local minimum (erosion) from the input image. " + "If higher-dimensional data is provided, the filter is applied to each 2D slice.") @JIPipeNode(menuPath = "Morphology", nodeTypeCategory = ImagesNodeTypeCategory.class) @JIPipeInputSlot(value = ImagePlusGreyscaleData.class, slotName = "Input") @JIPipeOutputSlot(value = ImagePlusGreyscaleData.class, slotName = "Output") public class MorphologyInternalGradient2DAlgorithm extends JIPipeSimpleIteratingAlgorithm { private double radius = 1; /** * Instantiates a new node type. * * @param info the info */ public MorphologyInternalGradient2DAlgorithm(JIPipeNodeInfo info) { super(info, JIPipeDefaultMutableSlotConfiguration.builder().addInputSlot("Input", "", ImagePlusGreyscaleData.class) .addOutputSlot("Output", "", ImagePlusGreyscaleData.class, "Input") .allowOutputSlotInheritance(true) .seal() .build()); } /** * Instantiates a new node type. * * @param other the other */ public MorphologyInternalGradient2DAlgorithm(MorphologyInternalGradient2DAlgorithm other) { super(other); this.radius = other.radius; } @Override public boolean supportsParallelization() { return true; } private void applyInternalGradient(ImagePlus img) { // Erode the original image ImagePlus eroded = ImageJUtils.duplicate(img); RankFilters erosionFilter = new RankFilters(); erosionFilter.rank(eroded.getProcessor(), radius, RankFilters.MIN); //TODO: Set element to octagon // Apply image calculator ImageCalculator calculator = new ImageCalculator(); calculator.run("Subtract", img, eroded); } @Override protected void runIteration(JIPipeDataBatch dataBatch, JIPipeProgressInfo progressInfo) { ImagePlus img = dataBatch.getInputData(getFirstInputSlot(), ImagePlusGreyscaleData.class, progressInfo).getImage(); ImageStack stack = new ImageStack(img.getWidth(), img.getHeight(), img.getProcessor().getColorModel()); ImageJUtils.forEachIndexedSlice(img, (imp, index) -> { progressInfo.log("Slice " + index + "/" + img.getStackSize()); ImagePlus slice = new ImagePlus("slice", imp.duplicate()); applyInternalGradient(slice); stack.addSlice("slice" + index, slice.getProcessor()); }, progressInfo); ImagePlus result = new ImagePlus("Segmented Image", stack); result.setDimensions(img.getNChannels(), img.getNSlices(), img.getNFrames()); result.copyScale(img); dataBatch.addOutputData(getFirstOutputSlot(), new ImagePlusGreyscaleMaskData(result), progressInfo); } @Override public void reportValidity(JIPipeIssueReport report) { report.resolve("Radius").checkIfWithin(this, radius, 0, Double.POSITIVE_INFINITY, false, true); } @JIPipeDocumentation(name = "Radius", description = "Radius of the local minimum / erosion filter") @JIPipeParameter("radius") public double getRadius() { return radius; } @JIPipeParameter("radius") public void setRadius(double radius) { this.radius = radius; } }
package com.budwk.starter.common.openapi.enums; /** * @author wizzer@qq.com */ public enum ParamIn { DEFAULT(""), HEADER("header"), QUERY("query"), PATH("path"), COOKIE("cookie"); private String value; private ParamIn(String value) { this.value = value; } public String toString() { return String.valueOf(this.value); } }
package agents.phujus; import agents.phujus.PhuJusAgent; import framework.IAgent; import framework.IAgentProvider; import utils.Random; public class PhuJusAgentProvider implements IAgentProvider { @Override public IAgent getAgent() { return new PhuJusAgent(); } @Override public String getAlias() { return "PhuJusAgent"; } }
package com.stackroute.configuration; import com.stackroute.domain.Notifications; import org.springframework.amqp.core.*; import org.springframework.amqp.rabbit.connection.ConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter; import org.springframework.amqp.support.converter.MessageConverter; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; import org.springframework.data.redis.core.RedisTemplate; @Configuration //Configuration class for rabbitmq public class RabbitmqConfig { //declaring queue,exchange and binding key for question and answer service @Value("${jsd.rabbitmq.queue}") private String queueName; @Value("${jsd.rabbitmq.exchange}") private String exchange; @Value("${jsd.rabbitmq.routingkey}") private String routingKey; //declaring queue,exchange and binding key for recommendation query service @Value("${jst.rabbitmq.queue}") private String queueName1; @Value("${jst.rabbitmq.exchange}") private String exchange1; @Value("${jst.rabbitmq.routingkey}") private String routingKey1; //Redis Template for using redis database @Bean JedisConnectionFactory jedisConnectionFactory() { return new JedisConnectionFactory(); } @Bean RedisTemplate<String, Notifications> redisTemplate() { RedisTemplate<String,Notifications> redisTemplate = new RedisTemplate<>(); redisTemplate.setConnectionFactory(jedisConnectionFactory()); return redisTemplate; } //Queue,Exchange and binding key for question and answer service @Bean Queue queue() { return new Queue(queueName, false); } @Bean DirectExchange exchange() { return new DirectExchange(exchange); } @Bean Binding binding(Queue queue, DirectExchange exchange) { return BindingBuilder.bind(queue).to(exchange).with(routingKey); } //Queue,exchange and binding key for recommendation query service @Bean Queue queue1() { return new Queue(queueName1, false); } @Bean DirectExchange exchange1() { return new DirectExchange(exchange1); } @Bean Binding binding1(Queue queue1, DirectExchange exchange1) { return BindingBuilder.bind(queue1).to(exchange1).with(routingKey1); } //message converter used for deserialization of DTO @Bean public MessageConverter jsonMessageConverter() { return new Jackson2JsonMessageConverter(); } //RabbitTemplate being used for message transfer @Bean public RabbitTemplate rabbitTemplate(ConnectionFactory connectionFactory) { final RabbitTemplate rabbitTemplate = new RabbitTemplate(connectionFactory); rabbitTemplate.setMessageConverter(jsonMessageConverter()); return rabbitTemplate; } }
/* * */ package net.community.chest.xml.transform; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.URISyntaxException; import java.net.URL; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.xml.sax.InputSource; import net.community.chest.io.IOCopier; /** * <P>Copyright 2010 as per GPLv2</P> * * @author Lyor G. * @since May 12, 2010 9:37:30 AM */ public final class TransformerUtil { private TransformerUtil () { // no instance } /** * Attempts to extract some useful I/O input from a {@link StreamSource} * using the following <U>order</U> and moving to next in line if the * previous option yielded a <code>null</code></BR> * <UL> * <LI>{@link StreamSource#getInputStream()}</LI> * <LI>{@link StreamSource#getReader()}</LI> * <LI>{@link StreamSource#getSystemId()} as a {@link URL}</LI> * </UL> * @param src The {@link StreamSource} instance * @param maxTimeout Timeout (sec.) to use in case need to open a URL * @return The {@link Closeable} instance - may be <code>null</code> if * no source set * @throws TransformerException If failed to open/use an available source */ public static final Closeable resolveInput ( final StreamSource src, final int maxTimeout) throws TransformerException { if (null == src) return null; // try the input stream first { final InputStream in=src.getInputStream(); if (in != null) return in; } // try the Reader second { final Reader r=src.getReader(); if (r != null) return r; } // try the system-id as an URL final String sysId=src.getSystemId(); if ((sysId != null) && (sysId.length() > 0)) { try { return IOCopier.openURLForRead(sysId, maxTimeout); } catch(URISyntaxException e) { throw new TransformerException("resolveInput(URI=" + sysId + ") " + e.getClass().getName() + ": " + e.getMessage(), e); } catch(IOException e) { throw new TransformerException("resolveInput(URI=" + sysId + ") " + e.getClass() + ": " + e.getMessage(), e); } } return null; } public static final InputSource resolveInputSource (final StreamSource src) { if (null == src) return null; // try the input stream first { final InputStream in=src.getInputStream(); if (in != null) return new InputSource(in); } // try the Reader second { final Reader r=src.getReader(); if (r != null) return new InputSource(r); } // try the system-id as an URL final String sysId=src.getSystemId(); if ((sysId != null) && (sysId.length() > 0)) return new InputSource(sysId); return null; } /** * Attempts to extract some useful I/O output stream from the {@link StreamResult} * using the following <U>order</U> and moving to next in line if the * previous option yielded a <code>null</code></BR> * <UL> * <LI>{@link StreamResult#getOutputStream()}</LI> * <LI>{@link StreamResult#getWriter()}</LI> * <LI>{@link StreamResult#getSystemId()} as a {@link URL}</LI> * </UL> * @param res The {@link StreamResult} instance - ignored if <code>null</code> * @param maxTimeout Timeout (sec.) to use in case need to open a URL * @return The {@link Closeable} instance - may be <code>null</code> if * no result set * @throws TransformerException If failed to open/use an available source */ public static final Closeable resolveOutput ( final StreamResult res, final int maxTimeout) throws TransformerException { if (null == res) return null; { final OutputStream out=res.getOutputStream(); if (out != null) return out; } { final Writer w=res.getWriter(); if (w != null) return w; } final String sysId=res.getSystemId(); if ((sysId != null) && (sysId.length() > 0)) { try { return IOCopier.openURLForWrite(sysId, maxTimeout); } catch(URISyntaxException e) { throw new TransformerException("resolveOutput(URI=" + sysId + ") " + e.getClass().getName() + ": " + e.getMessage(), e); } catch(IOException e) { throw new TransformerException("resolveOutput(URI=" + sysId + ") " + e.getClass() + ": " + e.getMessage(), e); } } return null; } public static final void transform (File in, Result out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + in + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform((null == in) ? null : new StreamSource(in), out); } public static final void transform (InputStream in, Result out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + in + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform((null == in) ? null : new StreamSource(in), out); } public static final void transform (Reader in, Result out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + in + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform((null == in) ? null : new StreamSource(in), out); } public static final void transform (Source in, File out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + out + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform(in, (null == out) ? null : new StreamResult(out)); } public static final void transform (Source in, OutputStream out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + out + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform(in, (null == out) ? null : new StreamResult(out)); } public static final void transform (Source in, Writer out, Transformer t) throws TransformerException { if (null == t) throw new TransformerException("transform(" + out + ") no " + Transformer.class.getSimpleName() + " instance"); t.transform(in, (null == out) ? null : new StreamResult(out)); } }
package api; public class ClassExtendsRemoved { }
/** * Copyright (c) 2008-2019 Bird Dog Games, Inc. * * This file is part of Ardor3D. * * Ardor3D is free software: you can redistribute it and/or modify it * under the terms of its license which may be found in the accompanying * LICENSE file or at <https://git.io/fjRmv>. */ package com.ardor3d.tool.editor.particle.swing.panel; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.File; import java.net.MalformedURLException; import java.util.Iterator; import java.util.List; import java.util.concurrent.Callable; import java.util.logging.Level; import java.util.logging.Logger; import java.util.prefs.Preferences; import javax.swing.AbstractAction; import javax.swing.DefaultListModel; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.ListSelectionModel; import javax.swing.ScrollPaneConstants; import javax.swing.SwingConstants; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import com.ardor3d.extension.effect.particle.AnimationEntry; import com.ardor3d.extension.effect.particle.ParticleFactory; import com.ardor3d.extension.effect.particle.ParticleInfluence; import com.ardor3d.extension.effect.particle.ParticlePoints; import com.ardor3d.extension.effect.particle.ParticleSystem; import com.ardor3d.extension.effect.particle.ParticleSystem.ParticleType; import com.ardor3d.extension.effect.particle.RampEntry; import com.ardor3d.image.Texture; import com.ardor3d.image.Texture.WrapMode; import com.ardor3d.image.TextureStoreFormat; import com.ardor3d.math.type.ReadOnlyColorRGBA; import com.ardor3d.renderer.state.BlendState; import com.ardor3d.renderer.state.RenderState; import com.ardor3d.renderer.state.RenderState.StateType; import com.ardor3d.renderer.state.TextureState; import com.ardor3d.tool.editor.particle.swing.ParticleEditorFrame; import com.ardor3d.tool.editor.swing.widget.ValuePanel; import com.ardor3d.tool.editor.swing.widget.Vector3Panel; import com.ardor3d.util.GameTaskQueueManager; import com.ardor3d.util.TextureManager; import com.ardor3d.util.resource.URLResourceSource; public abstract class ParticleAppearancePanel extends ParticleEditPanel { private static final Logger logger = Logger.getLogger(ParticleAppearancePanel.class.getName()); private static final long serialVersionUID = 1L; private File _newTexture = null; private final JCheckBox _additiveBlendingBox; private final JComboBox<ParticleType> _geomTypeBox; private final JCheckBox _velocityAlignedBox; private final JCheckBox _cameraAlignedBox; private final Vector3Panel _parentDirectionLeftPanel; private final Vector3Panel _parentDirectionUpPanel; private final JLabel _imageLabel = new JLabel(); private final JList<RampEntry> _rampList; private final DefaultListModel<RampEntry> _rampModel = new DefaultListModel<RampEntry>(); private final JButton _rampAddButton = makeListButton("Add"); private final JButton _rampRemoveButton = makeListButton("Remove"); private final JButton _rampEditButton = makeListButton("Edit"); private final JButton _rampMoveUpButton = makeListButton("/\\"); private final JButton _rampMoveDownButton = makeListButton("\\/"); private final JList<AnimationEntry> _animList; private final DefaultListModel<AnimationEntry> _animModel = new DefaultListModel<AnimationEntry>(); private final JButton _animAddButton = makeListButton("Add"); private final JButton _animRemoveButton = makeListButton("Remove"); private final JButton _animEditButton = makeListButton("Edit"); private final JButton _animMoveUpButton = makeListButton("/\\"); private final JButton _animMoveDownButton = makeListButton("\\/"); private final Preferences _prefs; private final JFileChooser _textureChooser = new JFileChooser(); private final JPanel _texturePanel; private final ValuePanel _texPanel, _startTexPanel; public ParticleAppearancePanel(final Preferences prefs) { super(); _prefs = prefs; setLayout(new GridBagLayout()); _geomTypeBox = new JComboBox<ParticleType>( new ParticleType[] { ParticleType.Triangle, ParticleType.Line, ParticleType.Point }); _geomTypeBox.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { changeParticleType((ParticleType) _geomTypeBox.getSelectedItem()); } }); _parentDirectionLeftPanel = new Vector3Panel(-1.0, 1.0, 0.1); _parentDirectionLeftPanel.setBorder(createTitledBorder(" particle left ")); _parentDirectionLeftPanel.addChangeListener(new ChangeListener() { public void stateChanged(final ChangeEvent e) { getEdittedParticles().setFacingLeftVector(_parentDirectionLeftPanel.getValue()); } }); _parentDirectionLeftPanel.setVisible(false); _parentDirectionUpPanel = new Vector3Panel(-1.0, 1.0, 0.1); _parentDirectionUpPanel.setBorder(createTitledBorder(" particle up ")); _parentDirectionUpPanel.addChangeListener(new ChangeListener() { public void stateChanged(final ChangeEvent e) { getEdittedParticles().setFacingUpVector(_parentDirectionUpPanel.getValue()); } }); _parentDirectionUpPanel.setVisible(false); _velocityAlignedBox = new JCheckBox(new AbstractAction("Align with Velocity") { private static final long serialVersionUID = 1L; public void actionPerformed(final ActionEvent e) { getEdittedParticles().setVelocityAligned(_velocityAlignedBox.isSelected()); } }); _velocityAlignedBox.setFont(new Font("Arial", Font.BOLD, 13)); _cameraAlignedBox = new JCheckBox(new AbstractAction("Align with Camera") { private static final long serialVersionUID = 1L; public void actionPerformed(final ActionEvent e) { getEdittedParticles().setCameraFacing(_cameraAlignedBox.isSelected()); updateVisibleControls(); } }); _cameraAlignedBox.setFont(new Font("Arial", Font.BOLD, 13)); _rampList = new JList<RampEntry>(_rampModel); _rampList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); _rampList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(final ListSelectionEvent e) { final int selected = _rampList.getSelectedIndex(); _rampRemoveButton.setEnabled(selected > 0 && selected < _rampModel.getSize() - 1); _rampEditButton.setEnabled(selected != -1); _rampMoveUpButton.setEnabled(selected > 1 && selected < _rampModel.getSize() - 1); _rampMoveDownButton.setEnabled(selected < _rampModel.getSize() - 2 && selected > 0); } }); _rampList.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(final MouseEvent e) { if (e.getClickCount() > 1) { _rampEditButton.doClick(); e.consume(); } } }); _rampAddButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { new Thread() { @Override public void run() { final RampEntry entry = new RampEntry(); getEdittedParticles().getRamp().addEntry(entry); showEditWindow(entry); updateRampModel(); _rampList.setSelectedValue(entry, true); } }.start(); } }); _rampEditButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { new Thread() { @Override public void run() { final int index = _rampList.getSelectedIndex(); final RampEntry entry = _rampList.getSelectedValue(); showEditWindow(entry); updateRampModel(); _rampList.setSelectedIndex(index); }; }.start(); } }); _rampRemoveButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final RampEntry entry = _rampList.getSelectedValue(); getEdittedParticles().getRamp().removeEntry(entry); updateRampModel(); } }); _rampMoveUpButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final int index = _rampList.getSelectedIndex(); final RampEntry entry = _rampList.getSelectedValue(); getEdittedParticles().getRamp().removeEntry(entry); getEdittedParticles().getRamp().addEntry(index - 2, entry); updateRampModel(); _rampList.setSelectedValue(entry, true); } }); _rampMoveDownButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final int index = _rampList.getSelectedIndex(); final RampEntry entry = _rampList.getSelectedValue(); getEdittedParticles().getRamp().removeEntry(entry); getEdittedParticles().getRamp().addEntry(index, entry); updateRampModel(); _rampList.setSelectedValue(entry, true); } }); _rampRemoveButton.setEnabled(false); _rampEditButton.setEnabled(false); _rampMoveUpButton.setEnabled(false); _rampMoveDownButton.setEnabled(false); final JPanel geomPanel = new JPanel(new GridBagLayout()); geomPanel.setBorder(createTitledBorder("PARTICLE GEOMETRY")); geomPanel.add(createBoldLabel("Type:"), new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(5, 5, 0, 0), 0, 0)); geomPanel.add(_geomTypeBox, new GridBagConstraints(1, 0, 2, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 5, 0), 0, 0)); geomPanel.add(_cameraAlignedBox, new GridBagConstraints(1, 1, 2, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 0), 0, 0)); geomPanel.add(_velocityAlignedBox, new GridBagConstraints(1, 2, 2, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 0), 0, 0)); geomPanel.add(_parentDirectionUpPanel, new GridBagConstraints(1, 3, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 0), 0, 0)); geomPanel.add(_parentDirectionLeftPanel, new GridBagConstraints(2, 3, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 0), 0, 0)); geomPanel.add(new JLabel(""), new GridBagConstraints(3, 0, 1, 1, 1.0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 5), 0, 0)); final JPanel rampPanel = new JPanel(new GridBagLayout()); rampPanel.setBorder(createTitledBorder("APPEARANCE TIMELINE")); rampPanel.add( new JScrollPane(_rampList, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED), new GridBagConstraints(1, 0, 1, 6, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0)); rampPanel.add(_rampAddButton, new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); rampPanel.add(_rampRemoveButton, new GridBagConstraints(0, 1, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); rampPanel.add(_rampEditButton, new GridBagConstraints(0, 2, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); rampPanel.add(_rampMoveUpButton, new GridBagConstraints(0, 3, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); rampPanel.add(_rampMoveDownButton, new GridBagConstraints(0, 4, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); _animList = new JList<AnimationEntry>(_animModel); _animList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); _animList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(final ListSelectionEvent e) { final int selected = _animList.getSelectedIndex(); _animRemoveButton.setEnabled(selected != -1); _animEditButton.setEnabled(selected != -1); _animMoveUpButton.setEnabled(selected > 0); _animMoveDownButton.setEnabled(selected != -1 && selected < _animModel.getSize() - 1); } }); _animList.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(final MouseEvent e) { if (e.getClickCount() > 1) { _animEditButton.doClick(); e.consume(); } } }); _animAddButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { new Thread() { @Override public void run() { final AnimationEntry entry = new AnimationEntry(); getEdittedParticles().getTexAnimation().addEntry(entry); showEditWindow(entry); updateAnimModel(); _animList.setSelectedValue(entry, true); } }.start(); } }); _animEditButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { new Thread() { @Override public void run() { final int index = _animList.getSelectedIndex(); final AnimationEntry entry = _animList.getSelectedValue(); showEditWindow(entry); updateAnimModel(); _animList.setSelectedIndex(index); }; }.start(); } }); _animRemoveButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final AnimationEntry entry = _animList.getSelectedValue(); getEdittedParticles().getTexAnimation().removeEntry(entry); updateAnimModel(); } }); _animMoveUpButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final int index = _animList.getSelectedIndex(); final AnimationEntry entry = _animList.getSelectedValue(); getEdittedParticles().getTexAnimation().removeEntry(entry); getEdittedParticles().getTexAnimation().addEntry(index - 1, entry); updateAnimModel(); _animList.setSelectedValue(entry, true); } }); _animMoveDownButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final int index = _animList.getSelectedIndex(); final AnimationEntry entry = _animList.getSelectedValue(); getEdittedParticles().getTexAnimation().removeEntry(entry); getEdittedParticles().getTexAnimation().addEntry(index + 1, entry); updateAnimModel(); _animList.setSelectedValue(entry, true); } }); _animRemoveButton.setEnabled(false); _animEditButton.setEnabled(false); _animMoveUpButton.setEnabled(false); _animMoveDownButton.setEnabled(false); final JPanel animPanel = new JPanel(new GridBagLayout()); animPanel.setBorder(createTitledBorder("ANIMATION TIMELINE")); animPanel.add( new JScrollPane(_animList, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED), new GridBagConstraints(1, 0, 1, 6, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0)); animPanel.add(_animAddButton, new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); animPanel.add(_animRemoveButton, new GridBagConstraints(0, 1, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); animPanel.add(_animEditButton, new GridBagConstraints(0, 2, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); animPanel.add(_animMoveUpButton, new GridBagConstraints(0, 3, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); animPanel.add(_animMoveDownButton, new GridBagConstraints(0, 4, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 5), 0, 0)); _additiveBlendingBox = new JCheckBox(new AbstractAction("Additive Blending") { private static final long serialVersionUID = 1L; public void actionPerformed(final ActionEvent e) { updateBlendState(_additiveBlendingBox.isSelected()); } }); _additiveBlendingBox.setFont(new Font("Arial", Font.BOLD, 13)); final JPanel blendPanel = new JPanel(new GridBagLayout()); blendPanel.setBorder(createTitledBorder("PARTICLE BLENDING")); blendPanel.add(_additiveBlendingBox, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0)); final JLabel textureLabel = createBoldLabel("Texture Image:"); final JButton changeTextureButton = new JButton(new AbstractAction("Browse...") { private static final long serialVersionUID = 1L; public void actionPerformed(final ActionEvent e) { changeTexture(); } }); changeTextureButton.setFont(new Font("Arial", Font.BOLD, 12)); changeTextureButton.setMargin(new Insets(2, 2, 2, 2)); final JButton clearTextureButton = new JButton(new AbstractAction("Clear") { private static final long serialVersionUID = 1L; public void actionPerformed(final ActionEvent e) { ((TextureState) getEdittedParticles().getLocalRenderState(StateType.Texture)).setTexture(null); _imageLabel.setIcon(null); } }); clearTextureButton.setFont(new Font("Arial", Font.BOLD, 12)); clearTextureButton.setMargin(new Insets(2, 2, 2, 2)); _imageLabel.setBackground(Color.lightGray); _imageLabel.setMaximumSize(new Dimension(128, 128)); _imageLabel.setMinimumSize(new Dimension(0, 0)); _imageLabel.setHorizontalAlignment(SwingConstants.CENTER); _imageLabel.setOpaque(false); _texturePanel = new JPanel(new GridBagLayout()); _texturePanel.setBorder(createTitledBorder("PARTICLE TEXTURE")); _texturePanel.add(textureLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0)); _texturePanel.add(changeTextureButton, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 5, 5), 0, 0)); _texturePanel.add(clearTextureButton, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 5, 5), 0, 0)); _texturePanel.add(_imageLabel, new GridBagConstraints(1, 0, 1, 3, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0)); _texPanel = new ValuePanel("Sub Images: ", "", 1, Integer.MAX_VALUE, 1); _texPanel.addChangeListener(new ChangeListener() { public void stateChanged(final ChangeEvent e) { getEdittedParticles().setTexQuantity(_texPanel.getIntValue()); } }); _texturePanel.add(_texPanel, new GridBagConstraints(0, 3, 2, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0)); _startTexPanel = new ValuePanel("Start Index: ", "", 0, Integer.MAX_VALUE, 1); _startTexPanel.addChangeListener(new ChangeListener() { public void stateChanged(final ChangeEvent e) { getEdittedParticles().setStartTexIndex(_startTexPanel.getIntValue()); } }); _texturePanel.add(_startTexPanel, new GridBagConstraints(0, 4, 2, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0)); add(geomPanel, new GridBagConstraints(0, 1, 1, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 5, 10), 0, 0)); add(_texturePanel, new GridBagConstraints(0, 2, 1, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(5, 10, 5, 10), 0, 0)); add(blendPanel, new GridBagConstraints(0, 3, 1, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(5, 10, 5, 10), 0, 0)); add(rampPanel, new GridBagConstraints(0, 4, 1, 1, 1.0, 1.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(5, 10, 5, 10), 0, 0)); add(animPanel, new GridBagConstraints(0, 5, 1, 1, 1.0, 1.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(5, 10, 5, 10), 0, 0)); final String tdir = _prefs.get("texture_dir", null); if (tdir != null) { _textureChooser.setCurrentDirectory(new File(tdir)); } } protected JButton makeListButton(final String text) { final JButton button = new JButton(text); button.setMargin(new Insets(2, 2, 2, 2)); return button; } protected void showEditWindow(final RampEntry entry) { final RampEntryEditDialog dialog = new RampEntryEditDialog(entry); dialog.setLocationRelativeTo(ParticleAppearancePanel.this); dialog.setModal(true); dialog.setVisible(true); dialog.toFront(); } protected void showEditWindow(final AnimationEntry entry) { final AnimationEntryEditDialog dialog = new AnimationEntryEditDialog(entry); dialog.setLocationRelativeTo(ParticleAppearancePanel.this); dialog.setModal(true); dialog.setVisible(true); dialog.toFront(); } protected void updateRampModel() { _rampModel.clear(); _rampModel.addElement(new StartRamp(getEdittedParticles())); final Iterator<RampEntry> it = getEdittedParticles().getRamp().getEntries(); while (it.hasNext()) { final RampEntry e = it.next(); _rampModel.addElement(e); } _rampModel.addElement(new EndRamp(getEdittedParticles())); } protected void updateAnimModel() { _animModel.clear(); final Iterator<AnimationEntry> it = getEdittedParticles().getTexAnimation().getEntries(); while (it.hasNext()) { final AnimationEntry e = it.next(); _animModel.addElement(e); } } private void changeParticleType(final ParticleType newType) { if (getEdittedParticles() == null) { return; } final ParticleType oldType = getEdittedParticles().getParticleType(); if (newType == oldType) { return; } final ParticleSystem oldGeom = getEdittedParticles(); ParticleSystem newGeom; if (newType == ParticleSystem.ParticleType.Point) { final ParticlePoints pPoints = (ParticlePoints) ParticleFactory.buildParticles(oldGeom.getName(), oldGeom.getNumParticles(), ParticleType.Point); newGeom = pPoints; pPoints.setPointSize(5); } else if (newType == ParticleSystem.ParticleType.Line) { newGeom = ParticleFactory.buildParticles(oldGeom.getName(), oldGeom.getNumParticles(), ParticleType.Line); } else { newGeom = ParticleFactory.buildParticles(oldGeom.getName(), oldGeom.getNumParticles(), newType); } // copy appearance parameters newGeom.setVelocityAligned(oldGeom.isVelocityAligned()); newGeom.setStartColor(oldGeom.getStartColor()); newGeom.setEndColor(oldGeom.getEndColor()); newGeom.setStartTexIndex(oldGeom.getStartTexIndex()); newGeom.setStartSize(oldGeom.getStartSize()); newGeom.setEndSize(oldGeom.getEndSize()); newGeom.setStartMass(oldGeom.getStartMass()); newGeom.setEndMass(oldGeom.getEndMass()); newGeom.setStartSpin(oldGeom.getStartSpin()); newGeom.setEndSpin(oldGeom.getEndSpin()); newGeom.setRamp(oldGeom.getRamp()); newGeom.setTexQuantity(oldGeom.getTexQuantity()); // copy origin parameters newGeom.setTransform(oldGeom.getTransform()); newGeom.setOriginOffset(oldGeom.getOriginOffset()); newGeom.setParticleEmitter(oldGeom.getParticleEmitter()); // copy emission parameters newGeom.setRotateWithScene(oldGeom.isRotateWithScene()); newGeom.setEmissionDirection(oldGeom.getEmissionDirection()); newGeom.setMinimumAngle(oldGeom.getMinimumAngle()); newGeom.setMaximumAngle(oldGeom.getMaximumAngle()); newGeom.setInitialVelocity(oldGeom.getInitialVelocity()); // copy flow parameters newGeom.setControlFlow(oldGeom.getParticleController().isControlFlow()); newGeom.setReleaseRate(oldGeom.getReleaseRate()); newGeom.setReleaseVariance(oldGeom.getReleaseVariance()); newGeom.setRepeatType(oldGeom.getParticleController().getRepeatType()); // copy world parameters newGeom.setSpeed(oldGeom.getParticleController().getSpeed()); newGeom.setMinimumLifeTime(oldGeom.getMinimumLifeTime()); newGeom.setMaximumLifeTime(oldGeom.getMaximumLifeTime()); newGeom.getParticleController().setPrecision(oldGeom.getParticleController().getPrecision()); // copy influence parameters final List<ParticleInfluence> infs = oldGeom.getInfluences(); if (infs != null) { for (final ParticleInfluence inf : infs) { newGeom.addInfluence(inf); } } // copy render states for (final StateType type : StateType.values) { final RenderState rs = oldGeom.getLocalRenderState(type); if (rs != null) { newGeom.setRenderState(rs); } } // warm up newGeom.warmUp(60); requestParticleSystemOverwrite(newGeom); } protected abstract void requestParticleSystemOverwrite(ParticleSystem newParticles); private void changeTexture() { try { final int result = _textureChooser.showOpenDialog(this); if (result == JFileChooser.CANCEL_OPTION) { return; } final File textFile = _textureChooser.getSelectedFile(); _prefs.put("texture_dir", textFile.getParent()); _newTexture = textFile; GameTaskQueueManager.getManager(ParticleEditorFrame.GLOBAL_CONTEXT).render(new Callable<Object>() { public Object call() throws Exception { loadApplyTexture(); return null; } }); final ImageIcon icon = new ImageIcon(getToolkit().createImage(textFile.getAbsolutePath())); _imageLabel.setIcon(icon); validate(); } catch (final Exception ex) { logger.logp(Level.SEVERE, this.getClass().toString(), "changeTexture()", "Exception", ex); } } private void loadApplyTexture() throws MalformedURLException { final TextureState ts = (TextureState) getEdittedParticles().getLocalRenderState(StateType.Texture); // XXX: Needed? // TextureManager.clearCache(); ts.setTexture(TextureManager.load(new URLResourceSource(_newTexture.toURI().toURL()), Texture.MinificationFilter.BilinearNearestMipMap, TextureStoreFormat.GuessCompressedFormat, true)); ts.getTexture().setWrap(WrapMode.BorderClamp); ts.setEnabled(true); getEdittedParticles().setRenderState(ts); _newTexture = null; } private void updateBlendState(final boolean additive) { BlendState blend = (BlendState) getEdittedParticles().getLocalRenderState(StateType.Blend); if (blend == null) { blend = new BlendState(); blend.setBlendEnabled(true); blend.setSourceFunction(BlendState.SourceFunction.SourceAlpha); blend.setTestEnabled(true); blend.setTestFunction(BlendState.TestFunction.GreaterThan); getEdittedParticles().setRenderState(blend); } blend.setDestinationFunction( additive ? BlendState.DestinationFunction.One : BlendState.DestinationFunction.OneMinusSourceAlpha); } @Override public void updateWidgets() { updateRampModel(); final ParticleSystem system = getEdittedParticles(); _geomTypeBox.setSelectedItem(system.getParticleType()); _velocityAlignedBox.setSelected(system.isVelocityAligned()); _parentDirectionLeftPanel.setValue(system.getFacingLeftVector()); _parentDirectionUpPanel.setValue(system.getFacingUpVector()); _cameraAlignedBox.setSelected(system.isCameraFacing()); _texPanel.setValue(system.getTexQuantity()); _startTexPanel.setValue(system.getStartTexIndex()); updateVisibleControls(); final BlendState as = (BlendState) system.getLocalRenderState(StateType.Blend); _additiveBlendingBox .setSelected(as == null || as.getDestinationFunctionRGB() == BlendState.DestinationFunction.One); if (getTexturePanel().isVisible()) { Texture tex = null; try { tex = ((TextureState) system.getLocalRenderState(StateType.Texture)).getTexture(); if (tex != null) { if (tex.getTextureKey() != null && tex.getTextureKey().getSource() != null) { _imageLabel .setIcon(new ImageIcon(((URLResourceSource) tex.getTextureKey().getSource()).getURL())); } } else { _imageLabel.setIcon(null); } } catch (final Exception e) { logger.warning("image: " + tex + " : " + tex != null ? tex.getTextureKey().getSource().toString() : ""); } } } public JCheckBox getAdditiveBlendingBox() { return _additiveBlendingBox; } public JPanel getTexturePanel() { return _texturePanel; } private void updateVisibleControls() { final boolean selected = _cameraAlignedBox.isSelected(); _parentDirectionUpPanel.setVisible(!selected); _parentDirectionLeftPanel.setVisible(!selected); } public class StartRamp extends RampEntry { private final ParticleSystem particles; public StartRamp(final ParticleSystem particles) { super(-1); this.particles = particles; setColor(particles.getStartColor()); setSize(particles.getStartSize()); setMass(particles.getStartMass()); setSpin(particles.getStartSpin()); } @Override public String toString() { return "START: " + super.toString(); } @Override public void setSize(final double size) { super.setSize(size); particles.setStartSize(size); } @Override public void setMass(final double mass) { super.setMass(mass); particles.setStartMass(mass); } @Override public void setSpin(final double spin) { super.setSpin(spin); particles.setStartSpin(spin); } @Override public void setColor(final ReadOnlyColorRGBA color) { super.setColor(color); particles.setStartColor(color); } } public class EndRamp extends RampEntry { private final ParticleSystem particles; public EndRamp(final ParticleSystem particles) { super(-1); this.particles = particles; setColor(particles.getEndColor()); setSize(particles.getEndSize()); setMass(particles.getEndMass()); setSpin(particles.getEndSpin()); } @Override public String toString() { return "END: " + super.toString(); } @Override public void setSize(final double size) { super.setSize(size); particles.setEndSize(size); } @Override public void setMass(final double mass) { super.setMass(mass); particles.setEndMass(mass); } @Override public void setSpin(final double spin) { super.setSpin(spin); particles.setEndSpin(spin); } @Override public void setColor(final ReadOnlyColorRGBA color) { super.setColor(color); particles.setEndColor(color); } } }
package cn.iocoder.mall.productservice.rpc.spu; import cn.iocoder.common.framework.vo.CommonResult; import cn.iocoder.common.framework.vo.PageResult; import cn.iocoder.mall.productservice.rpc.spu.dto.*; import java.util.Collection; import java.util.List; /** * 商品 SPU Rpc 接口 */ public interface ProductSpuRpc { /** * 创建商品 SPU * * @param createDTO 创建商品 SPU DTO * @return 商品 SPU编号 */ CommonResult<Integer> createProductSpu(ProductSpuAndSkuCreateReqDTO createDTO); /** * 更新商品 SPU * * @param updateDTO 更新商品 SPU DTO */ CommonResult<Boolean> updateProductSpu(ProductSpuAndSkuUpdateReqDTO updateDTO); /** * 获得商品 SPU * * @param productSpuId 商品 SPU 编号 * @return 商品 SPU */ CommonResult<ProductSpuRespDTO> getProductSpu(Integer productSpuId); /** * 获得商品 SPU列表 * * @param productSpuIds 商品 SPU 编号列表 * @return 商品 SPU 列表 */ CommonResult<List<ProductSpuRespDTO>> listProductSpus(Collection<Integer> productSpuIds); /** * 获得商品 SPU 分页 * * @param pageDTO 商品 SPU 分页查询 * @return 商品 SPU 分页结果 */ CommonResult<PageResult<ProductSpuRespDTO>> pageProductSpu(ProductSpuPageReqDTO pageDTO); /** * 顺序获得商品 SPU 编号数组 * * @param lastSpuId 最后一个商品 SPU 编号 * @param limit 数量 * @return 商品 SPU 编号数组 */ CommonResult<List<Integer>> listProductSpuIds(Integer lastSpuId, Integer limit); CommonResult<ProductSpuDetailRespDTO> getProductSpuDetail(Integer productSpuId, Collection<String> fields); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.vectorized; import java.util.*; import org.apache.commons.lang.NotImplementedException; import org.apache.spark.memory.MemoryMode; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.expressions.GenericMutableRow; import org.apache.spark.sql.catalyst.expressions.UnsafeRow; import org.apache.spark.sql.catalyst.util.ArrayData; import org.apache.spark.sql.catalyst.util.MapData; import org.apache.spark.sql.types.*; import org.apache.spark.unsafe.types.CalendarInterval; import org.apache.spark.unsafe.types.UTF8String; /** * This class is the in memory representation of rows as they are streamed through operators. It * is designed to maximize CPU efficiency and not storage footprint. Since it is expected that * each operator allocates one of these objects, the storage footprint on the task is negligible. * * The layout is a columnar with values encoded in their native format. Each RowBatch contains * a horizontal partitioning of the data, split into columns. * * The ColumnarBatch supports either on heap or offheap modes with (mostly) the identical API. * * TODO: * - There are many TODOs for the existing APIs. They should throw a not implemented exception. * - Compaction: The batch and columns should be able to compact based on a selection vector. */ public final class ColumnarBatch { private static final int DEFAULT_BATCH_SIZE = 4 * 1024; private static MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.ON_HEAP; private final StructType schema; private final int capacity; private int numRows; private final ColumnVector[] columns; // True if the row is filtered. private final boolean[] filteredRows; // Column indices that cannot have null values. private final Set<Integer> nullFilteredColumns; // Total number of rows that have been filtered. private int numRowsFiltered = 0; // Staging row returned from getRow. final Row row; public static ColumnarBatch allocate(StructType schema, MemoryMode memMode) { return new ColumnarBatch(schema, DEFAULT_BATCH_SIZE, memMode); } public static ColumnarBatch allocate(StructType type) { return new ColumnarBatch(type, DEFAULT_BATCH_SIZE, DEFAULT_MEMORY_MODE); } public static ColumnarBatch allocate(StructType schema, MemoryMode memMode, int maxRows) { return new ColumnarBatch(schema, maxRows, memMode); } /** * Called to close all the columns in this batch. It is not valid to access the data after * calling this. This must be called at the end to clean up memory allcoations. */ public void close() { for (ColumnVector c: columns) { c.close(); } } /** * Adapter class to interop with existing components that expect internal row. A lot of * performance is lost with this translation. */ public static final class Row extends InternalRow { protected int rowId; private final ColumnarBatch parent; private final int fixedLenRowSize; private final ColumnVector[] columns; // Ctor used if this is a top level row. private Row(ColumnarBatch parent) { this.parent = parent; this.fixedLenRowSize = UnsafeRow.calculateFixedPortionByteSize(parent.numCols()); this.columns = parent.columns; } // Ctor used if this is a struct. protected Row(ColumnVector[] columns) { this.parent = null; this.fixedLenRowSize = UnsafeRow.calculateFixedPortionByteSize(columns.length); this.columns = columns; } /** * Marks this row as being filtered out. This means a subsequent iteration over the rows * in this batch will not include this row. */ public void markFiltered() { parent.markFiltered(rowId); } public ColumnVector[] columns() { return columns; } @Override public int numFields() { return columns.length; } @Override /** * Revisit this. This is expensive. This is currently only used in test paths. */ public InternalRow copy() { GenericMutableRow row = new GenericMutableRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) { row.setNullAt(i); } else { DataType dt = columns[i].dataType(); if (dt instanceof BooleanType) { row.setBoolean(i, getBoolean(i)); } else if (dt instanceof IntegerType) { row.setInt(i, getInt(i)); } else if (dt instanceof LongType) { row.setLong(i, getLong(i)); } else if (dt instanceof FloatType) { row.setFloat(i, getFloat(i)); } else if (dt instanceof DoubleType) { row.setDouble(i, getDouble(i)); } else if (dt instanceof StringType) { row.update(i, getUTF8String(i)); } else if (dt instanceof BinaryType) { row.update(i, getBinary(i)); } else if (dt instanceof DecimalType) { DecimalType t = (DecimalType)dt; row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), t.precision()); } else if (dt instanceof DateType) { row.setInt(i, getInt(i)); } else { throw new RuntimeException("Not implemented. " + dt); } } } return row; } @Override public boolean anyNull() { throw new NotImplementedException(); } @Override public boolean isNullAt(int ordinal) { return columns[ordinal].isNullAt(rowId); } @Override public boolean getBoolean(int ordinal) { return columns[ordinal].getBoolean(rowId); } @Override public byte getByte(int ordinal) { return columns[ordinal].getByte(rowId); } @Override public short getShort(int ordinal) { return columns[ordinal].getShort(rowId); } @Override public int getInt(int ordinal) { return columns[ordinal].getInt(rowId); } @Override public long getLong(int ordinal) { return columns[ordinal].getLong(rowId); } @Override public float getFloat(int ordinal) { return columns[ordinal].getFloat(rowId); } @Override public double getDouble(int ordinal) { return columns[ordinal].getDouble(rowId); } @Override public Decimal getDecimal(int ordinal, int precision, int scale) { return columns[ordinal].getDecimal(rowId, precision, scale); } @Override public UTF8String getUTF8String(int ordinal) { return columns[ordinal].getUTF8String(rowId); } @Override public byte[] getBinary(int ordinal) { return columns[ordinal].getBinary(rowId); } @Override public CalendarInterval getInterval(int ordinal) { final int months = columns[ordinal].getChildColumn(0).getInt(rowId); final long microseconds = columns[ordinal].getChildColumn(1).getLong(rowId); return new CalendarInterval(months, microseconds); } @Override public InternalRow getStruct(int ordinal, int numFields) { return columns[ordinal].getStruct(rowId); } @Override public ArrayData getArray(int ordinal) { return columns[ordinal].getArray(rowId); } @Override public MapData getMap(int ordinal) { throw new NotImplementedException(); } @Override public Object get(int ordinal, DataType dataType) { throw new NotImplementedException(); } } /** * Returns an iterator over the rows in this batch. This skips rows that are filtered out. */ public Iterator<Row> rowIterator() { final int maxRows = ColumnarBatch.this.numRows(); final Row row = new Row(this); return new Iterator<Row>() { int rowId = 0; @Override public boolean hasNext() { while (rowId < maxRows && ColumnarBatch.this.filteredRows[rowId]) { ++rowId; } return rowId < maxRows; } @Override public Row next() { while (rowId < maxRows && ColumnarBatch.this.filteredRows[rowId]) { ++rowId; } if (rowId >= maxRows) { throw new NoSuchElementException(); } row.rowId = rowId++; return row; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } /** * Resets the batch for writing. */ public void reset() { for (int i = 0; i < numCols(); ++i) { columns[i].reset(); } if (this.numRowsFiltered > 0) { Arrays.fill(filteredRows, false); } this.numRows = 0; this.numRowsFiltered = 0; } /** * Sets the number of rows that are valid. Additionally, marks all rows as "filtered" if one or * more of their attributes are part of a non-nullable column. */ public void setNumRows(int numRows) { assert(numRows <= this.capacity); this.numRows = numRows; for (int ordinal : nullFilteredColumns) { if (columns[ordinal].numNulls != 0) { for (int rowId = 0; rowId < numRows; rowId++) { if (!filteredRows[rowId] && columns[ordinal].isNullAt(rowId)) { filteredRows[rowId] = true; ++numRowsFiltered; } } } } } /** * Returns the number of columns that make up this batch. */ public int numCols() { return columns.length; } /** * Returns the number of rows for read, including filtered rows. */ public int numRows() { return numRows; } /** * Returns the number of valid rowss. */ public int numValidRows() { assert(numRowsFiltered <= numRows); return numRows - numRowsFiltered; } /** * Returns the max capacity (in number of rows) for this batch. */ public int capacity() { return capacity; } /** * Returns the column at `ordinal`. */ public ColumnVector column(int ordinal) { return columns[ordinal]; } /** * Sets (replaces) the column at `ordinal` with column. This can be used to do very efficient * projections. */ public void setColumn(int ordinal, ColumnVector column) { if (column instanceof OffHeapColumnVector) { throw new NotImplementedException("Need to ref count columns."); } columns[ordinal] = column; } /** * Returns the row in this batch at `rowId`. Returned row is reused across calls. */ public ColumnarBatch.Row getRow(int rowId) { assert(rowId >= 0); assert(rowId < numRows); row.rowId = rowId; return row; } /** * Marks this row as being filtered out. This means a subsequent iteration over the rows * in this batch will not include this row. */ public void markFiltered(int rowId) { assert(!filteredRows[rowId]); filteredRows[rowId] = true; ++numRowsFiltered; } /** * Marks a given column as non-nullable. Any row that has a NULL value for the corresponding * attribute is filtered out. */ public void filterNullsInColumn(int ordinal) { nullFilteredColumns.add(ordinal); } private ColumnarBatch(StructType schema, int maxRows, MemoryMode memMode) { this.schema = schema; this.capacity = maxRows; this.columns = new ColumnVector[schema.size()]; this.nullFilteredColumns = new HashSet<>(); this.filteredRows = new boolean[maxRows]; for (int i = 0; i < schema.fields().length; ++i) { StructField field = schema.fields()[i]; columns[i] = ColumnVector.allocate(maxRows, field.dataType(), memMode); } this.row = new Row(this); } }
package org.jboss.resteasy.reactive.server.core; public class BlockingOperationSupport { private static volatile IOThreadDetector ioThreadDetector; //TODO: move away from a static public static void setIoThreadDetector(IOThreadDetector ioThreadDetector) { BlockingOperationSupport.ioThreadDetector = ioThreadDetector; } public static boolean isBlockingAllowed() { if (ioThreadDetector == null) { return true; } return ioThreadDetector.isBlockingAllowed(); } public interface IOThreadDetector { boolean isBlockingAllowed(); } }
package tiralabra.datastructure; import static org.junit.Assert.assertEquals; import org.junit.Test; public class MyPriorityQueueTest { @Test public void returnsItemsInCorrectOrder() { MyPriorityQueue<Integer> q = new MyPriorityQueue<>(); q.add(4); q.add(2); q.add(8); q.add(-1); q.add(5); assertEquals(-1, (int) q.poll()); assertEquals(2, (int) q.poll()); assertEquals(4, (int) q.poll()); assertEquals(5, (int) q.poll()); assertEquals(8, (int) q.poll()); } @Test public void returnsSizeCorrectly() { MyPriorityQueue<Integer> q = new MyPriorityQueue<>(); q.add(4); q.add(2); q.add(8); q.add(-1); q.add(5); for (int i = 0; i < 5; i++) { assertEquals(5 - i, q.size()); q.poll(); } assertEquals(0, q.size()); } @Test public void isEmptyWorksCorrectly() { MyPriorityQueue<Integer> q = new MyPriorityQueue<>(); assertEquals(true, q.isEmpty()); q.add(4); q.add(2); assertEquals(false, q.isEmpty()); q.poll(); q.poll(); assertEquals(true, q.isEmpty()); } @Test public void canAddManyElements() { MyPriorityQueue<Integer> q = new MyPriorityQueue<>(); for (int i = 0; i < 100; i++) { q.add(i); } for (int i = 0; i < 100; i++) { assertEquals(i, (int) q.poll()); } } @Test public void pollOnEmptyQueueRetunsNull() { MyPriorityQueue<Integer> q = new MyPriorityQueue<>(); assertEquals(null, q.poll()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mini2Dx.beanutils; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.io.StreamCorruptedException; import java.util.List; import java.util.Map; /** * <p>The metadata describing an individual property of a DynaBean.</p> * * <p>The meta contains an <em>optional</em> content type property ({@link #getContentType}) * for use by mapped and iterated properties. * A mapped or iterated property may choose to indicate the type it expects. * The DynaBean implementation may choose to enforce this type on its entries. * Alternatively, an implementatin may choose to ignore this property. * All keys for maps must be of type String so no meta data is needed for map keys.</p> * * @version $Id: DynaProperty.java 1546738 2013-11-30 16:24:19Z oheger $ */ public class DynaProperty implements Serializable { // ----------------------------------------------------------- Constants /* * There are issues with serializing primitive class types on certain JVM versions * (including java 1.3). * This class uses a custom serialization implementation that writes an integer * for these primitive class. * This list of constants are the ones used in serialization. * If these values are changed, then older versions will no longer be read correctly */ private static final int BOOLEAN_TYPE = 1; private static final int BYTE_TYPE = 2; private static final int CHAR_TYPE = 3; private static final int DOUBLE_TYPE = 4; private static final int FLOAT_TYPE = 5; private static final int INT_TYPE = 6; private static final int LONG_TYPE = 7; private static final int SHORT_TYPE = 8; // ----------------------------------------------------------- Constructors /** * Construct a property that accepts any data type. * * @param name Name of the property being described */ public DynaProperty(String name) { this(name, Object.class); } /** * Construct a property of the specified data type. * * @param name Name of the property being described * @param type Java class representing the property data type */ public DynaProperty(String name, Class<?> type) { super(); this.name = name; this.type = type; if (type != null && type.isArray()) { this.contentType = type.getComponentType(); } } /** * Construct an indexed or mapped <code>DynaProperty</code> that supports (pseudo)-introspection * of the content type. * * @param name Name of the property being described * @param type Java class representing the property data type * @param contentType Class that all indexed or mapped elements are instances of */ public DynaProperty(String name, Class<?> type, Class<?> contentType) { super(); this.name = name; this.type = type; this.contentType = contentType; } // ------------------------------------------------------------- Properties /** Property name */ protected String name = null; /** * Get the name of this property. * @return the name of the property */ public String getName() { return (this.name); } /** Property type */ protected transient Class<?> type = null; /** * <p>Gets the Java class representing the data type of the underlying property * values.</p> * * <p>There are issues with serializing primitive class types on certain JVM versions * (including java 1.3). * Therefore, this field <strong>must not be serialized using the standard methods</strong>.</p> * * <p><strong>Please leave this field as <code>transient</code></strong></p> * * @return the property type */ public Class<?> getType() { return (this.type); } /** The <em>(optional)</em> type of content elements for indexed <code>DynaProperty</code> */ protected transient Class<?> contentType; /** * Gets the <em>(optional)</em> type of the indexed content for <code>DynaProperty</code>'s * that support this feature. * * <p>There are issues with serializing primitive class types on certain JVM versions * (including java 1.3). * Therefore, this field <strong>must not be serialized using the standard methods</strong>.</p> * * @return the Class for the content type if this is an indexed <code>DynaProperty</code> * and this feature is supported. Otherwise null. */ public Class<?> getContentType() { return contentType; } // --------------------------------------------------------- Public Methods /** * Does this property represent an indexed value (ie an array or List)? * * @return <code>true</code> if the property is indexed (i.e. is a List or * array), otherwise <code>false</code> */ public boolean isIndexed() { if (type == null) { return (false); } else if (type.isArray()) { return (true); } else if (List.class.isAssignableFrom(type)) { return (true); } else { return (false); } } /** * Does this property represent a mapped value (ie a Map)? * * @return <code>true</code> if the property is a Map * otherwise <code>false</code> */ public boolean isMapped() { if (type == null) { return (false); } else { return (Map.class.isAssignableFrom(type)); } } /** * Checks this instance against the specified Object for equality. Overrides the * default refererence test for equality provided by {@link java.lang.Object#equals(Object)} * @param obj The object to compare to * @return <code>true</code> if object is a dyna property with the same name * type and content type, otherwise <code>false</code> * @since 1.8.0 */ @Override public boolean equals(final Object obj) { boolean result = false; result = (obj == this); if ((!result) && obj instanceof DynaProperty) { final DynaProperty that = (DynaProperty) obj; result = ((this.name == null) ? (that.name == null) : (this.name.equals(that.name))) && ((this.type == null) ? (that.type == null) : (this.type.equals(that.type))) && ((this.contentType == null) ? (that.contentType == null) : (this.contentType.equals(that.contentType))); } return result; } /** * @return the hashcode for this dyna property * @see java.lang.Object#hashCode * @since 1.8.0 */ @Override public int hashCode() { int result = 1; result = result * 31 + ((name == null) ? 0 : name.hashCode()); result = result * 31 + ((type == null) ? 0 : type.hashCode()); result = result * 31 + ((contentType == null) ? 0 : contentType.hashCode()); return result; } /** * Return a String representation of this Object. * @return a String representation of the dyna property */ @Override public String toString() { StringBuilder sb = new StringBuilder("DynaProperty[name="); sb.append(this.name); sb.append(",type="); sb.append(this.type); if (isMapped() || isIndexed()) { sb.append(" <").append(this.contentType).append(">"); } sb.append("]"); return (sb.toString()); } // --------------------------------------------------------- Serialization helper methods /** * Writes this object safely. * There are issues with serializing primitive class types on certain JVM versions * (including java 1.3). * This method provides a workaround. */ private void writeObject(ObjectOutputStream out) throws IOException { writeAnyClass(this.type,out); if (isMapped() || isIndexed()) { writeAnyClass(this.contentType,out); } // write out other values out.defaultWriteObject(); } /** * Write a class using safe encoding to workaround java 1.3 serialization bug. */ private void writeAnyClass(Class<?> clazz, ObjectOutputStream out) throws IOException { // safely write out any class int primitiveType = 0; if (Boolean.TYPE.equals(clazz)) { primitiveType = BOOLEAN_TYPE; } else if (Byte.TYPE.equals(clazz)) { primitiveType = BYTE_TYPE; } else if (Character.TYPE.equals(clazz)) { primitiveType = CHAR_TYPE; } else if (Double.TYPE.equals(clazz)) { primitiveType = DOUBLE_TYPE; } else if (Float.TYPE.equals(clazz)) { primitiveType = FLOAT_TYPE; } else if (Integer.TYPE.equals(clazz)) { primitiveType = INT_TYPE; } else if (Long.TYPE.equals(clazz)) { primitiveType = LONG_TYPE; } else if (Short.TYPE.equals(clazz)) { primitiveType = SHORT_TYPE; } if (primitiveType == 0) { // then it's not a primitive type out.writeBoolean(false); out.writeObject(clazz); } else { // we'll write out a constant instead out.writeBoolean(true); out.writeInt(primitiveType); } } /** * Reads field values for this object safely. * There are issues with serializing primitive class types on certain JVM versions * (including java 1.3). * This method provides a workaround. * * @throws StreamCorruptedException when the stream data values are outside expected range */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { this.type = readAnyClass(in); if (isMapped() || isIndexed()) { this.contentType = readAnyClass(in); } // read other values in.defaultReadObject(); } /** * Reads a class using safe encoding to workaround java 1.3 serialization bug. */ private Class<?> readAnyClass(ObjectInputStream in) throws IOException, ClassNotFoundException { // read back type class safely if (in.readBoolean()) { // it's a type constant switch (in.readInt()) { case BOOLEAN_TYPE: return Boolean.TYPE; case BYTE_TYPE: return Byte.TYPE; case CHAR_TYPE: return Character.TYPE; case DOUBLE_TYPE: return Double.TYPE; case FLOAT_TYPE: return Float.TYPE; case INT_TYPE: return Integer.TYPE; case LONG_TYPE: return Long.TYPE; case SHORT_TYPE: return Short.TYPE; default: // something's gone wrong throw new StreamCorruptedException( "Invalid primitive type. " + "Check version of beanutils used to serialize is compatible."); } } else { // it's another class return ((Class<?>) in.readObject()); } } }
/***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.modeler.editor.dbentity; import org.apache.cayenne.map.DbEntity; import org.apache.cayenne.map.DbRelationship; import org.apache.cayenne.map.event.DbEntityListener; import org.apache.cayenne.map.event.EntityEvent; import org.apache.cayenne.modeler.Application; import org.apache.cayenne.modeler.ProjectController; import org.apache.cayenne.modeler.action.ActionManager; import org.apache.cayenne.modeler.action.CopyAttributeRelationshipAction; import org.apache.cayenne.modeler.action.CreateAttributeAction; import org.apache.cayenne.modeler.action.CreateObjEntityFromDbAction; import org.apache.cayenne.modeler.action.CreateRelationshipAction; import org.apache.cayenne.modeler.action.CutAttributeRelationshipAction; import org.apache.cayenne.modeler.action.DbEntityCounterpartAction; import org.apache.cayenne.modeler.action.DbEntitySyncAction; import org.apache.cayenne.modeler.action.PasteAction; import org.apache.cayenne.modeler.action.RemoveAttributeRelationshipAction; import org.apache.cayenne.modeler.event.DbEntityDisplayListener; import org.apache.cayenne.modeler.event.EntityDisplayEvent; import org.apache.cayenne.modeler.pref.ComponentGeometry; import org.apache.cayenne.modeler.util.CayenneAction; import org.apache.cayenne.modeler.util.ModelerUtil; import org.apache.cayenne.swing.components.image.FilteredIconFactory; import org.slf4j.LoggerFactory; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JToolBar; import java.awt.BorderLayout; /** * Combines DbEntityAttributeTab and DbEntityRelationshipTab in JSplitPane. */ public class DbEntityAttributeRelationshipTab extends JPanel implements DbEntityDisplayListener, DbEntityListener { public DbEntityAttributePanel attributePanel; public DbEntityRelationshipPanel relationshipPanel; public JButton resolve = new CayenneAction.CayenneToolbarButton(null, 0); private JSplitPane splitPane; private ProjectController mediator; private CutAttributeRelationshipAction cut; private RemoveAttributeRelationshipAction remove; private CopyAttributeRelationshipAction copy; private JToolBar toolBar; public DbEntityAttributeRelationshipTab(ProjectController mediator) { this.mediator = mediator; init(); initToolBar(); mediator.addDbEntityDisplayListener(this); } private void init() { this.setLayout(new BorderLayout()); attributePanel = new DbEntityAttributePanel(mediator, this); relationshipPanel = new DbEntityRelationshipPanel(mediator, this); splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, attributePanel, relationshipPanel); splitPane.setOneTouchExpandable(true); splitPane.setResizeWeight(0.5); try { ComponentGeometry geometry = new ComponentGeometry( this.getClass(), "dbEntityAttrRelTab/splitPane/divider"); geometry.bindIntProperty(splitPane, JSplitPane.DIVIDER_LOCATION_PROPERTY, -1); } catch (Exception ex) { LoggerFactory.getLogger(getClass()).error("Cannot bind divider property", ex); } add(splitPane); } private void initToolBar() { toolBar = new JToolBar(); toolBar.setFloatable(false); ActionManager actionManager = Application.getInstance().getActionManager(); toolBar.add(actionManager.getAction(CreateAttributeAction.class).buildButton(1)); toolBar.add(actionManager.getAction(CreateRelationshipAction.class).buildButton(3)); toolBar.addSeparator(); toolBar.add(actionManager.getAction(CreateObjEntityFromDbAction.class).buildButton(1)); toolBar.add(actionManager.getAction(DbEntitySyncAction.class).buildButton(2)); toolBar.add(actionManager.getAction(DbEntityCounterpartAction.class).buildButton(3)); toolBar.addSeparator(); Icon ico = ModelerUtil.buildIcon("icon-edit.png"); resolve.setToolTipText("Database Mapping"); resolve.setIcon(ico); resolve.setDisabledIcon(FilteredIconFactory.createDisabledIcon(ico)); toolBar.add(resolve).setEnabled(false); cut = actionManager.getAction(CutAttributeRelationshipAction.class); remove = actionManager.getAction(RemoveAttributeRelationshipAction.class); copy = actionManager.getAction(CopyAttributeRelationshipAction.class); toolBar.addSeparator(); toolBar.add(remove.buildButton()); toolBar.addSeparator(); toolBar.add(cut.buildButton(1)); toolBar.add(copy.buildButton(2)); toolBar.add(actionManager.getAction(PasteAction.class).buildButton(3)); add(toolBar, BorderLayout.NORTH); } public void updateActions(Object[] params) { ModelerUtil.updateActions( params.length, RemoveAttributeRelationshipAction.class, CutAttributeRelationshipAction.class, CopyAttributeRelationshipAction.class); if (params instanceof DbRelationship[]) { resolve.setEnabled(params.length > 0); } } public JButton getResolve() { return resolve; } public JSplitPane getSplitPane() { return splitPane; } public DbEntityAttributePanel getAttributePanel() { return attributePanel; } public DbEntityRelationshipPanel getRelationshipPanel() { return relationshipPanel; } public void dbEntityChanged(EntityEvent e) { relationshipPanel.dbEntityChanged(e); } public void dbEntityAdded(EntityEvent e) { relationshipPanel.dbEntityAdded(e); } public void dbEntityRemoved(EntityEvent e) { relationshipPanel.dbEntityRemoved(e); } public void currentDbEntityChanged(EntityDisplayEvent e) { DbEntity entity = (DbEntity) e.getEntity(); if(entity.getDataMap().getMappedEntities(entity).isEmpty()) { toolBar.getComponentAtIndex(4).setEnabled(false); toolBar.getComponentAtIndex(5).setEnabled(false); } else { toolBar.getComponentAtIndex(4).setEnabled(true); toolBar.getComponentAtIndex(5).setEnabled(true); } } }