gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.HasBinary; import net.starlark.java.eval.Starlark; import net.starlark.java.syntax.Location; import net.starlark.java.syntax.TokenKind; /** An struct-like Info (provider instance) for providers defined in Starlark. */ public final class StarlarkInfo extends StructImpl implements HasBinary { private final Provider provider; // For a n-element info, the table contains n key strings, sorted, // followed by the n corresponding legal Starlark values. private final Object[] table; // A format string with one %s placeholder for the missing field name. // If null, uses the default format specified by the provider. // TODO(adonovan): make the provider determine the error message // (but: this has implications for struct+struct, the equivalence // relation, and other observable behaviors). // Perhaps it should be a property of the StarlarkInfo instance, but // defined by a subclass? @Nullable private final String unknownFieldError; // TODO(adonovan): restrict type of provider to StarlarkProvider? // Do we ever need StarlarkInfos of BuiltinProviders? Such BuiltinProviders could // be moved to Starlark using bzl builtins injection. // Alternatively: what about this implementation is specific to StarlarkProvider? // It's really just a "generic" or "dynamic" representation of a struct, // analogous to reflection versus generated message classes in the protobuf world. // The efficient table algorithms would be a nice addition to the Starlark // interpreter, to allow other clients to define their own fast structs // (or to define a standard one). See also comments at Info about upcoming clean-ups. private StarlarkInfo( Provider provider, Object[] table, @Nullable Location loc, @Nullable String unknownFieldError) { super(loc); this.provider = provider; this.table = table; this.unknownFieldError = unknownFieldError; } @Override public Provider getProvider() { return provider; } // Converts a map to a table of sorted keys followed by corresponding values. private static Object[] toTable(Map<String, Object> values) { int n = values.size(); Object[] table = new Object[n + n]; int i = 0; for (Map.Entry<String, Object> e : values.entrySet()) { table[i] = e.getKey(); table[n + i] = Starlark.checkValid(e.getValue()); i++; } // Sort keys, permuting values in parallel. if (n > 1) { sortPairs(table, 0, n - 1); } return table; } /** * Constructs a StarlarkInfo from an array of alternating key/value pairs as provided by * Starlark.fastcall. Checks that each key is provided at most once, and is defined by the * optional schema, which must be sorted. This optimized zero-allocation function exists solely * for the StarlarkProvider constructor. */ static StarlarkInfo createFromNamedArgs( Provider provider, Object[] table, @Nullable ImmutableList<String> schema, Location loc) throws EvalException { // Permute fastcall form (k, v, ..., k, v) into table form (k, k, ..., v, v). permute(table); int n = table.length >> 1; // number of K/V pairs // Sort keys, permuting values in parallel. if (n > 1) { sortPairs(table, 0, n - 1); } // Check for duplicate keys, which are now adjacent. for (int i = 0; i < n - 1; i++) { if (table[i].equals(table[i + 1])) { throw Starlark.errorf( "got multiple values for parameter %s in call to instantiate provider %s", table[i], provider.getPrintableName()); } } // Check that schema is a superset of the table's keys. if (schema != null) { List<String> unexpected = unexpectedKeys(schema, table, n); if (unexpected != null) { throw Starlark.errorf( "got unexpected field%s '%s' in call to instantiate provider %s", unexpected.size() > 1 ? "s" : "", Joiner.on("', '").join(unexpected), provider.getPrintableName()); } } return new StarlarkInfo(provider, table, loc, /*unknownFieldError=*/ null); } // Permutes array elements from alternating keys/values form, // (as used by fastcall's named array) into keys-then-corresponding-values form, // as used by StarlarkInfo.table. // The permutation preserves the key/value association but not the order of keys. static void permute(Object[] named) { int n = named.length >> 1; // number of K/V pairs // Thanks to Murali Ganapathy for the algorithm. // See https://play.golang.org/p/QOKnrj_bIwk. // // i and j are the indices bracketing successive pairs of cells, // working from the outside to the middle. // // i j // [KV]KVKVKVKVKVKV[KV] // i j // KK[KV]KVKVKVKV[KV]VV // i j // KKKK[KV]KVKV[KV]VVVV // etc... for (int i = 0; i < n - 1; i += 2) { int j = named.length - i; // rotate two pairs [KV]...[kv] -> [Kk]...[vV] Object tmp = named[i + 1]; named[i + 1] = named[j - 2]; named[j - 2] = named[j - 1]; named[j - 1] = tmp; } // reverse lower half containing keys: [KkvV] -> [kKvV] for (int i = 0; i < n >> 1; i++) { Object tmp = named[n - 1 - i]; named[n - 1 - i] = named[i]; named[i] = tmp; } } // Sorts non-empty slice a[lo:hi] (inclusive) in place. // Elements a[n:2n) are permuted the same way as a[0:n), // where n = a.length / 2. The lower half must be strings. // Precondition: 0 <= lo <= hi < n. static void sortPairs(Object[] a, int lo, int hi) { String pivot = (String) a[lo + (hi - lo) / 2]; int i = lo; int j = hi; while (i <= j) { while (((String) a[i]).compareTo(pivot) < 0) { i++; } while (((String) a[j]).compareTo(pivot) > 0) { j--; } if (i <= j) { int n = a.length >> 1; swap(a, i, j); swap(a, i + n, j + n); i++; j--; } } if (lo < j) { sortPairs(a, lo, j); } if (i < hi) { sortPairs(a, i, hi); } } private static void swap(Object[] a, int i, int j) { Object tmp = a[i]; a[i] = a[j]; a[j] = tmp; } // Returns the list of keys in table[0:n) not defined by the schema, // or null on success. // Allocates no memory on success. // Both table[0:n) and schema are sorted lists of strings. @Nullable private static List<String> unexpectedKeys(ImmutableList<String> schema, Object[] table, int n) { int si = 0; List<String> unexpected = null; table: for (int ti = 0; ti < n; ti++) { String t = (String) table[ti]; while (si < schema.size()) { String s = schema.get(si++); int cmp = s.compareTo(t); if (cmp == 0) { // table key matches schema continue table; } else if (cmp > 0) { // table contains unexpected key if (unexpected == null) { unexpected = new ArrayList<>(); } unexpected.add(t); } else { // skip over schema key not provided by table } } if (unexpected == null) { unexpected = new ArrayList<>(); } unexpected.add(t); } return unexpected; } @Override public ImmutableCollection<String> getFieldNames() { // TODO(adonovan): opt: can we avoid allocating three objects? @SuppressWarnings("unchecked") List<String> keys = (List<String>) (List<?>) Arrays.asList(table).subList(0, table.length / 2); return ImmutableList.copyOf(keys); } /** Returns the per-instance error message, if specified, or the provider's message otherwise. */ @Override public String getErrorMessageForUnknownField(String name) { return unknownFieldError != null ? String.format(unknownFieldError, name) + allAttributesSuffix() : super.getErrorMessageForUnknownField(name); } @Override public boolean isImmutable() { // If the provider is not yet exported, the hash code of the object is subject to change. if (!provider.isExported()) { return false; } for (int i = table.length / 2; i < table.length; i++) { if (!Starlark.isImmutable(table[i])) { return false; } } return true; } @Override public Object getValue(String name) { int n = table.length / 2; int i = Arrays.binarySearch(table, 0, n, name); if (i < 0) { return null; } return table[n + i]; } /** * Creates a schemaless provider instance with the given provider type and field values. * * <p>{@code loc} is the creation location for this instance. Built-in provider instances may use * {@link Location#BUILTIN}, which is the default if null. */ public static StarlarkInfo create( Provider provider, Map<String, Object> values, @Nullable Location loc) { return new StarlarkInfo(provider, toTable(values), loc, /*unknownFieldError=*/ null); } /** * Creates a schemaless provider instance with the given provider type, field values, and * unknown-field error message. * * <p>This is used to create structs for special purposes, such as {@code ctx.attr} and the {@code * native} module. The creation location will be {@link Location#BUILTIN}. * * <p>{@code unknownFieldError} is a string format, as for {@link * Provider#getErrorMessageFormatForUnknownField}. * * @deprecated Do not use this method. Instead, create a new subclass of {@link BuiltinProvider} * with the desired error message format, and create a corresponding {@link NativeInfo} * subclass. */ // TODO(bazel-team): Make the special structs that need a custom error message use a different // provider (subclassing BuiltinProvider) and a different StructImpl implementation. Then remove // this functionality, thereby saving a string pointer field for the majority of providers that // don't need it. However, this is tricky: if the error message is a property of the provider, // then each flavor of struct must have a distinct provider of a unique class, and this would be // observable to Starlark code. What would be their names: "struct", or something else? Should // struct+struct fail when different flavors are mixed (as happens today when adding info // instances of different providers)? Or should it return a new struct picking the provider of one // operand arbitrarily (as it does today for custom error strings)? Or ignore providers and return // a plain old struct, always? Or only if they differ? Or should we abolish struct+struct // altogether? In other words, the advice in the @deprecated tag above is not compatible. // // brandjon notes: nearly all the uses of custom errors are for objects that properly should be // Structures but not structs. They only leveraged the struct machinery for historical reasons and // convenience. // For instance, ctx.attr should have a custom error message, but should not support concatenation // (it fails today but only because you can't produce two ctx.attr's that don't have common // fields). It also should not support to_json(). // It's possible someone was crazy enough to take ctx.attr.to_json(), but we can probably break // that case without causing too much trouble. // If we migrate all these cases of non-providers away, whatever is left should be happy to use a // default error message, and we can eliminate this extra detail. @Deprecated public static StarlarkInfo createWithCustomMessage( Provider provider, Map<String, Object> values, String unknownFieldError) { Preconditions.checkNotNull(unknownFieldError); return new StarlarkInfo(provider, toTable(values), Location.BUILTIN, unknownFieldError); } @Override public StarlarkInfo binaryOp(TokenKind op, Object that, boolean thisLeft) throws EvalException { if (op == TokenKind.PLUS && that instanceof StarlarkInfo) { return thisLeft ? plus(this, (StarlarkInfo) that) // : plus((StarlarkInfo) that, this); } return null; } private static StarlarkInfo plus(StarlarkInfo x, StarlarkInfo y) throws EvalException { Provider xprov = x.provider; Provider yprov = y.provider; if (!xprov.equals(yprov)) { throw Starlark.errorf( "Cannot use '+' operator on instances of different providers (%s and %s)", xprov.getPrintableName(), yprov.getPrintableName()); } // ztable = merge(x.table, y.table) int xsize = x.table.length / 2; int ysize = y.table.length / 2; int zsize = xsize + ysize; Object[] ztable = new Object[zsize + zsize]; int xi = 0; int yi = 0; int zi = 0; while (xi < xsize && yi < ysize) { String xk = (String) x.table[xi]; String yk = (String) y.table[yi]; int cmp = xk.compareTo(yk); if (cmp < 0) { ztable[zi] = xk; ztable[zi + zsize] = x.table[xi + xsize]; xi++; } else if (cmp > 0) { ztable[zi] = yk; ztable[zi + zsize] = y.table[yi + ysize]; yi++; } else { throw Starlark.errorf("cannot add struct instances with common field '%s'", xk); } zi++; } while (xi < xsize) { ztable[zi] = x.table[xi]; ztable[zi + zsize] = x.table[xi + xsize]; xi++; zi++; } while (yi < ysize) { ztable[zi] = y.table[yi]; ztable[zi + zsize] = y.table[yi + ysize]; yi++; zi++; } return new StarlarkInfo(xprov, ztable, Location.BUILTIN, x.unknownFieldError); } }
package org.hive2hive.core.security; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Arrays; import javax.crypto.SecretKey; import org.hive2hive.core.H2HJUnitTest; import org.hive2hive.core.security.EncryptionUtil.AES_KEYLENGTH; import org.junit.BeforeClass; import org.junit.Test; public class PasswordUtilTest extends H2HJUnitTest { @BeforeClass public static void initTest() throws Exception { testClass = PasswordUtilTest.class; beforeClass(); } @Test public void generateSaltTest() { byte[][] salt = new byte[100][]; for (int i = 0; i < salt.length; i++) { // test salt generation salt[i] = PasswordUtil.generateRandomSalt(); assertNotNull(salt[i]); assertTrue(salt[i].length == PasswordUtil.SALT_BIT_SIZE / 8); logger.debug("Generated Salt: {}.", EncryptionUtil.byteToHex(salt[i])); // test whether salts are random for (int j = 0; j < i; j++) { assertFalse(Arrays.equals(salt[i], salt[j])); } } } @Test public void generateFixedSaltTest() { // test for different input byte[][] input = new byte[5][]; for (int i = 0; i < input.length; i++) { input[i] = generateRandomString(15).getBytes(); logger.debug("Random Input: {}.", EncryptionUtil.byteToHex(input[i])); byte[][] fixedSalt = new byte[10][]; for (int j = 0; j < fixedSalt.length; j++) { // test fixed salt generation fixedSalt[j] = PasswordUtil.generateFixedSalt(input[i]); assertNotNull(fixedSalt[j]); assertTrue(fixedSalt[j].length == PasswordUtil.SALT_BIT_SIZE / 8); logger.debug("Generated Fixed Salt: {}.", EncryptionUtil.byteToHex(fixedSalt[j])); // test whether salts are equal for (int k = 0; k < j; k++) { assertTrue(Arrays.equals(fixedSalt[k], fixedSalt[j])); } } } } @Test public void generateAESKeyFromPasswordTest() { // test all key sizes AES_KEYLENGTH[] sizes = EncryptionUtilTest.getAESKeySizes(); for (int s = 0; s < sizes.length; s++) { // test various UserPasswords for (int i = 0; i < 3; i++) { String randomPW = generateRandomString(20); String randomPIN = generateRandomString(6); logger.debug("Testing {}-bit AES key generation from user password and PIN:", sizes[s].value()); logger.debug("Random PW: {}.", randomPW); logger.debug("Random PIN: {}.", randomPIN); // test the generation process multiple times to ensure consistent result SecretKey[] aesKey = new SecretKey[3]; for (int j = 0; j < aesKey.length; j++) { // generate AES key aesKey[j] = PasswordUtil.generateAESKeyFromPassword(randomPW, randomPIN, sizes[s]); assertNotNull(aesKey[j]); assertNotNull(aesKey[j].getEncoded()); assertTrue(aesKey[j].getEncoded().length == sizes[s].value() / 8); logger.debug("Generated {}-bit AES key: {}.", sizes[s].value(), EncryptionUtil.byteToHex(aesKey[j].getEncoded())); // test whether generated AES passwords are equal for (int k = 0; k < j; k++) { assertTrue(Arrays.equals(aesKey[k].getEncoded(), aesKey[j].getEncoded())); } } } } } @Test public void generateHashTest() { // test various passwords char[][] password = new char[5][]; for (int i = 0; i < password.length; i++) { // set a random password and salt password[i] = generateRandomString(20).toCharArray(); byte[] salt = PasswordUtil.generateRandomSalt(); logger.debug("Tested Password: {}.", String.valueOf(password[i])); // test hash generation byte[] hash = PasswordUtil.generateHash(password[i], salt); assertNotNull(hash); assertTrue(hash.length == PasswordUtil.HASH_BIT_SIZE / 8); logger.debug("Generated Salt: {}.", EncryptionUtil.byteToHex(hash)); // test if hash outcome stays always the same with the same password and salt for (int j = 0; j < 10; j++) { assertTrue(Arrays.equals(hash, PasswordUtil.generateHash(password[i], salt))); } // test if hash outcome changes with other password or salt for (int j = 0; j < 10; j++) { // assure new parameters char[] otherPW; do { otherPW = generateRandomString(20).toCharArray(); } while (Arrays.equals(otherPW, password[i])); byte[] otherSalt; do { otherSalt = PasswordUtil.generateRandomSalt(); } while (Arrays.equals(otherSalt, salt)); assertFalse(Arrays.equals(hash, PasswordUtil.generateHash(password[i], otherSalt))); assertFalse(Arrays.equals(hash, PasswordUtil.generateHash(otherPW, salt))); assertFalse(Arrays.equals(hash, PasswordUtil.generateHash(otherPW, otherSalt))); } } } @Test public void validatePasswordTest() { // test various passwords char[][] password = new char[20][]; for (int i = 0; i < password.length; i++) { // set a random password and salt password[i] = generateRandomString(20).toCharArray(); byte[] salt = PasswordUtil.generateRandomSalt(); logger.debug("Validating password '{}' with salt '{}'.", String.valueOf(password[i]), EncryptionUtil.byteToHex(salt)); // generate hash byte[] hash = PasswordUtil.generateHash(password[i], salt); // validate password boolean isValid = PasswordUtil.validatePassword(password[i], salt, hash); assertTrue(isValid); // test validation with wrong password, salt or hash for (int j = 0; j < 3; j++) { // assure new parameters char[] otherPW; do { otherPW = generateRandomString(20).toCharArray(); } while (Arrays.equals(otherPW, password[i])); byte[] otherSalt; do { otherSalt = PasswordUtil.generateRandomSalt(); } while (Arrays.equals(otherSalt, salt)); byte[] otherHash = null; do { otherHash = PasswordUtil.generateHash(generateRandomString(20).toCharArray(), PasswordUtil.generateRandomSalt()); } while (Arrays.equals(otherHash, hash)); assertFalse(PasswordUtil.validatePassword(otherPW, salt, hash)); assertFalse(PasswordUtil.validatePassword(password[i], otherSalt, hash)); assertFalse(PasswordUtil.validatePassword(password[i], salt, otherHash)); assertFalse(PasswordUtil.validatePassword(otherPW, otherSalt, hash)); assertFalse(PasswordUtil.validatePassword(password[i], otherSalt, otherHash)); assertFalse(PasswordUtil.validatePassword(otherPW, salt, otherHash)); assertFalse(PasswordUtil.validatePassword(otherPW, otherSalt, otherHash)); } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.containerregistry.implementation; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.azure.management.apigeneration.LangDefinition; import com.microsoft.azure.management.containerregistry.ProvisioningState; import com.microsoft.azure.management.containerregistry.Registry; import com.microsoft.azure.management.containerregistry.Webhook; import com.microsoft.azure.management.containerregistry.WebhookAction; import com.microsoft.azure.management.containerregistry.WebhookEventInfo; import com.microsoft.azure.management.containerregistry.WebhookStatus; import com.microsoft.azure.management.resources.fluentcore.arm.Region; import com.microsoft.azure.management.resources.fluentcore.arm.ResourceUtils; import com.microsoft.azure.management.resources.fluentcore.arm.models.implementation.ExternalChildResourceImpl; import com.microsoft.azure.management.resources.fluentcore.utils.PagedListConverter; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import rx.Completable; import rx.Observable; import rx.functions.Func1; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; /** * Implementation for Webhook. */ @LangDefinition public class WebhookImpl extends ExternalChildResourceImpl<Webhook, WebhookInner, RegistryImpl, Registry> implements Webhook, Webhook.WebhookDefinition<Registry.DefinitionStages.WithCreate>, Webhook.UpdateDefinition<Registry.Update>, Webhook.UpdateResource<Registry.Update>, Webhook.Update { private WebhookCreateParametersInner webhookCreateParametersInner; private WebhookUpdateParametersInner webhookUpdateParametersInner; private Map<String, String> tags; private Map<String, String> customHeaders; private String serviceUri; private boolean isInCreateMode; private ContainerRegistryManager containerRegistryManager; private String resourceGroupName; private String registryName; /** * Creates an instance of external child resource in-memory. * * @param name the name of this external child resource * @param parent reference to the parent of this external child resource * @param innerObject reference to the inner object representing this external child resource * @param containerRegistryManager reference to the container registry manager that accesses web hook operations */ WebhookImpl(String name, RegistryImpl parent, WebhookInner innerObject, ContainerRegistryManager containerRegistryManager) { super(name, parent, innerObject); this.containerRegistryManager = containerRegistryManager; if (parent != null) { this.resourceGroupName = parent.resourceGroupName(); this.registryName = parent.name(); } this.initCreateUpdateParams(); } /** * Creates an instance of external child resource in-memory. * * @param resourceGroupName the resource group name * @param registryName the registry name * @param name the name of this external child resource * @param innerObject reference to the inner object representing this external child resource * @param containerRegistryManager reference to the container registry manager that accesses web hook operations */ WebhookImpl(String resourceGroupName, String registryName, String name, WebhookInner innerObject, ContainerRegistryManager containerRegistryManager) { super(name, null, innerObject); this.containerRegistryManager = containerRegistryManager; this.resourceGroupName = resourceGroupName; this.registryName = registryName; this.initCreateUpdateParams(); } private void initCreateUpdateParams() { this.webhookCreateParametersInner = null; this.webhookUpdateParametersInner = null; this.isInCreateMode = false; } @Override public String id() { return this.inner().id(); } @Override public String type() { return this.inner().type(); } @Override public String regionName() { return this.inner().location(); } @Override public Region region() { return Region.findByLabelOrName(this.regionName()); } @Override public Map<String, String> tags() { Map<String, String> tags = this.inner().getTags(); if (tags == null) { tags = new TreeMap<>(); } return Collections.unmodifiableMap(tags); } @Override public boolean isEnabled() { return this.inner().status().equals(WebhookStatus.ENABLED); } @Override public String scope() { return this.inner().scope(); } @Override public String serviceUri() { return this.serviceUri; } @Override public Map<String, String> customHeaders() { return Collections.unmodifiableMap(this.customHeaders); } @Override public Collection<WebhookAction> triggers() { return Collections.unmodifiableCollection(this.inner().actions()); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public String parentId() { return ResourceUtils.parentResourceIdFromResourceId(this.id()); } @Override public void enable() { this.update() .withDefaultStatus(WebhookStatus.ENABLED) .apply(); } @Override public Completable enableAsync() { return this.update() .withDefaultStatus(WebhookStatus.ENABLED) .applyAsync().toCompletable(); } @Override public void disable() { this.update() .withDefaultStatus(WebhookStatus.DISABLED) .apply(); } @Override public Completable disableAsync() { return this.update() .withDefaultStatus(WebhookStatus.DISABLED) .applyAsync().toCompletable(); } @Override public String ping() { return this.containerRegistryManager.inner().webhooks() .ping(this.resourceGroupName, this.registryName, name()).id(); } @Override public Observable<String> pingAsync() { return this.containerRegistryManager.inner().webhooks() .pingAsync(this.resourceGroupName, this.registryName, name()) .map(new Func1<EventInfoInner, String>() { @Override public String call(EventInfoInner eventInfoInner) { return eventInfoInner.id(); } }); } @Override public PagedList<WebhookEventInfo> listEvents() { final WebhookImpl self = this; final PagedListConverter<EventInner, WebhookEventInfo> converter = new PagedListConverter<EventInner, WebhookEventInfo>() { @Override public WebhookEventInfo typeConvert(EventInner inner) { return new WebhookEventInfoImpl(inner); } }; return converter.convert(this.containerRegistryManager.inner().webhooks() .listEvents(self.resourceGroupName, self.registryName, self.name())); } @Override public Observable<WebhookEventInfo> listEventsAsync() { final WebhookImpl self = this; return this.containerRegistryManager.inner().webhooks() .listEventsAsync(self.resourceGroupName, self.registryName, self.name()) .flatMap(new Func1<Page<EventInner>, Observable<EventInner>>() { @Override public Observable<EventInner> call(Page<EventInner> eventInnerPage) { return Observable.from(eventInnerPage.items()); } }).map(new Func1<EventInner, WebhookEventInfo>() { @Override public WebhookEventInfo call(EventInner inner) { return new WebhookEventInfoImpl(inner); } }); } @Override public Observable<Webhook> createAsync() { final WebhookImpl self = this; if (webhookCreateParametersInner != null) { return this.containerRegistryManager.inner().webhooks() .createAsync(self.resourceGroupName, this.registryName, this.name(), this.webhookCreateParametersInner) .map(new Func1<WebhookInner, WebhookImpl>() { @Override public WebhookImpl call(WebhookInner inner) { self.webhookCreateParametersInner = null; self.setInner(inner); return self; } }).flatMap(new Func1<WebhookImpl, Observable<Webhook>>() { @Override public Observable<Webhook> call(WebhookImpl webhook) { return self.setCallbackConfigAsync(); } }); } else { return Observable.just(this).map(new Func1<WebhookImpl, Webhook>() { @Override public Webhook call(WebhookImpl webhook) { return webhook; } }); } } WebhookImpl setCallbackConfig(CallbackConfigInner callbackConfigInner) { this.serviceUri = callbackConfigInner.serviceUri(); this.customHeaders = callbackConfigInner.customHeaders() != null ? callbackConfigInner.customHeaders() : new HashMap<String, String>(); return this; } Observable<Webhook> setCallbackConfigAsync() { final WebhookImpl self = this; return this.containerRegistryManager.inner().webhooks() .getCallbackConfigAsync(self.resourceGroupName, self.registryName, self.name()) .map(new Func1<CallbackConfigInner, Webhook>() { @Override public Webhook call(CallbackConfigInner callbackConfigInner) { setCallbackConfig(callbackConfigInner); return self; } }); } @Override public Observable<Webhook> updateAsync() { final WebhookImpl self = this; if (webhookUpdateParametersInner != null) { return this.containerRegistryManager.inner().webhooks() .updateAsync(self.resourceGroupName, self.registryName, self.name(), self.webhookUpdateParametersInner) .map(new Func1<WebhookInner, WebhookImpl>() { @Override public WebhookImpl call(WebhookInner inner) { self.setInner(inner); self.webhookUpdateParametersInner = null; return self; } }).flatMap(new Func1<WebhookImpl, Observable<Webhook>>() { @Override public Observable<Webhook> call(WebhookImpl webhook) { return self.setCallbackConfigAsync(); } }); } else { return Observable.just(this).map(new Func1<WebhookImpl, Webhook>() { @Override public Webhook call(WebhookImpl webhook) { return webhook; } }); } } @Override public Observable<Void> deleteAsync() { return this.containerRegistryManager.inner().webhooks() .deleteAsync(this.resourceGroupName, this.registryName, this.name()); } @Override protected Observable<WebhookInner> getInnerAsync() { final WebhookImpl self = this; final WebhooksInner webhooksInner = this.containerRegistryManager.inner().webhooks(); return webhooksInner.getAsync(this.resourceGroupName, this.registryName, this.name()) .flatMap(new Func1<WebhookInner, Observable<CallbackConfigInner>>() { @Override public Observable<CallbackConfigInner> call(WebhookInner webhookInner) { self.setInner(webhookInner); return webhooksInner.getCallbackConfigAsync(self.resourceGroupName, self.registryName, self.name()); } }).map(new Func1<CallbackConfigInner, WebhookInner>() { @Override public WebhookInner call(CallbackConfigInner callbackConfigInner) { return setCallbackConfig(callbackConfigInner).inner(); } }); } @Override public Webhook apply() { return this.applyAsync().toBlocking().last(); } @Override public Observable<Webhook> applyAsync() { return this.updateAsync(); } @Override public ServiceFuture<Webhook> applyAsync(ServiceCallback<Webhook> callback) { return ServiceFuture.fromBody(this.updateAsync(), callback); } @Override public WebhookImpl update() { setCreateMode(false); return this; } @Override public RegistryImpl attach() { return this.parent(); } WebhookImpl setCreateMode(boolean isInCreateMode) { this.isInCreateMode = isInCreateMode; if (this.isInCreateMode && parent() != null) { this.webhookCreateParametersInner = new WebhookCreateParametersInner().withLocation(parent().regionName()); } else { this.webhookUpdateParametersInner = new WebhookUpdateParametersInner(); } return this; } @Override public WebhookImpl withTags(Map<String, String> tags) { if (tags != null) { this.tags = null; ensureValidTags(); for (Map.Entry<String, String> entry : inner().getTags().entrySet()) { this.tags.put(entry.getKey(), entry.getValue()); } } return this; } @Override public WebhookImpl withTag(String key, String value) { if (key != null && value != null) { ensureValidTags().put(key, value); } return this; } @Override public WebhookImpl withoutTag(String key) { if (key != null && this.tags != null) { this.tags.remove(key); } return this; } @Override public WebhookImpl withTriggerWhen(WebhookAction... webhookActions) { if (webhookActions != null) { if (this.isInCreateMode) { ensureWebhookCreateParametersInner().withActions(Arrays.asList(webhookActions)); } else { ensureWebhookUpdateParametersInner().withActions(Arrays.asList(webhookActions)); } } return this; } @Override public WebhookImpl withServiceUri(String serviceUri) { if (serviceUri != null) { if (this.isInCreateMode) { ensureWebhookCreateParametersInner().withServiceUri(serviceUri); } else { ensureWebhookUpdateParametersInner().withServiceUri(serviceUri); } } return this; } @Override public WebhookImpl withCustomHeader(String name, String value) { if (name != null && value != null) { ensureValidCustomHeaders().put(name, value); } return this; } @Override public WebhookImpl withCustomHeaders(Map<String, String> customHeaders) { if (customHeaders != null) { this.customHeaders = null; ensureValidCustomHeaders(); for (Map.Entry<String, String> entry : inner().getTags().entrySet()) { this.customHeaders.put(entry.getKey(), entry.getValue()); } } return this; } @Override public WebhookImpl withRepositoriesScope(String repositoriesScope) { if (repositoriesScope != null) { if (this.isInCreateMode) { ensureWebhookCreateParametersInner().withScope(repositoriesScope); } else { ensureWebhookUpdateParametersInner().withScope(repositoriesScope); } } return this; } @Override public WebhookImpl withDefaultStatus(WebhookStatus defaultStatus) { if (defaultStatus != null) { if (this.isInCreateMode) { ensureWebhookCreateParametersInner().withStatus(defaultStatus); } else { ensureWebhookUpdateParametersInner().withStatus(defaultStatus); } } return this; } private WebhookCreateParametersInner ensureWebhookCreateParametersInner() { if (this.webhookCreateParametersInner == null && parent() != null) { this.webhookCreateParametersInner = new WebhookCreateParametersInner().withLocation(parent().regionName()); } return this.webhookCreateParametersInner; } private WebhookUpdateParametersInner ensureWebhookUpdateParametersInner() { if (this.webhookUpdateParametersInner == null && parent() != null) { this.webhookUpdateParametersInner = new WebhookUpdateParametersInner(); } return this.webhookUpdateParametersInner; } private Map<String, String> ensureValidTags() { if (this.tags == null) { this.tags = new HashMap<>(); if (this.isInCreateMode) { this.ensureWebhookCreateParametersInner().withTags(this.tags); } else { this.ensureWebhookUpdateParametersInner().withTags(this.tags); } } return this.tags; } private Map<String, String> ensureValidCustomHeaders() { if (this.customHeaders == null) { this.customHeaders = new HashMap<>(); if (this.isInCreateMode) { this.ensureWebhookCreateParametersInner().withCustomHeaders(this.customHeaders); } else { this.ensureWebhookUpdateParametersInner().withCustomHeaders(this.customHeaders); } } return this.customHeaders; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.test; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.MockConsumer; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.StreamsMetrics; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.TopologyBuilder; import org.apache.kafka.streams.processor.internals.InternalProcessorContext; import org.apache.kafka.streams.processor.internals.ProcessorContextImpl; import org.apache.kafka.streams.processor.internals.ProcessorRecordContext; import org.apache.kafka.streams.processor.internals.ProcessorStateManager; import org.apache.kafka.streams.processor.internals.ProcessorTopology; import org.apache.kafka.streams.processor.internals.StateDirectory; import org.apache.kafka.streams.processor.internals.StreamTask; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.internals.ThreadCache; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.atomic.AtomicLong; /** * This class makes it easier to write tests to verify the behavior of topologies created with a {@link TopologyBuilder}. * You can test simple topologies that have a single processor, or very complex topologies that have multiple sources, processors, * and sinks. And because it starts with a {@link TopologyBuilder}, you can create topologies specific to your tests or you * can use and test code you already have that uses a builder to create topologies. Best of all, the class works without a real * Kafka broker, so the tests execute very quickly with very little overhead. * <p> * Using the ProcessorTopologyTestDriver in tests is easy: simply instantiate the driver with a {@link StreamsConfig} and a * TopologyBuilder, use the driver to supply an input message to the topology, and then use the driver to read and verify any * messages output by the topology. * <p> * Although the driver doesn't use a real Kafka broker, it does simulate Kafka {@link org.apache.kafka.clients.consumer.Consumer}s * and {@link org.apache.kafka.clients.producer.Producer}s that read and write raw {@code byte[]} messages. You can either deal * with messages that have {@code byte[]} keys and values, or you can supply the {@link Serializer}s and {@link Deserializer}s * that the driver can use to convert the keys and values into objects. * * <h2>Driver setup</h2> * <p> * In order to create a ProcessorTopologyTestDriver instance, you need a TopologyBuilder and a {@link StreamsConfig}. The * configuration needs to be representative of what you'd supply to the real topology, so that means including several key * properties. For example, the following code fragment creates a configuration that specifies a local Kafka broker list * (which is needed but not used), a timestamp extractor, and default serializers and deserializers for string keys and values: * * <pre> * StringSerializer strSerializer = new StringSerializer(); * StringDeserializer strDeserializer = new StringDeserializer(); * Properties props = new Properties(); * props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9091"); * props.setProperty(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, CustomTimestampExtractor.class.getName()); * props.setProperty(StreamsConfig.KEY_SERIALIZER_CLASS_CONFIG, strSerializer.getClass().getName()); * props.setProperty(StreamsConfig.KEY_DESERIALIZER_CLASS_CONFIG, strDeserializer.getClass().getName()); * props.setProperty(StreamsConfig.VALUE_SERIALIZER_CLASS_CONFIG, strSerializer.getClass().getName()); * props.setProperty(StreamsConfig.VALUE_DESERIALIZER_CLASS_CONFIG, strDeserializer.getClass().getName()); * StreamsConfig config = new StreamsConfig(props); * TopologyBuilder builder = ... * ProcessorTopologyTestDriver driver = new ProcessorTopologyTestDriver(config, builder); * </pre> * * <h2>Processing messages</h2> * <p> * Your test can supply new input records on any of the topics that the topology's sources consume. Here's an example of an * input message on the topic named {@code input-topic}: * * <pre> * driver.process("input-topic", "key1", "value1", strSerializer, strSerializer); * </pre> * * Immediately, the driver will pass the input message through to the appropriate source that consumes the named topic, * and will invoke the processor(s) downstream of the source. If your topology's processors forward messages to sinks, * your test can then consume these output messages to verify they match the expected outcome. For example, if our topology * should have generated 2 messages on {@code output-topic-1} and 1 message on {@code output-topic-2}, then our test can * obtain these messages using the {@link #readOutput(String, Deserializer, Deserializer)} method: * * <pre> * ProducerRecord<String, String> record1 = driver.readOutput("output-topic-1", strDeserializer, strDeserializer); * ProducerRecord<String, String> record2 = driver.readOutput("output-topic-1", strDeserializer, strDeserializer); * ProducerRecord<String, String> record3 = driver.readOutput("output-topic-2", strDeserializer, strDeserializer); * </pre> * * Again, our example topology generates messages with string keys and values, so we supply our string deserializer instance * for use on both the keys and values. Your test logic can then verify whether these output records are correct. * <p> * Finally, when completed, make sure your tests {@link #close()} the driver to release all resources and * {@link org.apache.kafka.streams.processor.Processor}s. * * <h2>Processor state</h2> * <p> * Some processors use Kafka {@link StateStore state storage}, so this driver class provides the {@link #getStateStore(String)} * and {@link #getKeyValueStore(String)} methods so that your tests can check the underlying state store(s) used by your * topology's processors. In our previous example, after we supplied a single input message and checked the three output messages, * our test could also check the key value store to verify the processor correctly added, removed, or updated internal state. * Or, our test might have pre-populated some state <em>before</em> submitting the input message, and verified afterward that the * processor(s) correctly updated the state. */ public class ProcessorTopologyTestDriver { private final Serializer<byte[]> bytesSerializer = new ByteArraySerializer(); private final String applicationId = "test-driver-application"; private final TaskId id; private final ProcessorTopology topology; private final StreamTask task; private final MockConsumer<byte[], byte[]> consumer; private final MockProducer<byte[], byte[]> producer; private final MockConsumer<byte[], byte[]> restoreStateConsumer; private final Map<String, TopicPartition> partitionsByTopic = new HashMap<>(); private final Map<TopicPartition, AtomicLong> offsetsByTopicPartition = new HashMap<>(); private final Map<String, Queue<ProducerRecord<byte[], byte[]>>> outputRecordsByTopic = new HashMap<>(); /** * Create a new test driver instance. * @param config the stream configuration for the topology * @param builder the topology builder that will be used to create the topology instance * @param storeNames the optional names of the state stores that are used by the topology */ public ProcessorTopologyTestDriver(StreamsConfig config, TopologyBuilder builder, String... storeNames) { id = new TaskId(0, 0); topology = builder.setApplicationId("ProcessorTopologyTestDriver").build(null); // Set up the consumer and producer ... consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); producer = new MockProducer<byte[], byte[]>(true, bytesSerializer, bytesSerializer) { @Override public List<PartitionInfo> partitionsFor(String topic) { return Collections.singletonList(new PartitionInfo(topic, 0, null, null, null)); } }; restoreStateConsumer = createRestoreConsumer(id, storeNames); // Set up all of the topic+partition information and subscribe the consumer to each ... for (String topic : topology.sourceTopics()) { TopicPartition tp = new TopicPartition(topic, 1); partitionsByTopic.put(topic, tp); offsetsByTopicPartition.put(tp, new AtomicLong()); } consumer.assign(offsetsByTopicPartition.keySet()); task = new StreamTask(id, applicationId, partitionsByTopic.values(), topology, consumer, producer, restoreStateConsumer, config, new StreamsMetrics() { @Override public Sensor addLatencySensor(String scopeName, String entityName, String operationName, String... tags) { return null; } @Override public void recordLatency(Sensor sensor, long startNs, long endNs) { // do nothing } }, new StateDirectory(applicationId, TestUtils.tempDirectory().getPath()), new ThreadCache(1024 * 1024)); } /** * Send an input message with the given key and value on the specified topic to the topology, and then commit the messages. * * @param topicName the name of the topic on which the message is to be sent * @param key the raw message key * @param value the raw message value */ public void process(String topicName, byte[] key, byte[] value) { TopicPartition tp = partitionsByTopic.get(topicName); if (tp == null) { throw new IllegalArgumentException("Unexpected topic: " + topicName); } // Add the record ... long offset = offsetsByTopicPartition.get(tp).incrementAndGet(); task.addRecords(tp, records(new ConsumerRecord<byte[], byte[]>(tp.topic(), tp.partition(), offset, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, key, value))); producer.clear(); // Process the record ... task.process(); ((InternalProcessorContext) task.context()).setRecordContext(new ProcessorRecordContext(0L, offset, tp.partition(), topicName)); task.commit(); // Capture all the records sent to the producer ... for (ProducerRecord<byte[], byte[]> record : producer.history()) { Queue<ProducerRecord<byte[], byte[]>> outputRecords = outputRecordsByTopic.get(record.topic()); if (outputRecords == null) { outputRecords = new LinkedList<>(); outputRecordsByTopic.put(record.topic(), outputRecords); } outputRecords.add(record); } } /** * Send an input message with the given key and value on the specified topic to the topology. * * @param topicName the name of the topic on which the message is to be sent * @param key the raw message key * @param value the raw message value * @param keySerializer the serializer for the key * @param valueSerializer the serializer for the value */ public <K, V> void process(String topicName, K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer) { process(topicName, keySerializer.serialize(topicName, key), valueSerializer.serialize(topicName, value)); } /** * Read the next record from the given topic. These records were output by the topology during the previous calls to * {@link #process(String, byte[], byte[])}. * * @param topic the name of the topic * @return the next record on that topic, or null if there is no record available */ public ProducerRecord<byte[], byte[]> readOutput(String topic) { Queue<ProducerRecord<byte[], byte[]>> outputRecords = outputRecordsByTopic.get(topic); if (outputRecords == null) return null; return outputRecords.poll(); } /** * Read the next record from the given topic. These records were output by the topology during the previous calls to * {@link #process(String, byte[], byte[])}. * * @param topic the name of the topic * @param keyDeserializer the deserializer for the key type * @param valueDeserializer the deserializer for the value type * @return the next record on that topic, or null if there is no record available */ public <K, V> ProducerRecord<K, V> readOutput(String topic, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { ProducerRecord<byte[], byte[]> record = readOutput(topic); if (record == null) return null; K key = keyDeserializer.deserialize(record.topic(), record.key()); V value = valueDeserializer.deserialize(record.topic(), record.value()); return new ProducerRecord<K, V>(record.topic(), record.partition(), key, value); } private Iterable<ConsumerRecord<byte[], byte[]>> records(ConsumerRecord<byte[], byte[]> record) { return Collections.singleton(record); } /** * Get the {@link StateStore} with the given name. The name should have been supplied via * {@link #ProcessorTopologyTestDriver(StreamsConfig, TopologyBuilder, String...) this object's constructor}, and is * presumed to be used by a Processor within the topology. * <p> * This is often useful in test cases to pre-populate the store before the test case instructs the topology to * {@link #process(String, byte[], byte[]) process an input message}, and/or to check the store afterward. * * @param name the name of the store * @return the state store, or null if no store has been registered with the given name * @see #getKeyValueStore(String) */ public StateStore getStateStore(String name) { return ((ProcessorContextImpl) task.context()).getStateMgr().getStore(name); } /** * Get the {@link KeyValueStore} with the given name. The name should have been supplied via * {@link #ProcessorTopologyTestDriver(StreamsConfig, TopologyBuilder, String...) this object's constructor}, and is * presumed to be used by a Processor within the topology. * <p> * This is often useful in test cases to pre-populate the store before the test case instructs the topology to * {@link #process(String, byte[], byte[]) process an input message}, and/or to check the store afterward. * <p> * * @param name the name of the store * @return the key value store, or null if no {@link KeyValueStore} has been registered with the given name * @see #getStateStore(String) */ @SuppressWarnings("unchecked") public <K, V> KeyValueStore<K, V> getKeyValueStore(String name) { StateStore store = getStateStore(name); return store instanceof KeyValueStore ? (KeyValueStore<K, V>) getStateStore(name) : null; } /** * Close the driver, its topology, and all processors. */ public void close() { task.close(); } /** * Utility method that creates the {@link MockConsumer} used for restoring state, which should not be done by this * driver object unless this method is overwritten with a functional consumer. * * @param id the ID of the stream task * @param storeNames the names of the stores that this * @return the mock consumer; never null */ protected MockConsumer<byte[], byte[]> createRestoreConsumer(TaskId id, String... storeNames) { MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.LATEST) { @Override public synchronized void seekToEnd(Collection<TopicPartition> partitions) { // do nothing ... } @Override public synchronized void seekToBeginning(Collection<TopicPartition> partitions) { // do nothing ... } @Override public synchronized long position(TopicPartition partition) { // do nothing ... return 0L; } }; // For each store name ... for (String storeName : storeNames) { String topicName = ProcessorStateManager.storeChangelogTopic(applicationId, storeName); // Set up the restore-state topic ... // consumer.subscribe(new TopicPartition(topicName, 1)); // Set up the partition that matches the ID (which is what ProcessorStateManager expects) ... List<PartitionInfo> partitionInfos = new ArrayList<>(); partitionInfos.add(new PartitionInfo(topicName , id.partition, null, null, null)); consumer.updatePartitions(topicName, partitionInfos); consumer.updateEndOffsets(Collections.singletonMap(new TopicPartition(topicName, id.partition), 0L)); } return consumer; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.Bytes; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; /** * Immutable POJO class for representing a table name. * Which is of the form: * &lt;table namespace&gt;:&lt;table qualifier&gt; * * Two special namespaces: * * 1. hbase - system namespace, used to contain hbase internal tables * 2. default - tables with no explicit specified namespace will * automatically fall into this namespace. * * ie * * a) foo:bar, means namespace=foo and qualifier=bar * b) bar, means namespace=default and qualifier=bar * c) default:bar, means namespace=default and qualifier=bar * * <p> * Internally, in this class, we cache the instances to limit the number of objects and * make the "equals" faster. We try to minimize the number of objects created of * the number of array copy to check if we already have an instance of this TableName. The code * is not optimize for a new instance creation but is optimized to check for existence. * </p> */ @InterfaceAudience.Public @InterfaceStability.Evolving public final class TableName implements Comparable<TableName> { /** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */ private static final Set<TableName> tableCache = new CopyOnWriteArraySet<TableName>(); /** Namespace delimiter */ //this should always be only 1 byte long public final static char NAMESPACE_DELIM = ':'; // A non-capture group so that this can be embedded. // regex is a bit more complicated to support nuance of tables // in default namespace //Allows only letters, digits and '_' public static final String VALID_NAMESPACE_REGEX = "(?:[a-zA-Z_0-9]+)"; //Allows only letters, digits, '_', '-' and '.' public static final String VALID_TABLE_QUALIFIER_REGEX = "(?:[a-zA-Z_0-9][a-zA-Z_0-9-.]*)"; //Concatenation of NAMESPACE_REGEX and TABLE_QUALIFIER_REGEX, //with NAMESPACE_DELIM as delimiter public static final String VALID_USER_TABLE_REGEX = "(?:(?:(?:"+VALID_NAMESPACE_REGEX+"\\"+NAMESPACE_DELIM+")?)" + "(?:"+VALID_TABLE_QUALIFIER_REGEX+"))"; /** The hbase:meta table's name. */ public static final TableName META_TABLE_NAME = valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "meta"); /** The Namespace table's name. */ public static final TableName NAMESPACE_TABLE_NAME = valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "namespace"); public static final String OLD_META_STR = ".META."; public static final String OLD_ROOT_STR = "-ROOT-"; /** * TableName for old -ROOT- table. It is used to read/process old WALs which have * ROOT edits. */ public static final TableName OLD_ROOT_TABLE_NAME = getADummyTableName(OLD_ROOT_STR); /** * TableName for old .META. table. Used in testing. */ public static final TableName OLD_META_TABLE_NAME = getADummyTableName(OLD_META_STR); private final byte[] name; private final String nameAsString; private final byte[] namespace; private final String namespaceAsString; private final byte[] qualifier; private final String qualifierAsString; private final boolean systemTable; private final int hashCode; /** * Check passed byte array, "tableName", is legal user-space table name. * @return Returns passed <code>tableName</code> param * @throws IllegalArgumentException if passed a tableName is null or * is made of other than 'word' characters or underscores: i.e. * <code>[a-zA-Z_0-9.-:]</code>. The ':' is used to delimit the namespace * from the table name and can be used for nothing else. * * Namespace names can only contain 'word' characters * <code>[a-zA-Z_0-9]</code> or '_' * * Qualifier names can only contain 'word' characters * <code>[a-zA-Z_0-9]</code> or '_', '.' or '-'. * The name may not start with '.' or '-'. * * Valid fully qualified table names: * foo:bar, namespace=>foo, table=>bar * org:foo.bar, namespace=org, table=>foo.bar */ public static byte [] isLegalFullyQualifiedTableName(final byte[] tableName) { if (tableName == null || tableName.length <= 0) { throw new IllegalArgumentException("Name is null or empty"); } int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(tableName, (byte) NAMESPACE_DELIM); if (namespaceDelimIndex == 0 || namespaceDelimIndex == -1){ isLegalTableQualifierName(tableName); } else { isLegalNamespaceName(tableName, 0, namespaceDelimIndex); isLegalTableQualifierName(tableName, namespaceDelimIndex + 1, tableName.length); } return tableName; } public static byte [] isLegalTableQualifierName(final byte[] qualifierName){ isLegalTableQualifierName(qualifierName, 0, qualifierName.length); return qualifierName; } /** * Qualifier names can only contain 'word' characters * <code>[a-zA-Z_0-9]</code> or '_', '.' or '-'. * The name may not start with '.' or '-'. * * @param qualifierName byte array containing the qualifier name * @param start start index * @param end end index (exclusive) */ public static void isLegalTableQualifierName(final byte[] qualifierName, int start, int end){ if(end - start < 1) { throw new IllegalArgumentException("Table qualifier must not be empty"); } if (qualifierName[start] == '.' || qualifierName[start] == '-') { throw new IllegalArgumentException("Illegal first character <" + qualifierName[0] + "> at 0. Namespaces can only start with alphanumeric " + "characters': i.e. [a-zA-Z_0-9]: " + Bytes.toString(qualifierName)); } for (int i = start; i < end; i++) { if (Character.isLetterOrDigit(qualifierName[i]) || qualifierName[i] == '_' || qualifierName[i] == '-' || qualifierName[i] == '.') { continue; } throw new IllegalArgumentException("Illegal character code:" + qualifierName[i] + ", <" + (char) qualifierName[i] + "> at " + i + ". User-space table qualifiers can only contain " + "'alphanumeric characters': i.e. [a-zA-Z_0-9-.]: " + Bytes.toString(qualifierName, start, end)); } } public static void isLegalNamespaceName(byte[] namespaceName) { isLegalNamespaceName(namespaceName, 0, namespaceName.length); } /** * Valid namespace characters are [a-zA-Z_0-9] */ public static void isLegalNamespaceName(byte[] namespaceName, int offset, int length) { for (int i = offset; i < length; i++) { if (Character.isLetterOrDigit(namespaceName[i])|| namespaceName[i] == '_') { continue; } throw new IllegalArgumentException("Illegal character <" + namespaceName[i] + "> at " + i + ". Namespaces can only contain " + "'alphanumeric characters': i.e. [a-zA-Z_0-9]: " + Bytes.toString(namespaceName, offset, length)); } } public byte[] getName() { return name; } public String getNameAsString() { return nameAsString; } public byte[] getNamespace() { return namespace; } public String getNamespaceAsString() { return namespaceAsString; } public byte[] getQualifier() { return qualifier; } public String getQualifierAsString() { return qualifierAsString; } public byte[] toBytes() { return name; } public boolean isSystemTable() { return systemTable; } @Override public String toString() { return nameAsString; } /** * * @throws IllegalArgumentException See {@link #valueOf(byte[])} */ private TableName(ByteBuffer namespace, ByteBuffer qualifier) throws IllegalArgumentException { this.qualifier = new byte[qualifier.remaining()]; qualifier.duplicate().get(this.qualifier); this.qualifierAsString = Bytes.toString(this.qualifier); if (qualifierAsString.equals(OLD_ROOT_STR)) { throw new IllegalArgumentException(OLD_ROOT_STR + " has been deprecated."); } if (qualifierAsString.equals(OLD_META_STR)) { throw new IllegalArgumentException(OLD_META_STR + " no longer exists. The table has been " + "renamed to " + META_TABLE_NAME); } if (Bytes.equals(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME, namespace)) { // Using the same objects: this will make the comparison faster later this.namespace = NamespaceDescriptor.DEFAULT_NAMESPACE_NAME; this.namespaceAsString = NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR; this.systemTable = false; // The name does not include the namespace when it's the default one. this.nameAsString = qualifierAsString; this.name = this.qualifier; } else { if (Bytes.equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME, namespace)) { this.namespace = NamespaceDescriptor.SYSTEM_NAMESPACE_NAME; this.namespaceAsString = NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR; this.systemTable = true; } else { this.namespace = new byte[namespace.remaining()]; namespace.duplicate().get(this.namespace); this.namespaceAsString = Bytes.toString(this.namespace); this.systemTable = false; } this.nameAsString = namespaceAsString + NAMESPACE_DELIM + qualifierAsString; this.name = Bytes.toBytes(nameAsString); } this.hashCode = nameAsString.hashCode(); isLegalNamespaceName(this.namespace); isLegalTableQualifierName(this.qualifier); } /** * This is only for the old and meta tables. */ private TableName(String qualifier) { this.qualifier = Bytes.toBytes(qualifier); this.qualifierAsString = qualifier; this.namespace = NamespaceDescriptor.SYSTEM_NAMESPACE_NAME; this.namespaceAsString = NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR; this.systemTable = true; // WARNING: nameAsString is different than name for old meta & root! // This is by design. this.nameAsString = namespaceAsString + NAMESPACE_DELIM + qualifierAsString; this.name = this.qualifier; this.hashCode = nameAsString.hashCode(); } /** * Check that the object does not exist already. There are two reasons for creating the objects * only once: * 1) With 100K regions, the table names take ~20MB. * 2) Equals becomes much faster as it's resolved with a reference and an int comparison. */ private static TableName createTableNameIfNecessary(ByteBuffer bns, ByteBuffer qns) { for (TableName tn : tableCache) { if (Bytes.equals(tn.getQualifier(), qns) && Bytes.equals(tn.getNamespace(), bns)) { return tn; } } TableName newTable = new TableName(bns, qns); if (tableCache.add(newTable)) { // Adds the specified element if it is not already present return newTable; } // Someone else added it. Let's find it. for (TableName tn : tableCache) { if (Bytes.equals(tn.getQualifier(), qns) && Bytes.equals(tn.getNamespace(), bns)) { return tn; } } // this should never happen. throw new IllegalStateException(newTable + " was supposed to be in the cache"); } /** * It is used to create table names for old META, and ROOT table. * These tables are not really legal tables. They are not added into the cache. * @return a dummy TableName instance (with no validation) for the passed qualifier */ private static TableName getADummyTableName(String qualifier) { return new TableName(qualifier); } public static TableName valueOf(String namespaceAsString, String qualifierAsString) { if (namespaceAsString == null || namespaceAsString.length() < 1) { namespaceAsString = NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR; } for (TableName tn : tableCache) { if (qualifierAsString.equals(tn.getQualifierAsString()) && namespaceAsString.equals(tn.getNameAsString())) { return tn; } } return createTableNameIfNecessary( ByteBuffer.wrap(Bytes.toBytes(namespaceAsString)), ByteBuffer.wrap(Bytes.toBytes(qualifierAsString))); } /** * @throws IllegalArgumentException if fullName equals old root or old meta. Some code * depends on this. The test is buried in the table creation to save on array comparison * when we're creating a standard table object that will be in the cache. */ public static TableName valueOf(byte[] fullName) throws IllegalArgumentException{ for (TableName tn : tableCache) { if (Arrays.equals(tn.getName(), fullName)) { return tn; } } int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(fullName, (byte) NAMESPACE_DELIM); if (namespaceDelimIndex < 0) { return createTableNameIfNecessary( ByteBuffer.wrap(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME), ByteBuffer.wrap(fullName)); } else { return createTableNameIfNecessary( ByteBuffer.wrap(fullName, 0, namespaceDelimIndex), ByteBuffer.wrap(fullName, namespaceDelimIndex + 1, fullName.length - (namespaceDelimIndex + 1))); } } /** * @throws IllegalArgumentException if fullName equals old root or old meta. Some code * depends on this. */ public static TableName valueOf(String name) { for (TableName tn : tableCache) { if (name.equals(tn.getNameAsString())) { return tn; } } int namespaceDelimIndex = name.indexOf(NAMESPACE_DELIM); byte[] nameB = Bytes.toBytes(name); if (namespaceDelimIndex < 0) { return createTableNameIfNecessary( ByteBuffer.wrap(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME), ByteBuffer.wrap(nameB)); } else { return createTableNameIfNecessary( ByteBuffer.wrap(nameB, 0, namespaceDelimIndex), ByteBuffer.wrap(nameB, namespaceDelimIndex + 1, nameB.length - (namespaceDelimIndex + 1))); } } public static TableName valueOf(byte[] namespace, byte[] qualifier) { if (namespace == null || namespace.length < 1) { namespace = NamespaceDescriptor.DEFAULT_NAMESPACE_NAME; } for (TableName tn : tableCache) { if (Arrays.equals(tn.getQualifier(), namespace) && Arrays.equals(tn.getNamespace(), namespace)) { return tn; } } return createTableNameIfNecessary( ByteBuffer.wrap(namespace), ByteBuffer.wrap(qualifier)); } public static TableName valueOf(ByteBuffer namespace, ByteBuffer qualifier) { if (namespace == null || namespace.remaining() < 1) { return createTableNameIfNecessary( ByteBuffer.wrap(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME), qualifier); } return createTableNameIfNecessary(namespace, qualifier); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TableName tableName = (TableName) o; return o.hashCode() == hashCode && nameAsString.equals(tableName.nameAsString); } @Override public int hashCode() { return hashCode; } /** * For performance reasons, the ordering is not lexicographic. */ @Override public int compareTo(TableName tableName) { if (this == tableName) return 0; if (this.hashCode < tableName.hashCode()) { return -1; } if (this.hashCode > tableName.hashCode()) { return 1; } return this.nameAsString.compareTo(tableName.getNameAsString()); } /** * Get the appropriate row comparator for this table. * * @return The comparator. */ public KVComparator getRowComparator() { if(TableName.META_TABLE_NAME.equals(this)) { return KeyValue.META_COMPARATOR; } return KeyValue.COMPARATOR; } }
package com.google.ratel.deps.jackson.core.io; import java.io.*; /** * Since JDK does not come with UTF-32/UCS-4, let's implement a simple * decoder to use. */ public class UTF32Reader extends BaseReader { protected final boolean _bigEndian; /** * Although input is fine with full Unicode set, Java still uses * 16-bit chars, so we may have to split high-order chars into * surrogate pairs. */ protected char _surrogate = NULL_CHAR; /** * Total read character count; used for error reporting purposes */ protected int _charCount = 0; /** * Total read byte count; used for error reporting purposes */ protected int _byteCount = 0; protected final boolean _managedBuffers; /* /********************************************************** /* Life-cycle /********************************************************** */ public UTF32Reader(IOContext ctxt, InputStream in, byte[] buf, int ptr, int len, boolean isBigEndian) { super(ctxt, in, buf, ptr, len); _bigEndian = isBigEndian; _managedBuffers = (in != null); } /* /********************************************************** /* Public API /********************************************************** */ @Override public int read(char[] cbuf, int start, int len) throws IOException { // Already EOF? if (_buffer == null) { return -1; } if (len < 1) { return len; } // Let's then ensure there's enough room... if (start < 0 || (start+len) > cbuf.length) { reportBounds(cbuf, start, len); } len += start; int outPtr = start; // Ok, first; do we have a surrogate from last round? if (_surrogate != NULL_CHAR) { cbuf[outPtr++] = _surrogate; _surrogate = NULL_CHAR; // No need to load more, already got one char } else { /* Note: we'll try to avoid blocking as much as possible. As a * result, we only need to get 4 bytes for a full char. */ int left = (_length - _ptr); if (left < 4) { if (!loadMore(left)) { // (legal) EOF? return -1; } } } main_loop: while (outPtr < len) { int ptr = _ptr; int ch; if (_bigEndian) { ch = (_buffer[ptr] << 24) | ((_buffer[ptr+1] & 0xFF) << 16) | ((_buffer[ptr+2] & 0xFF) << 8) | (_buffer[ptr+3] & 0xFF); } else { ch = (_buffer[ptr] & 0xFF) | ((_buffer[ptr+1] & 0xFF) << 8) | ((_buffer[ptr+2] & 0xFF) << 16) | (_buffer[ptr+3] << 24); } _ptr += 4; // Does it need to be split to surrogates? // (also, we can and need to verify illegal chars) if (ch > 0xFFFF) { // need to split into surrogates? if (ch > LAST_VALID_UNICODE_CHAR) { reportInvalid(ch, outPtr-start, "(above "+Integer.toHexString(LAST_VALID_UNICODE_CHAR)+") "); } ch -= 0x10000; // to normalize it starting with 0x0 cbuf[outPtr++] = (char) (0xD800 + (ch >> 10)); // hmmh. can this ever be 0? (not legal, at least?) ch = (0xDC00 | (ch & 0x03FF)); // Room for second part? if (outPtr >= len) { // nope _surrogate = (char) ch; break main_loop; } } cbuf[outPtr++] = (char) ch; if (_ptr >= _length) { break main_loop; } } len = outPtr - start; _charCount += len; return len; } /* /********************************************************** /* Internal methods /********************************************************** */ private void reportUnexpectedEOF(int gotBytes, int needed) throws IOException { int bytePos = _byteCount + gotBytes; int charPos = _charCount; throw new CharConversionException("Unexpected EOF in the middle of a 4-byte UTF-32 char: got " +gotBytes+", needed "+needed+", at char #"+charPos+", byte #"+bytePos+")"); } private void reportInvalid(int value, int offset, String msg) throws IOException { int bytePos = _byteCount + _ptr - 1; int charPos = _charCount + offset; throw new CharConversionException("Invalid UTF-32 character 0x" +Integer.toHexString(value)+msg+" at char #"+charPos+", byte #"+bytePos+")"); } /** * @param available Number of "unused" bytes in the input buffer * * @return True, if enough bytes were read to allow decoding of at least * one full character; false if EOF was encountered instead. */ private boolean loadMore(int available) throws IOException { _byteCount += (_length - available); // Bytes that need to be moved to the beginning of buffer? if (available > 0) { if (_ptr > 0) { for (int i = 0; i < available; ++i) { _buffer[i] = _buffer[_ptr+i]; } _ptr = 0; } _length = available; } else { /* Ok; here we can actually reasonably expect an EOF, * so let's do a separate read right away: */ _ptr = 0; int count = (_in == null) ? -1 : _in.read(_buffer); if (count < 1) { _length = 0; if (count < 0) { // -1 if (_managedBuffers) { freeBuffers(); // to help GC? } return false; } // 0 count is no good; let's err out reportStrangeStream(); } _length = count; } /* Need at least 4 bytes; if we don't get that many, it's an * error. */ while (_length < 4) { int count = (_in == null) ? -1 : _in.read(_buffer, _length, _buffer.length - _length); if (count < 1) { if (count < 0) { // -1, EOF... no good! if (_managedBuffers) { freeBuffers(); // to help GC? } reportUnexpectedEOF(_length, 4); } // 0 count is no good; let's err out reportStrangeStream(); } _length += count; } return true; } }
/* * Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. */ package gobblin.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collection; import java.util.List; import java.util.Map.Entry; import java.util.Queue; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.Properties; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.collect.Queues; import com.google.common.io.BaseEncoding; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.ReflectionUtils; import gobblin.configuration.ConfigurationKeys; import gobblin.configuration.State; import gobblin.util.deprecation.DeprecationUtils; import gobblin.util.executors.ScalingThreadPoolExecutor; import gobblin.writer.DataWriter; /** * A utility class for working with Hadoop. */ @Slf4j public class HadoopUtils { public static final String HDFS_ILLEGAL_TOKEN_REGEX = "[\\s:\\\\]"; /** * A {@link Collection} of all known {@link FileSystem} schemes that do not support atomic renames or copies. * * <p> * The following important properties are useful to remember when writing code that is compatible with S3: * <ul> * <li>Renames are not atomic, and require copying the entire source file to the destination file</li> * <li>Writes to S3 using {@link FileSystem#create(Path)} will first go to the local filesystem, when the stream * is closed the local file will be uploaded to S3</li> * </ul> * </p> */ public static final Collection<String> FS_SCHEMES_NON_ATOMIC = ImmutableSortedSet.orderedBy(String.CASE_INSENSITIVE_ORDER).add("s3").add("s3a").add("s3n").build(); public static final String MAX_FILESYSTEM_QPS = "filesystem.throttling.max.filesystem.qps"; private static final List<String> DEPRECATED_KEYS = Lists.newArrayList("gobblin.copy.max.filesystem.qps"); public static Configuration newConfiguration() { Configuration conf = new Configuration(); // Explicitly check for S3 environment variables, so that Hadoop can access s3 and s3n URLs. // h/t https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala String awsAccessKeyId = System.getenv("AWS_ACCESS_KEY_ID"); String awsSecretAccessKey = System.getenv("AWS_SECRET_ACCESS_KEY"); if (awsAccessKeyId != null && awsSecretAccessKey != null) { conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); conf.set("fs.s3n.awsAccessKeyId", awsAccessKeyId); conf.set("fs.s3n.awsSecretAccessKey", awsSecretAccessKey); } // Add a new custom filesystem mapping conf.set("fs.sftp.impl", "gobblin.source.extractor.extract.sftp.SftpLightWeightFileSystem"); conf.set("fs.sftp.impl.disable.cache", "true"); return conf; } /** * @deprecated Use {@link FileListUtils#listFilesRecursively(FileSystem, Path)}. */ @Deprecated public static List<FileStatus> listStatusRecursive(FileSystem fileSystem, Path path) throws IOException { List<FileStatus> results = Lists.newArrayList(); walk(results, fileSystem, path); return results; } /** * Get the path as a string without schema or authority. * * E.g. Converts sftp://user/data/file.txt to /user/data/file.txt */ public static String toUriPath(Path path) { return path.toUri().getPath(); } /** * A wrapper around {@link FileSystem#delete(Path, boolean)} which throws {@link IOException} if the given * {@link Path} exists, and {@link FileSystem#delete(Path, boolean)} returns False. */ public static void deletePath(FileSystem fs, Path f, boolean recursive) throws IOException { if (fs.exists(f) && !fs.delete(f, recursive)) { throw new IOException("Failed to delete: " + f); } } /** * A wrapper around {@link FileSystem#delete(Path, boolean)} that only deletes a given {@link Path} if it is present * on the given {@link FileSystem}. */ public static void deleteIfExists(FileSystem fs, Path path, boolean recursive) throws IOException { if (fs.exists(path)) { deletePath(fs, path, recursive); } } public static void deletePathAndEmptyAncestors(FileSystem fs, Path f, boolean recursive) throws IOException { deletePath(fs, f, recursive); Path parent = f.getParent(); while (parent != null) { if (fs.exists(parent) && fs.listStatus(parent).length == 0) { deletePath(fs, parent, true); parent = parent.getParent(); } else { break; } } } /** * A wrapper around {@link FileSystem#rename(Path, Path)} which throws {@link IOException} if * {@link FileSystem#rename(Path, Path)} returns False. */ public static void renamePath(FileSystem fs, Path oldName, Path newName) throws IOException { renamePath(fs, oldName, newName, false); } /** * A wrapper around {@link FileSystem#rename(Path, Path)} which throws {@link IOException} if * {@link FileSystem#rename(Path, Path)} returns False. */ public static void renamePath(FileSystem fs, Path oldName, Path newName, boolean overwrite) throws IOException { if (!fs.exists(oldName)) { throw new FileNotFoundException(String.format("Failed to rename %s to %s: src not found", oldName, newName)); } if (fs.exists(newName)) { if (overwrite) { if (!fs.delete(newName, true)) { throw new IOException( String.format("Failed to delete %s while renaming %s to %s", newName, oldName, newName)); } } else { throw new FileAlreadyExistsException( String.format("Failed to rename %s to %s: dst already exists", oldName, newName)); } } if (!fs.rename(oldName, newName)) { throw new IOException(String.format("Failed to rename %s to %s", oldName, newName)); } } /** * Moves a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If * the srcFs and the dstFs have the same scheme, and neither of them or S3 schemes, then the {@link Path} is simply * renamed. Otherwise, the data is from the src {@link Path} to the dst {@link Path}. So this method can handle copying * data between different {@link FileSystem} implementations. * * @param srcFs the source {@link FileSystem} where the src {@link Path} exists * @param src the source {@link Path} which will me moved * @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created * @param dst the {@link Path} to move data to */ public static void movePath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Configuration conf) throws IOException { movePath(srcFs, src, dstFs, dst, false, conf); } /** * Moves a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If * the srcFs and the dstFs have the same scheme, and neither of them or S3 schemes, then the {@link Path} is simply * renamed. Otherwise, the data is from the src {@link Path} to the dst {@link Path}. So this method can handle copying * data between different {@link FileSystem} implementations. * * @param srcFs the source {@link FileSystem} where the src {@link Path} exists * @param src the source {@link Path} which will me moved * @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created * @param dst the {@link Path} to move data to * @param overwrite true if the destination should be overwritten; otherwise, false */ public static void movePath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite, Configuration conf) throws IOException { if (srcFs.getUri().getScheme().equals(dstFs.getUri().getScheme()) && !FS_SCHEMES_NON_ATOMIC.contains(srcFs.getUri().getScheme()) && !FS_SCHEMES_NON_ATOMIC.contains(dstFs.getUri().getScheme())) { renamePath(srcFs, src, dst); } else { copyPath(srcFs, src, dstFs, dst, true, overwrite, conf); } } /** * Copies data from a src {@link Path} to a dst {@link Path}. * * <p> * This method should be used in preference to * {@link FileUtil#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, Configuration)}, which does not handle * clean up of incomplete files if there is an error while copying data. * </p> * * <p> * TODO this method does not handle cleaning up any local files leftover by writing to S3. * </p> * * @param srcFs the source {@link FileSystem} where the src {@link Path} exists * @param src the {@link Path} to copy from the source {@link FileSystem} * @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created * @param dst the {@link Path} to copy data to */ public static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Configuration conf) throws IOException { copyPath(srcFs, src, dstFs, dst, false, false, conf); } /** * Copies data from a src {@link Path} to a dst {@link Path}. * * <p> * This method should be used in preference to * {@link FileUtil#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, Configuration)}, which does not handle * clean up of incomplete files if there is an error while copying data. * </p> * * <p> * TODO this method does not handle cleaning up any local files leftover by writing to S3. * </p> * * @param srcFs the source {@link FileSystem} where the src {@link Path} exists * @param src the {@link Path} to copy from the source {@link FileSystem} * @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created * @param dst the {@link Path} to copy data to * @param overwrite true if the destination should be overwritten; otherwise, false */ public static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite, Configuration conf) throws IOException { copyPath(srcFs, src, dstFs, dst, false, overwrite, conf); } private static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean deleteSource, boolean overwrite, Configuration conf) throws IOException { Preconditions.checkArgument(srcFs.exists(src), String.format("Cannot copy from %s to %s because src does not exist", src, dst)); Preconditions.checkArgument(overwrite || !dstFs.exists(dst), String.format("Cannot copy from %s to %s because dst exists", src, dst)); try { if (!FileUtil.copy(srcFs, src, dstFs, dst, deleteSource, overwrite, conf)) { throw new IOException(String.format("Failed to copy %s to %s", src, dst)); } } catch (Throwable t1) { try { deleteIfExists(dstFs, dst, true); } catch (Throwable t2) { // Do nothing } throw t1; } } /** * Copies a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If * either the srcFs or dstFs are S3 {@link FileSystem}s (as dictated by {@link #FS_SCHEMES_NON_ATOMIC}) then data is directly * copied from the src to the dst. Otherwise data is first copied to a tmp {@link Path}, which is then renamed to the * dst. * * @param srcFs the source {@link FileSystem} where the src {@link Path} exists * @param src the {@link Path} to copy from the source {@link FileSystem} * @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created * @param dst the {@link Path} to copy data to * @param tmp the temporary {@link Path} to use when copying data * @param overwriteDst true if the destination and tmp path should should be overwritten, false otherwise */ public static void copyFile(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Path tmp, boolean overwriteDst, Configuration conf) throws IOException { Preconditions.checkArgument(srcFs.isFile(src), String.format("Cannot copy from %s to %s because src is not a file", src, dst)); if (FS_SCHEMES_NON_ATOMIC.contains(srcFs.getUri().getScheme()) || FS_SCHEMES_NON_ATOMIC.contains(dstFs.getUri().getScheme())) { copyFile(srcFs, src, dstFs, dst, overwriteDst, conf); } else { copyFile(srcFs, src, dstFs, tmp, overwriteDst, conf); try { boolean renamed = false; if (overwriteDst && dstFs.exists(dst)) { try { deletePath(dstFs, dst, true); } finally { renamePath(dstFs, tmp, dst); renamed = true; } } if (!renamed) { renamePath(dstFs, tmp, dst); } } finally { deletePath(dstFs, tmp, true); } } } /** * Copy a file from a srcFs {@link FileSystem} to a dstFs {@link FileSystem}. The src {@link Path} must be a file, * that is {@link FileSystem#isFile(Path)} must return true for src. * * <p> * If overwrite is specified to true, this method may delete the dst directory even if the copy from src to dst fails. * </p> * * @param srcFs the src {@link FileSystem} to copy the file from * @param src the src {@link Path} to copy * @param dstFs the destination {@link FileSystem} to write to * @param dst the destination {@link Path} to write to * @param overwrite true if the dst {@link Path} should be overwritten, false otherwise */ public static void copyFile(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite, Configuration conf) throws IOException { Preconditions.checkArgument(srcFs.isFile(src), String.format("Cannot copy from %s to %s because src is not a file", src, dst)); Preconditions.checkArgument(overwrite || !dstFs.exists(dst), String.format("Cannot copy from %s to %s because dst exists", src, dst)); try (InputStream in = srcFs.open(src); OutputStream out = dstFs.create(dst, overwrite)) { IOUtils.copyBytes(in, out, conf, false); } catch (Throwable t1) { try { deleteIfExists(dstFs, dst, true); } catch (Throwable t2) { // Do nothing } throw t1; } } private static void walk(List<FileStatus> results, FileSystem fileSystem, Path path) throws IOException { for (FileStatus status : fileSystem.listStatus(path)) { if (!status.isDirectory()) { results.add(status); } else { walk(results, fileSystem, status.getPath()); } } } /** * This method is an additive implementation of the {@link FileSystem#rename(Path, Path)} method. It moves all the * files/directories under 'from' path to the 'to' path without overwriting existing directories in the 'to' path. * * <p> * The rename operation happens at the first non-existent sub-directory. If a directory at destination path already * exists, it recursively tries to move sub-directories. If all the sub-directories also exist at the destination, * a file level move is done * </p> * * @param fileSystem on which the data needs to be moved * @param from path of the data to be moved * @param to path of the data to be moved */ public static void renameRecursively(FileSystem fileSystem, Path from, Path to) throws IOException { FileSystem throttledFS = getOptionallyThrottledFileSystem(fileSystem, 10000); ExecutorService executorService = ScalingThreadPoolExecutor.newScalingThreadPool(1, 100, 100, ExecutorsUtils.newThreadFactory(Optional.of(log), Optional.of("rename-thread-%d"))); Queue<Future<?>> futures = Queues.newConcurrentLinkedQueue(); try { if (!fileSystem.exists(from)) { return; } futures.add(executorService .submit(new RenameRecursively(throttledFS, fileSystem.getFileStatus(from), to, executorService, futures))); while (!futures.isEmpty()) { try { futures.poll().get(); } catch (ExecutionException | InterruptedException ee) { throw new IOException(ee.getCause()); } } } finally { ExecutorsUtils.shutdownExecutorService(executorService, Optional.of(log), 1, TimeUnit.SECONDS); } } /** * Calls {@link #getOptionallyThrottledFileSystem(FileSystem, int)} parsing the qps from the input {@link State} * at key {@link #MAX_FILESYSTEM_QPS}. * @throws IOException */ public static FileSystem getOptionallyThrottledFileSystem(FileSystem fs, State state) throws IOException { DeprecationUtils.renameDeprecatedKeys(state, MAX_FILESYSTEM_QPS, DEPRECATED_KEYS); if (state.contains(MAX_FILESYSTEM_QPS)) { return getOptionallyThrottledFileSystem(fs, state.getPropAsInt(MAX_FILESYSTEM_QPS)); } return fs; } /** * Get a throttled {@link FileSystem} that limits the number of queries per second to a {@link FileSystem}. If * the input qps is <= 0, no such throttling will be performed. * @throws IOException */ public static FileSystem getOptionallyThrottledFileSystem(FileSystem fs, int qpsLimit) throws IOException { if (fs instanceof Decorator) { for (Object obj : DecoratorUtils.getDecoratorLineage(fs)) { if (obj instanceof RateControlledFileSystem) { // Already rate controlled return fs; } } } if (qpsLimit > 0) { try { RateControlledFileSystem newFS = new RateControlledFileSystem(fs, qpsLimit); newFS.startRateControl(); return newFS; } catch (ExecutionException ee) { throw new IOException("Could not create throttled FileSystem.", ee); } } return fs; } @AllArgsConstructor private static class RenameRecursively implements Runnable { private final FileSystem fileSystem; private final FileStatus from; private final Path to; private final ExecutorService executorService; private final Queue<Future<?>> futures; @Override public void run() { try { // Attempt to move safely if directory, unsafely if file (for performance, files are much less likely to collide on target) boolean moveSucessful = this.from.isDirectory() ? safeRenameIfNotExists(this.fileSystem, this.from.getPath(), this.to) : unsafeRenameIfNotExists(this.fileSystem, this.from.getPath(), this.to); if (!moveSucessful) { if (this.from.isDirectory()) { for (FileStatus fromFile : this.fileSystem.listStatus(this.from.getPath())) { Path relativeFilePath = new Path(StringUtils.substringAfter(fromFile.getPath().toString(), this.from.getPath().toString() + Path.SEPARATOR)); Path toFilePath = new Path(this.to, relativeFilePath); this.futures.add(this.executorService.submit( new RenameRecursively(this.fileSystem, fromFile, toFilePath, this.executorService, this.futures))); } } else { log.info(String.format("File already exists %s. Will not rewrite", this.to)); } } } catch (IOException ioe) { throw new RuntimeException(ioe); } } } /** * Renames from to to if to doesn't exist in a thread-safe way. This method is necessary because * {@link FileSystem#rename} is inconsistent across file system implementations, e.g. in some of them rename(foo, bar) * will create bar/foo if bar already existed, but it will only create bar if it didn't. * * <p> * The thread-safety is only guaranteed among calls to this method. An external modification to the relevant * target directory could still cause unexpected results in the renaming. * </p> * * @param fs filesystem where rename will be executed. * @param from origin {@link Path}. * @param to target {@link Path}. * @return true if rename succeeded, false if the target already exists. * @throws IOException if rename failed for reasons other than target exists. */ public synchronized static boolean safeRenameIfNotExists(FileSystem fs, Path from, Path to) throws IOException { return unsafeRenameIfNotExists(fs, from, to); } /** * Renames from to to if to doesn't exist in a non-thread-safe way. * * @param fs filesystem where rename will be executed. * @param from origin {@link Path}. * @param to target {@link Path}. * @return true if rename succeeded, false if the target already exists. * @throws IOException if rename failed for reasons other than target exists. */ public static boolean unsafeRenameIfNotExists(FileSystem fs, Path from, Path to) throws IOException { if (!fs.exists(to)) { if (!fs.exists(to.getParent())) { fs.mkdirs(to.getParent()); } if (!fs.rename(from, to)) { throw new IOException(String.format("Failed to rename %s to %s.", from, to)); } return true; } return false; } /** * A thread safe variation of {@link #renamePath(FileSystem, Path, Path)} which can be used in * multi-threaded/multi-mapper environment. The rename operation always happens at file level hence directories are * not overwritten under the 'to' path. * * <p> * If the contents of destination 'to' path is not expected to be modified concurrently, use * {@link #renamePath(FileSystem, Path, Path)} which is faster and more optimized * </p> * * <b>NOTE: This does not seem to be working for all {@link FileSystem} implementations. Use * {@link #renameRecursively(FileSystem, Path, Path)}</b> * * @param fileSystem on which the data needs to be moved * @param from path of the data to be moved * @param to path of the data to be moved * */ public static void safeRenameRecursively(FileSystem fileSystem, Path from, Path to) throws IOException { for (FileStatus fromFile : FileListUtils.listFilesRecursively(fileSystem, from)) { Path relativeFilePath = new Path(StringUtils.substringAfter(fromFile.getPath().toString(), from.toString() + Path.SEPARATOR)); Path toFilePath = new Path(to, relativeFilePath); if (!fileSystem.exists(toFilePath)) { if (!fileSystem.rename(fromFile.getPath(), toFilePath)) { throw new IOException(String.format("Failed to rename %s to %s.", fromFile.getPath(), toFilePath)); } log.info(String.format("Renamed %s to %s", fromFile.getPath(), toFilePath)); } else { log.info(String.format("File already exists %s. Will not rewrite", toFilePath)); } } } public static Configuration getConfFromState(State state) { Configuration conf = newConfiguration(); for (String propName : state.getPropertyNames()) { conf.set(propName, state.getProp(propName)); } return conf; } public static Configuration getConfFromProperties(Properties properties) { Configuration conf = newConfiguration(); for (String propName : properties.stringPropertyNames()) { conf.set(propName, properties.getProperty(propName)); } return conf; } public static State getStateFromConf(Configuration conf) { State state = new State(); for (Entry<String, String> entry : conf) { state.setProp(entry.getKey(), entry.getValue()); } return state; } /** * Set the group associated with a given path. * * @param fs the {@link FileSystem} instance used to perform the file operation * @param path the given path * @param group the group associated with the path * @throws IOException */ public static void setGroup(FileSystem fs, Path path, String group) throws IOException { fs.setOwner(path, fs.getFileStatus(path).getOwner(), group); } /** * Serialize a {@link Writable} object into a string. * * @param writable the {@link Writable} object to be serialized * @return a string serialized from the {@link Writable} object * @throws IOException if there's something wrong with the serialization */ public static String serializeToString(Writable writable) throws IOException { try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream)) { writable.write(dataOutputStream); return BaseEncoding.base64().encode(byteArrayOutputStream.toByteArray()); } } /** * Deserialize a {@link Writable} object from a string. * * @param writableClass the {@link Writable} implementation class * @param serializedWritableStr the string containing a serialized {@link Writable} object * @return a {@link Writable} deserialized from the string * @throws IOException if there's something wrong with the deserialization */ public static Writable deserializeFromString(Class<? extends Writable> writableClass, String serializedWritableStr) throws IOException { return deserializeFromString(writableClass, serializedWritableStr, new Configuration()); } /** * Deserialize a {@link Writable} object from a string. * * @param writableClass the {@link Writable} implementation class * @param serializedWritableStr the string containing a serialized {@link Writable} object * @param configuration a {@link Configuration} object containing Hadoop configuration properties * @return a {@link Writable} deserialized from the string * @throws IOException if there's something wrong with the deserialization */ public static Writable deserializeFromString(Class<? extends Writable> writableClass, String serializedWritableStr, Configuration configuration) throws IOException { byte[] writableBytes = BaseEncoding.base64().decode(serializedWritableStr); try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(writableBytes); DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream)) { Writable writable = ReflectionUtils.newInstance(writableClass, configuration); writable.readFields(dataInputStream); return writable; } } /** * Given a {@link FsPermission} objects, set a key, value pair in the given {@link State} for the writer to * use when creating files. This method should be used in conjunction with {@link #deserializeWriterFilePermissions(State, int, int)}. */ public static void serializeWriterFilePermissions(State state, int numBranches, int branchId, FsPermission fsPermissions) { serializeFsPermissions(state, ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId), fsPermissions); } /** * Given a {@link FsPermission} objects, set a key, value pair in the given {@link State} for the writer to * use when creating files. This method should be used in conjunction with {@link #deserializeWriterDirPermissions(State, int, int)}. */ public static void serializeWriterDirPermissions(State state, int numBranches, int branchId, FsPermission fsPermissions) { serializeFsPermissions(state, ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId), fsPermissions); } /** * Helper method that serializes a {@link FsPermission} object. */ private static void serializeFsPermissions(State state, String key, FsPermission fsPermissions) { state.setProp(key, String.format("%04o", fsPermissions.toShort())); } /** * Given a {@link String} in octal notation, set a key, value pair in the given {@link State} for the writer to * use when creating files. This method should be used in conjunction with {@link #deserializeWriterFilePermissions(State, int, int)}. */ public static void setWriterFileOctalPermissions(State state, int numBranches, int branchId, String octalPermissions) { state.setProp( ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId), octalPermissions); } /** * Given a {@link String} in octal notation, set a key, value pair in the given {@link State} for the writer to * use when creating directories. This method should be used in conjunction with {@link #deserializeWriterDirPermissions(State, int, int)}. */ public static void setWriterDirOctalPermissions(State state, int numBranches, int branchId, String octalPermissions) { state.setProp( ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId), octalPermissions); } /** * Deserializes a {@link FsPermission}s object that should be used when a {@link DataWriter} is writing a file. */ public static FsPermission deserializeWriterFilePermissions(State state, int numBranches, int branchId) { return new FsPermission(state.getPropAsShortWithRadix( ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId), FsPermission.getDefault().toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX)); } /** * Deserializes a {@link FsPermission}s object that should be used when a {@link DataWriter} is creating directories. */ public static FsPermission deserializeWriterDirPermissions(State state, int numBranches, int branchId) { return new FsPermission(state.getPropAsShortWithRadix( ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId), FsPermission.getDefault().toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX)); } /** * Get {@link FsPermission} from a {@link State} object. * * @param props A {@link State} containing properties. * @param propName The property name for the permission. If not contained in the given state, * defaultPermission will be used. * @param defaultPermission default permission if propName is not contained in props. * @return An {@link FsPermission} object. */ public static FsPermission deserializeFsPermission(State props, String propName, FsPermission defaultPermission) { short mode = props.getPropAsShortWithRadix(propName, defaultPermission.toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX); return new FsPermission(mode); } /** * Remove illegal HDFS path characters from the given path. Illegal characters will be replaced * with the given substitute. */ public static String sanitizePath(String path, String substitute) { Preconditions.checkArgument(substitute.replaceAll(HDFS_ILLEGAL_TOKEN_REGEX, "").equals(substitute), "substitute contains illegal characters: " + substitute); return path.replaceAll(HDFS_ILLEGAL_TOKEN_REGEX, substitute); } /** * Remove illegal HDFS path characters from the given path. Illegal characters will be replaced * with the given substitute. */ public static Path sanitizePath(Path path, String substitute) { return new Path(sanitizePath(path.toString(), substitute)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.eviction; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.io.File; import java.util.Iterator; import java.util.Map; import java.util.Properties; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAlgorithm; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.util.ObjectSizer; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.internal.OSProcess; import org.apache.geode.internal.SharedLibrary; import org.apache.geode.internal.cache.BucketRegion; import org.apache.geode.internal.cache.CachedDeserializableFactory; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.RegionMap; import org.apache.geode.internal.cache.TestNonSizerObject; import org.apache.geode.internal.cache.TestObjectSizerImpl; import org.apache.geode.internal.cache.entries.AbstractLRURegionEntry; import org.apache.geode.internal.size.ReflectionSingleObjectSizer; import org.apache.geode.internal.size.Sizeable; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.cache.CacheTestCase; import org.apache.geode.test.junit.categories.EvictionTest; @Category({EvictionTest.class}) @SuppressWarnings("serial") public class EvictionObjectSizerDUnitTest extends CacheTestCase { private static final int maxEnteries = 20; private static final int maxSizeInMb = 20; private static Cache cache; private static Region region; @After public void tearDown() throws Exception { if (cache != null) { cache.close(); } cache = null; region = null; } /** * Without object sizer */ @Test public void testWithoutObjectSizerForHeapLRU() throws Exception { prepareScenario(EvictionAlgorithm.LRU_HEAP, null); // Size of overhead= // 49(HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead() // Size of Integer key= 0 (it is inlined) // Size of Byte Array(1 MB)= 1024 * 1024 + 8 (byte object) + 4 ( size of object) + 4 (rounded up // to nearest word) // = 1048592 // Total Size of each entry should be= 1048592 putData("PR1", 2, 1); int keySize = 0; int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + (1024 * 1024) /* bytes */; valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize); int entrySize = keySize + valueSize + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead(); verifySize("PR1", 2, entrySize); assertEquals(2 * entrySize, ((PartitionedRegion) region).getEvictionController().getCounters().getCounter()); } /** * With object sizer for standard objects.Key -Integer Value ByteArray * */ @Test public void testObjectSizerForHeapLRU_StandardObjects() throws Exception { prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl()); // Size of overhead= 49 // Size of Integer key= 0(inlined) // Size of Byte Array(1 MB) + overhead (16 bytes)= 1048592 + 16 // Total Size of each entry should be= 1048592 putData("PR1", 2, 1); { int keySize = 0; int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + (1024 * 1024) /* bytes */; valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize); int entrySize = keySize + valueSize + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead(); verifySize("PR1", 2, entrySize); } // Size of overhead= 49 // Size of Integer key= 0(inlined) // Size of Byte Array(2 MB) + overhead= 2097152 + 16 // Total Size of each entry should be= 2097201 { putData("PR1", 2, 2); int keySize = 0; int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + (1024 * 1024 * 2) /* bytes */; valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize); int entrySize = keySize + valueSize + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead(); verifySize("PR1", 2, entrySize); } } /** * With object sizer for customized value object implementing ObjectSizer .Key -Integer Value * TestNonSizerObject */ @Test public void testObjectSizerForHeapLRU_CustomizedNonSizerObject() throws Exception { prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl()); // Size of overhead= 49 // Size of Integer key= 0(inlined) // Size of byte array 0 + size of overhead(16) // Total Size of each entry should be= 54 putCustomizedData(1, new byte[0]); { int keySize = 0; int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + 0 /* bytes */; valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize); int entrySize = keySize + valueSize + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead(); assertEquals(entrySize, getSizeOfCustomizedData(1)); } // Size of overhead= 49 // Size of Integer key= 0(inlined) // Size of byte array 4 + size of overhead(12) // Total Size of each entry should be= 59 putCustomizedData(2, new byte[4]); { int keySize = 0; int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + 4 /* bytes */; valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize); int entrySize = keySize + valueSize + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead(); assertEquals(entrySize, getSizeOfCustomizedData(2)); } } /** * With object sizer for customized value object implementing ObjectSizer .Key -Integer Value * TestObjectSizerImpl */ @Test public void testObjectSizerForHeapLRU_CustomizedSizerObject() throws Exception { prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl()); // Size of overhead= 49 // Size of Integer key= 0(inlined) // Size of TestObjectSizerImpl= 160 (serialized size), changed to 156 because package changed to // org.apache.geode // Total Size of entry should be= 71 putCustomizedData(1, new TestObjectSizerImpl()); int expected = (0 + 156 + (Sizeable.PER_OBJECT_OVERHEAD * 2) + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead()); assertEquals(expected, getSizeOfCustomizedData(1)); assertEquals(expected, ((PartitionedRegion) region).getEvictionController().getCounters().getCounter()); } /** * With object sizer for customized key and value objects. */ @Test public void testObjectSizerForHeapLRU_CustomizedSizerObjects() throws Exception { prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl()); // Size of overhead= 49 // Size of TestNonSizerObject key= 1(customized) // Size of TestObjectSizerImpl= 160 (serialized size), changed to 156 because package changed to // org.apache.geode // Total Size of entry should be= 72 putCustomizedObjects(new TestNonSizerObject("1"), new TestObjectSizerImpl()); int expected = (1 + 156 + (Sizeable.PER_OBJECT_OVERHEAD * 2) + ((HeapLRUController) ((PartitionedRegion) region).getEvictionController()) .getPerEntryOverhead()); assertEquals(expected, getSizeOfCustomizedObject(new TestNonSizerObject("1"))); assertEquals(expected, ((PartitionedRegion) region).getEvictionController().getCounters().getCounter()); } private void prepareScenario(EvictionAlgorithm evictionAlgorithm, ObjectSizer sizer) { createMyCache(); createPartitionedRegion(true, evictionAlgorithm, "PR1", 1, 1, 10000, sizer); } private void createMyCache() { Properties props = new Properties(); DistributedSystem ds = getSystem(props); assertNotNull(ds); ds.disconnect(); ds = getSystem(props); cache = CacheFactory.create(ds); cache.getResourceManager().setEvictionHeapPercentage(50); } private static void createPartitionedRegion(boolean setEvictionOn, EvictionAlgorithm evictionAlgorithm, String regionName, int totalNoOfBuckets, int evictionAction, int evictorInterval, ObjectSizer sizer) { final AttributesFactory factory = new AttributesFactory(); PartitionAttributesFactory partitionAttributesFactory = new PartitionAttributesFactory() .setRedundantCopies(totalNoOfBuckets == 4 ? 0 : 1).setTotalNumBuckets(totalNoOfBuckets); factory.setConcurrencyChecksEnabled(false); factory.setPartitionAttributes(partitionAttributesFactory.create()); if (setEvictionOn) { if (evictionAlgorithm.isLRUHeap()) { factory.setEvictionAttributes(EvictionAttributes.createLRUHeapAttributes(sizer, evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK)); } else if (evictionAlgorithm.isLRUMemory()) { factory.setEvictionAttributes(EvictionAttributes.createLRUMemoryAttributes(maxSizeInMb, sizer, evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK)); } else { factory.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(maxEnteries, evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK)); } if (evictionAction == 2) { factory.setDiskSynchronous(true); final File[] diskDirs = new File[1]; diskDirs[0] = new File("Partitioned_Region_Eviction/" + "LogFile" + "_" + OSProcess.getId()); diskDirs[0].mkdirs(); factory.setDiskStoreName(cache.createDiskStoreFactory().setDiskDirs(diskDirs) .create("EvictionObjectSizerDUnitTest").getName()); } } region = cache.createRegion(regionName, factory.create()); assertNotNull(region); LogWriterUtils.getLogWriter().info("Partitioned Region created Successfully :" + region); } /** * returns data size in bytes */ private static int putData(final String regionName, final int noOfElememts, final int sizeOfElement) { int result = 0; final Region pr = cache.getRegion(regionName); for (int counter = 1; counter <= noOfElememts; counter++) { byte[] baValue = new byte[sizeOfElement * 1024 * 1024]; int baSize = CachedDeserializableFactory.getByteSize(baValue); result += baSize; pr.put(new Integer(counter), baValue); } return result; } private static void verifySize(String regionName, int noOfElememts, int entrySize) { final Region pr = cache.getRegion(regionName); for (final Iterator i = ((PartitionedRegion) pr).getDataStore().getAllLocalBuckets().iterator(); i.hasNext();) { final Map.Entry entry = (Map.Entry) i.next(); final BucketRegion bucketRegion = (BucketRegion) entry.getValue(); if (bucketRegion == null) { continue; } else { RegionMap map = bucketRegion.getRegionMap(); if (map == null || map.size() == 0) { continue; } LogWriterUtils.getLogWriter().info("Checking for entry in bucket region: " + bucketRegion); for (int counter = 1; counter <= noOfElememts; counter++) { assertEquals(entrySize, ((AbstractLRURegionEntry) map.getEntry(new Integer(counter))).getEntrySize()); } } } } private void putCustomizedData(int counter, Object object) { final Region pr = cache.getRegion("PR1"); pr.put(new Integer(counter), object); } private void putCustomizedObjects(Object key, Object value) { final Region pr = cache.getRegion("PR1"); pr.put(key, value); } private int getSizeOfCustomizedData(int counter) { final Region pr = cache.getRegion("PR1"); for (final Iterator i = ((PartitionedRegion) pr).getDataStore().getAllLocalBuckets().iterator(); i.hasNext();) { final Map.Entry entry = (Map.Entry) i.next(); final BucketRegion bucketRegion = (BucketRegion) entry.getValue(); if (bucketRegion == null) { continue; } else { RegionMap map = bucketRegion.getRegionMap(); return ((AbstractLRURegionEntry) map.getEntry(new Integer(counter))).getEntrySize(); } } return 0; } private int getSizeOfCustomizedObject(Object object) { final Region pr = cache.getRegion("PR1"); for (final Iterator i = ((PartitionedRegion) pr).getDataStore().getAllLocalBuckets().iterator(); i.hasNext();) { final Map.Entry entry = (Map.Entry) i.next(); final BucketRegion bucketRegion = (BucketRegion) entry.getValue(); if (bucketRegion == null) { continue; } else { RegionMap map = bucketRegion.getRegionMap(); AbstractLRURegionEntry re = (AbstractLRURegionEntry) map.getEntry(object); if (re != null) { return re.getEntrySize(); } } } return 0; } }
/* * Licensed to CRATE Technology GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.analyze; import io.crate.metadata.doc.DocTableInfo; import io.crate.sql.SqlFormatter; import io.crate.sql.tree.CreateTable; import io.crate.test.integration.CrateDummyClusterServiceUnitTest; import io.crate.testing.SQLExecutor; import org.hamcrest.Matchers; import org.junit.Test; public class MetadataToASTNodeResolverTest extends CrateDummyClusterServiceUnitTest { @Override protected boolean enableWarningsCheck() { return false; } @Test public void testBuildCreateTableColumns() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table doc.test (" + " bools boolean," + " bytes byte," + " strings string," + " shorts short," + " floats float," + " doubles double," + " ints integer," + " longs long," + " timestamp timestamp with time zone," + " ip_addr ip," + " arr_simple array(string)," + " arr_geo_point array(geo_point)," + " arr_obj array(object(strict) as (" + " col_1 long," + " col_2 string" + " ))," + " obj object as (" + " col_1 long," + " col_2 string" + " )" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("doc.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"doc\".\"test\" (\n" + " \"bools\" BOOLEAN,\n" + " \"bytes\" CHAR,\n" + " \"strings\" TEXT,\n" + " \"shorts\" SMALLINT,\n" + " \"floats\" REAL,\n" + " \"doubles\" DOUBLE PRECISION,\n" + " \"ints\" INTEGER,\n" + " \"longs\" BIGINT,\n" + " \"timestamp\" TIMESTAMP WITH TIME ZONE,\n" + " \"ip_addr\" IP,\n" + " \"arr_simple\" ARRAY(TEXT),\n" + " \"arr_geo_point\" ARRAY(GEO_POINT),\n" + " \"arr_obj\" ARRAY(OBJECT(STRICT) AS (\n" + " \"col_1\" BIGINT,\n" + " \"col_2\" TEXT\n" + " )),\n" + " \"obj\" OBJECT(DYNAMIC) AS (\n" + " \"col_1\" BIGINT,\n" + " \"col_2\" TEXT\n" + " )\n" + ")\n" + "CLUSTERED INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTablePrimaryKey() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table myschema.test (" + " pk_col_one long," + " pk_col_two long," + " primary key (pk_col_one, pk_col_two)" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " column_policy = 'strict'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("myschema.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"myschema\".\"test\" (\n" + " \"pk_col_one\" BIGINT,\n" + " \"pk_col_two\" BIGINT,\n" + " PRIMARY KEY (\"pk_col_one\", \"pk_col_two\")\n" + ")\n" + "CLUSTERED INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableNotNull() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table myschema.test (" + " col_a string," + " col_b string not null index using fulltext," + " primary key (col_a)" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " column_policy = 'strict'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("myschema.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"myschema\".\"test\" (\n" + " \"col_a\" TEXT,\n" + " \"col_b\" TEXT NOT NULL INDEX USING FULLTEXT WITH (\n" + " analyzer = 'standard'\n" + " ),\n" + " PRIMARY KEY (\"col_a\")\n" + ")\n" + "CLUSTERED BY (\"col_a\") INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableCheckConstraints() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table doc.test (" + " floats float constraint test_floats_check check (floats != -1)," + " shorts short," + " constraint test_shorts_check check (shorts >= 0)" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("doc.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"doc\".\"test\" (\n" + " \"floats\" REAL,\n" + " \"shorts\" SMALLINT,\n" + " CONSTRAINT test_floats_check CHECK(\"floats\" <> - 1),\n" + " CONSTRAINT test_shorts_check CHECK(\"shorts\" >= 0)\n" + ")\n" + "CLUSTERED INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableClusteredByPartitionedBy() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addPartitionedTable("create table myschema.test (" + " id long," + " partition_column string," + " cluster_column string" + ") " + "partitioned by (partition_column) " + "clustered by (cluster_column) into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("myschema.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"myschema\".\"test\" (\n" + " \"id\" BIGINT,\n" + " \"partition_column\" TEXT,\n" + " \"cluster_column\" TEXT\n" + ")\n" + "CLUSTERED BY (\"cluster_column\") INTO 5 SHARDS\n" + "PARTITIONED BY (\"partition_column\")\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableIndexes() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table myschema.test (" + " id long," + " col_a string," + " col_b string index using fulltext," + " col_c string index off," + " col_d object as (" + " a string" + " )," + " index col_a_col_b_ft using fulltext (col_a, col_b) with (" + " analyzer= 'english'" + " )," + " index col_d_a_ft using fulltext (col_d['a']) with (" + " analyzer= 'custom_analyzer'" + " )," + " index col_a_col_b_plain using plain (col_a, col_b)" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("myschema.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"myschema\".\"test\" (\n" + " \"id\" BIGINT,\n" + " \"col_a\" TEXT,\n" + " \"col_b\" TEXT INDEX USING FULLTEXT WITH (\n" + " analyzer = 'standard'\n" + " ),\n" + " \"col_c\" TEXT INDEX OFF,\n" + " \"col_d\" OBJECT(DYNAMIC) AS (\n" + " \"a\" TEXT\n" + " ),\n" + " INDEX \"col_a_col_b_ft\" USING FULLTEXT (\"col_b\", \"col_a\") WITH (\n" + " analyzer = 'english'\n" + " ),\n" + " INDEX \"col_d_a_ft\" USING FULLTEXT (\"col_d\"['a']) WITH (\n" + " analyzer = 'custom_analyzer'\n" + " ),\n" + " INDEX \"col_a_col_b_plain\" USING FULLTEXT (\"col_b\", \"col_a\") WITH (\n" + " analyzer = 'keyword'\n" + " )\n" + ")\n" + "CLUSTERED INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableStorageDefinitions() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table myschema.test (" + " s string storage with (columnstore =false)" + ") " + "clustered into 5 shards " + "with (" + " number_of_replicas = '0-all'," + " column_policy = 'strict'," + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("myschema.test"); CreateTable node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"myschema\".\"test\" (\n" + " \"s\" TEXT STORAGE WITH (\n" + " columnstore = false\n" + " )\n" + ")\n" + "CLUSTERED INTO 5 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-all',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void testBuildCreateTableColumnDefaultClause() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("CREATE TABLE test (" + " col1 TEXT," + " col2 INTEGER DEFAULT 1 + 1," + " col3 TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP(3)," + " col4 TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP(3)" + ") with (" + " \"merge.scheduler.max_thread_count\" = 1" + ")") .build(); DocTableInfo tableInfo = e.resolveTableInfo("test"); CreateTable<?> node = MetadataToASTNodeResolver.resolveCreateTable(tableInfo); assertEquals("CREATE TABLE IF NOT EXISTS \"doc\".\"test\" (\n" + " \"col1\" TEXT,\n" + " \"col2\" INTEGER DEFAULT 2,\n" + " \"col3\" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(3),\n" + " \"col4\" TIMESTAMP WITHOUT TIME ZONE DEFAULT _cast(current_timestamp(3), 'timestamp without time zone')\n" + ")\n" + "CLUSTERED INTO 4 SHARDS\n" + "WITH (\n" + " \"allocation.max_retries\" = 5,\n" + " \"blocks.metadata\" = false,\n" + " \"blocks.read\" = false,\n" + " \"blocks.read_only\" = false,\n" + " \"blocks.read_only_allow_delete\" = false,\n" + " \"blocks.write\" = false,\n" + " codec = 'default',\n" + " column_policy = 'strict',\n" + " \"mapping.total_fields.limit\" = 1000,\n" + " max_ngram_diff = 1,\n" + " max_shingle_diff = 3,\n" + " \"merge.scheduler.max_thread_count\" = 1,\n" + " number_of_replicas = '0-1',\n" + " \"routing.allocation.enable\" = 'all',\n" + " \"routing.allocation.total_shards_per_node\" = -1,\n" + " \"store.type\" = 'fs',\n" + " \"translog.durability\" = 'REQUEST',\n" + " \"translog.flush_threshold_size\" = 536870912,\n" + " \"translog.sync_interval\" = 5000,\n" + " \"unassigned.node_left.delayed_timeout\" = 60000,\n" + " \"write.wait_for_active_shards\" = '1'\n" + ")", SqlFormatter.formatSql(node)); } @Test public void test_varchar_with_length_limit_is_printed_as_varchar_with_length_in_show_create_table() throws Exception { SQLExecutor e = SQLExecutor.builder(clusterService) .addTable("create table tbl (name varchar(10))") .build(); DocTableInfo table = e.resolveTableInfo("tbl"); CreateTable<?> node = MetadataToASTNodeResolver.resolveCreateTable(table); assertThat(SqlFormatter.formatSql(node), Matchers.containsString("\"name\" VARCHAR(10)")); } }
package com.microsoft.azure.search.samples.demo; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; import com.microsoft.azure.search.samples.client.SearchIndexClient; import com.microsoft.azure.search.samples.index.ComplexIndexField; import com.microsoft.azure.search.samples.index.IndexDefinition; import com.microsoft.azure.search.samples.index.IndexField; import com.microsoft.azure.search.samples.index.SimpleIndexField; import com.microsoft.azure.search.samples.index.Suggester; import com.microsoft.azure.search.samples.options.SearchOptions; import com.microsoft.azure.search.samples.options.SuggestOptions; import com.microsoft.azure.search.samples.results.IndexBatchOperationResult; import com.microsoft.azure.search.samples.results.IndexBatchResult; import com.microsoft.azure.search.samples.results.SearchResult; import com.microsoft.azure.search.samples.results.SuggestHit; import com.microsoft.azure.search.samples.results.SuggestResult; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import static com.microsoft.azure.search.samples.demo.Address.CITY; import static com.microsoft.azure.search.samples.demo.Address.STATE; import static com.microsoft.azure.search.samples.demo.Address.STREET_ADDRESS; import static com.microsoft.azure.search.samples.demo.Address.ZIP_CODE; import static com.microsoft.azure.search.samples.demo.Hotel.CATEGORY; import static com.microsoft.azure.search.samples.demo.Hotel.HOTEL_ID; import static com.microsoft.azure.search.samples.demo.Hotel.HOTEL_NAME; import static com.microsoft.azure.search.samples.demo.Hotel.LAST_RENOVATION_DATE; import static com.microsoft.azure.search.samples.demo.Hotel.PARKING_INCLUDED; import static com.microsoft.azure.search.samples.demo.Hotel.RATING; import static com.microsoft.azure.search.samples.demo.Hotel.ROOMS; import static com.microsoft.azure.search.samples.demo.Room.BASE_RATE; import static com.microsoft.azure.search.samples.demo.Room.BED_OPTIONS; import static com.microsoft.azure.search.samples.demo.Room.DESCRIPTION; import static com.microsoft.azure.search.samples.demo.Room.DESCRIPTION_FR; import static com.microsoft.azure.search.samples.demo.Room.SLEEPS_COUNT; import static com.microsoft.azure.search.samples.demo.Room.SMOKING_ALLOWED; import static com.microsoft.azure.search.samples.demo.Room.TAGS; import static com.microsoft.azure.search.samples.demo.Room.TYPE; class DemoOperations { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().registerModule(new Jdk8Module()); private static final String INDEX_NAME = "hotels"; private SearchIndexClient client; DemoOperations(String serviceName, String apiKey) { this.client = new SearchIndexClient(serviceName, INDEX_NAME, apiKey); } // Indexes may be created via the management UI in portal.azure.com or via APIs. In addition to field // details index definitions include options for custom scoring, suggesters and more void createIndex() throws IOException { // Typical application initialization may createIndex an index if it doesn't exist. Deleting an index // on initialization is a sample-only thing to do client.deleteIndexIfExists(); if (!client.doesIndexExist()) { List<IndexField> fields = Arrays.asList(SimpleIndexField.builder(HOTEL_ID, "Edm.String") .key(true).filterable(true).build(), SimpleIndexField.builder(HOTEL_NAME, "Edm.String") .searchable(true).build(), SimpleIndexField.builder(DESCRIPTION, "Edm.String") .searchable(true).build(), SimpleIndexField.builder(DESCRIPTION_FR, "Edm.String") .searchable(true).analyzer("fr.lucene").build(), SimpleIndexField.builder(CATEGORY, "Edm.String") .searchable(true).filterable(true).sortable(true).facetable(true).build(), SimpleIndexField.builder(TAGS, "Collection(Edm.String)") .searchable(true).filterable(true).facetable(true).build(), SimpleIndexField.builder(PARKING_INCLUDED, "Edm.Boolean") .filterable(true).facetable(true).build(), SimpleIndexField.builder(SMOKING_ALLOWED, "Edm.Boolean") .filterable(true).facetable(true).build(), SimpleIndexField.builder(LAST_RENOVATION_DATE, "Edm.DateTimeOffset") .filterable(true).sortable(true).facetable(true).build(), SimpleIndexField.builder(RATING, "Edm.Double") .filterable(true).sortable(true).facetable(true).build(), defineAddressField(), defineRoomsField()); Suggester suggester = Suggester.create("sg", "analyzingInfixMatching", Collections.singletonList(HOTEL_NAME)); client.createIndex(IndexDefinition.create(INDEX_NAME, fields, Collections.singletonList(suggester))); } } private ComplexIndexField defineAddressField() { return ComplexIndexField .create("Address", Arrays.asList( SimpleIndexField .builder(STREET_ADDRESS, "Edm.String") .searchable(true) .build(), SimpleIndexField .builder(CITY, "Edm.String") .searchable(true) .build(), SimpleIndexField .builder(STATE, "Edm.String") .searchable(true) .build(), SimpleIndexField .builder(ZIP_CODE, "Edm.String") .searchable(true) .build() ), false); } private ComplexIndexField defineRoomsField() { return ComplexIndexField .create("Rooms", Arrays.asList( SimpleIndexField .builder(DESCRIPTION, "Edm.String") .searchable(true) .analyzer("en.lucene") .build(), SimpleIndexField .builder(DESCRIPTION_FR, "Edm.String") .searchable(true) .analyzer("fr.lucene") .build(), SimpleIndexField .builder(TYPE, "Edm.String") .searchable(true) .build(), SimpleIndexField .builder(BASE_RATE, "Edm.Double") .filterable(true) .facetable(true) .build(), SimpleIndexField .builder(BED_OPTIONS, "Edm.String") .searchable(true) .build(), SimpleIndexField .builder(SLEEPS_COUNT, "Edm.Int32") .filterable(true) .facetable(true) .build(), SimpleIndexField .builder(SMOKING_ALLOWED, "Edm.Boolean") .filterable(true) .facetable(true) .build(), SimpleIndexField .builder(TAGS, "Collection(Edm.String)") .searchable(true) .filterable(true) .facetable(true) .build() ), true); } void indexData() throws IOException { // In this case we createIndex sample data in-memory. Typically this will come from another database, file or // API and will be turned into objects with the desired shape for indexing List<IndexOperation> ops = new ArrayList<>(); for (String id : new String[] { "hotel1", "hotel10","hotel11", "hotel12", "hotel13"}) { Hotel hotel = OBJECT_MAPPER.readValue(getClass().getResource("/" + id), Hotel.class); ops.add(IndexOperation.uploadOperation(hotel)); } ops.add(IndexOperation.deleteOperation(HOTEL_ID, "1")); IndexBatchResult result = client.indexBatch(ops); if (result.status() != null && result.status() == 207) { System.out.print("handle partial success, check individual client status/error message"); } for (IndexBatchOperationResult r : result.value()) { System.out.printf("Operation for id: %s, success: %s\n", r.key(), r.status()); } } void searchSimple() throws IOException { SearchOptions options = SearchOptions.builder().includeCount(true).build(); SearchResult result = client.search("Lobby", options); System.out.printf("Found %s hits\n", result.count()); for (SearchResult.SearchHit hit : result.hits()) { System.out.printf("\tid: %s, name: %s, score: %s\n", hit.document().get(HOTEL_ID), hit.document().get(HOTEL_NAME), hit.score()); } } void searchAllFeatures() throws IOException { SearchOptions options = SearchOptions.builder() .includeCount(true) .filter("Rooms/all(r: r/BaseRate lt 260)") .orderBy(LAST_RENOVATION_DATE + " desc") .select(HOTEL_ID + "," + DESCRIPTION + "," + LAST_RENOVATION_DATE) .searchFields(ROOMS + "/" + DESCRIPTION) .facets(Arrays.asList(TAGS, RATING)) .highlight(HOTEL_NAME) .highlightPreTag("*pre*") .highlightPostTag("*post*") .top(10) .requireAllTerms(true) .minimumCoverage(0.75).build(); SearchResult result = client.search("Mountain", options); // list search hits System.out.printf("Found %s hits, coverage: %s\n", result.count(), result.coverage() == null ? "-" : result.coverage()); for (SearchResult.SearchHit hit : result.hits()) { System.out.printf("\tid: %s, name: %s, LastRenovationDate: %s\n", hit.document().get(HOTEL_ID), hit.document().get(DESCRIPTION), hit.document().get(LAST_RENOVATION_DATE)); } // list facets for (String field : Objects.requireNonNull(result.facets()).keySet()) { System.out.println(field + ":"); for (SearchResult.FacetValue value : Objects.requireNonNull(result.facets()).get(field)) { if (value.value() != null) { System.out.printf("\t%s: %s\n", value.value(), value.count()); } else { System.out.printf("\t%s-%s: %s\n", value.from() == null ? "min" : value.from(), value.to() == null ? "max" : value.to(), value.count()); } } } } void lookup() throws IOException { Map<String, Object> document = client.lookup("10"); System.out.println("Document lookup, key='10'"); System.out.printf("\tname: %s\n", document.get(HOTEL_NAME)); System.out.printf("\trenovated: %s\n", document.get(LAST_RENOVATION_DATE)); System.out.printf("\trating: %s\n", document.get(RATING)); } void suggest() throws IOException { SuggestOptions options = SuggestOptions.builder().fuzzy(true).build(); SuggestResult result = client.suggest("res", "sg", options); System.out.println("Suggest results, coverage: " + result.coverage()); for (SuggestHit hit : result.hits()) { System.out.printf("\ttext: %s (id: %s)\n", hit.text(), hit.document().get(HOTEL_ID)); } } }
<%# Copyright 2013-2017 the original author or authors. This file is part of the JHipster project, see https://jhipster.github.io/ for more information. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -%> package <%=packageName%>.service; <% if (databaseType == 'sql' || databaseType == 'mongodb') { %> import <%=packageName%>.domain.Authority;<% } %> import <%=packageName%>.domain.User;<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> import <%=packageName%>.repository.AuthorityRepository;<% if (authenticationType == 'session') { %> import <%=packageName%>.repository.PersistentTokenRepository;<% } %><% } %> import <%=packageName%>.config.Constants; import <%=packageName%>.repository.UserRepository;<% if (searchEngine == 'elasticsearch') { %> import <%=packageName%>.repository.search.UserSearchRepository;<% } %> import <%=packageName%>.security.AuthoritiesConstants; import <%=packageName%>.security.SecurityUtils; import <%=packageName%>.service.util.RandomUtil; import <%=packageName%>.service.dto.UserDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory;<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.scheduling.annotation.Scheduled;<% } %> import org.springframework.security.crypto.password.PasswordEncoder; <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> import org.springframework.security.oauth2.provider.token.store.JdbcTokenStore; <%_ } _%> import org.springframework.stereotype.Service;<% if (databaseType == 'sql') { %> import org.springframework.transaction.annotation.Transactional;<% } %> <%_ if ((databaseType == 'sql' || databaseType == 'mongodb') && authenticationType == 'session') { _%> import java.time.LocalDate; <%_ } _%> import java.time.Instant; <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> import java.time.temporal.ChronoUnit; <%_ } _%> import java.util.*; import java.util.stream.Collectors; /** * Service class for managing users. */ @Service<% if (databaseType == 'sql') { %> @Transactional<% } %> public class UserService { private final Logger log = LoggerFactory.getLogger(UserService.class); private final UserRepository userRepository; private final PasswordEncoder passwordEncoder; <%_ if (enableSocialSignIn) { _%> private final SocialService socialService; <%_ } _%> <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> public final JdbcTokenStore jdbcTokenStore; <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> private final UserSearchRepository userSearchRepository; <%_ } _%> <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> <%_ if (authenticationType == 'session') { _%> private final PersistentTokenRepository persistentTokenRepository; <%_ } _%> private final AuthorityRepository authorityRepository; <%_ } _%> public UserService(UserRepository userRepository, PasswordEncoder passwordEncoder<% if (enableSocialSignIn) { %>, SocialService socialService<% } %><% if (databaseType == 'sql' && authenticationType == 'oauth2') { %>, JdbcTokenStore jdbcTokenStore<% } %><% if (searchEngine == 'elasticsearch') { %>, UserSearchRepository userSearchRepository<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %><% if (authenticationType == 'session') { %>, PersistentTokenRepository persistentTokenRepository<% } %>, AuthorityRepository authorityRepository<% } %>) { this.userRepository = userRepository; this.passwordEncoder = passwordEncoder; <%_ if (enableSocialSignIn) { _%> this.socialService = socialService; <%_ } _%> <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> this.jdbcTokenStore = jdbcTokenStore; <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> this.userSearchRepository = userSearchRepository; <%_ } _%> <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> <%_ if (authenticationType == 'session') { _%> this.persistentTokenRepository = persistentTokenRepository; <%_ } _%> this.authorityRepository = authorityRepository; <%_ } _%> } public Optional<User> activateRegistration(String key) { log.debug("Activating user for activation key {}", key); return userRepository.findOneByActivationKey(key) .map(user -> { // activate given user for the registration key. user.setActivated(true); user.setActivationKey(null); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> log.debug("Activated user: {}", user); return user; }); } public Optional<User> completePasswordReset(String newPassword, String key) { log.debug("Reset user password for reset key {}", key); return userRepository.findOneByResetKey(key) .filter(user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400))) .map(user -> { user.setPassword(passwordEncoder.encode(newPassword)); user.setResetKey(null); user.setResetDate(null); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> return user; }); } public Optional<User> requestPasswordReset(String mail) { return userRepository.findOneByEmail(mail) .filter(User::getActivated) .map(user -> { user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(Instant.now()); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> return user; }); } public User createUser(String login, String password, String firstName, String lastName, String email<% if (databaseType == 'sql' || databaseType == 'mongodb') { %>, String imageUrl<% } %>, String langKey) { User newUser = new User();<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER); Set<Authority> authorities = new HashSet<>();<% } %><% if (databaseType == 'cassandra') { %> newUser.setId(UUID.randomUUID().toString()); Set<String> authorities = new HashSet<>();<% } %> String encryptedPassword = passwordEncoder.encode(password); newUser.setLogin(login); // new user gets initially a generated password newUser.setPassword(encryptedPassword); newUser.setFirstName(firstName); newUser.setLastName(lastName); newUser.setEmail(email); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> newUser.setImageUrl(imageUrl); <%_ } _%> newUser.setLangKey(langKey); // new user is not active newUser.setActivated(false); // new user gets registration key newUser.setActivationKey(RandomUtil.generateActivationKey()); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> authorities.add(authority); <%_ } _%> <%_ if (databaseType == 'cassandra') { _%> authorities.add(AuthoritiesConstants.USER); <%_ } _%> newUser.setAuthorities(authorities); userRepository.save(newUser);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.save(newUser);<% } %> log.debug("Created Information for User: {}", newUser); return newUser; } public User createUser(UserDTO userDTO) { User user = new User();<% if (databaseType == 'cassandra') { %> user.setId(UUID.randomUUID().toString());<% } %> user.setLogin(userDTO.getLogin()); user.setFirstName(userDTO.getFirstName()); user.setLastName(userDTO.getLastName()); user.setEmail(userDTO.getEmail()); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> user.setImageUrl(userDTO.getImageUrl()); <%_ } _%> if (userDTO.getLangKey() == null) { user.setLangKey("<%= nativeLanguage %>"); // default language } else { user.setLangKey(userDTO.getLangKey()); } <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> if (userDTO.getAuthorities() != null) { Set<Authority> authorities = new HashSet<>(); userDTO.getAuthorities().forEach( authority -> authorities.add(authorityRepository.findOne(authority)) ); user.setAuthorities(authorities); } <%_ } _%> <%_ if (databaseType == 'cassandra') { _%> user.setAuthorities(userDTO.getAuthorities()); <%_ } _%> String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword()); user.setPassword(encryptedPassword); user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(Instant.now()); user.setActivated(true); userRepository.save(user);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.save(user);<% } %> log.debug("Created Information for User: {}", user); return user; } /** * Update basic information (first name, last name, email, language) for the current user. * * @param firstName first name of user * @param lastName last name of user * @param email email id of user * @param langKey language key <%_ if (databaseType == 'mongodb' || databaseType == 'sql') { _%> * @param imageUrl image URL of user <%_ } _%> */ public void updateUser(String firstName, String lastName, String email, String langKey<% if (databaseType === 'mongodb' || databaseType === 'sql') { %>, String imageUrl<% } %>) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> { user.setFirstName(firstName); user.setLastName(lastName); user.setEmail(email); user.setLangKey(langKey); <%_ if (databaseType === 'mongodb' || databaseType === 'sql') { _%> user.setImageUrl(imageUrl); <%_ } _%> <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> log.debug("Changed Information for User: {}", user); }); } /** * Update all information for a specific user, and return the modified user. * * @param userDTO user to update * @return updated user */ public Optional<UserDTO> updateUser(UserDTO userDTO) { return Optional.of(userRepository .findOne(userDTO.getId())) .map(user -> { user.setLogin(userDTO.getLogin()); user.setFirstName(userDTO.getFirstName()); user.setLastName(userDTO.getLastName()); user.setEmail(userDTO.getEmail()); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> user.setImageUrl(userDTO.getImageUrl()); <%_ } _%> user.setActivated(userDTO.isActivated()); user.setLangKey(userDTO.getLangKey()); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> Set<Authority> managedAuthorities = user.getAuthorities(); managedAuthorities.clear(); userDTO.getAuthorities().stream() .map(authorityRepository::findOne) .forEach(managedAuthorities::add); <%_ } else { // Cassandra _%> user.setAuthorities(userDTO.getAuthorities()); <%_ } _%> <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> log.debug("Changed Information for User: {}", user); return user; }) .map(UserDTO::new); } public void deleteUser(String login) { <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> jdbcTokenStore.findTokensByUserName(login).forEach(token -> jdbcTokenStore.removeAccessToken(token)); <%_ } _%> userRepository.findOneByLogin(login).ifPresent(user -> { <%_ if (enableSocialSignIn) { _%> socialService.deleteUserSocialConnection(user.getLogin()); <%_ } _%> userRepository.delete(user); <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.delete(user); <%_ } _%> log.debug("Deleted User: {}", user); }); } public void changePassword(String password) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> { String encryptedPassword = passwordEncoder.encode(password); user.setPassword(encryptedPassword); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> log.debug("Changed password for User: {}", user); }); } <%_ if (databaseType === 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb') { _%> public Page<UserDTO> getAllManagedUsers(Pageable pageable) { return userRepository.findAllByLoginNot(pageable, Constants.ANONYMOUS_USER).map(UserDTO::new); }<% } else { // Cassandra %> public List<UserDTO> getAllManagedUsers() { return userRepository.findAll().stream() .filter(user -> !Constants.ANONYMOUS_USER.equals(user.getLogin())) .map(UserDTO::new) .collect(Collectors.toList()); }<% } %> <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public Optional<User> getUserWithAuthoritiesByLogin(String login) { <%_ if (databaseType == 'sql') { _%> return userRepository.findOneWithAuthoritiesByLogin(login); <%_ } else { // MongoDB and Cassandra _%> return userRepository.findOneByLogin(login); <%_ } _%> } <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public User getUserWithAuthorities(<%= pkType %> id) { <%_ if (databaseType == 'sql') { _%> return userRepository.findOneWithAuthoritiesById(id); <%_ } else { // MongoDB and Cassandra _%> return userRepository.findOne(id); <%_ } _%> } <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public User getUserWithAuthorities() { <%_ if (databaseType == 'sql') { _%> return userRepository.findOneWithAuthoritiesByLogin(SecurityUtils.getCurrentUserLogin()).orElse(null); <%_ } else { // MongoDB and Cassandra _%> return userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).orElse(null); <%_ } _%> } <%_ if ((databaseType == 'sql' || databaseType == 'mongodb') && authenticationType == 'session') { _%> /** * Persistent Token are used for providing automatic authentication, they should be automatically deleted after * 30 days. * <p> * This is scheduled to get fired everyday, at midnight. * </p> */ @Scheduled(cron = "0 0 0 * * ?") public void removeOldPersistentTokens() { LocalDate now = LocalDate.now(); persistentTokenRepository.findByTokenDateBefore(now.minusMonths(1)).forEach(token -> { log.debug("Deleting token {}", token.getSeries());<% if (databaseType == 'sql') { %> User user = token.getUser(); user.getPersistentTokens().remove(token);<% } %> persistentTokenRepository.delete(token); }); }<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %> /** * Not activated users should be automatically deleted after 3 days. * <p> * This is scheduled to get fired everyday, at 01:00 (am). * </p> */ @Scheduled(cron = "0 0 1 * * ?") public void removeNotActivatedUsers() { List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(Instant.now().minus(3, ChronoUnit.DAYS)); for (User user : users) { log.debug("Deleting not activated user {}", user.getLogin()); userRepository.delete(user);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.delete(user);<% } %> } } /** * @return a list of all the authorities */ public List<String> getAuthorities() { return authorityRepository.findAll().stream().map(Authority::getName).collect(Collectors.toList()); }<% } %> }
/* * Copyright (C) 2016 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package OptimizationTests.ConstantCalculationSinking.ThrowExceptionIntLong_001; /** * * No sinking expected: loop has unknown number of iterations * **/ public class Main { final int iterations = 1100; public int testLoopAddInt(int n) { int testVar = 10; int additionalVar = 0; for (int i = 0; i < iterations; i++) { testVar += 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // System.out.println("Exception " + ae); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public int testLoopSubInt(int n) { int testVar = 10; int additionalVar = 0; for (int i = 0; i < iterations; i++) { testVar -= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public long testLoopSubLong(long n) { long testVar = 10; long additionalVar = 0; for (long i = 0; i < iterations; i++) { testVar -= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public int testLoopMulInt(int n) { int testVar = 1; int additionalVar = 0; for (int i = 0; i < 10; i++) { testVar *= 6; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public long testLoopMulLong(long n) { long testVar = 1L; long additionalVar = 0; for (long i = 0; i < 12; i++) { testVar *= 6L; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public int testLoopDivInt(int n) { int testVar = 10; int additionalVar = 0; for (int i = 0; i < iterations; i++) { testVar /= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public long testLoopDivLong(long n) { long testVar = 10; long additionalVar = 0; for (long i = 0; i < iterations; i++) { testVar /= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public int testLoopRemInt(int n) { int testVar = 10; int additionalVar = 0; for (int i = 0; i < iterations; i++) { testVar %= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public long testLoopRemLong(long n) { long testVar = 10; long additionalVar = 0; for (long i = 0; i < iterations; i++) { testVar %= 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public long testLoopAddLong(long n) { long testVar = 10; long additionalVar = 0; for (long i = 0; i < iterations; i++) { testVar += 5; try { throw new java.lang.ArithmeticException(); } catch (java.lang.ArithmeticException ae) { // ae.printStackTrace(); } additionalVar += i%2 + i%3 + i%4 + i%5 + i%6 + i%7 + i%8 + i%9 + i%10 + n; } return testVar + additionalVar; } public static void main(String[] args) { System.out.println(new Main().testLoopAddInt(10)); System.out.println(new Main().testLoopAddLong(10L)); System.out.println(new Main().testLoopRemLong(10L)); System.out.println(new Main().testLoopRemInt(10)); System.out.println(new Main().testLoopDivLong(10L)); System.out.println(new Main().testLoopDivInt(10)); System.out.println(new Main().testLoopMulLong(10L)); System.out.println(new Main().testLoopMulInt(10)); System.out.println(new Main().testLoopSubLong(10L)); System.out.println(new Main().testLoopSubInt(10)); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.idea.maven.dom; import com.intellij.openapi.module.Module; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlTag; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.dom.model.MavenDomParent; import org.jetbrains.idea.maven.dom.model.MavenDomProfile; import org.jetbrains.idea.maven.dom.model.MavenDomProjectModel; import org.jetbrains.idea.maven.dom.model.MavenDomProperties; import org.jetbrains.idea.maven.dom.references.MavenFilteredPropertyPsiReferenceProvider; import org.jetbrains.idea.maven.model.MavenId; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.project.MavenProjectsManager; import org.jetbrains.idea.maven.server.MavenServerUtil; import org.jetbrains.idea.maven.utils.MavenJDOMUtil; import org.jetbrains.idea.maven.utils.MavenUtil; import org.jetbrains.jps.maven.compiler.MavenEscapeWindowsCharacterUtils; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; public final class MavenPropertyResolver { public static final Pattern PATTERN = Pattern.compile("\\$\\{(.+?)}|@(.+?)@"); public static void doFilterText(Module module, String text, Properties additionalProperties, @Nullable String propertyEscapeString, Appendable out) throws IOException { MavenProjectsManager manager = MavenProjectsManager.getInstance(module.getProject()); MavenProject mavenProject = manager.findProject(module); if (mavenProject == null) { out.append(text); return; } Element pluginConfiguration = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); String escapeWindowsPathsStr = MavenJDOMUtil.findChildValueByPath(pluginConfiguration, "escapeWindowsPaths"); boolean escapeWindowsPath = escapeWindowsPathsStr == null || Boolean.parseBoolean(escapeWindowsPathsStr); doFilterText(MavenFilteredPropertyPsiReferenceProvider.getDelimitersPattern(mavenProject), manager, mavenProject, null, text, additionalProperties, propertyEscapeString, escapeWindowsPath, null, out); } private static void doFilterText(Pattern pattern, MavenProjectsManager mavenProjectsManager, @Nullable MavenProject mavenProject, MavenDomProjectModel projectDom, String text, Properties additionalProperties, @Nullable String escapeString, boolean escapeWindowsPath, @Nullable Map<String, String> resolvedPropertiesParam, Appendable out) throws IOException { Map<String, String> resolvedProperties = resolvedPropertiesParam; Matcher matcher = pattern.matcher(text); int groupCount = matcher.groupCount(); int last = 0; while (matcher.find()) { if (escapeString != null) { int escapeStringStartIndex = matcher.start() - escapeString.length(); if (escapeStringStartIndex >= last) { if (text.startsWith(escapeString, escapeStringStartIndex)) { out.append(text, last, escapeStringStartIndex); out.append(matcher.group()); last = matcher.end(); continue; } } } out.append(text, last, matcher.start()); last = matcher.end(); String propertyName = null; for (int i = 0; i < groupCount; i++) { propertyName = matcher.group(i + 1); if (propertyName != null) { break; } } assert propertyName != null; if (resolvedProperties == null) { resolvedProperties = new HashMap<>(); } String propertyValue = resolvedProperties.get(propertyName); if (propertyValue == null) { if (resolvedProperties.containsKey(propertyName)) { // if cyclic property dependencies out.append(matcher.group()); continue; } String resolved; if (mavenProject != null) { resolved = doResolvePropertyForMavenProject(propertyName, mavenProjectsManager, mavenProject, additionalProperties); } else { resolved = doResolvePropertyForMavenDomModel(propertyName, mavenProjectsManager, projectDom, additionalProperties); } if (resolved == null) { out.append(matcher.group()); continue; } resolvedProperties.put(propertyName, null); StringBuilder sb = new StringBuilder(); doFilterText(pattern, mavenProjectsManager, mavenProject, projectDom, resolved, additionalProperties, null, escapeWindowsPath, resolvedProperties, sb); propertyValue = sb.toString(); resolvedProperties.put(propertyName, propertyValue); } if (escapeWindowsPath) { MavenEscapeWindowsCharacterUtils.escapeWindowsPath(out, propertyValue); } else { out.append(propertyValue); } } out.append(text, last, text.length()); } /** * Resolve properties from the string (either like {@code ${propertyName}} or like {@code @propertyName@}). * @param text text string to resolve properties in * @param projectDom a project dom * @return string with the properties resolved */ public static String resolve(String text, MavenDomProjectModel projectDom) { XmlElement element = projectDom.getXmlElement(); if (element == null) return text; VirtualFile file = MavenDomUtil.getVirtualFile(element); if (file == null) return text; MavenProjectsManager manager = MavenProjectsManager.getInstance(projectDom.getManager().getProject()); MavenProject mavenProject = manager.findProject(file); StringBuilder res = new StringBuilder(); try { doFilterText(PATTERN, manager, mavenProject, projectDom, text, collectPropertiesFromDOM(mavenProject, projectDom), null, false, null, res); } catch (IOException e) { throw new RuntimeException(e); // never thrown } return res.toString(); } public static Properties collectPropertiesFromDOM(@Nullable MavenProject project, MavenDomProjectModel projectDom) { Properties result = new Properties(); collectPropertiesFromDOM(projectDom.getProperties(), result); if (project != null) { collectPropertiesForActivatedProfiles(project, projectDom, result); } return result; } private static void collectPropertiesForActivatedProfiles(@NotNull MavenProject project, MavenDomProjectModel projectDom, Properties result) { Collection<String> activeProfiles = project.getActivatedProfilesIds().getEnabledProfiles(); for (MavenDomProfile each : projectDom.getProfiles().getProfiles()) { XmlTag idTag = each.getId().getXmlTag(); if (idTag == null || !activeProfiles.contains(idTag.getValue().getTrimmedText())) continue; collectPropertiesFromDOM(each.getProperties(), result); } } private static void collectPropertiesFromDOM(MavenDomProperties props, Properties result) { XmlTag propsTag = props.getXmlTag(); if (propsTag != null) { for (XmlTag each : propsTag.getSubTags()) { result.setProperty(each.getName(), each.getValue().getTrimmedText()); } } } @Nullable private static String doResolvePropertyForMavenProject(String propName, MavenProjectsManager projectsManager, MavenProject mavenProject, Properties additionalProperties) { boolean hasPrefix = false; String unprefixed = propName; if (propName.startsWith("pom.")) { unprefixed = propName.substring("pom.".length()); hasPrefix = true; } else if (propName.startsWith("project.")) { unprefixed = propName.substring("project.".length()); hasPrefix = true; } MavenProject selectedProject = mavenProject; while (unprefixed.startsWith("parent.")) { MavenId parentId = selectedProject.getParentId(); if (parentId == null) return null; unprefixed = unprefixed.substring("parent.".length()); if (unprefixed.equals("groupId")) { return parentId.getGroupId(); } if (unprefixed.equals("artifactId")) { return parentId.getArtifactId(); } if (unprefixed.equals("version")) { return parentId.getVersion(); } selectedProject = projectsManager.findProject(parentId); if (selectedProject == null) return null; } if (unprefixed.equals("basedir") || (hasPrefix && mavenProject == selectedProject && unprefixed.equals("baseUri"))) { return selectedProject.getDirectory(); } if ("java.home".equals(propName)) { String jreDir = MavenUtil.getModuleJreHome(projectsManager, mavenProject); if (jreDir != null) { return jreDir; } } if ("java.version".equals(propName)) { String javaVersion = MavenUtil.getModuleJavaVersion(projectsManager, mavenProject); if (javaVersion != null) { return javaVersion; } } String result; result = MavenUtil.getPropertiesFromMavenOpts().get(propName); if (result != null) return result; result = mavenProject.getMavenConfig().get(propName); if (result != null) return result; result = mavenProject.getJvmConfig().get(propName); if (result != null) return result; result = MavenServerUtil.collectSystemProperties().getProperty(propName); if (result != null) return result; result = selectedProject.getModelMap().get(unprefixed); if (result != null) return result; result = additionalProperties.getProperty(propName); if (result != null) return result; result = mavenProject.getProperties().getProperty(propName); if (result != null) return result; if ("settings.localRepository".equals(propName)) { return mavenProject.getLocalRepository().getAbsolutePath(); } return null; } @Nullable private static String doResolvePropertyForMavenDomModel(String propName, MavenProjectsManager projectsManager, MavenDomProjectModel projectDom, Properties additionalProperties) { if (propName.startsWith("parent.")) { MavenDomParent parentDomElement = projectDom.getMavenParent(); if (!parentDomElement.exists()) { return null; } MavenId parentId = new MavenId(parentDomElement.getGroupId().getStringValue(), parentDomElement.getArtifactId().getStringValue(), parentDomElement.getVersion().getStringValue()); propName = propName.substring("parent.".length()); if (propName.equals("groupId")) { return parentId.getGroupId(); } if (propName.equals("artifactId")) { return parentId.getArtifactId(); } if (propName.equals("version")) { return parentId.getVersion(); } return null; } String result; result = MavenUtil.getPropertiesFromMavenOpts().get(propName); if (result != null) return result; result = MavenServerUtil.collectSystemProperties().getProperty(propName); if (result != null) return result; result = additionalProperties.getProperty(propName); if (result != null) return result; if ("settings.localRepository".equals(propName)) { return MavenProjectsManager.getInstance(projectDom.getManager().getProject()).getLocalRepository().getAbsolutePath(); } return null; } }
/* Copyright 2022 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import java.util.Iterator; import java.util.List; /** Generated */ public class V1beta1FlowSchemaStatusFluentImpl< A extends io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent<A>> extends io.kubernetes.client.fluent.BaseFluent<A> implements io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent<A> { public V1beta1FlowSchemaStatusFluentImpl() {} public V1beta1FlowSchemaStatusFluentImpl( io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatus instance) { this.withConditions(instance.getConditions()); } private java.util.ArrayList<io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> conditions; public A addToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder>(); } io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); _visitables .get("conditions") .add(index >= 0 ? index : _visitables.get("conditions").size(), builder); this.conditions.add(index >= 0 ? index : conditions.size(), builder); return (A) this; } public A setToConditions( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder>(); } io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); if (index < 0 || index >= _visitables.get("conditions").size()) { _visitables.get("conditions").add(builder); } else { _visitables.get("conditions").set(index, builder); } if (index < 0 || index >= conditions.size()) { conditions.add(builder); } else { conditions.set(index, builder); } return (A) this; } public A addToConditions( io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition... items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A addAllToConditions( java.util.Collection<io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition> items) { if (this.conditions == null) { this.conditions = new java.util.ArrayList< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder>(); } for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); _visitables.get("conditions").add(builder); this.conditions.add(builder); } return (A) this; } public A removeFromConditions( io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition... items) { for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeAllFromConditions( java.util.Collection<io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition> items) { for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : items) { io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(item); _visitables.get("conditions").remove(builder); if (this.conditions != null) { this.conditions.remove(builder); } } return (A) this; } public A removeMatchingFromConditions( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> predicate) { if (conditions == null) return (A) this; final Iterator<io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> each = conditions.iterator(); final List visitables = _visitables.get("conditions"); while (each.hasNext()) { io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder = each.next(); if (predicate.test(builder)) { visitables.remove(builder); each.remove(); } } return (A) this; } /** * This method has been deprecated, please use method buildConditions instead. * * @return The buildable object. */ @java.lang.Deprecated public java.util.List<io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition> getConditions() { return conditions != null ? build(conditions) : null; } public java.util.List<io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition> buildConditions() { return conditions != null ? build(conditions) : null; } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition buildCondition( java.lang.Integer index) { return this.conditions.get(index).build(); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition buildFirstCondition() { return this.conditions.get(0).build(); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition buildLastCondition() { return this.conditions.get(conditions.size() - 1).build(); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition buildMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder item : conditions) { if (predicate.test(item)) { return item.build(); } } return null; } public java.lang.Boolean hasMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> predicate) { for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder item : conditions) { if (predicate.test(item)) { return true; } } return false; } public A withConditions( java.util.List<io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition> conditions) { if (this.conditions != null) { _visitables.get("conditions").removeAll(this.conditions); } if (conditions != null) { this.conditions = new java.util.ArrayList(); for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : conditions) { this.addToConditions(item); } } else { this.conditions = null; } return (A) this; } public A withConditions( io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition... conditions) { if (this.conditions != null) { this.conditions.clear(); } if (conditions != null) { for (io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item : conditions) { this.addToConditions(item); } } return (A) this; } public java.lang.Boolean hasConditions() { return conditions != null && !conditions.isEmpty(); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> addNewCondition() { return new io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluentImpl .ConditionsNestedImpl(); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> addNewConditionLike(io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item) { return new io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluentImpl .ConditionsNestedImpl(-1, item); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> setNewConditionLike( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item) { return new io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluentImpl .ConditionsNestedImpl(index, item); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> editCondition(java.lang.Integer index) { if (conditions.size() <= index) throw new RuntimeException("Can't edit conditions. Index exceeds size."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> editFirstCondition() { if (conditions.size() == 0) throw new RuntimeException("Can't edit first conditions. The list is empty."); return setNewConditionLike(0, buildCondition(0)); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> editLastCondition() { int index = conditions.size() - 1; if (index < 0) throw new RuntimeException("Can't edit last conditions. The list is empty."); return setNewConditionLike(index, buildCondition(index)); } public io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<A> editMatchingCondition( java.util.function.Predicate< io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder> predicate) { int index = -1; for (int i = 0; i < conditions.size(); i++) { if (predicate.test(conditions.get(i))) { index = i; break; } } if (index < 0) throw new RuntimeException("Can't edit matching conditions. No match found."); return setNewConditionLike(index, buildCondition(index)); } public boolean equals(java.lang.Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; V1beta1FlowSchemaStatusFluentImpl that = (V1beta1FlowSchemaStatusFluentImpl) o; if (conditions != null ? !conditions.equals(that.conditions) : that.conditions != null) return false; return true; } public int hashCode() { return java.util.Objects.hash(conditions, super.hashCode()); } public class ConditionsNestedImpl<N> extends io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionFluentImpl< io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested<N>> implements io.kubernetes.client.openapi.models.V1beta1FlowSchemaStatusFluent.ConditionsNested< N>, io.kubernetes.client.fluent.Nested<N> { ConditionsNestedImpl( java.lang.Integer index, io.kubernetes.client.openapi.models.V1beta1FlowSchemaCondition item) { this.index = index; this.builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(this, item); } ConditionsNestedImpl() { this.index = -1; this.builder = new io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder(this); } io.kubernetes.client.openapi.models.V1beta1FlowSchemaConditionBuilder builder; java.lang.Integer index; public N and() { return (N) V1beta1FlowSchemaStatusFluentImpl.this.setToConditions(index, builder.build()); } public N endCondition() { return and(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.store; import com.google.common.base.Predicate; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.disruption.SlowClusterStateProcessing; import org.junit.Test; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.concurrent.Future; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; /** * */ @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndicesStoreIntegrationTests extends ElasticsearchIntegrationTest { @Override protected Settings nodeSettings(int nodeOrdinal) { // simplify this and only use a single data path return ImmutableSettings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("path.data", "").build(); } @Test public void indexCleanup() throws Exception { final String masterNode = internalCluster().startNode(ImmutableSettings.builder().put("node.data", false)); final String node_1 = internalCluster().startNode(ImmutableSettings.builder().put("node.master", false)); final String node_2 = internalCluster().startNode(ImmutableSettings.builder().put("node.master", false)); logger.info("--> creating index [test] with one shard and on replica"); assertAcked(prepareCreate("test").setSettings( ImmutableSettings.builder().put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)) ); ensureGreen("test"); logger.info("--> making sure that shard and its replica are allocated on node_1 and node_2"); assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); logger.info("--> starting node server3"); final String node_3 = internalCluster().startNode(ImmutableSettings.builder().put("node.master", false)); logger.info("--> running cluster_health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() .setWaitForNodes("4") .setWaitForRelocatingShards(0) .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_1, "test")), equalTo(true)); assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(false)); assertThat(Files.exists(indexDirectory(node_3, "test")), equalTo(false)); logger.info("--> move shard from node_1 to node_3, and wait for relocation to finish"); SlowClusterStateProcessing disruption = null; if (randomBoolean()) { disruption = new SlowClusterStateProcessing(node_3, getRandom(), 0, 0, 1000, 2000); internalCluster().setDisruptionScheme(disruption); disruption.startDisrupting(); } internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_3)).get(); clusterHealth = client().admin().cluster().prepareHealth() .setWaitForNodes("4") .setWaitForRelocatingShards(0) .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(waitForShardDeletion(node_1, "test", 0), equalTo(false)); assertThat(waitForIndexDeletion(node_1, "test"), equalTo(false)); assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_3, "test")), equalTo(true)); } @Test public void shardsCleanup() throws Exception { final String node_1 = internalCluster().startNode(); final String node_2 = internalCluster().startNode(); logger.info("--> creating index [test] with one shard and on replica"); assertAcked(prepareCreate("test").setSettings( ImmutableSettings.builder().put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)) ); ensureGreen("test"); logger.info("--> making sure that shard and its replica are allocated on node_1 and node_2"); assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); assertThat(Files.exists(shardDirectory(node_2, "test", 0)), equalTo(true)); logger.info("--> starting node server3"); String node_3 = internalCluster().startNode(); logger.info("--> running cluster_health"); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() .setWaitForNodes("3") .setWaitForRelocatingShards(0) .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); logger.info("--> making sure that shard is not allocated on server3"); assertThat(waitForShardDeletion(node_3, "test", 0), equalTo(false)); Path server2Shard = shardDirectory(node_2, "test", 0); logger.info("--> stopping node " + node_2); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); logger.info("--> running cluster_health"); clusterHealth = client().admin().cluster().prepareHealth() .setWaitForGreenStatus() .setWaitForNodes("2") .setWaitForRelocatingShards(0) .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); logger.info("--> done cluster_health, status " + clusterHealth.getStatus()); assertThat(Files.exists(server2Shard), equalTo(true)); logger.info("--> making sure that shard and its replica exist on server1, server2 and server3"); assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); assertThat(Files.exists(server2Shard), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); logger.info("--> starting node node_4"); final String node_4 = internalCluster().startNode(); logger.info("--> running cluster_health"); ensureGreen(); logger.info("--> making sure that shard and its replica are allocated on server1 and server3 but not on server2"); assertThat(Files.exists(shardDirectory(node_1, "test", 0)), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, "test", 0)), equalTo(true)); assertThat(waitForShardDeletion(node_4, "test", 0), equalTo(false)); } @Test @Slow public void testShardActiveElseWhere() throws Exception { boolean node1IsMasterEligible = randomBoolean(); boolean node2IsMasterEligible = !node1IsMasterEligible || randomBoolean(); Future<String> node_1_future = internalCluster().startNodeAsync(ImmutableSettings.builder().put("node.master", node1IsMasterEligible).build()); Future<String> node_2_future = internalCluster().startNodeAsync(ImmutableSettings.builder().put("node.master", node2IsMasterEligible).build()); final String node_1 = node_1_future.get(); final String node_2 = node_2_future.get(); final String node_1_id = internalCluster().getInstance(DiscoveryService.class, node_1).localNode().getId(); final String node_2_id = internalCluster().getInstance(DiscoveryService.class, node_2).localNode().getId(); logger.debug("node {} (node_1) is {}master eligible", node_1, node1IsMasterEligible ? "" : "not "); logger.debug("node {} (node_2) is {}master eligible", node_2, node2IsMasterEligible ? "" : "not "); logger.debug("node {} became master", internalCluster().getMasterName()); final int numShards = scaledRandomIntBetween(2, 20); assertAcked(prepareCreate("test") .setSettings(ImmutableSettings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards)) ); ensureGreen("test"); waitNoPendingTasksOnAll(); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().get(); RoutingNode routingNode = stateResponse.getState().routingNodes().node(node_2_id); final int[] node2Shards = new int[routingNode.numberOfOwningShards()]; int i = 0; for (MutableShardRouting mutableShardRouting : routingNode) { node2Shards[i] = mutableShardRouting.shardId().id(); i++; } logger.info("Node 2 has shards: {}", Arrays.toString(node2Shards)); final long shardVersions[] = new long[numShards]; final int shardIds[] = new int[numShards]; i = 0; for (ShardRouting shardRouting : stateResponse.getState().getRoutingTable().allShards("test")) { shardVersions[i] = shardRouting.version(); shardIds[i] = shardRouting.getId(); i++; } internalCluster().getInstance(ClusterService.class, node_2).submitStateUpdateTask("test", Priority.IMMEDIATE, new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder("test"); for (int i = 0; i < numShards; i++) { indexRoutingTableBuilder.addIndexShard( new IndexShardRoutingTable.Builder(new ShardId("test", i), false) .addShard(new ImmutableShardRouting("test", i, node_1_id, true, ShardRoutingState.STARTED, shardVersions[shardIds[i]])) .build() ); } return ClusterState.builder(currentState) .routingTable(RoutingTable.builder().add(indexRoutingTableBuilder).build()) .build(); } public boolean runOnlyOnMaster() { return false; } @Override public void onFailure(String source, Throwable t) { } }); waitNoPendingTasksOnAll(); logger.info("Checking if shards aren't removed"); for (int shard : node2Shards) { assertTrue(waitForShardDeletion(node_2, "test", shard)); } } private Path indexDirectory(String server, String index) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); final Path[] paths = env.indexPaths(new Index(index)); assert paths.length == 1; return paths[0]; } private Path shardDirectory(String server, String index, int shard) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); final Path[] paths = env.availableShardPaths(new ShardId(index, shard)); assert paths.length == 1; return paths[0]; } private boolean waitForShardDeletion(final String server, final String index, final int shard) throws InterruptedException { awaitBusy(new Predicate<Object>() { @Override public boolean apply(Object o) { return !Files.exists(shardDirectory(server, index, shard)); } }); return Files.exists(shardDirectory(server, index, shard)); } private boolean waitForIndexDeletion(final String server, final String index) throws InterruptedException { awaitBusy(new Predicate<Object>() { @Override public boolean apply(Object o) { return !Files.exists(indexDirectory(server, index)); } }); return Files.exists(indexDirectory(server, index)); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** Interface of <a href="http://schema.org/AutoWash}">http://schema.org/AutoWash}</a>. */ public interface AutoWash extends AutomotiveBusiness { /** Builder interface of <a href="http://schema.org/AutoWash}">http://schema.org/AutoWash}</a>. */ public interface Builder extends AutomotiveBusiness.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property additionalProperty. */ Builder addAdditionalProperty(PropertyValue value); /** Add a value to property additionalProperty. */ Builder addAdditionalProperty(PropertyValue.Builder value); /** Add a value to property additionalProperty. */ Builder addAdditionalProperty(String value); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property address. */ Builder addAddress(PostalAddress value); /** Add a value to property address. */ Builder addAddress(PostalAddress.Builder value); /** Add a value to property address. */ Builder addAddress(Text value); /** Add a value to property address. */ Builder addAddress(String value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating.Builder value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property alumni. */ Builder addAlumni(Person value); /** Add a value to property alumni. */ Builder addAlumni(Person.Builder value); /** Add a value to property alumni. */ Builder addAlumni(String value); /** Add a value to property areaServed. */ Builder addAreaServed(AdministrativeArea value); /** Add a value to property areaServed. */ Builder addAreaServed(AdministrativeArea.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(GeoShape value); /** Add a value to property areaServed. */ Builder addAreaServed(GeoShape.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(Place value); /** Add a value to property areaServed. */ Builder addAreaServed(Place.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(Text value); /** Add a value to property areaServed. */ Builder addAreaServed(String value); /** Add a value to property award. */ Builder addAward(Text value); /** Add a value to property award. */ Builder addAward(String value); /** Add a value to property awards. */ Builder addAwards(Text value); /** Add a value to property awards. */ Builder addAwards(String value); /** Add a value to property branchCode. */ Builder addBranchCode(Text value); /** Add a value to property branchCode. */ Builder addBranchCode(String value); /** Add a value to property branchOf. */ Builder addBranchOf(Organization value); /** Add a value to property branchOf. */ Builder addBranchOf(Organization.Builder value); /** Add a value to property branchOf. */ Builder addBranchOf(String value); /** Add a value to property brand. */ Builder addBrand(Brand value); /** Add a value to property brand. */ Builder addBrand(Brand.Builder value); /** Add a value to property brand. */ Builder addBrand(Organization value); /** Add a value to property brand. */ Builder addBrand(Organization.Builder value); /** Add a value to property brand. */ Builder addBrand(String value); /** Add a value to property contactPoint. */ Builder addContactPoint(ContactPoint value); /** Add a value to property contactPoint. */ Builder addContactPoint(ContactPoint.Builder value); /** Add a value to property contactPoint. */ Builder addContactPoint(String value); /** Add a value to property contactPoints. */ Builder addContactPoints(ContactPoint value); /** Add a value to property contactPoints. */ Builder addContactPoints(ContactPoint.Builder value); /** Add a value to property contactPoints. */ Builder addContactPoints(String value); /** Add a value to property containedIn. */ Builder addContainedIn(Place value); /** Add a value to property containedIn. */ Builder addContainedIn(Place.Builder value); /** Add a value to property containedIn. */ Builder addContainedIn(String value); /** Add a value to property containedInPlace. */ Builder addContainedInPlace(Place value); /** Add a value to property containedInPlace. */ Builder addContainedInPlace(Place.Builder value); /** Add a value to property containedInPlace. */ Builder addContainedInPlace(String value); /** Add a value to property containsPlace. */ Builder addContainsPlace(Place value); /** Add a value to property containsPlace. */ Builder addContainsPlace(Place.Builder value); /** Add a value to property containsPlace. */ Builder addContainsPlace(String value); /** Add a value to property currenciesAccepted. */ Builder addCurrenciesAccepted(Text value); /** Add a value to property currenciesAccepted. */ Builder addCurrenciesAccepted(String value); /** Add a value to property department. */ Builder addDepartment(Organization value); /** Add a value to property department. */ Builder addDepartment(Organization.Builder value); /** Add a value to property department. */ Builder addDepartment(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property dissolutionDate. */ Builder addDissolutionDate(Date value); /** Add a value to property dissolutionDate. */ Builder addDissolutionDate(String value); /** Add a value to property duns. */ Builder addDuns(Text value); /** Add a value to property duns. */ Builder addDuns(String value); /** Add a value to property email. */ Builder addEmail(Text value); /** Add a value to property email. */ Builder addEmail(String value); /** Add a value to property employee. */ Builder addEmployee(Person value); /** Add a value to property employee. */ Builder addEmployee(Person.Builder value); /** Add a value to property employee. */ Builder addEmployee(String value); /** Add a value to property employees. */ Builder addEmployees(Person value); /** Add a value to property employees. */ Builder addEmployees(Person.Builder value); /** Add a value to property employees. */ Builder addEmployees(String value); /** Add a value to property event. */ Builder addEvent(Event value); /** Add a value to property event. */ Builder addEvent(Event.Builder value); /** Add a value to property event. */ Builder addEvent(String value); /** Add a value to property events. */ Builder addEvents(Event value); /** Add a value to property events. */ Builder addEvents(Event.Builder value); /** Add a value to property events. */ Builder addEvents(String value); /** Add a value to property faxNumber. */ Builder addFaxNumber(Text value); /** Add a value to property faxNumber. */ Builder addFaxNumber(String value); /** Add a value to property founder. */ Builder addFounder(Person value); /** Add a value to property founder. */ Builder addFounder(Person.Builder value); /** Add a value to property founder. */ Builder addFounder(String value); /** Add a value to property founders. */ Builder addFounders(Person value); /** Add a value to property founders. */ Builder addFounders(Person.Builder value); /** Add a value to property founders. */ Builder addFounders(String value); /** Add a value to property foundingDate. */ Builder addFoundingDate(Date value); /** Add a value to property foundingDate. */ Builder addFoundingDate(String value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(Place value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(Place.Builder value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(String value); /** Add a value to property geo. */ Builder addGeo(GeoCoordinates value); /** Add a value to property geo. */ Builder addGeo(GeoCoordinates.Builder value); /** Add a value to property geo. */ Builder addGeo(GeoShape value); /** Add a value to property geo. */ Builder addGeo(GeoShape.Builder value); /** Add a value to property geo. */ Builder addGeo(String value); /** Add a value to property globalLocationNumber. */ Builder addGlobalLocationNumber(Text value); /** Add a value to property globalLocationNumber. */ Builder addGlobalLocationNumber(String value); /** Add a value to property hasMap. */ Builder addHasMap(Map value); /** Add a value to property hasMap. */ Builder addHasMap(Map.Builder value); /** Add a value to property hasMap. */ Builder addHasMap(URL value); /** Add a value to property hasMap. */ Builder addHasMap(String value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(OfferCatalog value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(OfferCatalog.Builder value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(String value); /** Add a value to property hasPOS. */ Builder addHasPOS(Place value); /** Add a value to property hasPOS. */ Builder addHasPOS(Place.Builder value); /** Add a value to property hasPOS. */ Builder addHasPOS(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property isicV4. */ Builder addIsicV4(Text value); /** Add a value to property isicV4. */ Builder addIsicV4(String value); /** Add a value to property legalName. */ Builder addLegalName(Text value); /** Add a value to property legalName. */ Builder addLegalName(String value); /** Add a value to property location. */ Builder addLocation(Place value); /** Add a value to property location. */ Builder addLocation(Place.Builder value); /** Add a value to property location. */ Builder addLocation(PostalAddress value); /** Add a value to property location. */ Builder addLocation(PostalAddress.Builder value); /** Add a value to property location. */ Builder addLocation(Text value); /** Add a value to property location. */ Builder addLocation(String value); /** Add a value to property logo. */ Builder addLogo(ImageObject value); /** Add a value to property logo. */ Builder addLogo(ImageObject.Builder value); /** Add a value to property logo. */ Builder addLogo(URL value); /** Add a value to property logo. */ Builder addLogo(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property makesOffer. */ Builder addMakesOffer(Offer value); /** Add a value to property makesOffer. */ Builder addMakesOffer(Offer.Builder value); /** Add a value to property makesOffer. */ Builder addMakesOffer(String value); /** Add a value to property map. */ Builder addMap(URL value); /** Add a value to property map. */ Builder addMap(String value); /** Add a value to property maps. */ Builder addMaps(URL value); /** Add a value to property maps. */ Builder addMaps(String value); /** Add a value to property member. */ Builder addMember(Organization value); /** Add a value to property member. */ Builder addMember(Organization.Builder value); /** Add a value to property member. */ Builder addMember(Person value); /** Add a value to property member. */ Builder addMember(Person.Builder value); /** Add a value to property member. */ Builder addMember(String value); /** Add a value to property memberOf. */ Builder addMemberOf(Organization value); /** Add a value to property memberOf. */ Builder addMemberOf(Organization.Builder value); /** Add a value to property memberOf. */ Builder addMemberOf(ProgramMembership value); /** Add a value to property memberOf. */ Builder addMemberOf(ProgramMembership.Builder value); /** Add a value to property memberOf. */ Builder addMemberOf(String value); /** Add a value to property members. */ Builder addMembers(Organization value); /** Add a value to property members. */ Builder addMembers(Organization.Builder value); /** Add a value to property members. */ Builder addMembers(Person value); /** Add a value to property members. */ Builder addMembers(Person.Builder value); /** Add a value to property members. */ Builder addMembers(String value); /** Add a value to property naics. */ Builder addNaics(Text value); /** Add a value to property naics. */ Builder addNaics(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(QuantitativeValue value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(QuantitativeValue.Builder value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(String value); /** Add a value to property openingHours. */ Builder addOpeningHours(Text value); /** Add a value to property openingHours. */ Builder addOpeningHours(String value); /** Add a value to property openingHoursSpecification. */ Builder addOpeningHoursSpecification(OpeningHoursSpecification value); /** Add a value to property openingHoursSpecification. */ Builder addOpeningHoursSpecification(OpeningHoursSpecification.Builder value); /** Add a value to property openingHoursSpecification. */ Builder addOpeningHoursSpecification(String value); /** Add a value to property owns. */ Builder addOwns(OwnershipInfo value); /** Add a value to property owns. */ Builder addOwns(OwnershipInfo.Builder value); /** Add a value to property owns. */ Builder addOwns(Product value); /** Add a value to property owns. */ Builder addOwns(Product.Builder value); /** Add a value to property owns. */ Builder addOwns(String value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(Organization value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(Organization.Builder value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(String value); /** Add a value to property paymentAccepted. */ Builder addPaymentAccepted(Text value); /** Add a value to property paymentAccepted. */ Builder addPaymentAccepted(String value); /** Add a value to property photo. */ Builder addPhoto(ImageObject value); /** Add a value to property photo. */ Builder addPhoto(ImageObject.Builder value); /** Add a value to property photo. */ Builder addPhoto(Photograph value); /** Add a value to property photo. */ Builder addPhoto(Photograph.Builder value); /** Add a value to property photo. */ Builder addPhoto(String value); /** Add a value to property photos. */ Builder addPhotos(ImageObject value); /** Add a value to property photos. */ Builder addPhotos(ImageObject.Builder value); /** Add a value to property photos. */ Builder addPhotos(Photograph value); /** Add a value to property photos. */ Builder addPhotos(Photograph.Builder value); /** Add a value to property photos. */ Builder addPhotos(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property priceRange. */ Builder addPriceRange(Text value); /** Add a value to property priceRange. */ Builder addPriceRange(String value); /** Add a value to property review. */ Builder addReview(Review value); /** Add a value to property review. */ Builder addReview(Review.Builder value); /** Add a value to property review. */ Builder addReview(String value); /** Add a value to property reviews. */ Builder addReviews(Review value); /** Add a value to property reviews. */ Builder addReviews(Review.Builder value); /** Add a value to property reviews. */ Builder addReviews(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property seeks. */ Builder addSeeks(Demand value); /** Add a value to property seeks. */ Builder addSeeks(Demand.Builder value); /** Add a value to property seeks. */ Builder addSeeks(String value); /** Add a value to property serviceArea. */ Builder addServiceArea(AdministrativeArea value); /** Add a value to property serviceArea. */ Builder addServiceArea(AdministrativeArea.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(GeoShape value); /** Add a value to property serviceArea. */ Builder addServiceArea(GeoShape.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(Place value); /** Add a value to property serviceArea. */ Builder addServiceArea(Place.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(String value); /** Add a value to property subOrganization. */ Builder addSubOrganization(Organization value); /** Add a value to property subOrganization. */ Builder addSubOrganization(Organization.Builder value); /** Add a value to property subOrganization. */ Builder addSubOrganization(String value); /** Add a value to property taxID. */ Builder addTaxID(Text value); /** Add a value to property taxID. */ Builder addTaxID(String value); /** Add a value to property telephone. */ Builder addTelephone(Text value); /** Add a value to property telephone. */ Builder addTelephone(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property vatID. */ Builder addVatID(Text value); /** Add a value to property vatID. */ Builder addVatID(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link AutoWash} object. */ AutoWash build(); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.05.04 at 02:33:07 PM CEST // package com.devtty.neb27k.jaxb; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for OriginDestType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="OriginDestType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{hafas_rest}Notes" minOccurs="0"/> * &lt;/sequence> * &lt;attGroup ref="{hafas_rest}attlist.OriginDestType"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "OriginDestType", propOrder = { "notes" }) public class OriginDestType { @XmlElement(name = "Notes") protected Notes notes; @XmlAttribute(name = "name", required = true) protected String name; @XmlAttribute(name = "type", required = true) protected String type; @XmlAttribute(name = "id") protected String id; @XmlAttribute(name = "extId") protected String extId; @XmlAttribute(name = "lon") protected BigDecimal lon; @XmlAttribute(name = "lat") protected BigDecimal lat; @XmlAttribute(name = "routeIdx") protected Integer routeIdx; @XmlAttribute(name = "prognosisType") protected PrognosisType prognosisType; @XmlAttribute(name = "time", required = true) protected String time; @XmlAttribute(name = "date", required = true) protected String date; @XmlAttribute(name = "tz") protected Integer tz; @XmlAttribute(name = "track") protected String track; @XmlAttribute(name = "rtTime") protected String rtTime; @XmlAttribute(name = "rtDate") protected String rtDate; @XmlAttribute(name = "rtTz") protected Integer rtTz; @XmlAttribute(name = "rtTrack") protected String rtTrack; /** * Contains a list of notes to be displayed for this location, like attributes or footnotes. * * @return * possible object is * {@link Notes } * */ public Notes getNotes() { return notes; } /** * Sets the value of the notes property. * * @param value * allowed object is * {@link Notes } * */ public void setNotes(Notes value) { this.notes = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the extId property. * * @return * possible object is * {@link String } * */ public String getExtId() { return extId; } /** * Sets the value of the extId property. * * @param value * allowed object is * {@link String } * */ public void setExtId(String value) { this.extId = value; } /** * Gets the value of the lon property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getLon() { return lon; } /** * Sets the value of the lon property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setLon(BigDecimal value) { this.lon = value; } /** * Gets the value of the lat property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getLat() { return lat; } /** * Sets the value of the lat property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setLat(BigDecimal value) { this.lat = value; } /** * Gets the value of the routeIdx property. * * @return * possible object is * {@link Integer } * */ public Integer getRouteIdx() { return routeIdx; } /** * Sets the value of the routeIdx property. * * @param value * allowed object is * {@link Integer } * */ public void setRouteIdx(Integer value) { this.routeIdx = value; } /** * Gets the value of the prognosisType property. * * @return * possible object is * {@link PrognosisType } * */ public PrognosisType getPrognosisType() { return prognosisType; } /** * Sets the value of the prognosisType property. * * @param value * allowed object is * {@link PrognosisType } * */ public void setPrognosisType(PrognosisType value) { this.prognosisType = value; } /** * Gets the value of the time property. * * @return * possible object is * {@link String } * */ public String getTime() { return time; } /** * Sets the value of the time property. * * @param value * allowed object is * {@link String } * */ public void setTime(String value) { this.time = value; } /** * Gets the value of the date property. * * @return * possible object is * {@link String } * */ public String getDate() { return date; } /** * Sets the value of the date property. * * @param value * allowed object is * {@link String } * */ public void setDate(String value) { this.date = value; } /** * Gets the value of the tz property. * * @return * possible object is * {@link Integer } * */ public int getTz() { if (tz == null) { return 0; } else { return tz; } } /** * Sets the value of the tz property. * * @param value * allowed object is * {@link Integer } * */ public void setTz(Integer value) { this.tz = value; } /** * Gets the value of the track property. * * @return * possible object is * {@link String } * */ public String getTrack() { return track; } /** * Sets the value of the track property. * * @param value * allowed object is * {@link String } * */ public void setTrack(String value) { this.track = value; } /** * Gets the value of the rtTime property. * * @return * possible object is * {@link String } * */ public String getRtTime() { return rtTime; } /** * Sets the value of the rtTime property. * * @param value * allowed object is * {@link String } * */ public void setRtTime(String value) { this.rtTime = value; } /** * Gets the value of the rtDate property. * * @return * possible object is * {@link String } * */ public String getRtDate() { return rtDate; } /** * Sets the value of the rtDate property. * * @param value * allowed object is * {@link String } * */ public void setRtDate(String value) { this.rtDate = value; } /** * Gets the value of the rtTz property. * * @return * possible object is * {@link Integer } * */ public int getRtTz() { if (rtTz == null) { return 0; } else { return rtTz; } } /** * Sets the value of the rtTz property. * * @param value * allowed object is * {@link Integer } * */ public void setRtTz(Integer value) { this.rtTz = value; } /** * Gets the value of the rtTrack property. * * @return * possible object is * {@link String } * */ public String getRtTrack() { return rtTrack; } /** * Sets the value of the rtTrack property. * * @param value * allowed object is * {@link String } * */ public void setRtTrack(String value) { this.rtTrack = value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.deployment; import java.util.ArrayList; import java.util.List; import javax.xml.namespace.QName; import junit.framework.TestCase; import org.apache.axis2.AbstractTestCase; import org.apache.axis2.AxisFault; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.axis2.description.AxisModule; import org.apache.axis2.description.AxisOperation; import org.apache.axis2.description.AxisService; import org.apache.axis2.description.InOutAxisOperation; import org.apache.axis2.engine.AxisConfiguration; import org.apache.axis2.engine.Phase; public class ModuleDisengagementTest extends TestCase { AxisConfiguration config; String serviceName = "testService"; QName opName = new QName("testOperation"); protected void setUp() throws Exception { String filename = AbstractTestCase.basedir + "/test-resources/deployment/moduleDisEngegeRepo"; config = ConfigurationContextFactory. createConfigurationContextFromFileSystem(filename, filename + "/axis2.xml").getAxisConfiguration(); AxisService testService = new AxisService(); testService.setName(serviceName); AxisOperation testOperation = new InOutAxisOperation(); testOperation.setName(opName); testService.addOperation(testOperation); testOperation = new InOutAxisOperation(); testOperation.setName(new QName("oper2")); testService.addOperation(testOperation); config.addService(testService); } public void testGlobalDisengagement() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase phase; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); phase = (Phase) globalinflow.get(3); assertNotNull(phase); assertEquals(phase.getHandlerCount(), 0); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); config.engageModule(module.getName()); assertEquals(2, phase.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); config.disengageModule(module); assertEquals(0, phase.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } public void testServiceDisengagement() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase phase; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); phase = (Phase) globalinflow.get(3); assertNotNull(phase); assertEquals(0, phase.getHandlerCount()); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); config.engageModule(module.getName()); assertEquals(2, phase.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); service.disengageModule(module); assertEquals(2, phase.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } public void testGlobalCheck() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); config.engageModule(module.getName()); config.disengageModule(module); config.engageModule(module.getName()); } public void testOperationDisengagement() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase phase; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); phase = (Phase) globalinflow.get(3); assertNotNull(phase); assertEquals(phase.getHandlerCount(), 0); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); config.engageModule(module.getName()); assertEquals(2, phase.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); operation.disengageModule(module); assertEquals(2, phase.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } public void testServiceEngageServiceDisengage() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase predisptah; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); predisptah = (Phase) globalinflow.get(3); assertNotNull(predisptah); assertEquals(predisptah.getHandlerCount(), 0); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); service.engageModule(module); assertEquals(2, predisptah.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); service.disengageModule(module); assertEquals(0, predisptah.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } public void testServiceEngageOperationDisengage() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase phase; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); phase = (Phase) globalinflow.get(3); assertNotNull(phase); assertEquals(phase.getHandlerCount(), 0); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); service.engageModule(module); assertEquals(2, phase.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); operation.disengageModule(module); assertEquals(2, phase.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } public void testOperationEngageOperationDisengage() throws AxisFault { AxisModule module = config.getModule("testModule"); assertNotNull(module); Phase phase; Phase userPhase; List globalinflow = config.getInFlowPhases(); assertNotNull(globalinflow); phase = (Phase) globalinflow.get(3); assertNotNull(phase); assertEquals(phase.getHandlerCount(), 0); AxisService service = config.getService(serviceName); assertNotNull(service); AxisOperation operation = service.getOperation(opName); assertNotNull(operation); userPhase = (Phase) operation.getRemainingPhasesInFlow().get(1); assertNotNull(userPhase); assertEquals(0, userPhase.getHandlerCount()); operation.engageModule(module); assertEquals(2, phase.getHandlerCount()); assertEquals(1, userPhase.getHandlerCount()); operation.disengageModule(module); assertEquals(0, phase.getHandlerCount()); assertEquals(0, userPhase.getHandlerCount()); } }
/******************************************************************************* * Copyright 2012 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.model.serialization; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.NodeIterator; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.ResourceFactory; import edu.isi.karma.modeling.Namespaces; import edu.isi.karma.modeling.Prefixes; import edu.isi.karma.rep.alignment.Label; import edu.isi.karma.rep.model.Argument; import edu.isi.karma.rep.model.ArgumentType; import edu.isi.karma.rep.model.Atom; import edu.isi.karma.rep.model.ClassAtom; import edu.isi.karma.rep.model.IndividualPropertyAtom; import edu.isi.karma.rep.sources.Attribute; import edu.isi.karma.rep.sources.AttributeRequirement; import edu.isi.karma.rep.sources.IOType; import edu.isi.karma.rep.sources.Source; import edu.isi.karma.rep.sources.WebService; import edu.isi.karma.webserver.ServletContextParameterMap; import edu.isi.karma.webserver.ServletContextParameterMap.ContextParameter; public class WebServiceLoader extends SourceLoader { private static Logger logger = LoggerFactory.getLogger(WebServiceLoader.class); private static WebServiceLoader instance = null; private final int DEFAULT_SERVICE_RESULTS_SIZE = 10; protected WebServiceLoader() { // Exists only to defeat instantiation. } public static WebServiceLoader getInstance() { if (instance == null) { instance = new WebServiceLoader(); } return instance; } @Override public WebService getSourceByUri(String uri) { Model m = Repository.Instance().getNamedModel(uri); if (m == null) return null; WebService service = importSourceFromJenaModel(m); return service; } @Override public void deleteSourceByUri(String uri) { Repository.Instance().clearNamedModel(uri); String service_id = uri.substring(uri.lastIndexOf("/") + 1, uri.length() - 1); String dir = ServletContextParameterMap.getParameterValue(ContextParameter.USER_DIRECTORY_PATH) + Repository.Instance().SERVICE_REPOSITORY_REL_DIR; String fileName = service_id + Repository.Instance().getFileExtension(Repository.Instance().LANG); File f = new File(dir + fileName); try { if (f.exists()) { if (!f.delete()) logger.debug("The file " + fileName + " cannot be deleted from " + dir); else logger.debug("The file " + fileName + " has been deleted from " + dir); } else logger.debug("The file " + fileName + " does not exist in " + dir); } catch (Throwable t) { logger.debug("cannot delete the file " + fileName + " from " + dir + " because " + t.getMessage()); } } /** * returns the service id, name, address of all services in the repository * @param serviceLimit: maximum number of results, null value means all the services * @return */ @Override public List<Source> getSourcesAbstractInfo(Integer serviceLimit) { List<Source> serviceList = new ArrayList<Source>(); Model model = Repository.Instance().getModel(); String service_id = ""; String service_name = ""; String service_address = ""; // Create a new query String queryString = "PREFIX " + Prefixes.KARMA + ": <" + Namespaces.KARMA + "> \n" + "PREFIX " + Prefixes.HRESTS + ": <" + Namespaces.HRESTS + "> \n" + "SELECT ?s ?name ?address \n" + "WHERE { \n" + " ?s a " + Prefixes.KARMA + ":Service . \n" + " OPTIONAL {?s " + Prefixes.HRESTS + ":hasAddress ?address .} \n" + " OPTIONAL {?s " + Prefixes.KARMA + ":hasName ?name .} \n" + " } \n"; if (serviceLimit != null) { if (serviceLimit.intValue() < 0) serviceLimit = DEFAULT_SERVICE_RESULTS_SIZE; queryString += "LIMIT " + String.valueOf(serviceLimit.intValue() + "\n"); } logger.debug("query= \n" + queryString); Query query = QueryFactory.create(queryString); // Execute the query and obtain results QueryExecution qexec = QueryExecutionFactory.create(query, model); try { ResultSet results = qexec.execSelect() ; if (!results.hasNext()) logger.info("query does not return any answer."); // ResultSetFormatter.out(System.out, results, query) ; for ( ; results.hasNext() ; ) { QuerySolution soln = results.nextSolution() ; RDFNode s = soln.get("s") ; // Get a result variable by name. RDFNode name = soln.get("name") ; // Get a result variable by name. RDFNode address = soln.get("address") ; // Get a result variable by name. if (s == null) { logger.info("service id is null."); continue; } String service_uri = s.toString(); service_id = service_uri.substring(service_uri.lastIndexOf("/") + 1, service_uri.length() - 1); logger.debug("service uri: " + service_uri); logger.debug("service id: " + service_id); if (name != null && name.isLiteral()) service_name = name.asLiteral().getString(); logger.debug("service name: " + service_name); if (address != null && address.isLiteral()) service_address = address.asLiteral().getString(); logger.debug("service address: " + service_address); if (service_id.trim().length() > 0) serviceList.add(new WebService(service_id, service_name, service_address)); else logger.info("length of service id is zero."); } return serviceList; } catch (Exception e) { logger.info(e.getMessage()); return null; } finally { qexec.close() ; } } /** * returns all the services in the repository along with their complete information * probably this method is not useful since fetching all the services with their complete * information takes long time. * @return */ @Override public List<Source> getSourcesDetailedInfo(Integer serviceLimit) { List<Source> serviceList = getSourcesAbstractInfo(serviceLimit); List<Source> serviceListCompleteInfo = new ArrayList<Source>(); for (Source s : serviceList) { serviceListCompleteInfo.add(getSourceByUri(s.getUri())); } return serviceListCompleteInfo; } public WebService getServiceByAddress(String address) { String uri = getServiceUriByServiceAddress(address); Model m = Repository.Instance().getNamedModel(uri); if (m == null) return null; WebService service = importSourceFromJenaModel(m); return service; } public void deleteServiceByAddress(String address) { String uri = getServiceUriByServiceAddress(address); Repository.Instance().clearNamedModel(uri); } private String getServiceUriByServiceAddress(String address) { Model model = Repository.Instance().getModel(); // Create a new query String queryString = "PREFIX " + Prefixes.KARMA + ": <" + Namespaces.KARMA + "> \n" + "PREFIX " + Prefixes.HRESTS + ": <" + Namespaces.HRESTS + "> \n" + "SELECT ?s \n" + "WHERE { \n" + " ?s a " + Prefixes.KARMA + ":Service . \n" + " ?s " + Prefixes.HRESTS + ":hasAddress \"" + address + "\"^^hrests:URITemplate . \n" + " } \n"; logger.debug(queryString); Query query = QueryFactory.create(queryString); // Execute the query and obtain results QueryExecution qexec = QueryExecutionFactory.create(query, model); try { ResultSet results = qexec.execSelect() ; if (!results.hasNext()) logger.info("query does not return any answer."); // ResultSetFormatter.out(System.out, results, query) ; String serviceURI = ""; for ( ; results.hasNext() ; ) { QuerySolution soln = results.nextSolution() ; RDFNode x = soln.get("s") ; // Get a result variable by name. serviceURI = x.toString(); logger.info("Service with id " + x.toString() + " has been found with the address " + address); break; } return serviceURI; } catch (Exception e) { logger.info("Exception in finding a service with the address " + address + " in service repository."); return null; } finally { qexec.close() ; } } /** * Searches the repository to find the services that the semantic model parameter is contained in * their input model. * @param semanticModel The input model whose pattern will be searched in the repository * @return a hashmap of all found services and a mapping from the found service parameters to the model parameters. * This help us later to how to join the model's corresponding source and the matched service */ public Map<WebService, Map<String, String>> getServicesByInputPattern(edu.isi.karma.rep.model.Model semanticModel, Integer serviceLimit) { if (semanticModel == null || semanticModel.getAtoms() == null || semanticModel.getAtoms().size() == 0) { logger.info("The input model is nul or it does not have any atom"); return null; } Map<WebService, Map<String, String>> servicesAndMappings = new HashMap<WebService, Map<String,String>>(); Map<String, Map<String, String>> serviceIdsAndMappings = semanticModel.findInServiceInputs(Repository.Instance().getModel(), serviceLimit); if (serviceIdsAndMappings == null) return null; for (String serviceId : serviceIdsAndMappings.keySet()) { Model m = Repository.Instance().getNamedModel(serviceId); if (m != null) servicesAndMappings.put(importSourceFromJenaModel(m), serviceIdsAndMappings.get(serviceId)); } return servicesAndMappings; } /** * Searches the repository to find the services that the semantic model parameter is contained in * their output model. * @param semanticModel The input model whose pattern will be searched in the repository * @return a hashmap of all found services and a mapping from the found service parameters to the model parameters. * This help us later to how to join the model's corresponding source and the matched service */ public Map<WebService, Map<String, String>> getServicesByOutputPattern(edu.isi.karma.rep.model.Model semanticModel, Integer serviceLimit) { if (semanticModel == null || semanticModel.getAtoms() == null || semanticModel.getAtoms().size() == 0) { logger.info("The input model is nul or it does not have any atom"); return null; } Map<WebService, Map<String, String>> servicesAndMappings = new HashMap<WebService, Map<String,String>>(); Map<String, Map<String, String>> serviceIdsAndMappings = semanticModel.findInServiceOutputs(Repository.Instance().getModel(), serviceLimit); if (serviceIdsAndMappings == null) return null; for (String serviceId : serviceIdsAndMappings.keySet()) { Model m = Repository.Instance().getNamedModel(serviceId); if (m != null) servicesAndMappings.put(importSourceFromJenaModel(m), serviceIdsAndMappings.get(serviceId)); } return servicesAndMappings; } /** * Searches the repository to find the services whose input model is contained in the semantic model parameter. * Note that the services in the return list only include the operations that match the model parameter. * @param semanticModel The input model whose pattern will be searched in the repository * @return a hashmap of all found services and a mapping from the found service parameters to the model parameters. * This help us later to how to join the model's corresponding source and the matched service */ public Map<WebService, Map<String, String>> getServicesWithInputContainedInModel( edu.isi.karma.rep.model.Model semanticModel, Integer serviceLimit) { List<Source> serviceList = getSourcesDetailedInfo(serviceLimit); Map<WebService, Map<String, String>> servicesAndMappings = new HashMap<WebService, Map<String,String>>(); Model jenaModel = semanticModel.getJenaModel(); for (Source service : serviceList) { if (!(service instanceof WebService)) continue; edu.isi.karma.rep.model.Model m = ((WebService)service).getInputModel(); if (m == null) continue; Map<String, Map<String, String>> serviceIdsAndMappings = m.findInJenaModel(jenaModel, null); if (serviceIdsAndMappings == null) continue; Iterator<String> itr = serviceIdsAndMappings.keySet().iterator(); if (itr.hasNext()) { String key = itr.next(); servicesAndMappings.put((WebService)service, serviceIdsAndMappings.get(key)); } } return servicesAndMappings; } /** * Searches the repository to find the services whose output model is contained in the semantic model parameter. * Note that the services in the return list only include the operations that match the model parameter. * @param semanticModel The input model whose pattern will be searched in the repository * @return a hashmap of all found services and a mapping from the found service parameters to the model parameters. * This help us later to how to join the model's corresponding source and the matched service */ public Map<WebService, Map<String, String>> getServicesWithOutputContainedInModel( edu.isi.karma.rep.model.Model semanticModel, Integer serviceLimit) { List<Source> serviceList = getSourcesDetailedInfo(serviceLimit); Map<WebService, Map<String, String>> servicesAndMappings = new HashMap<WebService, Map<String,String>>(); Model jenaModel = semanticModel.getJenaModel(); for (Source service : serviceList) { if (!(service instanceof WebService)) continue; edu.isi.karma.rep.model.Model m = ((WebService)service).getOutputModel(); if (m == null) continue; Map<String, Map<String, String>> serviceIdsAndMappings = m.findInJenaModel(jenaModel, null); if (serviceIdsAndMappings == null) continue; Iterator<String> itr = serviceIdsAndMappings.keySet().iterator(); if (itr.hasNext()) { String key = itr.next(); servicesAndMappings.put((WebService)service, serviceIdsAndMappings.get(key)); } } return servicesAndMappings; } /** * From the service model, returns the service object * @param model * @return */ @Override public WebService importSourceFromJenaModel(Model model) { logger.debug("model size: " + model.getGraph().size()); String service_name = ""; String service_uri = ""; String service_id = ""; String service_address = ""; String service_method = ""; // service id service_uri = model.getNsPrefixURI(""); logger.debug("service uri: " + service_uri); // service local id service_id = service_uri.substring(service_uri.lastIndexOf("/") + 1, service_uri.length() - 1); logger.debug("service id: " + service_id); Property has_address_property = model.getProperty(Namespaces.HRESTS + "hasAddress"); Property has_name_property = model.getProperty(Namespaces.KARMA + "hasName"); Property has_method_property = model.getProperty(Namespaces.HRESTS + "hasMethod"); Property has_input_property = model.getProperty(Namespaces.KARMA + "hasInput"); Property has_output_property = model.getProperty(Namespaces.KARMA + "hasOutput"); Resource service_resource = model.getResource(service_uri); NodeIterator nodeIterator = null; RDFNode node = null; // service name nodeIterator = model.listObjectsOfProperty(service_resource, has_name_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isLiteral()) { service_name = node.asLiteral().getString(); logger.debug("service name: " + service_name); } else logger.debug("service does not have a name."); // service address nodeIterator = model.listObjectsOfProperty(service_resource, has_address_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isLiteral()) { service_address = node.asLiteral().getString(); logger.debug("service address: " + service_address); } else logger.debug("service does not have an address."); // service method nodeIterator = model.listObjectsOfProperty(service_resource, has_method_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isLiteral()) { service_method = node.asLiteral().getString(); logger.debug("service method: " + service_method); } else logger.debug("service does not have a method."); List<String> variables = null; List<Attribute> inputAttributes = null; List<Attribute> outputAttributes = null; edu.isi.karma.rep.model.Model inputModel = null; edu.isi.karma.rep.model.Model outputModel = null; // service variables variables = getVariables(model, service_resource); // service input nodeIterator = model.listObjectsOfProperty(service_resource, has_input_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isResource()) { inputAttributes = getAttributes(model, node.asResource(), IOType.INPUT); inputModel = getSemanticModel(model, node.asResource()); } else logger.debug("service does not have an input."); // service output nodeIterator = model.listObjectsOfProperty(service_resource, has_output_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isResource()) { outputAttributes = getAttributes(model, node.asResource(), IOType.OUTPUT ); outputModel = getSemanticModel(model, node.asResource()); } else logger.info("service does not have an output."); WebService service = new WebService(service_id, service_name, service_address); service.setMethod(service_method); service.setVariables(variables); service.setInputAttributes(inputAttributes); service.setOutputAttributes(outputAttributes); service.setInputModel(inputModel); service.setOutputModel(outputModel); return service; } private List<String> getVariables(Model model, Resource service_resource) { Property has_variable_property = model.getProperty(Namespaces.KARMA + "hasVariable"); List<String> variables = new ArrayList<String>(); NodeIterator nodeIterator = null; RDFNode node = null; // hasAttribute nodeIterator = model.listObjectsOfProperty(service_resource, has_variable_property); while ( nodeIterator.hasNext()) { node = nodeIterator.next(); if (!node.isResource()) { logger.info("object of the hasAttribute property is not a resource."); continue; } variables.add(node.asResource().getLocalName()); } return variables; } private List<Attribute> getAttributes(Model model, Resource io_resource, String ioType) { Property has_attribute_property = model.getProperty(Namespaces.KARMA + "hasAttribute"); Property has_mandatory_attribute_property = model.getProperty(Namespaces.KARMA + "hasMandatoryAttribute"); Property has_optional_attribute_property = model.getProperty(Namespaces.KARMA + "hasOptionalAttribute"); List<Attribute> attList = new ArrayList<Attribute>(); NodeIterator nodeIterator = null; RDFNode node = null; // hasAttribute nodeIterator = model.listObjectsOfProperty(io_resource, has_attribute_property); while ( nodeIterator.hasNext()) { node = nodeIterator.next(); if (!node.isResource()) { logger.info("object of the hasAttribute property is not a resource."); continue; } attList.add(getAttribute(model, node.asResource(), ioType, AttributeRequirement.NONE)); } // hasMandatoryAttribute nodeIterator = model.listObjectsOfProperty(io_resource, has_mandatory_attribute_property); while ( nodeIterator.hasNext()) { node = nodeIterator.next(); if (!node.isResource()) { logger.info("object of the hasMandatoryAttribute property is not a resource."); continue; } attList.add(getAttribute(model, node.asResource(), ioType, AttributeRequirement.MANDATORY)); } // hasOptionalAttribute nodeIterator = model.listObjectsOfProperty(io_resource, has_optional_attribute_property); while ( nodeIterator.hasNext()) { node = nodeIterator.next(); if (!node.isResource()) { logger.info("object of the hasOptionalAttribute property is not a resource."); continue; } attList.add(getAttribute(model, node.asResource(), ioType, AttributeRequirement.OPTIONAL)); } return attList; } private Attribute getAttribute(Model model, Resource att_resource, String ioType, AttributeRequirement requirement) { String att_id = ""; String att_name = ""; String att_groundedIn = ""; Property has_name_property = model.getProperty(Namespaces.KARMA + "hasName"); Property is_gounded_in_property = model.getProperty(Namespaces.HRESTS + "isGroundedIn"); // attribute id att_id = att_resource.getLocalName(); logger.debug("attribute id: " + att_id); NodeIterator nodeIterator = null; RDFNode node = null; // attribute name nodeIterator = model.listObjectsOfProperty(att_resource, has_name_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isLiteral()) { att_name = node.asLiteral().getString(); logger.debug("attribute name: " + att_name); } else logger.debug("attribute does not have a name."); // attribute grounded In nodeIterator = model.listObjectsOfProperty(att_resource, is_gounded_in_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isLiteral()) { att_groundedIn = node.asLiteral().getString(); logger.debug("attribute grounded in: " + att_groundedIn); } else logger.debug("attribute does not have agroundedIn value."); Attribute att = null; if (att_groundedIn.length() > 0) att = new Attribute(att_id, att_resource.getNameSpace(), att_name, ioType, requirement, att_groundedIn ); else att = new Attribute(att_id, att_resource.getNameSpace(), att_name, ioType, requirement); return att; } private edu.isi.karma.rep.model.Model getSemanticModel(Model model, Resource io_resource) { Property has_model_property = model.getProperty(Namespaces.KARMA + "hasModel"); Property has_atom_property = model.getProperty(Namespaces.KARMA + "hasAtom"); NodeIterator nodeIterator = null; RDFNode modelNode = null; RDFNode atomNode = null; // hasModel nodeIterator = model.listObjectsOfProperty(io_resource, has_model_property); if (!nodeIterator.hasNext() || !(modelNode = nodeIterator.next()).isResource()) { logger.info("There is no model resource."); return null; } edu.isi.karma.rep.model.Model semanticModel = new edu.isi.karma.rep.model.Model(modelNode.asResource().getLocalName()); List<Atom> atoms = new ArrayList<Atom>(); // hasAtom nodeIterator = model.listObjectsOfProperty(modelNode.asResource(), has_atom_property); while ( nodeIterator.hasNext()) { atomNode = nodeIterator.next(); if (!atomNode.isResource()) { logger.info("object of the hasAtom property is not a resource."); continue; } atoms.add(getAtom(model, atomNode.asResource())); } semanticModel.setAtoms(atoms); return semanticModel; } private Atom getAtom(Model model, Resource atom_resource) { Property rdf_type = model.getProperty(Namespaces.RDF + "type"); NodeIterator nodeIterator = null; RDFNode node = null; String classAtomUri = Namespaces.SWRL + "ClassAtom"; String propertyAtomUri = Namespaces.SWRL + "IndividualPropertyAtom"; // atom type nodeIterator = model.listObjectsOfProperty(atom_resource, rdf_type); if (!nodeIterator.hasNext() || !(node = nodeIterator.next()).isResource()) { logger.info("The atom type is not specified."); return null; } if (node.asResource().getURI().equalsIgnoreCase(classAtomUri)) { logger.debug("The atom is a ClassAtom"); return getClassAtom(model, atom_resource); } else if (node.asResource().getURI().equalsIgnoreCase(propertyAtomUri)) { logger.debug("The atom is an IndividualPropertyAtom"); return getPropertyAtom(model, atom_resource); } return null; } private ClassAtom getClassAtom(Model model, Resource atom_resource) { String predicateUri = null; String predicatePrefix = null; String predicateNs = null; String argument1Id = null; String argument1Type = null; Resource attribute = ResourceFactory.createResource(Namespaces.KARMA + "Attribute"); Resource variable = ResourceFactory.createResource(Namespaces.SWRL + "Variable"); Property class_predicate_property = model.getProperty(Namespaces.SWRL + "classPredicate"); Property argument1_property = model.getProperty(Namespaces.SWRL + "argument1"); NodeIterator nodeIterator = null; RDFNode node = null; // atom class predicate nodeIterator = model.listObjectsOfProperty(atom_resource, class_predicate_property); if (!nodeIterator.hasNext() || !(node = nodeIterator.next()).isResource()) { logger.info("The class predicate resource is not specified."); return null; } predicateUri = node.asResource().getURI(); logger.debug("The atom predicate is: " + predicateUri); predicateNs = node.asResource().getNameSpace(); predicatePrefix = model.getNsURIPrefix(predicateNs); // atom argument1 nodeIterator = model.listObjectsOfProperty(atom_resource, argument1_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isResource()) { argument1Id = node.asResource().getLocalName(); logger.debug("The atom argument1 is: " + argument1Id); if (isInstanceOfTheClass(node.asResource(), attribute)) argument1Type = ArgumentType.ATTRIBUTE; else if (isInstanceOfTheClass(node.asResource(), variable)) argument1Type = ArgumentType.VARIABLE; } else { logger.info("atom does not have an argument1."); return null; } Label predicateName = new Label(predicateUri, predicateNs, predicatePrefix); Argument arg1 = new Argument(argument1Id, argument1Id, argument1Type); ClassAtom classAtom = new ClassAtom(predicateName, arg1); return classAtom; } private IndividualPropertyAtom getPropertyAtom(Model model, Resource atom_resource) { String predicateUri = null; String predicatePrefix = null; String predicateNs = null; String argument1Id = null; String argument2Id = null; String argument1Type = null; String argument2Type = null; Resource attribute = ResourceFactory.createResource(Namespaces.KARMA + "Attribute"); Resource variable = ResourceFactory.createResource(Namespaces.SWRL + "Variable"); Property property_predicate_property = model.getProperty(Namespaces.SWRL + "propertyPredicate"); Property argument1_property = model.getProperty(Namespaces.SWRL + "argument1"); Property argument2_property = model.getProperty(Namespaces.SWRL + "argument2"); NodeIterator nodeIterator = null; RDFNode node = null; // atom class predicate nodeIterator = model.listObjectsOfProperty(atom_resource, property_predicate_property); if (!nodeIterator.hasNext() || !(node = nodeIterator.next()).isResource()) { logger.info("The property predicate resource is not specified."); return null; } predicateUri = node.asResource().getURI(); logger.debug("The atom predicate is: " + predicateUri); predicateNs = node.asResource().getNameSpace(); predicatePrefix = model.getNsURIPrefix(predicateNs); // atom argument1 nodeIterator = model.listObjectsOfProperty(atom_resource, argument1_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isResource()) { argument1Id = node.asResource().getLocalName(); logger.debug("The atom argument1 is: " + argument1Id); if (isInstanceOfTheClass(node.asResource(), attribute)) argument1Type = ArgumentType.ATTRIBUTE; else if (isInstanceOfTheClass(node.asResource(), variable)) argument1Type = ArgumentType.VARIABLE; } else { logger.info("atom does not have an argument1."); return null; } // atom argument2 nodeIterator = model.listObjectsOfProperty(atom_resource, argument2_property); if (nodeIterator.hasNext() && (node = nodeIterator.next()).isResource()) { argument2Id = node.asResource().getLocalName(); logger.debug("The atom argument2 is: " + argument2Id); if (isInstanceOfTheClass(node.asResource(), attribute)) argument2Type = ArgumentType.ATTRIBUTE; else if (isInstanceOfTheClass(node.asResource(), variable)) argument2Type = ArgumentType.VARIABLE; } else { logger.info("atom does not have an argument2."); return null; } Label predicateName = new Label(predicateUri, predicateNs, predicatePrefix); Argument arg1 = new Argument(argument1Id, argument1Id, argument1Type); Argument arg2 = new Argument(argument2Id, argument2Id, argument2Type); IndividualPropertyAtom propertyAtom = new IndividualPropertyAtom(predicateName, arg1, arg2); return propertyAtom; } private boolean isInstanceOfTheClass(Resource resource, Resource class_resource) { Property type_property = ResourceFactory.createProperty(Namespaces.RDF + "type"); if (resource == null || !resource.isResource()) return true; if (resource.hasProperty(type_property, class_resource)) return true; else return false; } private static void testGetServiceByUri() { String uri = "http://isi.edu/integration/karma/services/CDA81BE4-DD77-E0D3-D033-FC771B2F4800#"; WebService service = WebServiceLoader.getInstance().getSourceByUri(uri); if (service != null) { // System.out.println(service.getInputModel().getSPARQLConstructQuery()); // System.out.println(service.getOutputModel().getSPARQLConstructQuery()); service.print(); } } private static void testGetServiceByAddress() { String address = "http://api.geonames.org/"; WebService service = WebServiceLoader.getInstance().getServiceByAddress(address); if (service != null) service.print(); } private static void testGetAllServices() { List<Source> serviceList = WebServiceLoader.getInstance().getSourcesDetailedInfo(null); for (Source s : serviceList) { if (s != null) s.print(); } } private static void testGetServicesByIOPattern() { edu.isi.karma.rep.model.Model semanticModel = new edu.isi.karma.rep.model.Model(null); // String geonamesOntology = "http://www.geonames.org/ontology#"; // String wgs84Ontology = "http://www.w3.org/2003/01/geo/wgs84_pos#"; String geoOntology = "http://isi.edu/ontologies/geo/current#"; Label featurePredicatName = new Label(geoOntology + "Feature", geoOntology, "geo"); Label latPredicatName = new Label(geoOntology + "lat", geoOntology, "geo"); Label lngPredicatName = new Label(geoOntology + "long", geoOntology, "geo"); ClassAtom c1 = new ClassAtom(featurePredicatName, new Argument("arg1", "arg1", ArgumentType.ATTRIBUTE)); IndividualPropertyAtom p1 = new IndividualPropertyAtom(latPredicatName, new Argument("arg1", "arg1", ArgumentType.ATTRIBUTE), new Argument("arg2", "arg2", ArgumentType.ATTRIBUTE)); IndividualPropertyAtom p2 = new IndividualPropertyAtom(lngPredicatName, new Argument("arg1", "arg1", ArgumentType.ATTRIBUTE), new Argument("arg3", "arg3", ArgumentType.ATTRIBUTE)); // ClassAtom c2 = new ClassAtom(featurePredicatName, new Argument("arg2", "arg2", ArgumentType.ATTRIBUTE)); semanticModel.getAtoms().add(c1); // semanticModel.getAtoms().add(c2); semanticModel.getAtoms().add(p1); semanticModel.getAtoms().add(p2); Map<WebService, Map<String, String>> servicesAndMappings = WebServiceLoader.getInstance().getServicesWithInputContainedInModel(semanticModel, null); // getServicesByInputPattern(semanticModel, null); // Map<Service, Map<String, String>> servicesAndMappings = // getServicesByIOPattern(semanticModel, IOType.INPUT, null); if (servicesAndMappings == null) return; for (WebService s : servicesAndMappings.keySet()) { if (s != null) System.out.println((s.getUri())); //s.print(); } System.out.println("Mappings from matched source to model arguments:"); for (WebService s : servicesAndMappings.keySet()) { System.out.println("Service: " + s.getId()); if (servicesAndMappings.get(s) == null) continue; for (String str : servicesAndMappings.get(s).keySet()) System.out.println(str + "-------" + servicesAndMappings.get(s).get(str)); } } private static void testDeleteServiceByUri() { String uri = "http://isi.edu/integration/karma/services/3D579101-2596-2331-53A8-63F949D71C8F#"; WebServiceLoader.getInstance().deleteSourceByUri(uri); } public static void main(String[] args) { // ServiceBuilder.main(new String[0]); boolean test1 = true, test2 = false, test3 = false, test4 = false, test5 = false; if (test1) testGetServiceByUri(); if (test2) testGetServiceByAddress(); if (test3) testGetServicesByIOPattern(); if (test4) testGetAllServices(); if (test5) testDeleteServiceByUri(); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android_settings; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.Fragment; import android.app.admin.DevicePolicyManager; import android.content.Context; import android.content.Intent; import android.net.ConnectivityManager; import android.net.ProxyProperties; import android.os.Bundle; import android.text.Selection; import android.text.Spannable; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnFocusChangeListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.example.android_settings.SettingsPreferenceFragment.SettingsDialogFragment; import java.util.regex.Matcher; import java.util.regex.Pattern; public class ProxySelector extends Fragment implements DialogCreatable { private static final String TAG = "ProxySelector"; EditText mHostnameField; EditText mPortField; EditText mExclusionListField; Button mOKButton; Button mClearButton; Button mDefaultButton; // Matches blank input, ips, and domain names private static final String HOSTNAME_REGEXP = "^$|^[a-zA-Z0-9]+(\\-[a-zA-Z0-9]+)*(\\.[a-zA-Z0-9]+(\\-[a-zA-Z0-9]+)*)*$"; private static final Pattern HOSTNAME_PATTERN; private static final String EXCLUSION_REGEXP = "$|^[a-zA-Z0-9]+(\\-[a-zA-Z0-9]+)*(\\.[a-zA-Z0-9]+(\\-[a-zA-Z0-9]+)*)*$"; private static final Pattern EXCLUSION_PATTERN; static { HOSTNAME_PATTERN = Pattern.compile(HOSTNAME_REGEXP); EXCLUSION_PATTERN = Pattern.compile(EXCLUSION_REGEXP); } private static final int ERROR_DIALOG_ID = 0; private SettingsDialogFragment mDialogFragment; private View mView; @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mView = inflater.inflate(R.layout.proxy, container, false); initView(mView); // TODO: Populate based on connection status populateFields(); return mView; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); final DevicePolicyManager dpm = (DevicePolicyManager)getActivity().getSystemService(Context.DEVICE_POLICY_SERVICE); final boolean userSetGlobalProxy = (dpm.getGlobalProxyAdmin() == null); // Disable UI if the Global Proxy is being controlled by a Device Admin mHostnameField.setEnabled(userSetGlobalProxy); mPortField.setEnabled(userSetGlobalProxy); mExclusionListField.setEnabled(userSetGlobalProxy); mOKButton.setEnabled(userSetGlobalProxy); mClearButton.setEnabled(userSetGlobalProxy); mDefaultButton.setEnabled(userSetGlobalProxy); } // Dialog management @Override public Dialog onCreateDialog(int id) { if (id == ERROR_DIALOG_ID) { String hostname = mHostnameField.getText().toString().trim(); String portStr = mPortField.getText().toString().trim(); String exclList = mExclusionListField.getText().toString().trim(); String msg = getActivity().getString(validate(hostname, portStr, exclList)); return new AlertDialog.Builder(getActivity()) .setTitle(R.string.proxy_error) .setPositiveButton(R.string.proxy_error_dismiss, null) .setMessage(msg) .create(); } return null; } private void showDialog(int dialogId) { if (mDialogFragment != null) { Log.e(TAG, "Old dialog fragment not null!"); } mDialogFragment = new SettingsDialogFragment(this, dialogId); mDialogFragment.show(getActivity().getFragmentManager(), Integer.toString(dialogId)); } private void initView(View view) { mHostnameField = (EditText)view.findViewById(R.id.hostname); mHostnameField.setOnFocusChangeListener(mOnFocusChangeHandler); mPortField = (EditText)view.findViewById(R.id.port); mPortField.setOnClickListener(mOKHandler); mPortField.setOnFocusChangeListener(mOnFocusChangeHandler); mExclusionListField = (EditText)view.findViewById(R.id.exclusionlist); mExclusionListField.setOnFocusChangeListener(mOnFocusChangeHandler); mOKButton = (Button)view.findViewById(R.id.action); mOKButton.setOnClickListener(mOKHandler); mClearButton = (Button)view.findViewById(R.id.clear); mClearButton.setOnClickListener(mClearHandler); mDefaultButton = (Button)view.findViewById(R.id.defaultView); mDefaultButton.setOnClickListener(mDefaultHandler); } void populateFields() { final Activity activity = getActivity(); String hostname = ""; int port = -1; String exclList = ""; // Use the last setting given by the user ConnectivityManager cm = (ConnectivityManager)getActivity().getSystemService(Context.CONNECTIVITY_SERVICE); ProxyProperties proxy = cm.getGlobalProxy(); if (proxy != null) { hostname = proxy.getHost(); port = proxy.getPort(); exclList = proxy.getExclusionList(); } if (hostname == null) { hostname = ""; } mHostnameField.setText(hostname); String portStr = port == -1 ? "" : Integer.toString(port); mPortField.setText(portStr); mExclusionListField.setText(exclList); final Intent intent = activity.getIntent(); String buttonLabel = intent.getStringExtra("button-label"); if (!TextUtils.isEmpty(buttonLabel)) { mOKButton.setText(buttonLabel); } String title = intent.getStringExtra("title"); if (!TextUtils.isEmpty(title)) { activity.setTitle(title); } } /** * validate syntax of hostname and port entries * @return 0 on success, string resource ID on failure */ public static int validate(String hostname, String port, String exclList) { Matcher match = HOSTNAME_PATTERN.matcher(hostname); String exclListArray[] = exclList.split(","); if (!match.matches()) return R.string.proxy_error_invalid_host; for (String excl : exclListArray) { Matcher m = EXCLUSION_PATTERN.matcher(excl); if (!m.matches()) return R.string.proxy_error_invalid_exclusion_list; } if (hostname.length() > 0 && port.length() == 0) { return R.string.proxy_error_empty_port; } if (port.length() > 0) { if (hostname.length() == 0) { return R.string.proxy_error_empty_host_set_port; } int portVal = -1; try { portVal = Integer.parseInt(port); } catch (NumberFormatException ex) { return R.string.proxy_error_invalid_port; } if (portVal <= 0 || portVal > 0xFFFF) { return R.string.proxy_error_invalid_port; } } return 0; } /** * returns true on success, false if the user must correct something */ boolean saveToDb() { String hostname = mHostnameField.getText().toString().trim(); String portStr = mPortField.getText().toString().trim(); String exclList = mExclusionListField.getText().toString().trim(); int port = 0; int result = validate(hostname, portStr, exclList); if (result > 0) { showDialog(ERROR_DIALOG_ID); return false; } if (portStr.length() > 0) { try { port = Integer.parseInt(portStr); } catch (NumberFormatException ex) { // should never happen - caught by validate above return false; } } ProxyProperties p = new ProxyProperties(hostname, port, exclList); // FIXME: The best solution would be to make a better UI that would // disable editing of the text boxes if the user chooses to use the // default settings. i.e. checking a box to always use the default // carrier. http:/b/issue?id=756480 // FIXME: If the user types in a proxy that matches the default, should // we keep that setting? Can be fixed with a new UI. ConnectivityManager cm = (ConnectivityManager)getActivity().getSystemService(Context.CONNECTIVITY_SERVICE); cm.setGlobalProxy(p); return true; } OnClickListener mOKHandler = new OnClickListener() { @Override public void onClick(View v) { if (saveToDb()) { getActivity().onBackPressed(); } } }; OnClickListener mClearHandler = new OnClickListener() { @Override public void onClick(View v) { mHostnameField.setText(""); mPortField.setText(""); mExclusionListField.setText(""); } }; OnClickListener mDefaultHandler = new OnClickListener() { @Override public void onClick(View v) { // TODO: populate based on connection status populateFields(); } }; OnFocusChangeListener mOnFocusChangeHandler = new OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (hasFocus) { TextView textView = (TextView) v; Selection.selectAll((Spannable) textView.getText()); } } }; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.gms; import java.io.*; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.util.*; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.BoundedStatsDeque; import org.apache.cassandra.utils.FBUtilities; /** * This FailureDetector is an implementation of the paper titled * "The Phi Accrual Failure Detector" by Hayashibara. * Check the paper and the <i>IFailureDetector</i> interface for details. */ public class FailureDetector implements IFailureDetector, FailureDetectorMBean { public static final IFailureDetector instance = new FailureDetector(); private static Logger logger_ = LoggerFactory.getLogger(FailureDetector.class); private static final int sampleSize_ = 1000; private static int phiConvictThreshold_; private Map<InetAddress, ArrivalWindow> arrivalSamples_ = new Hashtable<InetAddress, ArrivalWindow>(); private List<IFailureDetectionEventListener> fdEvntListeners_ = new ArrayList<IFailureDetectionEventListener>(); public FailureDetector() { phiConvictThreshold_ = DatabaseDescriptor.getPhiConvictThreshold(); // Register this instance with JMX try { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); mbs.registerMBean(this, new ObjectName("org.apache.cassandra.net:type=FailureDetector")); } catch (Exception e) { throw new RuntimeException(e); } } public String getAllEndpointStates() { StringBuilder sb = new StringBuilder(); for (Map.Entry<InetAddress, EndpointState> entry : Gossiper.instance.endpointStateMap.entrySet()) { sb.append(entry.getKey()).append("\n"); for (Map.Entry<ApplicationState, VersionedValue> state : entry.getValue().applicationState.entrySet()) sb.append(" ").append(state.getKey()).append(":").append(state.getValue().value).append("\n"); } return sb.toString(); } /** * Dump the inter arrival times for examination if necessary. */ public void dumpInterArrivalTimes() { OutputStream os = null; try { File file = File.createTempFile("failuredetector-", ".dat"); os = new BufferedOutputStream(new FileOutputStream(file, true)); os.write(toString().getBytes()); } catch (IOException e) { throw new IOError(e); } finally { FileUtils.closeQuietly(os); } } public void setPhiConvictThreshold(int phi) { phiConvictThreshold_ = phi; } public int getPhiConvictThreshold() { return phiConvictThreshold_; } public boolean isAlive(InetAddress ep) { if (ep.equals(FBUtilities.getLocalAddress())) return true; EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(ep); // we could assert not-null, but having isAlive fail screws a node over so badly that // it's worth being defensive here so minor bugs don't cause disproportionate // badness. (See CASSANDRA-1463 for an example). if (epState == null) logger_.error("unknown endpoint " + ep); return epState != null && epState.isAlive(); } public void report(InetAddress ep) { if (logger_.isTraceEnabled()) logger_.trace("reporting {}", ep); long now = System.currentTimeMillis(); ArrivalWindow heartbeatWindow = arrivalSamples_.get(ep); if ( heartbeatWindow == null ) { heartbeatWindow = new ArrivalWindow(sampleSize_); arrivalSamples_.put(ep, heartbeatWindow); } heartbeatWindow.add(now); } public void interpret(InetAddress ep) { ArrivalWindow hbWnd = arrivalSamples_.get(ep); if ( hbWnd == null ) { return; } long now = System.currentTimeMillis(); double phi = hbWnd.phi(now); if (logger_.isTraceEnabled()) logger_.trace("PHI for " + ep + " : " + phi); if ( phi > phiConvictThreshold_ ) { for ( IFailureDetectionEventListener listener : fdEvntListeners_ ) { listener.convict(ep); } } } public void remove(InetAddress ep) { arrivalSamples_.remove(ep); } public void registerFailureDetectionEventListener(IFailureDetectionEventListener listener) { fdEvntListeners_.add(listener); } public void unregisterFailureDetectionEventListener(IFailureDetectionEventListener listener) { fdEvntListeners_.remove(listener); } public String toString() { StringBuilder sb = new StringBuilder(); Set<InetAddress> eps = arrivalSamples_.keySet(); sb.append("-----------------------------------------------------------------------"); for ( InetAddress ep : eps ) { ArrivalWindow hWnd = arrivalSamples_.get(ep); sb.append(ep + " : "); sb.append(hWnd.toString()); sb.append( System.getProperty("line.separator") ); } sb.append("-----------------------------------------------------------------------"); return sb.toString(); } public static void main(String[] args) throws Throwable { } } class ArrivalWindow { private static Logger logger_ = LoggerFactory.getLogger(ArrivalWindow.class); private double tLast_ = 0L; private BoundedStatsDeque arrivalIntervals_; ArrivalWindow(int size) { arrivalIntervals_ = new BoundedStatsDeque(size); } synchronized void add(double value) { double interArrivalTime; if ( tLast_ > 0L ) { interArrivalTime = (value - tLast_); } else { interArrivalTime = Gossiper.intervalInMillis / 2; } tLast_ = value; arrivalIntervals_.add(interArrivalTime); } synchronized double sum() { return arrivalIntervals_.sum(); } synchronized double sumOfDeviations() { return arrivalIntervals_.sumOfDeviations(); } synchronized double mean() { return arrivalIntervals_.mean(); } synchronized double variance() { return arrivalIntervals_.variance(); } double stdev() { return arrivalIntervals_.stdev(); } void clear() { arrivalIntervals_.clear(); } double p(double t) { double mean = mean(); double exponent = (-1)*(t)/mean; return Math.pow(Math.E, exponent); } double phi(long tnow) { int size = arrivalIntervals_.size(); double log = 0d; if ( size > 0 ) { double t = tnow - tLast_; double probability = p(t); log = (-1) * Math.log10( probability ); } return log; } public String toString() { return StringUtils.join(arrivalIntervals_.iterator(), " "); } }
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.security.option; import java.util.Map; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityVisitor; import com.opengamma.id.ExternalId; import com.opengamma.master.security.SecurityDescription; import com.opengamma.util.money.Currency; import com.opengamma.util.time.Expiry; /** * OG-Financial representation of a bond future option. */ @BeanDefinition @SecurityDescription(type = BondFutureOptionSecurity.SECURITY_TYPE, description = "Bond future option") public class BondFutureOptionSecurity extends FinancialSecurity { /** * The security type. */ public static final String SECURITY_TYPE = "BONDFUTURE_OPTION"; /** Serialization version. */ private static final long serialVersionUID = 1L; /** * The exchange. */ @PropertyDefinition(validate = "notNull") private String _tradingExchange; /** * The settlement exchange. */ @PropertyDefinition(validate = "notNull") private String _settlementExchange; /** * The expiry. */ @PropertyDefinition(validate = "notNull") private Expiry _expiry; /** * The exercise type. */ @PropertyDefinition(validate = "notNull") private ExerciseType _exerciseType; /** * The underlying identifier. */ @PropertyDefinition(validate = "notNull") private ExternalId _underlyingId; /** * The point value. */ @PropertyDefinition private double _pointValue; /** * The margined flag. */ @PropertyDefinition private boolean _margined; /** * The currency. */ @PropertyDefinition(validate = "notNull") private Currency _currency; /** * The strike. */ @PropertyDefinition private double _strike; /** * The option type. */ @PropertyDefinition(validate = "notNull") private OptionType _optionType; BondFutureOptionSecurity() { //For builder super(SECURITY_TYPE); } /** * Constructs a bond future option. * @param tradingExchange the trading exchange, not null. * @param settlementExchange the settlement exchange, not null. * @param expiry the expiry, not null. * @param exerciseType the exercise type, not null. * @param underlyingIdentifier the underlying bond future id, not null. * @param pointValue the point value. * @param margined whether the option is margined or not. * @param currency the currency, not null. * @param strike the strike. * @param optionType the option type, not null. */ public BondFutureOptionSecurity(String tradingExchange, String settlementExchange, Expiry expiry, ExerciseType exerciseType, ExternalId underlyingIdentifier, double pointValue, boolean margined, Currency currency, double strike, OptionType optionType) { super(SECURITY_TYPE); setTradingExchange(tradingExchange); setSettlementExchange(settlementExchange); setExpiry(expiry); setExerciseType(exerciseType); setUnderlyingId(underlyingIdentifier); setPointValue(pointValue); setMargined(margined); setCurrency(currency); setStrike(strike); setOptionType(optionType); } @Override public <T> T accept(FinancialSecurityVisitor<T> visitor) { return visitor.visitBondFutureOptionSecurity(this); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code BondFutureOptionSecurity}. * @return the meta-bean, not null */ public static BondFutureOptionSecurity.Meta meta() { return BondFutureOptionSecurity.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(BondFutureOptionSecurity.Meta.INSTANCE); } @Override public BondFutureOptionSecurity.Meta metaBean() { return BondFutureOptionSecurity.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the exchange. * @return the value of the property, not null */ public String getTradingExchange() { return _tradingExchange; } /** * Sets the exchange. * @param tradingExchange the new value of the property, not null */ public void setTradingExchange(String tradingExchange) { JodaBeanUtils.notNull(tradingExchange, "tradingExchange"); this._tradingExchange = tradingExchange; } /** * Gets the the {@code tradingExchange} property. * @return the property, not null */ public final Property<String> tradingExchange() { return metaBean().tradingExchange().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the settlement exchange. * @return the value of the property, not null */ public String getSettlementExchange() { return _settlementExchange; } /** * Sets the settlement exchange. * @param settlementExchange the new value of the property, not null */ public void setSettlementExchange(String settlementExchange) { JodaBeanUtils.notNull(settlementExchange, "settlementExchange"); this._settlementExchange = settlementExchange; } /** * Gets the the {@code settlementExchange} property. * @return the property, not null */ public final Property<String> settlementExchange() { return metaBean().settlementExchange().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the expiry. * @return the value of the property, not null */ public Expiry getExpiry() { return _expiry; } /** * Sets the expiry. * @param expiry the new value of the property, not null */ public void setExpiry(Expiry expiry) { JodaBeanUtils.notNull(expiry, "expiry"); this._expiry = expiry; } /** * Gets the the {@code expiry} property. * @return the property, not null */ public final Property<Expiry> expiry() { return metaBean().expiry().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the exercise type. * @return the value of the property, not null */ public ExerciseType getExerciseType() { return _exerciseType; } /** * Sets the exercise type. * @param exerciseType the new value of the property, not null */ public void setExerciseType(ExerciseType exerciseType) { JodaBeanUtils.notNull(exerciseType, "exerciseType"); this._exerciseType = exerciseType; } /** * Gets the the {@code exerciseType} property. * @return the property, not null */ public final Property<ExerciseType> exerciseType() { return metaBean().exerciseType().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the underlying identifier. * @return the value of the property, not null */ public ExternalId getUnderlyingId() { return _underlyingId; } /** * Sets the underlying identifier. * @param underlyingId the new value of the property, not null */ public void setUnderlyingId(ExternalId underlyingId) { JodaBeanUtils.notNull(underlyingId, "underlyingId"); this._underlyingId = underlyingId; } /** * Gets the the {@code underlyingId} property. * @return the property, not null */ public final Property<ExternalId> underlyingId() { return metaBean().underlyingId().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the point value. * @return the value of the property */ public double getPointValue() { return _pointValue; } /** * Sets the point value. * @param pointValue the new value of the property */ public void setPointValue(double pointValue) { this._pointValue = pointValue; } /** * Gets the the {@code pointValue} property. * @return the property, not null */ public final Property<Double> pointValue() { return metaBean().pointValue().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the margined flag. * @return the value of the property */ public boolean isMargined() { return _margined; } /** * Sets the margined flag. * @param margined the new value of the property */ public void setMargined(boolean margined) { this._margined = margined; } /** * Gets the the {@code margined} property. * @return the property, not null */ public final Property<Boolean> margined() { return metaBean().margined().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the currency. * @return the value of the property, not null */ public Currency getCurrency() { return _currency; } /** * Sets the currency. * @param currency the new value of the property, not null */ public void setCurrency(Currency currency) { JodaBeanUtils.notNull(currency, "currency"); this._currency = currency; } /** * Gets the the {@code currency} property. * @return the property, not null */ public final Property<Currency> currency() { return metaBean().currency().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the strike. * @return the value of the property */ public double getStrike() { return _strike; } /** * Sets the strike. * @param strike the new value of the property */ public void setStrike(double strike) { this._strike = strike; } /** * Gets the the {@code strike} property. * @return the property, not null */ public final Property<Double> strike() { return metaBean().strike().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the option type. * @return the value of the property, not null */ public OptionType getOptionType() { return _optionType; } /** * Sets the option type. * @param optionType the new value of the property, not null */ public void setOptionType(OptionType optionType) { JodaBeanUtils.notNull(optionType, "optionType"); this._optionType = optionType; } /** * Gets the the {@code optionType} property. * @return the property, not null */ public final Property<OptionType> optionType() { return metaBean().optionType().createProperty(this); } //----------------------------------------------------------------------- @Override public BondFutureOptionSecurity clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { BondFutureOptionSecurity other = (BondFutureOptionSecurity) obj; return JodaBeanUtils.equal(getTradingExchange(), other.getTradingExchange()) && JodaBeanUtils.equal(getSettlementExchange(), other.getSettlementExchange()) && JodaBeanUtils.equal(getExpiry(), other.getExpiry()) && JodaBeanUtils.equal(getExerciseType(), other.getExerciseType()) && JodaBeanUtils.equal(getUnderlyingId(), other.getUnderlyingId()) && JodaBeanUtils.equal(getPointValue(), other.getPointValue()) && (isMargined() == other.isMargined()) && JodaBeanUtils.equal(getCurrency(), other.getCurrency()) && JodaBeanUtils.equal(getStrike(), other.getStrike()) && JodaBeanUtils.equal(getOptionType(), other.getOptionType()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash = hash * 31 + JodaBeanUtils.hashCode(getTradingExchange()); hash = hash * 31 + JodaBeanUtils.hashCode(getSettlementExchange()); hash = hash * 31 + JodaBeanUtils.hashCode(getExpiry()); hash = hash * 31 + JodaBeanUtils.hashCode(getExerciseType()); hash = hash * 31 + JodaBeanUtils.hashCode(getUnderlyingId()); hash = hash * 31 + JodaBeanUtils.hashCode(getPointValue()); hash = hash * 31 + JodaBeanUtils.hashCode(isMargined()); hash = hash * 31 + JodaBeanUtils.hashCode(getCurrency()); hash = hash * 31 + JodaBeanUtils.hashCode(getStrike()); hash = hash * 31 + JodaBeanUtils.hashCode(getOptionType()); return hash ^ super.hashCode(); } @Override public String toString() { StringBuilder buf = new StringBuilder(352); buf.append("BondFutureOptionSecurity{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } @Override protected void toString(StringBuilder buf) { super.toString(buf); buf.append("tradingExchange").append('=').append(JodaBeanUtils.toString(getTradingExchange())).append(',').append(' '); buf.append("settlementExchange").append('=').append(JodaBeanUtils.toString(getSettlementExchange())).append(',').append(' '); buf.append("expiry").append('=').append(JodaBeanUtils.toString(getExpiry())).append(',').append(' '); buf.append("exerciseType").append('=').append(JodaBeanUtils.toString(getExerciseType())).append(',').append(' '); buf.append("underlyingId").append('=').append(JodaBeanUtils.toString(getUnderlyingId())).append(',').append(' '); buf.append("pointValue").append('=').append(JodaBeanUtils.toString(getPointValue())).append(',').append(' '); buf.append("margined").append('=').append(JodaBeanUtils.toString(isMargined())).append(',').append(' '); buf.append("currency").append('=').append(JodaBeanUtils.toString(getCurrency())).append(',').append(' '); buf.append("strike").append('=').append(JodaBeanUtils.toString(getStrike())).append(',').append(' '); buf.append("optionType").append('=').append(JodaBeanUtils.toString(getOptionType())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code BondFutureOptionSecurity}. */ public static class Meta extends FinancialSecurity.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code tradingExchange} property. */ private final MetaProperty<String> _tradingExchange = DirectMetaProperty.ofReadWrite( this, "tradingExchange", BondFutureOptionSecurity.class, String.class); /** * The meta-property for the {@code settlementExchange} property. */ private final MetaProperty<String> _settlementExchange = DirectMetaProperty.ofReadWrite( this, "settlementExchange", BondFutureOptionSecurity.class, String.class); /** * The meta-property for the {@code expiry} property. */ private final MetaProperty<Expiry> _expiry = DirectMetaProperty.ofReadWrite( this, "expiry", BondFutureOptionSecurity.class, Expiry.class); /** * The meta-property for the {@code exerciseType} property. */ private final MetaProperty<ExerciseType> _exerciseType = DirectMetaProperty.ofReadWrite( this, "exerciseType", BondFutureOptionSecurity.class, ExerciseType.class); /** * The meta-property for the {@code underlyingId} property. */ private final MetaProperty<ExternalId> _underlyingId = DirectMetaProperty.ofReadWrite( this, "underlyingId", BondFutureOptionSecurity.class, ExternalId.class); /** * The meta-property for the {@code pointValue} property. */ private final MetaProperty<Double> _pointValue = DirectMetaProperty.ofReadWrite( this, "pointValue", BondFutureOptionSecurity.class, Double.TYPE); /** * The meta-property for the {@code margined} property. */ private final MetaProperty<Boolean> _margined = DirectMetaProperty.ofReadWrite( this, "margined", BondFutureOptionSecurity.class, Boolean.TYPE); /** * The meta-property for the {@code currency} property. */ private final MetaProperty<Currency> _currency = DirectMetaProperty.ofReadWrite( this, "currency", BondFutureOptionSecurity.class, Currency.class); /** * The meta-property for the {@code strike} property. */ private final MetaProperty<Double> _strike = DirectMetaProperty.ofReadWrite( this, "strike", BondFutureOptionSecurity.class, Double.TYPE); /** * The meta-property for the {@code optionType} property. */ private final MetaProperty<OptionType> _optionType = DirectMetaProperty.ofReadWrite( this, "optionType", BondFutureOptionSecurity.class, OptionType.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "tradingExchange", "settlementExchange", "expiry", "exerciseType", "underlyingId", "pointValue", "margined", "currency", "strike", "optionType"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -661485980: // tradingExchange return _tradingExchange; case 389497452: // settlementExchange return _settlementExchange; case -1289159373: // expiry return _expiry; case -466331342: // exerciseType return _exerciseType; case -771625640: // underlyingId return _underlyingId; case 1257391553: // pointValue return _pointValue; case 243392205: // margined return _margined; case 575402001: // currency return _currency; case -891985998: // strike return _strike; case 1373587791: // optionType return _optionType; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends BondFutureOptionSecurity> builder() { return new DirectBeanBuilder<BondFutureOptionSecurity>(new BondFutureOptionSecurity()); } @Override public Class<? extends BondFutureOptionSecurity> beanType() { return BondFutureOptionSecurity.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code tradingExchange} property. * @return the meta-property, not null */ public final MetaProperty<String> tradingExchange() { return _tradingExchange; } /** * The meta-property for the {@code settlementExchange} property. * @return the meta-property, not null */ public final MetaProperty<String> settlementExchange() { return _settlementExchange; } /** * The meta-property for the {@code expiry} property. * @return the meta-property, not null */ public final MetaProperty<Expiry> expiry() { return _expiry; } /** * The meta-property for the {@code exerciseType} property. * @return the meta-property, not null */ public final MetaProperty<ExerciseType> exerciseType() { return _exerciseType; } /** * The meta-property for the {@code underlyingId} property. * @return the meta-property, not null */ public final MetaProperty<ExternalId> underlyingId() { return _underlyingId; } /** * The meta-property for the {@code pointValue} property. * @return the meta-property, not null */ public final MetaProperty<Double> pointValue() { return _pointValue; } /** * The meta-property for the {@code margined} property. * @return the meta-property, not null */ public final MetaProperty<Boolean> margined() { return _margined; } /** * The meta-property for the {@code currency} property. * @return the meta-property, not null */ public final MetaProperty<Currency> currency() { return _currency; } /** * The meta-property for the {@code strike} property. * @return the meta-property, not null */ public final MetaProperty<Double> strike() { return _strike; } /** * The meta-property for the {@code optionType} property. * @return the meta-property, not null */ public final MetaProperty<OptionType> optionType() { return _optionType; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -661485980: // tradingExchange return ((BondFutureOptionSecurity) bean).getTradingExchange(); case 389497452: // settlementExchange return ((BondFutureOptionSecurity) bean).getSettlementExchange(); case -1289159373: // expiry return ((BondFutureOptionSecurity) bean).getExpiry(); case -466331342: // exerciseType return ((BondFutureOptionSecurity) bean).getExerciseType(); case -771625640: // underlyingId return ((BondFutureOptionSecurity) bean).getUnderlyingId(); case 1257391553: // pointValue return ((BondFutureOptionSecurity) bean).getPointValue(); case 243392205: // margined return ((BondFutureOptionSecurity) bean).isMargined(); case 575402001: // currency return ((BondFutureOptionSecurity) bean).getCurrency(); case -891985998: // strike return ((BondFutureOptionSecurity) bean).getStrike(); case 1373587791: // optionType return ((BondFutureOptionSecurity) bean).getOptionType(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case -661485980: // tradingExchange ((BondFutureOptionSecurity) bean).setTradingExchange((String) newValue); return; case 389497452: // settlementExchange ((BondFutureOptionSecurity) bean).setSettlementExchange((String) newValue); return; case -1289159373: // expiry ((BondFutureOptionSecurity) bean).setExpiry((Expiry) newValue); return; case -466331342: // exerciseType ((BondFutureOptionSecurity) bean).setExerciseType((ExerciseType) newValue); return; case -771625640: // underlyingId ((BondFutureOptionSecurity) bean).setUnderlyingId((ExternalId) newValue); return; case 1257391553: // pointValue ((BondFutureOptionSecurity) bean).setPointValue((Double) newValue); return; case 243392205: // margined ((BondFutureOptionSecurity) bean).setMargined((Boolean) newValue); return; case 575402001: // currency ((BondFutureOptionSecurity) bean).setCurrency((Currency) newValue); return; case -891985998: // strike ((BondFutureOptionSecurity) bean).setStrike((Double) newValue); return; case 1373587791: // optionType ((BondFutureOptionSecurity) bean).setOptionType((OptionType) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } @Override protected void validate(Bean bean) { JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._tradingExchange, "tradingExchange"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._settlementExchange, "settlementExchange"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._expiry, "expiry"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._exerciseType, "exerciseType"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._underlyingId, "underlyingId"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._currency, "currency"); JodaBeanUtils.notNull(((BondFutureOptionSecurity) bean)._optionType, "optionType"); super.validate(bean); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
package org.apache.maven.wrapper; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.net.URI; import java.util.Properties; import org.apache.commons.io.IOUtils; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; public class WrapperExecutorTest { private final Installer install; private final BootstrapMainStarter start; private File propertiesFile; private Properties properties = new Properties(); private File testDir = new File("target/test-files/SystemPropertiesHandlerTest-" + System.currentTimeMillis()); private File mockInstallDir = new File(testDir, "mock-dir"); public WrapperExecutorTest() throws Exception { install = mock(Installer.class); when(install.createDist(Mockito.any(WrapperConfiguration.class))).thenReturn(mockInstallDir); start = mock(BootstrapMainStarter.class); testDir.mkdirs(); propertiesFile = new File(testDir, "maven/wrapper/maven-wrapper.properties"); properties.put("distributionUrl", "http://server/test/maven.zip"); properties.put("distributionBase", "testDistBase"); properties.put("distributionPath", "testDistPath"); properties.put("zipStoreBase", "testZipBase"); properties.put("zipStorePath", "testZipPath"); properties.put("verifyDownload", Boolean.TRUE.toString()); properties.put("checksumAlgorithm", Checksum.MD5.toString()); properties.put("checksumUrl", "http://server/test/maven.zip.md5"); writePropertiesFile(properties, propertiesFile, "header"); } @Test public void loadWrapperMetadataFromFile() throws Exception { WrapperExecutor wrapper = WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.assertEquals(1, wrapper.getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getDistributionUris().get(0)); Assert.assertEquals(1, wrapper.getConfiguration().getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getConfiguration().getDistributionUris().get(0)); Assert.assertEquals("testDistBase", wrapper.getConfiguration().getDistributionBase()); Assert.assertEquals("testDistPath", wrapper.getConfiguration().getDistributionPath()); Assert.assertEquals("testZipBase", wrapper.getConfiguration().getZipBase()); Assert.assertEquals("testZipPath", wrapper.getConfiguration().getZipPath()); Assert.assertTrue(wrapper.getConfiguration().isVerifyDownload()); Assert.assertEquals(Checksum.MD5, wrapper.getConfiguration().getChecksumAlgorithm()); } @Test public void loadWrapperMetadataFromDirectory() throws Exception { WrapperExecutor wrapper = WrapperExecutor.forProjectDirectory(testDir); Assert.assertEquals(1, wrapper.getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getDistributionUris().get(0)); Assert.assertEquals(1, wrapper.getConfiguration().getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getConfiguration().getDistributionUris().get(0)); Assert.assertEquals("testDistBase", wrapper.getConfiguration().getDistributionBase()); Assert.assertEquals("testDistPath", wrapper.getConfiguration().getDistributionPath()); Assert.assertEquals("testZipBase", wrapper.getConfiguration().getZipBase()); Assert.assertEquals("testZipPath", wrapper.getConfiguration().getZipPath()); Assert.assertTrue(wrapper.getConfiguration().isVerifyDownload()); Assert.assertEquals(Checksum.MD5, wrapper.getConfiguration().getChecksumAlgorithm()); } @Test public void useDefaultMetadataNoPropertiesFile() throws Exception { WrapperExecutor wrapper = WrapperExecutor.forProjectDirectory(new File(testDir, "unknown")); Assert.assertNull(wrapper.getDistributionUris()); Assert.assertNull(wrapper.getConfiguration().getDistributionUris()); Assert.assertEquals(PathAssembler.MAVEN_USER_HOME_STRING, wrapper.getConfiguration().getDistributionBase()); Assert.assertEquals(Installer.DEFAULT_DISTRIBUTION_PATH, wrapper.getConfiguration().getDistributionPath()); Assert.assertEquals(PathAssembler.MAVEN_USER_HOME_STRING, wrapper.getConfiguration().getZipBase()); Assert.assertEquals(Installer.DEFAULT_DISTRIBUTION_PATH, wrapper.getConfiguration().getZipPath()); Assert.assertFalse(wrapper.getConfiguration().isVerifyDownload()); Assert.assertNull(wrapper.getConfiguration().getChecksumAlgorithm()); } @Test public void propertiesFileOnlyContainsDistURL() throws Exception { properties = new Properties(); properties.put("distributionUrl", "http://server/test/maven.zip"); writePropertiesFile(properties, propertiesFile, "header"); WrapperExecutor wrapper = WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.assertEquals(1, wrapper.getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getDistributionUris().get(0)); Assert.assertEquals(1, wrapper.getConfiguration().getDistributionUris().size()); Assert.assertEquals(new URI("http://server/test/maven.zip"), wrapper.getConfiguration().getDistributionUris().get(0)); Assert.assertEquals(PathAssembler.MAVEN_USER_HOME_STRING, wrapper.getConfiguration().getDistributionBase()); Assert.assertEquals(Installer.DEFAULT_DISTRIBUTION_PATH, wrapper.getConfiguration().getDistributionPath()); Assert.assertEquals(PathAssembler.MAVEN_USER_HOME_STRING, wrapper.getConfiguration().getZipBase()); Assert.assertEquals(Installer.DEFAULT_DISTRIBUTION_PATH, wrapper.getConfiguration().getZipPath()); Assert.assertFalse(wrapper.getConfiguration().isVerifyDownload()); Assert.assertNull(wrapper.getConfiguration().getChecksumAlgorithm()); } @Test public void executeInstallAndLaunch() throws Exception { WrapperExecutor wrapper = WrapperExecutor.forProjectDirectory(propertiesFile); wrapper.execute(new String[] { "arg" }, install, start); verify(install).createDist(Mockito.any(WrapperConfiguration.class)); verify(start).start(new String[] { "arg" }, mockInstallDir); } @Test public void failWhenDistNotSetInProperties() throws Exception { properties = new Properties(); writePropertiesFile(properties, propertiesFile, "header"); try { WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.fail("Expected RuntimeException"); } catch (RuntimeException e) { Assert.assertEquals("Could not load wrapper properties from '" + propertiesFile + "'.", e.getMessage()); Assert.assertEquals("No value with key 'distributionUrl' specified in wrapper properties file '" + propertiesFile + "'.", e.getCause().getMessage()); } } @Test public void failWhenPropertiesFileDoesNotExist() { propertiesFile = new File(testDir, "unknown.properties"); try { WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.fail("Expected RuntimeException"); } catch (RuntimeException e) { Assert.assertEquals("Wrapper properties file '" + propertiesFile + "' does not exist.", e.getMessage()); } } @Test public void failWhenVerifyDownloadWithoutAlgorithm() throws Exception { properties = new Properties(); properties.put("distributionUrl", "http://server/test/maven.zip"); properties.put("verifyDownload", Boolean.TRUE.toString()); properties.put("checksumUrl", "http://server/test/maven.md5"); writePropertiesFile(properties, propertiesFile, "header"); try { WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.fail("Expected RuntimeException"); } catch (RuntimeException e) { Assert.assertEquals("Could not load wrapper properties from '" + propertiesFile + "'.", e.getMessage()); Assert.assertEquals("No value with key 'checksumAlgorithm' specified in wrapper properties file '" + propertiesFile + "'.", e.getCause().getMessage()); } } @Test public void failWhenVerifyDownloadWithInvalidAlgorithm() throws Exception { properties = new Properties(); properties.put("distributionUrl", "http://server/test/maven.zip"); properties.put("verifyDownload", Boolean.TRUE.toString()); properties.put("checksumAlgorithm", "FOO_BAR"); properties.put("checksumExtension", "md5"); writePropertiesFile(properties, propertiesFile, "header"); try { WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.fail("Expected RuntimeException"); } catch (RuntimeException e) { Assert.assertEquals("Could not load wrapper properties from '" + propertiesFile + "'.", e.getMessage()); Assert.assertTrue(e.getCause().getMessage().toLowerCase().indexOf("no enum") != -1); } } @Test public void testRelativeDistUrl() throws Exception { properties = new Properties(); properties.put("distributionUrl", "some/relative/url/to/bin.zip"); writePropertiesFile(properties, propertiesFile, "header"); WrapperExecutor wrapper = WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.assertEquals(1, wrapper.getDistributionUris().size()); Assert.assertNotEquals("some/relative/url/to/bin.zip", wrapper.getDistributionUris().get(0).getSchemeSpecificPart()); Assert.assertTrue(wrapper.getDistributionUris().get(0).getSchemeSpecificPart().endsWith("some/relative/url/to/bin.zip")); } @Test public void testRelativeChecksumUrl() throws Exception { properties = new Properties(); properties.put("distributionUrl", "http://server/test/maven.zip"); properties.put("verifyDownload", Boolean.TRUE.toString()); properties.put("checksumUrl", "some/relative/url/to/bin.md5"); properties.put("checksumAlgorithm", Checksum.MD5.toString()); writePropertiesFile(properties, propertiesFile, "header"); WrapperExecutor wrapper = WrapperExecutor.forWrapperPropertiesFile(propertiesFile); Assert.assertNotNull(wrapper); Assert.assertNotNull(wrapper.getConfiguration()); Assert.assertThat(wrapper.getConfiguration().getChecksumAlgorithm(), Matchers.is(Checksum.MD5)); Assert.assertTrue(wrapper.getConfiguration().isVerifyDownload()); // Assert.assertEquals(1, wrapper.getConfiguration().getChecksum().size()); // Assert.assertNotEquals("some/relative/url/to/bin.md5", wrapper.getConfiguration().getChecksum().get(0).getSchemeSpecificPart()); // Assert.assertTrue(wrapper.getConfiguration().getChecksum().get(0).getSchemeSpecificPart().endsWith("some/relative/url/to/bin.md5")); } private void writePropertiesFile(Properties properties, File propertiesFile, String message) throws Exception { propertiesFile.getParentFile().mkdirs(); OutputStream outStream = null; try { outStream = new FileOutputStream(propertiesFile); properties.store(outStream, message); } finally { IOUtils.closeQuietly(outStream); } } }
package org.jabref.model.database; import java.math.BigInteger; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; import java.util.stream.Collectors; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import org.jabref.model.database.event.EntryAddedEvent; import org.jabref.model.database.event.EntryRemovedEvent; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.BibtexString; import org.jabref.model.entry.FieldName; import org.jabref.model.entry.InternalBibtexFields; import org.jabref.model.entry.Month; import org.jabref.model.entry.event.EntryChangedEvent; import org.jabref.model.entry.event.EntryEventSource; import org.jabref.model.entry.event.FieldChangedEvent; import org.jabref.model.strings.StringUtil; import com.google.common.eventbus.EventBus; import com.google.common.eventbus.Subscribe; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A bibliography database. */ public class BibDatabase { private static final Log LOGGER = LogFactory.getLog(BibDatabase.class); private static final Pattern RESOLVE_CONTENT_PATTERN = Pattern.compile(".*#[^#]+#.*"); /** * State attributes */ private final ObservableList<BibEntry> entries = FXCollections.synchronizedObservableList(FXCollections.observableArrayList()); private final Map<String, BibtexString> bibtexStrings = new ConcurrentHashMap<>(); /** * this is kept in sync with the database (upon adding/removing an entry, it is updated as well) */ private final DuplicationChecker duplicationChecker = new DuplicationChecker(); /** * contains all entry.getID() of the current database */ private final Set<String> internalIDs = new HashSet<>(); private final EventBus eventBus = new EventBus(); private String preamble; // All file contents below the last entry in the file private String epilog = ""; private String sharedDatabaseID; public BibDatabase() { this.eventBus.register(duplicationChecker); this.registerListener(new KeyChangeListener(this)); } /** * @param toResolve maybenull The text to resolve. * @param database maybenull The database to use for resolving the text. * @return The resolved text or the original text if either the text or the database are null * @deprecated use {@link BibDatabase#resolveForStrings(String)} * * Returns a text with references resolved according to an optionally given database. */ @Deprecated public static String getText(String toResolve, BibDatabase database) { if ((toResolve != null) && (database != null)) { return database.resolveForStrings(toResolve); } return toResolve; } /** * Returns the number of entries. */ public int getEntryCount() { return entries.size(); } /** * Checks if the database contains entries. */ public boolean hasEntries() { return !entries.isEmpty(); } /** * Returns an EntrySorter with the sorted entries from this base, * sorted by the given Comparator. */ public synchronized EntrySorter getSorter(Comparator<BibEntry> comp) { return new EntrySorter(new ArrayList<>(getEntries()), comp); } /** * Returns whether an entry with the given ID exists (-> entry_type + hashcode). */ public boolean containsEntryWithId(String id) { return internalIDs.contains(id); } public ObservableList<BibEntry> getEntries() { return FXCollections.unmodifiableObservableList(entries); } /** * Returns a set of Strings, that contains all field names that are visible. This means that the fields * are not internal fields. Internal fields are fields, that are starting with "_". * * @return set of fieldnames, that are visible */ public Set<String> getAllVisibleFields() { Set<String> allFields = new TreeSet<>(); for (BibEntry e : getEntries()) { allFields.addAll(e.getFieldNames()); } return allFields.stream().filter(field -> !InternalBibtexFields.isInternalField(field)) .collect(Collectors.toSet()); } /** * Returns the entry with the given bibtex key. */ public synchronized Optional<BibEntry> getEntryByKey(String key) { for (BibEntry entry : entries) { if (key.equals(entry.getCiteKeyOptional().orElse(null))) { return Optional.of(entry); } } return Optional.empty(); } /** * Collects entries having the specified BibTeX key and returns these entries as list. * The order of the entries is the order they appear in the database. * * @param key * @return list of entries that contains the given key */ public synchronized List<BibEntry> getEntriesByKey(String key) { List<BibEntry> result = new ArrayList<>(); for (BibEntry entry : entries) { entry.getCiteKeyOptional().ifPresent(entryKey -> { if (key.equals(entryKey)) { result.add(entry); } }); } return result; } /** * Finds the entry with a specified ID. * * @param id * @return The entry that has the given id */ public synchronized Optional<BibEntry> getEntryById(String id) { return entries.stream().filter(entry -> entry.getId().equals(id)).findFirst(); } /** * Inserts the entry, given that its ID is not already in use. * use Util.createId(...) to make up a unique ID for an entry. * * @param entry BibEntry to insert into the database * @return false if the insert was done without a duplicate warning * @throws KeyCollisionException thrown if the entry id ({@link BibEntry#getId()}) is already present in the database */ public synchronized boolean insertEntry(BibEntry entry) throws KeyCollisionException { return insertEntry(entry, EntryEventSource.LOCAL); } /** * Inserts the entry, given that its ID is not already in use. * use Util.createId(...) to make up a unique ID for an entry. * * @param entry BibEntry to insert * @param eventSource Source the event is sent from * @return false if the insert was done without a duplicate warning */ public synchronized boolean insertEntry(BibEntry entry, EntryEventSource eventSource) throws KeyCollisionException { insertEntries(Collections.singletonList(entry), eventSource); return duplicationChecker.isDuplicateCiteKeyExisting(entry); } public synchronized void insertEntries(BibEntry... entries) throws KeyCollisionException { insertEntries(Arrays.asList(entries), EntryEventSource.LOCAL); } public synchronized void insertEntries(List<BibEntry> entries) throws KeyCollisionException { insertEntries(entries, EntryEventSource.LOCAL); } private synchronized void insertEntries(List<BibEntry> newEntries, EntryEventSource eventSource) throws KeyCollisionException { Objects.requireNonNull(newEntries); for (BibEntry entry : newEntries) { String id = entry.getId(); if (containsEntryWithId(id)) { throw new KeyCollisionException("ID is already in use, please choose another"); } internalIDs.add(id); entry.registerListener(this); eventBus.post(new EntryAddedEvent(entry, eventSource)); } entries.addAll(newEntries); } /** * Removes the given entry. * The Entry is removed based on the id {@link BibEntry#id} * @param toBeDeleted Entry to delete */ public synchronized void removeEntry(BibEntry toBeDeleted) { removeEntry(toBeDeleted, EntryEventSource.LOCAL); } /** * Removes the given entry. * The Entry is removed based on the id {@link BibEntry#id} * * @param toBeDeleted Entry to delete * @param eventSource Source the event is sent from */ public synchronized void removeEntry(BibEntry toBeDeleted, EntryEventSource eventSource) { Objects.requireNonNull(toBeDeleted); boolean anyRemoved = entries.removeIf(entry -> entry.getId().equals(toBeDeleted.getId())); if (anyRemoved) { internalIDs.remove(toBeDeleted.getId()); eventBus.post(new EntryRemovedEvent(toBeDeleted, eventSource)); } } /** * Returns the database's preamble. * If the preamble text consists only of whitespace, then also an empty optional is returned. */ public synchronized Optional<String> getPreamble() { if (StringUtil.isBlank(preamble)) { return Optional.empty(); } else { return Optional.of(preamble); } } /** * Sets the database's preamble. */ public synchronized void setPreamble(String preamble) { this.preamble = preamble; } /** * Inserts a Bibtex String. */ public synchronized void addString(BibtexString string) throws KeyCollisionException { if (hasStringLabel(string.getName())) { throw new KeyCollisionException("A string with that label already exists"); } if (bibtexStrings.containsKey(string.getId())) { throw new KeyCollisionException("Duplicate BibTeX string id."); } bibtexStrings.put(string.getId(), string); } /** * Removes the string with the given id. */ public void removeString(String id) { bibtexStrings.remove(id); } /** * Returns a Set of keys to all BibtexString objects in the database. * These are in no sorted order. */ public Set<String> getStringKeySet() { return bibtexStrings.keySet(); } /** * Returns a Collection of all BibtexString objects in the database. * These are in no particular order. */ public Collection<BibtexString> getStringValues() { return bibtexStrings.values(); } /** * Returns the string with the given id. */ public BibtexString getString(String id) { return bibtexStrings.get(id); } /** * Returns the number of strings. */ public int getStringCount() { return bibtexStrings.size(); } /** * Check if there are strings. */ public boolean hasNoStrings() { return bibtexStrings.isEmpty(); } /** * Copies the preamble of another BibDatabase. * * @param database another BibDatabase */ public void copyPreamble(BibDatabase database) { setPreamble(database.getPreamble().orElse("")); } /** * Copies all Strings from another BibDatabase. * * @param database another BibDatabase */ public void copyStrings(BibDatabase database) { for (String key : database.getStringKeySet()) { BibtexString string = database.getString(key); addString(string); } } /** * Returns true if a string with the given label already exists. */ public synchronized boolean hasStringLabel(String label) { for (BibtexString value : bibtexStrings.values()) { if (value.getName().equals(label)) { return true; } } return false; } /** * Resolves any references to strings contained in this field content, * if possible. */ public String resolveForStrings(String content) { Objects.requireNonNull(content, "Content for resolveForStrings must not be null."); return resolveContent(content, new HashSet<>(), new HashSet<>()); } /** * Get all strings used in the entries. */ public Collection<BibtexString> getUsedStrings(Collection<BibEntry> entries) { List<BibtexString> result = new ArrayList<>(); Set<String> allUsedIds = new HashSet<>(); // All entries for (BibEntry entry : entries) { for (String fieldContent : entry.getFieldValues()) { resolveContent(fieldContent, new HashSet<>(), allUsedIds); } } // Preamble if (preamble != null) { resolveContent(preamble, new HashSet<>(), allUsedIds); } for (String stringId : allUsedIds) { result.add((BibtexString) bibtexStrings.get(stringId).clone()); } return result; } /** * Take the given collection of BibEntry and resolve any string * references. * * @param entriesToResolve A collection of BibtexEntries in which all strings of the form * #xxx# will be resolved against the hash map of string * references stored in the database. * @param inPlace If inPlace is true then the given BibtexEntries will be modified, if false then copies of the BibtexEntries are made before resolving the strings. * @return a list of bibtexentries, with all strings resolved. It is dependent on the value of inPlace whether copies are made or the given BibtexEntries are modified. */ public List<BibEntry> resolveForStrings(Collection<BibEntry> entriesToResolve, boolean inPlace) { Objects.requireNonNull(entriesToResolve, "entries must not be null."); List<BibEntry> results = new ArrayList<>(entriesToResolve.size()); for (BibEntry entry : entriesToResolve) { results.add(this.resolveForStrings(entry, inPlace)); } return results; } /** * Take the given BibEntry and resolve any string references. * * @param entry A BibEntry in which all strings of the form #xxx# will be * resolved against the hash map of string references stored in * the database. * @param inPlace If inPlace is true then the given BibEntry will be * modified, if false then a copy is made using close made before * resolving the strings. * @return a BibEntry with all string references resolved. It is * dependent on the value of inPlace whether a copy is made or the * given BibtexEntries is modified. */ public BibEntry resolveForStrings(BibEntry entry, boolean inPlace) { BibEntry resultingEntry; if (inPlace) { resultingEntry = entry; } else { resultingEntry = (BibEntry) entry.clone(); } for (Map.Entry<String, String> field : resultingEntry.getFieldMap().entrySet()) { resultingEntry.setField(field.getKey(), this.resolveForStrings(field.getValue())); } return resultingEntry; } /** * If the label represents a string contained in this database, returns * that string's content. Resolves references to other strings, taking * care not to follow a circular reference pattern. * If the string is undefined, returns null. */ private String resolveString(String label, Set<String> usedIds, Set<String> allUsedIds) { Objects.requireNonNull(label); Objects.requireNonNull(usedIds); Objects.requireNonNull(allUsedIds); for (BibtexString string : bibtexStrings.values()) { if (string.getName().equalsIgnoreCase(label)) { // First check if this string label has been resolved // earlier in this recursion. If so, we have a // circular reference, and have to stop to avoid // infinite recursion. if (usedIds.contains(string.getId())) { LOGGER.info("Stopped due to circular reference in strings: " + label); return label; } // If not, log this string's ID now. usedIds.add(string.getId()); if (allUsedIds != null) { allUsedIds.add(string.getId()); } // Ok, we found the string. Now we must make sure we // resolve any references to other strings in this one. String result = string.getContent(); result = resolveContent(result, usedIds, allUsedIds); // Finished with recursing this branch, so we remove our // ID again: usedIds.remove(string.getId()); return result; } } // If we get to this point, the string has obviously not been defined locally. // Check if one of the standard BibTeX month strings has been used: Optional<Month> month = Month.getMonthByShortName(label); return month.map(Month::getFullName).orElse(null); } private String resolveContent(String result, Set<String> usedIds, Set<String> allUsedIds) { String res = result; if (RESOLVE_CONTENT_PATTERN.matcher(res).matches()) { StringBuilder newRes = new StringBuilder(); int piv = 0; int next; while ((next = res.indexOf('#', piv)) >= 0) { // We found the next string ref. Append the text // up to it. if (next > 0) { newRes.append(res.substring(piv, next)); } int stringEnd = res.indexOf('#', next + 1); if (stringEnd >= 0) { // We found the boundaries of the string ref, // now resolve that one. String refLabel = res.substring(next + 1, stringEnd); String resolved = resolveString(refLabel, usedIds, allUsedIds); if (resolved == null) { // Could not resolve string. Display the # // characters rather than removing them: newRes.append(res.substring(next, stringEnd + 1)); } else { // The string was resolved, so we display its meaning only, // stripping the # characters signifying the string label: newRes.append(resolved); } piv = stringEnd + 1; } else { // We did not find the boundaries of the string ref. This // makes it impossible to interpret it as a string label. // So we should just append the rest of the text and finish. newRes.append(res.substring(next)); piv = res.length(); break; } } if (piv < (res.length() - 1)) { newRes.append(res.substring(piv)); } res = newRes.toString(); } return res; } public String getEpilog() { return epilog; } public void setEpilog(String epilog) { this.epilog = epilog; } /** * Registers an listener object (subscriber) to the internal event bus. * The following events are posted: * * - {@link EntryAddedEvent} * - {@link EntryChangedEvent} * - {@link EntryRemovedEvent} * * @param listener listener (subscriber) to add */ public void registerListener(Object listener) { this.eventBus.register(listener); } /** * Unregisters an listener object. * @param listener listener (subscriber) to remove */ public void unregisterListener(Object listener) { try { this.eventBus.unregister(listener); } catch (IllegalArgumentException e) { // occurs if the event source has not been registered, should not prevent shutdown LOGGER.debug(e); } } @Subscribe private void relayEntryChangeEvent(FieldChangedEvent event) { eventBus.post(event); } public Optional<BibEntry> getReferencedEntry(BibEntry entry) { return entry.getField(FieldName.CROSSREF).flatMap(this::getEntryByKey); } public Optional<String> getSharedDatabaseID() { return Optional.ofNullable(this.sharedDatabaseID); } public void setSharedDatabaseID(String sharedDatabaseID) { this.sharedDatabaseID = sharedDatabaseID; } public boolean isShared() { return getSharedDatabaseID().isPresent(); } public void clearSharedDatabaseID() { this.sharedDatabaseID = null; } /** * Generates and sets a random ID which is globally unique. * * @return The generated sharedDatabaseID */ public String generateSharedDatabaseID() { this.sharedDatabaseID = new BigInteger(128, new SecureRandom()).toString(32); return this.sharedDatabaseID; } public DuplicationChecker getDuplicationChecker() { return duplicationChecker; } }
/******************************************************************************* * Copyright 2013 Sergey Tarasevich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.nostra13.universalimageloader.utils; import android.graphics.BitmapFactory; import android.opengl.GLES10; import com.nostra13.universalimageloader.core.assist.ImageSize; import com.nostra13.universalimageloader.core.assist.ViewScaleType; import com.nostra13.universalimageloader.core.imageaware.ImageAware; import javax.microedition.khronos.opengles.GL10; /** * Provides calculations with image sizes, scales * * @author Sergey Tarasevich (nostra13[at]gmail[dot]com) * @since 1.8.3 */ public final class ImageSizeUtils { private static final int DEFAULT_MAX_BITMAP_DIMENSION = 2048; private static ImageSize maxBitmapSize; static { int[] maxTextureSize = new int[1]; GLES10.glGetIntegerv(GL10.GL_MAX_TEXTURE_SIZE, maxTextureSize, 0); int maxBitmapDimension = Math.max(maxTextureSize[0], DEFAULT_MAX_BITMAP_DIMENSION); maxBitmapSize = new ImageSize(maxBitmapDimension, maxBitmapDimension); } private ImageSizeUtils() { } /** * Defines target size for image aware view. Size is defined by target * {@link com.nostra13.universalimageloader.core.imageaware.ImageAware view} parameters, configuration * parameters or device display dimensions.<br /> */ public static ImageSize defineTargetSizeForView(ImageAware imageAware, ImageSize maxImageSize) { int width = imageAware.getWidth(); if (width <= 0) width = maxImageSize.getWidth(); int height = imageAware.getHeight(); if (height <= 0) height = maxImageSize.getHeight(); return new ImageSize(width, height); } /** * Computes sample size for downscaling image size (<b>srcSize</b>) to view size (<b>targetSize</b>). This sample * size is used during * {@linkplain BitmapFactory#decodeStream(java.io.InputStream, android.graphics.Rect, android.graphics.BitmapFactory.Options) * decoding image} to bitmap.<br /> * <br /> * <b>Examples:</b><br /> * <p/> * <pre> * srcSize(100x100), targetSize(10x10), powerOf2Scale = true -> sampleSize = 8 * srcSize(100x100), targetSize(10x10), powerOf2Scale = false -> sampleSize = 10 * * srcSize(100x100), targetSize(20x40), viewScaleType = FIT_INSIDE -> sampleSize = 5 * srcSize(100x100), targetSize(20x40), viewScaleType = CROP -> sampleSize = 2 * </pre> * <p/> * <br /> * The sample size is the number of pixels in either dimension that correspond to a single pixel in the decoded * bitmap. For example, inSampleSize == 4 returns an image that is 1/4 the width/height of the original, and 1/16 * the number of pixels. Any value <= 1 is treated the same as 1. * * @param srcSize Original (image) size * @param targetSize Target (view) size * @param viewScaleType {@linkplain ViewScaleType Scale type} for placing image in view * @param powerOf2Scale <i>true</i> - if sample size be a power of 2 (1, 2, 4, 8, ...) * @return Computed sample size */ public static int computeImageSampleSize(ImageSize srcSize, ImageSize targetSize, ViewScaleType viewScaleType, boolean powerOf2Scale) { final int srcWidth = srcSize.getWidth(); final int srcHeight = srcSize.getHeight(); final int targetWidth = targetSize.getWidth(); final int targetHeight = targetSize.getHeight(); int scale = 1; switch (viewScaleType) { case FIT_INSIDE: if (powerOf2Scale) { final int halfWidth = srcWidth / 2; final int halfHeight = srcHeight / 2; while ((halfWidth / scale) > targetWidth || (halfHeight / scale) > targetHeight) { // || scale *= 2; } } else { scale = Math.max(srcWidth / targetWidth, srcHeight / targetHeight); // max } break; case CROP: if (powerOf2Scale) { final int halfWidth = srcWidth / 2; final int halfHeight = srcHeight / 2; while ((halfWidth / scale) > targetWidth && (halfHeight / scale) > targetHeight) { // && scale *= 2; } } else { scale = Math.min(srcWidth / targetWidth, srcHeight / targetHeight); // min } break; } if (scale < 1) { scale = 1; } return scale; } /** * Computes minimal sample size for downscaling image so result image size won't exceed max acceptable OpenGL * texture size.<br /> * We can't create Bitmap in memory with size exceed max texture size (usually this is 2048x2048) so this method * calculate minimal sample size which should be applied to image to fit into these limits. * * @param srcSize Original image size * @return Minimal sample size */ public static int computeMinImageSampleSize(ImageSize srcSize) { final int srcWidth = srcSize.getWidth(); final int srcHeight = srcSize.getHeight(); final int targetWidth = maxBitmapSize.getWidth(); final int targetHeight = maxBitmapSize.getHeight(); final int widthScale = (int) Math.ceil((float) srcWidth / targetWidth); final int heightScale = (int) Math.ceil((float) srcHeight / targetHeight); return Math.max(widthScale, heightScale); // max } /** * Computes scale of target size (<b>targetSize</b>) to source size (<b>srcSize</b>).<br /> * <br /> * <b>Examples:</b><br /> * <p/> * <pre> * srcSize(40x40), targetSize(10x10) -> scale = 0.25 * * srcSize(10x10), targetSize(20x20), stretch = false -> scale = 1 * srcSize(10x10), targetSize(20x20), stretch = true -> scale = 2 * * srcSize(100x100), targetSize(20x40), viewScaleType = FIT_INSIDE -> scale = 0.2 * srcSize(100x100), targetSize(20x40), viewScaleType = CROP -> scale = 0.4 * </pre> * * @param srcSize Source (image) size * @param targetSize Target (view) size * @param viewScaleType {@linkplain ViewScaleType Scale type} for placing image in view * @param stretch Whether source size should be stretched if target size is larger than source size. If <b>false</b> * then result scale value can't be greater than 1. * @return Computed scale */ public static float computeImageScale(ImageSize srcSize, ImageSize targetSize, ViewScaleType viewScaleType, boolean stretch) { final int srcWidth = srcSize.getWidth(); final int srcHeight = srcSize.getHeight(); final int targetWidth = targetSize.getWidth(); final int targetHeight = targetSize.getHeight(); final float widthScale = (float) srcWidth / targetWidth; final float heightScale = (float) srcHeight / targetHeight; final int destWidth; final int destHeight; if ((viewScaleType == ViewScaleType.FIT_INSIDE && widthScale >= heightScale) || (viewScaleType == ViewScaleType.CROP && widthScale < heightScale)) { destWidth = targetWidth; destHeight = (int) (srcHeight / widthScale); } else { destWidth = (int) (srcWidth / heightScale); destHeight = targetHeight; } float scale = 1; if ((!stretch && destWidth < srcWidth && destHeight < srcHeight) || (stretch && destWidth != srcWidth && destHeight != srcHeight)) { scale = (float) destWidth / srcWidth; } return scale; } }
/* * Copyright (C) 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.sample.cast.refplayer.queue; import com.google.android.gms.cast.MediaQueueItem; import com.google.android.gms.cast.MediaStatus; import com.google.android.gms.cast.framework.CastContext; import com.google.android.gms.cast.framework.CastSession; import com.google.android.gms.cast.framework.SessionManagerListener; import com.google.android.gms.cast.framework.media.RemoteMediaClient; import android.content.Context; import android.util.Log; import android.view.View; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; /** * A singleton to manage the queue. Upon instantiation, it syncs up its own copy of the queue with * the one that the VideoCastManager holds. After that point, it maintains an up-to-date version of * the queue. UI elements get their data from this class. A boolean field, {@code mDetachedQueue} * is used to manage whether this changes to the queue coming from the cast framework should be * reflected here or not; when in "detached" mode, it means that its own copy of the queue is not * kept up to date with the one that the cast framework has. This is needed to preserve the queue * when the media session ends. */ public class QueueDataProvider { private static final String TAG = "QueueDataProvider"; public static final int INVALID = -1; private final Context mAppContext; private final List<MediaQueueItem> mQueue = new CopyOnWriteArrayList<>(); private static QueueDataProvider mInstance; // Locks modification to the remove queue. private final Object mLock = new Object(); private final SessionManagerListener<CastSession> mSessionManagerListener = new MySessionManagerListener(); private final RemoteMediaClient.Listener mRemoteMediaClientListener = new MyRemoteMediaClientListener(); private int mRepeatMode; private boolean mShuffle; private MediaQueueItem mCurrentIem; private MediaQueueItem mUpcomingItem; private OnQueueDataChangedListener mListener; private boolean mDetachedQueue = true; private QueueDataProvider(Context context) { mAppContext = context.getApplicationContext(); mRepeatMode = MediaStatus.REPEAT_MODE_REPEAT_OFF; mShuffle = false; mCurrentIem = null; CastContext.getSharedInstance(mAppContext).getSessionManager().addSessionManagerListener( mSessionManagerListener, CastSession.class); syncWithRemoteQueue(); } public void onUpcomingStopClicked(View view, MediaQueueItem upcomingItem) { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } // need to truncate the queue on the remote device so that we can complete the playback of // the current item but not go any further. Alternatively, one could just stop the playback // here, if that was acceptable. int position = getPositionByItemId(upcomingItem.getItemId()); int[] itemIds = new int[getCount() - position]; for (int i = 0; i < itemIds.length; i++) { itemIds[i] = mQueue.get(i + position).getItemId(); } remoteMediaClient.queueRemoveItems(itemIds, null); } public void onUpcomingPlayClicked(View view, MediaQueueItem upcomingItem) { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } remoteMediaClient.queueJumpToItem(upcomingItem.getItemId(), null); } public boolean isQueueDetached() { return mDetachedQueue; } public int getPositionByItemId(int itemId) { if (mQueue.isEmpty()) { return INVALID; } for (int i = 0; i < mQueue.size(); i++) { if (mQueue.get(i).getItemId() == itemId) { return i; } } return INVALID; } public static synchronized QueueDataProvider getInstance(Context context) { if (mInstance == null) { mInstance = new QueueDataProvider(context); } return mInstance; } public void removeFromQueue(int position) { synchronized (mLock) { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } remoteMediaClient.queueRemoveItem(mQueue.get(position).getItemId(), null); } } public void removeAll() { synchronized (mLock) { if (mQueue.isEmpty()) { return; } RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } int[] itemIds = new int[mQueue.size()]; for (int i = 0; i < mQueue.size(); i++) { itemIds[i] = mQueue.get(i).getItemId(); } remoteMediaClient.queueRemoveItems(itemIds, null); mQueue.clear(); } } public void moveItem(int fromPosition, int toPosition) { if (fromPosition == toPosition) { return; } RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } int itemId = mQueue.get(fromPosition).getItemId(); remoteMediaClient.queueMoveItemToNewIndex(itemId, toPosition, null); final MediaQueueItem item = mQueue.remove(fromPosition); mQueue.add(toPosition, item); } public int getCount() { return mQueue.size(); } public MediaQueueItem getItem(int position) { return mQueue.get(position); } public void clearQueue() { mQueue.clear(); mDetachedQueue = true; mCurrentIem = null; } public int getRepeatMode() { return mRepeatMode; } public boolean isShuffleOn() { return mShuffle; } public MediaQueueItem getCurrentItem() { return mCurrentIem; } public int getCurrentItemId() { return mCurrentIem.getItemId(); } public MediaQueueItem getUpcomingItem() { Log.d(TAG, "[upcoming] getUpcomingItem() returning " + mUpcomingItem); return mUpcomingItem; } public void setOnQueueDataChangedListener(OnQueueDataChangedListener listener) { mListener = listener; } public List<MediaQueueItem> getItems() { return mQueue; } /** * Listener notifies the data of the queue has changed. */ public interface OnQueueDataChangedListener { void onQueueDataChanged(); } private void syncWithRemoteQueue() { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient != null) { remoteMediaClient.addListener(mRemoteMediaClientListener); MediaStatus mediaStatus = remoteMediaClient.getMediaStatus(); if (mediaStatus != null) { List<MediaQueueItem> items = mediaStatus.getQueueItems(); if (items != null && !items.isEmpty()) { mQueue.clear(); mQueue.addAll(items); mRepeatMode = mediaStatus.getQueueRepeatMode(); mCurrentIem = mediaStatus.getQueueItemById(mediaStatus.getCurrentItemId()); mDetachedQueue = false; mUpcomingItem = mediaStatus.getQueueItemById(mediaStatus.getPreloadedItemId()); } } } } private class MySessionManagerListener implements SessionManagerListener<CastSession> { @Override public void onSessionResumed(CastSession session, boolean wasSuspended) { syncWithRemoteQueue(); } @Override public void onSessionStarted(CastSession session, String sessionId) { syncWithRemoteQueue(); } @Override public void onSessionEnded(CastSession session, int error) { clearQueue(); if (mListener != null) { mListener.onQueueDataChanged(); } } @Override public void onSessionStarting(CastSession session) { } @Override public void onSessionStartFailed(CastSession session, int error) { } @Override public void onSessionEnding(CastSession session) { } @Override public void onSessionResuming(CastSession session, String sessionId) { } @Override public void onSessionResumeFailed(CastSession session, int error) { } @Override public void onSessionSuspended(CastSession session, int reason) { } } private class MyRemoteMediaClientListener implements RemoteMediaClient.Listener { @Override public void onPreloadStatusUpdated() { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); if (remoteMediaClient == null) { return; } MediaStatus mediaStatus = remoteMediaClient.getMediaStatus(); if (mediaStatus == null) { return; } mUpcomingItem = mediaStatus.getQueueItemById(mediaStatus.getPreloadedItemId()); Log.d(TAG, "onRemoteMediaPreloadStatusUpdated() with item=" + mUpcomingItem); if (mListener != null) { mListener.onQueueDataChanged(); } } @Override public void onQueueStatusUpdated() { updateMediaQueue(); if (mListener != null) { mListener.onQueueDataChanged(); } Log.d(TAG, "Queue was updated"); } @Override public void onStatusUpdated() { updateMediaQueue(); if (mListener != null) { mListener.onQueueDataChanged(); } } @Override public void onMetadataUpdated() { } @Override public void onSendingRemoteMediaRequest() { } private void updateMediaQueue() { RemoteMediaClient remoteMediaClient = getRemoteMediaClient(); MediaStatus mediaStatus; List<MediaQueueItem> queueItems = null; if (remoteMediaClient != null) { mediaStatus = remoteMediaClient.getMediaStatus(); if (mediaStatus != null) { queueItems = mediaStatus.getQueueItems(); mRepeatMode = mediaStatus.getQueueRepeatMode(); mCurrentIem = mediaStatus.getQueueItemById(mediaStatus.getCurrentItemId()); } } mQueue.clear(); if (queueItems == null) { Log.d(TAG, "Queue is cleared"); } else { Log.d(TAG, "Queue is updated with a list of size: " + queueItems.size()); if (queueItems.size() > 0) { mQueue.addAll(queueItems); mDetachedQueue = false; } else { mDetachedQueue = true; } } } } private RemoteMediaClient getRemoteMediaClient() { CastSession castSession = CastContext.getSharedInstance(mAppContext).getSessionManager() .getCurrentCastSession(); if (castSession == null || !castSession.isConnected()) { Log.w(TAG, "Trying to get a RemoteMediaClient when no CastSession is started."); return null; } return castSession.getRemoteMediaClient(); } }
package ga; import engine.*; import java.util.*; public class GAPopulation { /* Evolutionary parameters: */ public int size; // size of the population public int ngens; // total number of generations public int currgen; // current generation /* Crossover parameters */ int tournamentK; // size of tournament int elite; // size of elite int immigrant; // number of new random individuals double mutrate; // chance that a mutation will occur double xoverrate; // chance that the xover will occur /* Containers */ public ArrayList<Genome> individual; Genome parent; Trainer T; /* Progress data */ public double[] max_fitness; public double[] avg_fitness; public double[] terminals; // average total number of terminals public double[] bigterminals; // average total number of sig. terminals /** * Initialize and load parameters. * Parameter comp is a node from a previous * scenario, which is used for distance calculations. */ public GAPopulation(Genome comp) { individual = new ArrayList<Genome>(); parent = comp; // reading parameters Parameter param = Parameter.getInstance(); String paramval; paramval = param.getParam("population size"); if (paramval != null) size = Integer.valueOf(paramval); else { System.err.println("\"population size\" not defined on parameter file."); size = 10; } paramval = param.getParam("generation number"); if (paramval != null) ngens = Integer.valueOf(paramval); else { System.err.println("\"generation number\" not defined on parameter file."); ngens = 10; } paramval = param.getParam("tournament K"); if (paramval != null) tournamentK = Integer.valueOf(paramval); else { System.err.println("\"tournament K\" not defined on parameter file."); tournamentK = 5; } paramval = param.getParam("elite size"); if (paramval != null) elite = Integer.valueOf(paramval); else { System.err.println("\"elite size\" not defined on parameter file."); elite = 1; } paramval = param.getParam("immigrant size"); if (paramval != null) immigrant = Integer.valueOf(paramval); else { System.err.println("\"immigrant size\" not defined on parameter file."); immigrant = 0;; } paramval = param.getParam("mutation rate"); if (paramval != null) mutrate = Double.valueOf(paramval); else { System.err.println("\"mutation rate\" not defined on parameter file."); mutrate = 0.01; } paramval = param.getParam("crossover rate"); if (paramval != null) xoverrate = Double.valueOf(paramval); else { System.err.println("\"crossover rate\" not defined on parameter file."); xoverrate = 0.9; } } /** * Initialize the new population and the local * variables. Startd is the target date for the * @param startd */ public void initPopulation(Date startd) { T = new Trainer(startd); currgen = 0; for (int i = 0; i < size; i++) { Genome n = new Genome(); n.init(); individual.add(n); } max_fitness = new double[ngens]; avg_fitness = new double[ngens]; terminals = new double[ngens]; bigterminals = new double[ngens]; } /** * Runs one generation loop * */ public void runGeneration() { eval(); breed(); currgen++; } /** * update the values of the maxfitness/avg fitness/etc * public arrays; */ public void updateStatus() { Parameter p = Parameter.getInstance(); String param = p.getParam("asset treshold"); double tresh = Double.valueOf(param); avg_fitness[currgen-1] = 0; terminals[currgen-1] = 0; bigterminals[currgen-1] = 0; for (int i = 0; i < individual.size(); i++) { avg_fitness[currgen-1] += individual.get(i).fitness; terminals[currgen-1] += individual.get(i).countAsset(0.0); bigterminals[currgen-1] += individual.get(i).countAsset(tresh); } max_fitness[currgen-1] = individual.get(0).fitness; avg_fitness[currgen-1] /= size; terminals[currgen-1] /= size; bigterminals[currgen-1] /= size; } /** * Calculates the fitness value for each individual * in the population. */ public void eval() { for (int i = 0; i < size; i++) { individual.get(i).eval(T); } Collections.sort(individual); } /** * Perform selection, crossover, mutation in * order to create a new population. * * Assumes the eval function has already been * performed. * */ public void breed() { RNG d = RNG.getInstance(); ArrayList<Genome> nextGen = new ArrayList<Genome>(); Genome p1,p2; // elite: (few copied individuals) for (int i = 0; i < elite; i++) { nextGen.add(individual.get(i).copy()); } // immigrant: (usually 0) for (int i = 0; i < immigrant; i++) { Genome n = new Genome(); n.init(); nextGen.add(n); } // crossover: for (int i = 0; i < size - (immigrant + elite); i+=2) { // selection - the selection function should // return copies already. p1 = Tournament(); p2 = Tournament(); // rolls for xover if (d.nextDouble() < xoverrate) { p1.crossover(p2); } // rolls for mutation if (d.nextDouble() < mutrate) p1.mutation(); if (d.nextDouble() < mutrate) p2.mutation(); nextGen.add(p1); nextGen.add(p2); } individual = nextGen; } /** * Select one parent from the population by using * fitness-proportional tournament selection * (eat candidate has a chance proportional to his * fitness of being chosen). * * The function copy the chosen candidate and send * him back. * @return */ public Genome Tournament() { RNG d = RNG.getInstance(); Genome[] list = new Genome[tournamentK]; double[] rank = new double[tournamentK]; double sum = 0.0; double ticket = 0.0; double min = 0.0; /* Selects individuals and removes negative fitness */ for (int i = 0; i < tournamentK; i++) { list[i] = individual.get(d.nextInt(size)); if (list[i].fitness < min) min = list[i].fitness; } /* I'm not sure if this is the best way to * make the proportion between the fitnesses. * Some sort of scaling factor should be put here * to avoit high fitnesses from superdominating. * * But maybe the tournament proccess already guarantees this? */ for (int i = 0; i < tournamentK; i++) { sum += list[i].fitness - min; rank[i] = sum; } ticket = d.nextDouble()*sum; for (int i = 0; i < tournamentK; i++) { if ((ticket) <= rank[i]) return list[i].copy(); } // should never get here System.err.println("x" + ticket + " + " + sum); System.err.println("Warning: MemeTournament - reached unreachable line"); return list[0].copy(); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.light; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; public class LightMemberReference extends LightElement implements PsiJavaCodeReferenceElement { @NotNull private final PsiMember myRefMember; private final PsiSubstitutor mySubstitutor; private LightReferenceParameterList myParameterList; public LightMemberReference(@NotNull PsiManager manager, @NotNull PsiMember member, PsiSubstitutor substitutor) { super(manager, JavaLanguage.INSTANCE); myRefMember = member; mySubstitutor = substitutor; } @Override @NotNull public PsiElement resolve() { return myRefMember; } @Override @NotNull public JavaResolveResult advancedResolve(boolean incompleteCode){ final PsiElement resolved = resolve(); PsiSubstitutor substitutor = mySubstitutor; if (substitutor == null) { substitutor = PsiSubstitutor.EMPTY; } return new CandidateInfo(resolved, substitutor); } @Override @NotNull public JavaResolveResult[] multiResolve(boolean incompleteCode){ final JavaResolveResult result = advancedResolve(incompleteCode); if(result != JavaResolveResult.EMPTY) return new JavaResolveResult[]{result}; return JavaResolveResult.EMPTY_ARRAY; } @Override public void processVariants(@NotNull PsiScopeProcessor processor){ throw new RuntimeException("Variants are not available for light references"); } @Override public PsiElement getReferenceNameElement() { return null; } @Override public PsiReferenceParameterList getParameterList() { if (myParameterList == null) { myParameterList = new LightReferenceParameterList(myManager, PsiTypeElement.EMPTY_ARRAY); } return myParameterList; } @Override public String getQualifiedName() { final PsiClass containingClass = myRefMember.getContainingClass(); if (containingClass != null) { final String qualifiedName = containingClass.getQualifiedName(); if (qualifiedName != null) { return qualifiedName + '.' + myRefMember.getName(); } } return myRefMember.getName(); } @Override public String getReferenceName() { return getQualifiedName(); } @Override public String getText() { return myRefMember.getName() + getParameterList().getText(); } @Override public PsiReference getReference() { return this; } @Override @NotNull public String getCanonicalText() { String name = getQualifiedName(); if (name == null) return null; PsiType[] types = getTypeParameters(); if (types.length == 0) return name; StringBuffer buf = new StringBuffer(); buf.append(name); buf.append('<'); for (int i = 0; i < types.length; i++) { if (i > 0) buf.append(','); buf.append(types[i].getCanonicalText()); } buf.append('>'); return buf.toString(); } @Override public PsiElement copy() { return new LightMemberReference(myManager, myRefMember, mySubstitutor); } @Override public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { //TODO? throw new IncorrectOperationException(); } @Override public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException { //TODO? throw new IncorrectOperationException(); } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof JavaElementVisitor) { ((JavaElementVisitor)visitor).visitReferenceElement(this); } else { visitor.visitElement(this); } } public String toString() { return "LightClassReference:" + myRefMember.getName(); } @Override public boolean isReferenceTo(PsiElement element) { return element instanceof PsiClass && element.getManager().areElementsEquivalent(resolve(), element); } @Override @NotNull public Object[] getVariants() { throw new RuntimeException("Variants are not available for light references"); } @Override public boolean isSoft(){ return false; } @NotNull @Override public TextRange getRangeInElement() { return new TextRange(0, getTextLength()); } @NotNull @Override public PsiElement getElement() { return this; } @Override public boolean isValid() { PsiReferenceParameterList parameterList = getParameterList(); if (parameterList != null && !parameterList.isValid()) return false; return myRefMember == null || myRefMember.isValid(); } @Override @NotNull public PsiType[] getTypeParameters() { PsiReferenceParameterList parameterList = getParameterList(); return parameterList == null ? PsiType.EMPTY_ARRAY : parameterList.getTypeArguments(); } @Override public PsiElement getQualifier() { return null; } @Override public boolean isQualified() { return false; } }
package net.minidev.json.writer; /* * Copyright 2011 JSON-SMART authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.reflect.Array; import java.util.ArrayList; import java.util.List; public class ArraysMapper<T> extends JsonReaderI<T> { public ArraysMapper(JsonReader base) { super(base); } @Override public Object createArray() { return new ArrayList<Object>(); } @SuppressWarnings("unchecked") @Override public void addValue(Object current, Object value) { ((List<Object>) current).add(value); } @SuppressWarnings("unchecked") @Override public T convert(Object current) { return (T) current; } public static class GenericMapper<T> extends ArraysMapper<T> { final Class<?> componentType; JsonReaderI<?> subMapper; public GenericMapper(JsonReader base, Class<T> type) { super(base); this.componentType = type.getComponentType(); } @SuppressWarnings("unchecked") @Override public T convert(Object current) { int p = 0; Object[] r = (Object[]) Array.newInstance(componentType, ((List<?>) current).size()); for (Object e : ((List<?>) current)) r[p++] = e; return (T) r; } @Override public JsonReaderI<?> startArray(String key) { if (subMapper == null) subMapper = base.getMapper(componentType); return subMapper; } @Override public JsonReaderI<?> startObject(String key) { if (subMapper == null) subMapper = base.getMapper(componentType); return subMapper; } }; public static JsonReaderI<int[]> MAPPER_PRIM_INT = new ArraysMapper<int[]>(null) { @Override public int[] convert(Object current) { int p = 0; int[] r = new int[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).intValue(); return r; } }; public static JsonReaderI<Integer[]> MAPPER_INT = new ArraysMapper<Integer[]>(null) { @Override public Integer[] convert(Object current) { int p = 0; Integer[] r = new Integer[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Integer) r[p] = (Integer) e; else r[p] = ((Number) e).intValue(); p++; } return r; } }; public static JsonReaderI<short[]> MAPPER_PRIM_SHORT = new ArraysMapper<short[]>(null) { @Override public short[] convert(Object current) { int p = 0; short[] r = new short[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).shortValue(); return r; } }; public static JsonReaderI<Short[]> MAPPER_SHORT = new ArraysMapper<Short[]>(null) { @Override public Short[] convert(Object current) { int p = 0; Short[] r = new Short[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Short) r[p] = (Short) e; else r[p] = ((Number) e).shortValue(); p++; } return r; } }; public static JsonReaderI<byte[]> MAPPER_PRIM_BYTE = new ArraysMapper<byte[]>(null) { @Override public byte[] convert(Object current) { int p = 0; byte[] r = new byte[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).byteValue(); return r; } }; public static JsonReaderI<Byte[]> MAPPER_BYTE = new ArraysMapper<Byte[]>(null) { @Override public Byte[] convert(Object current) { int p = 0; Byte[] r = new Byte[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Byte) r[p] = (Byte) e; else r[p] = ((Number) e).byteValue(); p++; } return r; } }; public static JsonReaderI<char[]> MAPPER_PRIM_CHAR = new ArraysMapper<char[]>(null) { @Override public char[] convert(Object current) { int p = 0; char[] r = new char[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = e.toString().charAt(0); return r; } }; public static JsonReaderI<Character[]> MAPPER_CHAR = new ArraysMapper<Character[]>(null) { @Override public Character[] convert(Object current) { int p = 0; Character[] r = new Character[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; r[p] = e.toString().charAt(0); p++; } return r; } }; public static JsonReaderI<long[]> MAPPER_PRIM_LONG = new ArraysMapper<long[]>(null) { @Override public long[] convert(Object current) { int p = 0; long[] r = new long[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).intValue(); return r; } }; public static JsonReaderI<Long[]> MAPPER_LONG = new ArraysMapper<Long[]>(null) { @Override public Long[] convert(Object current) { int p = 0; Long[] r = new Long[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Float) r[p] = ((Long) e); else r[p] = ((Number) e).longValue(); p++; } return r; } }; public static JsonReaderI<float[]> MAPPER_PRIM_FLOAT = new ArraysMapper<float[]>(null) { @Override public float[] convert(Object current) { int p = 0; float[] r = new float[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).floatValue(); return r; } }; public static JsonReaderI<Float[]> MAPPER_FLOAT = new ArraysMapper<Float[]>(null) { @Override public Float[] convert(Object current) { int p = 0; Float[] r = new Float[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Float) r[p] = ((Float) e); else r[p] = ((Number) e).floatValue(); p++; } return r; } }; public static JsonReaderI<double[]> MAPPER_PRIM_DOUBLE = new ArraysMapper<double[]>(null) { @Override public double[] convert(Object current) { int p = 0; double[] r = new double[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Number) e).doubleValue(); return r; } }; public static JsonReaderI<Double[]> MAPPER_DOUBLE = new ArraysMapper<Double[]>(null) { @Override public Double[] convert(Object current) { int p = 0; Double[] r = new Double[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Double) r[p] = ((Double) e); else r[p] = ((Number) e).doubleValue(); p++; } return r; } }; public static JsonReaderI<boolean[]> MAPPER_PRIM_BOOL = new ArraysMapper<boolean[]>(null) { @Override public boolean[] convert(Object current) { int p = 0; boolean[] r = new boolean[((List<?>) current).size()]; for (Object e : ((List<?>) current)) r[p++] = ((Boolean) e).booleanValue(); return r; } }; public static JsonReaderI<Boolean[]> MAPPER_BOOL = new ArraysMapper<Boolean[]>(null) { @Override public Boolean[] convert(Object current) { int p = 0; Boolean[] r = new Boolean[((List<?>) current).size()]; for (Object e : ((List<?>) current)) { if (e == null) continue; if (e instanceof Boolean) r[p] = ((Boolean) e).booleanValue(); else if (e instanceof Number) r[p] = ((Number) e).intValue() != 0; else throw new RuntimeException("can not convert " + e + " toBoolean"); p++; } return r; } }; }
package com.ECS.client.jax; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for CartModifyRequest complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="CartModifyRequest"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CartId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="HMAC" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="MergeCart" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Items" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Item" maxOccurs="unbounded" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Action" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="MoveToCart"/> * &lt;enumeration value="SaveForLater"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="CartItemId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Quantity" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ResponseGroup" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "CartModifyRequest", propOrder = { "cartId", "hmac", "mergeCart", "items", "responseGroup" }) public class CartModifyRequest { @XmlElement(name = "CartId") protected String cartId; @XmlElement(name = "HMAC") protected String hmac; @XmlElement(name = "MergeCart") protected String mergeCart; @XmlElement(name = "Items") protected CartModifyRequest.Items items; @XmlElement(name = "ResponseGroup") protected List<String> responseGroup; /** * Gets the value of the cartId property. * * @return * possible object is * {@link String } * */ public String getCartId() { return cartId; } /** * Sets the value of the cartId property. * * @param value * allowed object is * {@link String } * */ public void setCartId(String value) { this.cartId = value; } /** * Gets the value of the hmac property. * * @return * possible object is * {@link String } * */ public String getHMAC() { return hmac; } /** * Sets the value of the hmac property. * * @param value * allowed object is * {@link String } * */ public void setHMAC(String value) { this.hmac = value; } /** * Gets the value of the mergeCart property. * * @return * possible object is * {@link String } * */ public String getMergeCart() { return mergeCart; } /** * Sets the value of the mergeCart property. * * @param value * allowed object is * {@link String } * */ public void setMergeCart(String value) { this.mergeCart = value; } /** * Gets the value of the items property. * * @return * possible object is * {@link CartModifyRequest.Items } * */ public CartModifyRequest.Items getItems() { return items; } /** * Sets the value of the items property. * * @param value * allowed object is * {@link CartModifyRequest.Items } * */ public void setItems(CartModifyRequest.Items value) { this.items = value; } /** * Gets the value of the responseGroup property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the responseGroup property. * * <p> * For example, to add a new item, do as follows: * <pre> * getResponseGroup().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getResponseGroup() { if (responseGroup == null) { responseGroup = new ArrayList<String>(); } return this.responseGroup; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Item" maxOccurs="unbounded" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Action" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="MoveToCart"/> * &lt;enumeration value="SaveForLater"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="CartItemId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Quantity" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "item" }) public static class Items { @XmlElement(name = "Item") protected List<CartModifyRequest.Items.Item> item; /** * Gets the value of the item property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the item property. * * <p> * For example, to add a new item, do as follows: * <pre> * getItem().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link CartModifyRequest.Items.Item } * * */ public List<CartModifyRequest.Items.Item> getItem() { if (item == null) { item = new ArrayList<CartModifyRequest.Items.Item>(); } return this.item; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Action" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="MoveToCart"/> * &lt;enumeration value="SaveForLater"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="CartItemId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Quantity" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "action", "cartItemId", "quantity" }) public static class Item { @XmlElement(name = "Action") protected String action; @XmlElement(name = "CartItemId") protected String cartItemId; @XmlElement(name = "Quantity") @XmlSchemaType(name = "nonNegativeInteger") protected BigInteger quantity; /** * Gets the value of the action property. * * @return * possible object is * {@link String } * */ public String getAction() { return action; } /** * Sets the value of the action property. * * @param value * allowed object is * {@link String } * */ public void setAction(String value) { this.action = value; } /** * Gets the value of the cartItemId property. * * @return * possible object is * {@link String } * */ public String getCartItemId() { return cartItemId; } /** * Sets the value of the cartItemId property. * * @param value * allowed object is * {@link String } * */ public void setCartItemId(String value) { this.cartItemId = value; } /** * Gets the value of the quantity property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getQuantity() { return quantity; } /** * Sets the value of the quantity property. * * @param value * allowed object is * {@link BigInteger } * */ public void setQuantity(BigInteger value) { this.quantity = value; } } } }
/** * Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.segment.index.loader; import com.linkedin.pinot.common.config.IndexingConfig; import com.linkedin.pinot.common.config.TableConfig; import com.linkedin.pinot.common.segment.ReadMode; import com.linkedin.pinot.core.data.manager.config.InstanceDataManagerConfig; import com.linkedin.pinot.core.indexsegment.generator.SegmentVersion; import com.linkedin.pinot.core.segment.index.loader.columnminmaxvalue.ColumnMinMaxValueGeneratorMode; import com.linkedin.pinot.core.startree.StarTreeFormatVersion; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Table level index loading config. */ public class IndexLoadingConfig { private static final int DEFAULT_REALTIME_AVG_MULTI_VALUE_COUNT = 2; private ReadMode _readMode = ReadMode.DEFAULT_MODE; private List<String> _sortedColumns = Collections.emptyList(); private Set<String> _invertedIndexColumns = new HashSet<>(); private Set<String> _noDictionaryColumns = new HashSet<>(); private Set<String> _onHeapDictionaryColumns = new HashSet<>(); private SegmentVersion _segmentVersion; private StarTreeFormatVersion _starTreeVersion = StarTreeFormatVersion.DEFAULT_VERSION; // This value will remain true only when the empty constructor is invoked. private boolean _enableDefaultColumns = true; private ColumnMinMaxValueGeneratorMode _columnMinMaxValueGeneratorMode = ColumnMinMaxValueGeneratorMode.DEFAULT_MODE; private int _realtimeAvgMultiValueCount = DEFAULT_REALTIME_AVG_MULTI_VALUE_COUNT; private boolean _enableSplitCommit; private boolean _isRealtimeOffheapAllocation; public IndexLoadingConfig(@Nonnull InstanceDataManagerConfig instanceDataManagerConfig, @Nullable TableConfig tableConfig) { extractFromInstanceConfig(instanceDataManagerConfig); if (tableConfig != null) { extractFromTableConfig(tableConfig); } } private void extractFromTableConfig(@Nullable TableConfig tableConfig) { IndexingConfig indexingConfig = tableConfig.getIndexingConfig(); String tableReadMode = indexingConfig.getLoadMode(); if (tableReadMode != null) { _readMode = ReadMode.getEnum(tableReadMode); } List<String> sortedColumns = indexingConfig.getSortedColumn(); if (sortedColumns != null) { _sortedColumns = sortedColumns; } List<String> invertedIndexColumns = indexingConfig.getInvertedIndexColumns(); if (invertedIndexColumns != null) { _invertedIndexColumns.addAll(invertedIndexColumns); } List<String> noDictionaryColumns = indexingConfig.getNoDictionaryColumns(); if (noDictionaryColumns != null) { _noDictionaryColumns.addAll(noDictionaryColumns); } List<String> onHeapDictionaryColumns = indexingConfig.getOnHeapDictionaryColumns(); if (onHeapDictionaryColumns != null) { _onHeapDictionaryColumns.addAll(onHeapDictionaryColumns); } String tableSegmentVersion = indexingConfig.getSegmentFormatVersion(); if (tableSegmentVersion != null) { _segmentVersion = SegmentVersion.valueOf(tableSegmentVersion.toLowerCase()); } String starTreeFormat = indexingConfig.getStarTreeFormat(); if (starTreeFormat != null) { _starTreeVersion = StarTreeFormatVersion.valueOf(starTreeFormat.toUpperCase()); } String columnMinMaxValueGeneratorMode = indexingConfig.getColumnMinMaxValueGeneratorMode(); if (columnMinMaxValueGeneratorMode != null) { _columnMinMaxValueGeneratorMode = ColumnMinMaxValueGeneratorMode.valueOf(columnMinMaxValueGeneratorMode.toUpperCase()); } } private void extractFromInstanceConfig(@Nonnull InstanceDataManagerConfig instanceDataManagerConfig) { ReadMode instanceReadMode = instanceDataManagerConfig.getReadMode(); if (instanceReadMode != null) { _readMode = instanceReadMode; } String instanceSegmentVersion = instanceDataManagerConfig.getSegmentFormatVersion(); if (instanceSegmentVersion != null) { _segmentVersion = SegmentVersion.valueOf(instanceSegmentVersion.toLowerCase()); } _enableDefaultColumns = instanceDataManagerConfig.isEnableDefaultColumns(); _enableSplitCommit = instanceDataManagerConfig.isEnableSplitCommit(); _isRealtimeOffheapAllocation = instanceDataManagerConfig.isRealtimeOffHeapAllocation(); String avgMultiValueCount = instanceDataManagerConfig.getAvgMultiValueCount(); if (avgMultiValueCount != null) { _realtimeAvgMultiValueCount = Integer.valueOf(avgMultiValueCount); } } /** * For tests only. */ public IndexLoadingConfig() { } @Nonnull public ReadMode getReadMode() { return _readMode; } /** * For tests only. */ public void setReadMode(@Nonnull ReadMode readMode) { _readMode = readMode; } @Nonnull public List<String> getSortedColumns() { return _sortedColumns; } @Nonnull public Set<String> getInvertedIndexColumns() { return _invertedIndexColumns; } /** * For tests only. */ public void setInvertedIndexColumns(@Nonnull Set<String> invertedIndexColumns) { _invertedIndexColumns = invertedIndexColumns; } @Nonnull public Set<String> getNoDictionaryColumns() { return _noDictionaryColumns; } @Nonnull public Set<String> getOnHeapDictionaryColumns() { return _onHeapDictionaryColumns; } @Nullable public SegmentVersion getSegmentVersion() { return _segmentVersion; } /** * For tests only. */ public void setSegmentVersion(@Nonnull SegmentVersion segmentVersion) { _segmentVersion = segmentVersion; } @Nonnull public StarTreeFormatVersion getStarTreeVersion() { return _starTreeVersion; } /** * For tests only. */ public void setStarTreeVersion(@Nonnull StarTreeFormatVersion starTreeVersion) { _starTreeVersion = starTreeVersion; } public boolean isEnableDefaultColumns() { return _enableDefaultColumns; } public boolean isEnableSplitCommit() { return _enableSplitCommit; } public boolean isRealtimeOffheapAllocation() { return _isRealtimeOffheapAllocation; } @Nonnull public ColumnMinMaxValueGeneratorMode getColumnMinMaxValueGeneratorMode() { return _columnMinMaxValueGeneratorMode; } /** * For tests only. */ public void setColumnMinMaxValueGeneratorMode(ColumnMinMaxValueGeneratorMode columnMinMaxValueGeneratorMode) { _columnMinMaxValueGeneratorMode = columnMinMaxValueGeneratorMode; } public int getRealtimeAvgMultiValueCount() { return _realtimeAvgMultiValueCount; } }
package server; import java.io.*; import java.util.HashMap; import java.util.Map; /** Created by ${Ruslan} on 23.01.17. */ @SuppressWarnings("Since15") class ServerMenu { String way; //File file = new File("TestDir1"); File file; private int bufferFile = 128; private int divTail, sends; private String separator = System.getProperty("file.separator"); private StringBuffer waySend; private int delWay; private String newWay, root; private DataInputStream in; private DataOutputStream out; private Map<String, ServerActions> serverActionsHashMap = new HashMap<>(); ServerMenu(DataOutputStream out, DataInputStream in) { this.in = in; this.out = out; } //String way = file.getAbsolutePath(); //private int delWay = way.length() - file.getName().length(); //private String newWay, root = way; void initFile(String fileName){ file = new File(fileName); way = file.getAbsolutePath(); delWay = way.length() - file.getName().length(); root = way; } void fillServerActions(){ this.serverActionsHashMap.put("cd", new EnterFolder()); this.serverActionsHashMap.put("back", new ExitFolder()); this.serverActionsHashMap.put("list", new ShowList()); this.serverActionsHashMap.put("exit", new ExitApp()); this.serverActionsHashMap.put("download", new Download()); this.serverActionsHashMap.put("upload", new Upload()); } void select(ToDo toDo) throws IOException { if (serverActionsHashMap.containsKey(toDo.getKeyToDo())){ this.serverActionsHashMap.get(toDo.getKeyToDo()).execute(toDo); } } private class EnterFolder implements ServerActions{ public String commandName() { return "cd"; } public void execute(ToDo value) throws IOException { System.out.println(System.getProperty("os.name")); newWay = way.concat(separator).concat(value.getTarget()); File file = new File(newWay); if (file.exists()){ waySend = new StringBuffer(newWay); waySend.delete(0, delWay); out.writeUTF(waySend.toString()); way = newWay; out.writeBoolean(true); } else { waySend = new StringBuffer(way); waySend.delete(0, delWay); out.writeUTF(waySend.toString()); out.writeBoolean(false); } } } private class ExitFolder implements ServerActions { public String commandName() { return "back"; } public void execute(ToDo value) throws IOException { File file1 = new File(way); boolean isExist = false; String parent = null; if (file1.getParentFile().exists() && !way.equals(root)){ isExist = true; parent = file1.getParent(); } out.writeBoolean(isExist); if (isExist){ way = parent; } waySend = new StringBuffer(way); waySend.delete(0, delWay); out.writeUTF(waySend.toString()); } } private class ExitApp implements ServerActions { public String commandName() { return "exit"; } public void execute(ToDo value) throws IOException { } } private class ShowList implements ServerActions { public String commandName() { return "list"; } public void execute(ToDo value) throws IOException { File fl = new File(way); boolean isDir; isDir = fl.isDirectory(); out.writeBoolean(isDir); if (isDir) { File[] files = fl.listFiles(); int quantity = 0; if (files != null) { quantity = files.length; } out.writeInt(quantity); if (quantity != 0) { for (File file1 : files) { out.writeUTF(file1.getName()); if (file1.isDirectory()) { out.writeBoolean(true); } else { out.writeBoolean(false); } } } } waySend = new StringBuffer(way); waySend.delete(0, delWay); out.writeUTF(waySend.toString()); } } private class Download implements ServerActions { public String commandName() { return "download"; } public void execute(ToDo value) throws IOException { newWay = way.concat(separator).concat(value.getTarget()); boolean isExist = false; if (new File(newWay).exists()){ isExist = true; way = newWay; } out.writeBoolean(isExist); waySend = new StringBuffer(way); waySend.delete(0, delWay); out.writeUTF(waySend.toString()); if (isExist) { File file = new File(way); boolean isFolder = false; if (file.isDirectory()) { isFolder = true; } out.writeBoolean(isFolder); if (!isFolder) { int fileSize = (int) file.length(); out.writeInt(fileSize); if (fileSize > bufferFile) { divTail = fileSize % bufferFile; sends = (fileSize - divTail) / bufferFile; out.writeInt(bufferFile); out.writeInt(sends); out.writeInt(divTail); try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file))){ byte []buffer = new byte[bufferFile]; for (int i = 0; i < sends; i++){ bis.read(buffer, 0, buffer.length); out.write(buffer, 0, buffer.length); } if (divTail != 0){ byte []tailBuffer = new byte[divTail]; bis.read(tailBuffer, 0, tailBuffer.length); out.write(tailBuffer, 0, tailBuffer.length); } bis.close(); } catch (Exception ex){ ex.printStackTrace(); } } else { out.writeInt(fileSize); try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file))){ byte []buffer = new byte[fileSize]; bis.read(buffer, 0, buffer.length); out.write(buffer, 0, buffer.length); } catch (Exception ex){ ex.printStackTrace(); } } } } } } private class Upload implements ServerActions { @Override public String commandName() { return "upload"; } @Override public void execute(ToDo value) throws IOException { boolean isExist = in.readBoolean(); if (isExist) { boolean isDirectory = in.readBoolean(); if (!isDirectory){ if (new File(way).isDirectory()){ out.writeBoolean(true); String newFile = way.concat(separator).concat(value.getTarget()); File file = new File(newFile); out.writeInt(bufferFile); int fileSize = in.readInt(); boolean oneSent = in.readBoolean(); if (!oneSent){ try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file))){ sends = in.readInt(); divTail = in.readInt(); byte []buffer = new byte[bufferFile]; for (int i = 0; i < sends; i++){ in.read(buffer, 0, buffer.length); bos.write(buffer, 0, buffer.length); } if (divTail != 0){ byte []tailBuffer = new byte[divTail]; in.read(tailBuffer, 0, tailBuffer.length); bos.write(tailBuffer, 0, tailBuffer.length); } bos.flush(); } catch (Exception ex){ ex.printStackTrace(); } } else { try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file))){ byte []buffer = new byte[fileSize]; in.read(buffer, 0, buffer.length); bos.write(buffer, 0, buffer.length); bos.flush(); } catch (Exception ex){ ex.printStackTrace(); } } } else { out.writeBoolean(false); } } } } } }
/* * Copyright 2007 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.gl.document; import java.math.BigDecimal; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.List; import org.apache.commons.lang.StringUtils; import org.kuali.kfs.gl.businessobject.CorrectionChange; import org.kuali.kfs.gl.businessobject.CorrectionChangeGroup; import org.kuali.kfs.gl.businessobject.CorrectionCriteria; import org.kuali.kfs.gl.businessobject.OriginEntryFull; import org.kuali.kfs.gl.businessobject.OriginEntryStatistics; import org.kuali.kfs.gl.businessobject.options.OriginEntryFieldFinder; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.rice.core.api.util.type.KualiDecimal; import org.kuali.rice.coreservice.framework.parameter.ParameterService; /** * This class provides utility methods for the correction document */ public class CorrectionDocumentUtils { private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(CorrectionDocumentUtils.class); public static final int DEFAULT_RECORD_COUNT_FUNCTIONALITY_LIMIT = 1000; /** * The GLCP document will always be on restricted functionality mode, regardless of input group size */ public static final int RECORD_COUNT_FUNCTIONALITY_LIMIT_IS_NONE = 0; /** * The GLCP document will never be on restricted functionality mode, regardless of input group size */ public static final int RECORD_COUNT_FUNCTIONALITY_LIMIT_IS_UNLIMITED = -1; public static final int DEFAULT_RECORDS_PER_PAGE = 10; /** * This method returns the limit for record count functionality * * @return limit for record count functionality */ public static int getRecordCountFunctionalityLimit() { String limitString = SpringContext.getBean(ParameterService.class).getParameterValueAsString(GeneralLedgerCorrectionProcessDocument.class, KFSConstants.GeneralLedgerCorrectionProcessApplicationParameterKeys.RECORD_COUNT_FUNCTIONALITY_LIMIT); if (limitString != null) { return Integer.valueOf(limitString); } return DEFAULT_RECORD_COUNT_FUNCTIONALITY_LIMIT; } /** * This method returns the number of records per page * * @return number of records per page */ public static int getRecordsPerPage() { String limitString = SpringContext.getBean(ParameterService.class).getParameterValueAsString(GeneralLedgerCorrectionProcessDocument.class, KFSConstants.GeneralLedgerCorrectionProcessApplicationParameterKeys.RECORDS_PER_PAGE); if (limitString != null) { return Integer.valueOf(limitString); } return DEFAULT_RECORDS_PER_PAGE; } /** * This method returns true if input group size is greater than or equal to record count functionality limit * * @param inputGroupSize size of input groups * @param recordCountFunctionalityLimit limit for record count functionality * @return true if input group size is greater than or equal to record count functionality limit */ public static boolean isRestrictedFunctionalityMode(int inputGroupSize, int recordCountFunctionalityLimit) { return (recordCountFunctionalityLimit != CorrectionDocumentUtils.RECORD_COUNT_FUNCTIONALITY_LIMIT_IS_UNLIMITED && inputGroupSize >= recordCountFunctionalityLimit) || recordCountFunctionalityLimit == CorrectionDocumentUtils.RECORD_COUNT_FUNCTIONALITY_LIMIT_IS_NONE; } /** * When a correction criterion is about to be added to a group, this will check if it is valid, meaning that the field name is * not blank * * @param correctionCriteria validated correction criteria * @return true if correction criteria is valid for adding */ public static boolean validCorrectionCriteriaForAdding(CorrectionCriteria correctionCriteria) { String fieldName = correctionCriteria.getCorrectionFieldName(); if (StringUtils.isBlank(fieldName)) { return false; } return true; } /** * When a document is about to be saved, this will check if it is valid, meaning that the field name and value are both blank * * @param correctionCriteria validated correction criteria * @return true if correction criteria is valid for saving */ public static boolean validCorrectionCriteriaForSaving(CorrectionCriteria correctionCriteria) { return correctionCriteria == null || (StringUtils.isBlank(correctionCriteria.getCorrectionFieldName()) && StringUtils.isBlank(correctionCriteria.getCorrectionFieldValue())); } /** * When a correction change is about to be added to a group, this will check if it is valid, meaning that the field name is not * blank * * @param correctionChange validated correction change * @return true is correction change is valid for adding */ public static boolean validCorrectionChangeForAdding(CorrectionChange correctionChange) { String fieldName = correctionChange.getCorrectionFieldName(); if (StringUtils.isBlank(fieldName)) { return false; } return true; } /** * When a document is about to be saved, this will check if it is valid, meaning that the field name and value are both blank * * @param correctionCriteria validated correction criteria * @return true if correction change is valid for saving (i.e. correction change is null or correction field name and field * value are blank) */ public static boolean validCorrectionChangeForSaving(CorrectionChange correctionChange) { return correctionChange == null || (StringUtils.isBlank(correctionChange.getCorrectionFieldName()) && StringUtils.isBlank(correctionChange.getCorrectionFieldValue())); } /** * Sets all origin entries' entry IDs to null within the collection. * * @param originEntries collection of origin entries */ public static void setAllEntryIdsToNull(Collection<OriginEntryFull> originEntries) { for (OriginEntryFull entry : originEntries) { entry.setEntryId(null); } } /** * Sets all origin entries' entry IDs to be sequential starting from 0 in the collection * * @param originEntries collection of origin entries */ public static void setSequentialEntryIds(Collection<OriginEntryFull> originEntries) { int index = 0; for (OriginEntryFull entry : originEntries) { entry.setEntryId(new Integer(index)); index++; } } /** * Returns whether an origin entry matches the passed in criteria. If both the criteria and actual value are both String types * and are empty, null, or whitespace only, then they will match. * * @param cc correction criteria to test against origin entry * @param oe origin entry to test * @return true if origin entry matches the passed in criteria */ public static boolean entryMatchesCriteria(CorrectionCriteria cc, OriginEntryFull oe) { OriginEntryFieldFinder oeff = new OriginEntryFieldFinder(); Object fieldActualValue = oe.getFieldValue(cc.getCorrectionFieldName()); String fieldTestValue = StringUtils.isBlank(cc.getCorrectionFieldValue()) ? "" : cc.getCorrectionFieldValue(); String fieldType = oeff.getFieldType(cc.getCorrectionFieldName()); String fieldActualValueString = convertToString(fieldActualValue, fieldType); if ("String".equals(fieldType) || "sw".equals(cc.getCorrectionOperatorCode()) || "ew".equals(cc.getCorrectionOperatorCode()) || "ct".equals(cc.getCorrectionOperatorCode())) { return compareStringData(cc, fieldTestValue, fieldActualValueString); } int compareTo = 0; try { if (fieldActualValue == null) { return false; } if ("Integer".equals(fieldType)) { compareTo = ((Integer) fieldActualValue).compareTo(Integer.parseInt(fieldTestValue)); } if ("KualiDecimal".equals(fieldType)) { compareTo = ((KualiDecimal) fieldActualValue).compareTo(new KualiDecimal(Double.parseDouble(fieldTestValue))); } if ("BigDecimal".equals(fieldType)) { compareTo = ((BigDecimal) fieldActualValue).compareTo(new BigDecimal(Double.parseDouble(fieldTestValue))); } if ("Date".equals(fieldType)) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); compareTo = ((Date) fieldActualValue).compareTo(df.parse(fieldTestValue)); } } catch (Exception e) { // any exception while parsing data return false return false; } return compareTo(compareTo, cc.getCorrectionOperatorCode()); } /** * Compares string data * * @param cc criteria * @param fieldTestValue test value * @param fieldActualValueString actual value * @return flag true if matches with criteria */ public static boolean compareStringData(CorrectionCriteria cc, String fieldTestValue, String fieldActualValueString) { if ("eq".equals(cc.getCorrectionOperatorCode())) { return fieldActualValueString.equals(fieldTestValue); } else if ("ne".equals(cc.getCorrectionOperatorCode())) { return (!fieldActualValueString.equals(fieldTestValue)); } else if ("sw".equals(cc.getCorrectionOperatorCode())) { return fieldActualValueString.startsWith(fieldTestValue); } else if ("ew".equals(cc.getCorrectionOperatorCode())) { return fieldActualValueString.endsWith(fieldTestValue); } else if ("ct".equals(cc.getCorrectionOperatorCode())) { return (fieldActualValueString.indexOf(fieldTestValue) > -1); } else if ("lt".equals(cc.getCorrectionOperatorCode())) { return (fieldActualValueString.compareTo(fieldTestValue) < 0); } else if ("le".equals(cc.getCorrectionOperatorCode())) { return (fieldActualValueString.compareTo(fieldTestValue) <= 0); } else if ("gt".equals(cc.getCorrectionOperatorCode())) { return (fieldActualValueString.compareTo(fieldTestValue) > 0); } else if ("ge".equals(cc.getCorrectionOperatorCode())) { return (fieldActualValueString.compareTo(fieldTestValue) >= 0); } throw new IllegalArgumentException("Unknown operator: " + cc.getCorrectionOperatorCode()); } /** * Returns true is compared indicator matches * * @param compareTo * @param operatorCode * @return */ public static boolean compareTo(int compareTo, String operatorCode) { if ("eq".equals(operatorCode)) { return (compareTo == 0); } else if ("ne".equals(operatorCode)) { return (compareTo != 0); } else if ("lt".equals(operatorCode)) { return (compareTo < 0); } else if ("le".equals(operatorCode)) { return (compareTo <= 0); } else if ("gt".equals(operatorCode)) { return (compareTo > 0); } else if ("ge".equals(operatorCode)) { return (compareTo >= 0); } throw new IllegalArgumentException("Unknown operator: " + operatorCode); } /** * Converts the value into a string, with the appropriate formatting * * @param fieldActualValue actual field value * @param fieldType field type (i.e. "String", "Integer", "Date") * @return String object value as a string */ public static String convertToString(Object fieldActualValue, String fieldType) { if (fieldActualValue == null) { return ""; } if ("String".equals(fieldType)) { return (String) fieldActualValue; } else if ("Integer".equals(fieldType)) { Integer i = (Integer) fieldActualValue; return i.toString(); } else if ("KualiDecimal".equals(fieldType)) { KualiDecimal kd = (KualiDecimal) fieldActualValue; return kd.toString(); } else if ("BigDecimal".equals(fieldType)) { BigDecimal bd = (BigDecimal) fieldActualValue; return bd.toString(); } else if ("Date".equals(fieldType)) { Date d = (Date) fieldActualValue; SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); return df.format(d); } return ""; } /** * Applies a list of change criteria groups to an origin entry. Note that the returned value, if not null, is a reference to the * same instance as the origin entry passed in (i.e. intentional side effect) * * @param entry origin entry * @param matchCriteriaOnly if true and no criteria match, then this method will return null * @param changeCriteriaGroups list of change criteria groups to apply * @return the passed in entry instance, or null (see above) */ public static OriginEntryFull applyCriteriaToEntry(OriginEntryFull entry, boolean matchCriteriaOnly, List<CorrectionChangeGroup> changeCriteriaGroups) { if (matchCriteriaOnly && !doesEntryMatchAnyCriteriaGroups(entry, changeCriteriaGroups)) { return null; } for (CorrectionChangeGroup ccg : changeCriteriaGroups) { int matches = 0; for (CorrectionCriteria cc : ccg.getCorrectionCriteria()) { if (entryMatchesCriteria(cc, entry)) { matches++; } } // If they all match, change it if (matches == ccg.getCorrectionCriteria().size()) { for (CorrectionChange change : ccg.getCorrectionChange()) { // Change the row entry.setFieldValue(change.getCorrectionFieldName(), change.getCorrectionFieldValue()); } } } return entry; } /** * Returns whether the entry matches any of the criteria groups * * @param entry origin entry * @param groups collection of correction change group * @return true if origin entry matches any of the criteria groups */ public static boolean doesEntryMatchAnyCriteriaGroups(OriginEntryFull entry, Collection<CorrectionChangeGroup> groups) { boolean anyGroupMatch = false; for (CorrectionChangeGroup ccg : groups) { int matches = 0; for (CorrectionCriteria cc : ccg.getCorrectionCriteria()) { if (CorrectionDocumentUtils.entryMatchesCriteria(cc, entry)) { matches++; } } // If they all match, change it if (matches == ccg.getCorrectionCriteria().size()) { anyGroupMatch = true; break; } } return anyGroupMatch; } /** * Computes the statistics (credit amount, debit amount, row count) of a collection of origin entries. * * @param entries list of orgin entry entries * @return {@link OriginEntryStatistics} statistics (credit amount, debit amount, row count) of a collection of origin entries. */ public static OriginEntryStatistics getStatistics(Collection<OriginEntryFull> entries) { OriginEntryStatistics oes = new OriginEntryStatistics(); for (OriginEntryFull oe : entries) { updateStatisticsWithEntry(oe, oes); } return oes; } /** * Returns whether the origin entry represents a debit * * @param oe origin entry * @return true if origin entry represents a debit */ public static boolean isDebit(OriginEntryFull oe) { return (KFSConstants.GL_DEBIT_CODE.equals(oe.getTransactionDebitCreditCode())); } /** * Returns whether the origin entry represents a budget * * @param oe origin entry * @return true if origin entry represents a budget */ public static boolean isBudget(OriginEntryFull oe) { return KFSConstants.GL_BUDGET_CODE.equals(oe.getTransactionDebitCreditCode()); } /** * Returns whether the origin entry represents a credit * * @param oe origin entry * @return true if origin entry represents a credit */ public static boolean isCredit(OriginEntryFull oe) { return KFSConstants.GL_CREDIT_CODE.equals(oe.getTransactionDebitCreditCode()); } /** * Given an instance of statistics, it adds information from the passed in entry to the statistics * * @param entry origin entry * @param statistics adds statistics from the passed in origin entry to the passed in statistics */ public static void updateStatisticsWithEntry(OriginEntryFull entry, OriginEntryStatistics statistics) { statistics.incrementCount(); if (isDebit(entry)) { statistics.addDebit(entry.getTransactionLedgerEntryAmount()); } else if (isCredit(entry)) { statistics.addCredit(entry.getTransactionLedgerEntryAmount()); } else { statistics.addBudget(entry.getTransactionLedgerEntryAmount()); } } /** * Sets document with the statistics data * * @param statistics origin entry statistics that are being used to set document * @param document document with statistic information being set */ public static void copyStatisticsToDocument(OriginEntryStatistics statistics, GeneralLedgerCorrectionProcessDocument document) { document.setCorrectionCreditTotalAmount(statistics.getCreditTotalAmount()); document.setCorrectionDebitTotalAmount(statistics.getDebitTotalAmount()); document.setCorrectionBudgetTotalAmount(statistics.getBudgetTotalAmount()); document.setCorrectionRowCount(statistics.getRowCount()); } }
package mil.nga.giat.geowave.core.ingest.local; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import mil.nga.giat.geowave.core.index.ByteArrayId; import mil.nga.giat.geowave.core.ingest.GeoWaveData; import mil.nga.giat.geowave.core.store.AdapterToIndexMapping; import mil.nga.giat.geowave.core.store.IndexWriter; import mil.nga.giat.geowave.core.store.adapter.WritableDataAdapter; import mil.nga.giat.geowave.core.store.index.PrimaryIndex; import mil.nga.giat.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An IngestTask is a thread which listens to items from a blocking queue, and * writes those items to IndexWriter objects obtained from LocalIngestRunData * (where they are constructed but also cached from the DataStore object). Read * items until isTerminated == true. */ public class IngestTask implements Runnable { private final static Logger LOGGER = LoggerFactory.getLogger(IngestTask.class); private final String id; private final BlockingQueue<GeoWaveData<?>> readQueue; private final LocalIngestRunData runData; private final Map<ByteArrayId, PrimaryIndex> specifiedPrimaryIndexes; private final Map<ByteArrayId, PrimaryIndex> requiredIndexMap; private volatile boolean isTerminated = false; private volatile boolean isFinished = false; private Map<ByteArrayId, IndexWriter> indexWriters; private Map<ByteArrayId, AdapterToIndexMapping> adapterMappings; public IngestTask( String id, LocalIngestRunData runData, Map<ByteArrayId, PrimaryIndex> specifiedPrimaryIndexes, Map<ByteArrayId, PrimaryIndex> requiredIndexMap, BlockingQueue<GeoWaveData<?>> queue ) { this.id = id; this.runData = runData; this.specifiedPrimaryIndexes = specifiedPrimaryIndexes; this.requiredIndexMap = requiredIndexMap; this.readQueue = queue; this.indexWriters = new HashMap<ByteArrayId, IndexWriter>(); this.adapterMappings = new HashMap<ByteArrayId, AdapterToIndexMapping>(); } /** * This function is called by the thread placing items on the blocking * queue. */ public void terminate() { isTerminated = true; } /** * An identifier, usually (filename)-(counter) * * @return */ public String getId() { return this.id; } /** * Whether this worker has terminated. * * @return */ public boolean isFinished() { return isFinished; } /** * This function will continue to read from the BlockingQueue until * isTerminated is true and the queue is empty. */ @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void run() { int count = 0; long dbWriteMs = 0L; try { LOGGER.debug(String.format( "Worker executing for plugin [%s]", this.getId())); while (true) { GeoWaveData<?> geowaveData = readQueue.poll( 100, TimeUnit.MILLISECONDS); if (geowaveData == null) { if (isTerminated && readQueue.size() == 0) { // Done! break; } // Didn't receive an item. Make sure we haven't been // terminated. LOGGER.debug(String.format( "Worker waiting for item [%s]", this.getId())); continue; } final WritableDataAdapter adapter = runData.getDataAdapter(geowaveData); if (adapter == null) { LOGGER.warn(String.format( "Adapter not found for [%s] worker [%s]", geowaveData.getValue(), this.getId())); continue; } // Ingest the data! dbWriteMs += ingestData( geowaveData, adapter); count++; } } catch (Exception e) { // This should really never happen, because we don't limit the // amount of items in the IndexWriter pool. LOGGER.error( "Fatal error occured while trying to get an index writer.", e); throw new RuntimeException( "Fatal error occured while trying to get an index writer.", e); } finally { // Clean up index writers for (Entry<ByteArrayId, IndexWriter> writerEntry : indexWriters.entrySet()) { try { runData.releaseIndexWriter( adapterMappings.get(writerEntry.getKey()), writerEntry.getValue()); } catch (Exception e) { LOGGER.warn( String.format( "Could not return index writer: [%s]", writerEntry.getKey()), e); } } LOGGER.debug(String.format( "Worker exited for plugin [%s]; Ingested %d items in %d seconds", this.getId(), count, (int) dbWriteMs / 1000)); isFinished = true; } } private long ingestData( GeoWaveData<?> geowaveData, WritableDataAdapter adapter ) throws Exception { AdapterToIndexMapping mapping = adapterMappings.get(adapter.getAdapterId()); if (mapping == null) { List<PrimaryIndex> indices = new ArrayList<PrimaryIndex>(); for (final ByteArrayId indexId : geowaveData.getIndexIds()) { PrimaryIndex index = specifiedPrimaryIndexes.get(indexId); if (index == null) { index = requiredIndexMap.get(indexId); if (index == null) { LOGGER.warn(String.format( "Index '%s' not found for %s; worker [%s]", indexId.getString(), geowaveData.getValue(), this.getId())); continue; } } indices.add(index); } runData.addIndices(indices); runData.addAdapter(adapter); mapping = new AdapterToIndexMapping( adapter.getAdapterId(), indices.toArray(new PrimaryIndex[indices.size()])); adapterMappings.put( mapping.getAdapterId(), mapping); // If we have the index checked out already, use that. if (!indexWriters.containsKey(mapping.getAdapterId())) { indexWriters.put( mapping.getAdapterId(), runData.getIndexWriter(mapping)); } } // Write the data to the data store. IndexWriter writer = indexWriters.get(mapping.getAdapterId()); // Time the DB write long hack = System.currentTimeMillis(); writer.write(geowaveData.getValue()); long durMs = System.currentTimeMillis() - hack; return durMs; } }
package org.greyfire.transcendancy.ui; import java.awt.Canvas; import java.awt.Color; import java.awt.Graphics; import java.awt.Image; import java.awt.Toolkit; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.MemoryImageSource; public class ArtCanvas extends Canvas implements ArtSurface { private static final long serialVersionUID = 1L; private Color background = Color.BLACK; private Graphics g; private MemoryImageSource mem; private Image screen; private int width; private int height; private int size; private int[] pixels; public static int WHITE = ArtCanvas.translated_colour(Color.WHITE); public static int BLACK = ArtCanvas.translated_colour(Color.BLACK); public ArtCanvas(int width, int height) { this.g = this.getGraphics(); if(width<=0 || height<=0) throw new IllegalArgumentException("dimensions must be positive"); this.width = width; this.height = height; this.size = width*height; this.pixels = new int[size]; this.mem = new MemoryImageSource(width, height, pixels, 0, width); this.mem.setAnimated(true); this.mem.setFullBufferUpdates(true); this.screen = Toolkit.getDefaultToolkit().createImage(this.mem); } public int width() { return this.width; } public int height() { return this.height; } public void update(Graphics g) { this.paint(g); } public void refresh() { //this.paint(this.g); this.repaint(); } public void refresh(int x, int y, int w, int h) { //this.paint(this.g, x, y, w, h); this.repaint(); } public void paint(Graphics g) { Graphics graphics = this.g; if(graphics==null) graphics = g; if(graphics==null) return; this.mem.newPixels(); //graphics.setColor(background); //graphics.fillRect(0, 0, width, height); graphics.drawImage(screen, 0, 0, null); } public void paint(Graphics g, int x, int y, int w, int h) { Graphics graphics = this.g; if(graphics==null) graphics = g; if(graphics==null) return; this.mem.newPixels(x, y, w, h); //graphics.setColor(background); //graphics.fillRect(x, y, w, h); this.g.drawImage(screen, 0, 0, null); } public static int translated_colour(Color clr) { int r = clr.getRed(); int g = clr.getGreen(); int b = clr.getBlue(); int a = clr.getAlpha(); return translated_colour(r, g, b, a); } public static int translated_colour(int r, int g, int b, int a) { return ((b << 0) | (g << 8) | (r << 16) | (a << 24)); } public static int translated_colour(int r, int g, int b) { return translated_colour(r, g, b, 255); } public static int interpolate_colours(int c1, int c2, double proportion) { int r1 = (c1 & 0x00FF0000) >> 16; int g1 = (c1 & 0x0000FF00) >> 8; int b1 = (c1 & 0x000000FF); int r2 = (c2 & 0x00FF0000) >> 16; int g2 = (c2 & 0x0000FF00) >> 8; int b2 = (c2 & 0x000000FF); r1 = (int)(r1*proportion + r2*(1-proportion)); g1 = (int)(g1*proportion + g2*(1-proportion)); b1 = (int)(b1*proportion + b2*(1-proportion)); return translated_colour(r1, g1, b1, 255); } public static int multiply_colours(int c1, int c2) { int a1 = (c1 & 0xFF000000) >> 24; int r1 = (c1 & 0x00FF0000) >> 16; int g1 = (c1 & 0x0000FF00) >> 8; int b1 = (c1 & 0x000000FF); int a2 = (c2 & 0xFF000000) >> 24; int r2 = (c2 & 0x00FF0000) >> 16; int g2 = (c2 & 0x0000FF00) >> 8; int b2 = (c2 & 0x000000FF); r1 = (r1*r2)/255; g1 = (g1*g2)/255; b1 = (b1*b2)/255; a1 = (a1*a2)/255; return translated_colour(r1, g1, b1, a1); } public void clear(Color clr) { int c = translated_colour(clr); for(int i=0; i<this.size; i++) { this.pixels[i] = c; } } public void pset(int x, int y, int c) { int point = y*width + x; this.pixels[point] = c; } public void pset(int x, int y, Color clr) { int point = y*width + x; pixels[point] = translated_colour(clr); } public void pset_safe(int x, int y, int c) { if(x<0 || y<0 || x>=width-1 || y>=height-1) return; int point = y*width + x; this.pixels[point] = c; } public void pset_safe(int x, int y, Color clr) { if(x<0 || y<0 || x>=width-1 || y>=height-1) return; int point = y*width + x; pixels[point] = translated_colour(clr); } public int pget(int x, int y) { int point = y*width + x; return this.pixels[point]; } public void line(int x0, int y0, int x1, int y1, int c) { int ax0 = x0; int ax1 = x1; int ay0 = y0; int ay1 = y1; int dx = ax1 - ax0; int dy = ay1 - ay0; boolean steep = (Math.abs(dy) > Math.abs(dx)); if(steep) { int tmp = ax0; ax0 = ay0; ay0 = tmp; tmp = ax1; ax1 = ay1; ay1 = tmp; } if(ax0 > ax1) { int tmp = ax1; ax1 = ax0; ax0 = tmp; tmp = ay1; ay1 = ay0; ay0 = tmp; } dx = ax1 - ax0; dy = Math.abs(ay1 - ay0); int err = dx / 2; int iy = ay0; int ystep = (ay0 < ay1 ? 1 : -1); for(int ix=ax0; ix<=ax1; ix++) { if(steep) { this.pset(iy, ix, c); } else { this.pset(ix, iy, c); } err -= dy; if(err<0) { iy += ystep; err += dx; } } } public void line_safe(int x0, int y0, int x1, int y1, int c) { int ax0 = x0; int ax1 = x1; int ay0 = y0; int ay1 = y1; int dx = ax1 - ax0; int dy = ay1 - ay0; boolean steep = (Math.abs(dy) > Math.abs(dx)); if(steep) { int tmp = ax0; ax0 = ay0; ay0 = tmp; tmp = ax1; ax1 = ay1; ay1 = tmp; } if(ax0 > ax1) { int tmp = ax1; ax1 = ax0; ax0 = tmp; tmp = ay1; ay1 = ay0; ay0 = tmp; } dx = ax1 - ax0; dy = Math.abs(ay1 - ay0); int err = dx / 2; int iy = ay0; int ystep = (ay0 < ay1 ? 1 : -1); for(int ix=ax0; ix<=ax1; ix++) { if(steep) { this.pset_safe(iy, ix, c); } else { this.pset_safe(ix, iy, c); } err -= dy; if(err<0) { iy += ystep; err += dx; } } } public void line(int x0, int y0, int x1, int y1, Color clr) { this.line(x0, y0, x1, y1, translated_colour(clr)); } public void line_safe(int x0, int y0, int x1, int y1, Color clr) { this.line_safe(x0, y0, x1, y1, translated_colour(clr)); } public void blit(int x, int y, BufferedImage img) { int u, v; int X; int Y; int w = img.getWidth(); int h = img.getHeight(); ColorModel palette = img.getColorModel(); int c; long ia; double a; for(v=0; v<h; v++) { Y = y+v; for(u=0; u<w; u++) { X = x+u; c = img.getRGB(u, v); ia = (c&0xFF000000)>>24; if(ia<0) ia += 256; a = ia/255.0; c = ArtCanvas.interpolate_colours(c, this.pget(X, Y), a); this.pset(X, Y, c); } } } /* setFont setColor drawImage drawLine drawOval drawRect drawString fillOval fillRect */ public void circle(int x0, int y0, int radius, int c) { /* TODO: change this to Bresenham's circle instead of this awful sqrt() approach */ if(radius<0) throw new IllegalArgumentException("radius must be non-negative"); if(radius==0) { this.pset(x0, y0, c); return; } int u, v; int r2 = radius*radius; int last = radius; for(u=0; u<=radius; u++) { v = (int)Math.round(Math.sqrt(r2 - u*u)); if(v<last-1) { this.line(x0+u, y0+last-1, x0+u, y0+v, c); this.line(x0-u, y0+last-1, x0-u, y0+v, c); this.line(x0+u, y0-last+1, x0+u, y0-v, c); this.line(x0-u, y0-last+1, x0-u, y0-v, c); } else { this.pset(x0+u, y0+v, c); this.pset(x0-u, y0+v, c); this.pset(x0+u, y0-v, c); this.pset(x0-u, y0-v, c); } last = v; } } public void circle_safe(int x0, int y0, int radius, int c) { /* TODO: change this to Bresenham's circle instead of this awful sqrt() approach */ if(radius<0) throw new IllegalArgumentException("radius must be non-negative"); if(radius==0) { this.pset_safe(x0, y0, c); return; } int u, v; int r2 = radius*radius; int last = radius; for(u=0; u<=radius; u++) { v = (int)Math.round(Math.sqrt(r2 - u*u)); if(v<last-1) { this.line_safe(x0+u, y0+last-1, x0+u, y0+v, c); this.line_safe(x0-u, y0+last-1, x0-u, y0+v, c); this.line_safe(x0+u, y0-last+1, x0+u, y0-v, c); this.line_safe(x0-u, y0-last+1, x0-u, y0-v, c); } else { this.pset_safe(x0+u, y0+v, c); this.pset_safe(x0-u, y0+v, c); this.pset_safe(x0+u, y0-v, c); this.pset_safe(x0-u, y0-v, c); } last = v; } } public void circle(int x0, int y0, int radius, Color clr) { this.circle(x0, y0, radius, ArtCanvas.translated_colour(clr)); } public void circle_safe(int x0, int y0, int radius, Color clr) { this.circle_safe(x0, y0, radius, ArtCanvas.translated_colour(clr)); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.util.gotoByName; import com.google.common.annotations.VisibleForTesting; import com.intellij.ide.IdeBundle; import com.intellij.ide.actions.ApplyIntentionAction; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.search.BooleanOptionDescription; import com.intellij.ide.ui.search.OptionDescription; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.MinusculeMatcher; import com.intellij.ui.*; import com.intellij.ui.components.JBLabel; import com.intellij.ui.components.OnOffButton; import com.intellij.ui.speedSearch.SpeedSearchUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.Function; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.ContainerUtilRt; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import java.awt.*; import java.util.*; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.intellij.openapi.keymap.KeymapUtil.getActiveKeymapShortcuts; import static com.intellij.ui.SimpleTextAttributes.STYLE_PLAIN; import static com.intellij.ui.SimpleTextAttributes.STYLE_SEARCH_MATCH; public class GotoActionModel implements ChooseByNameModel, Comparator<Object>, DumbAware { private static final Pattern INNER_GROUP_WITH_IDS = Pattern.compile("(.*) \\(\\d+\\)"); @Nullable private final Project myProject; private final Component myContextComponent; @Nullable private final Editor myEditor; protected final ActionManager myActionManager = ActionManager.getInstance(); private static final Icon EMPTY_ICON = EmptyIcon.ICON_18; protected final Map<AnAction, String> myActionGroups = ContainerUtil.newHashMap(); private final NotNullLazyValue<Map<String, String>> myConfigurablesNames = VolatileNotNullLazyValue.createValue(() -> { Map<String, String> map = ContainerUtil.newTroveMap(); for (Configurable configurable : ShowSettingsUtilImpl.getConfigurables(getProject(), true)) { if (configurable instanceof SearchableConfigurable) { map.put(((SearchableConfigurable)configurable).getId(), configurable.getDisplayName()); } } return map; }); private final ModalityState myModality; public GotoActionModel(@Nullable Project project, Component component, @Nullable Editor editor) { this(project, component, editor, ModalityState.defaultModalityState()); } public GotoActionModel(@Nullable Project project, Component component, @Nullable Editor editor, @Nullable ModalityState modalityState) { myProject = project; myContextComponent = component; myEditor = editor; myModality = modalityState; ActionGroup mainMenu = (ActionGroup)myActionManager.getActionOrStub(IdeActions.GROUP_MAIN_MENU); assert mainMenu != null; collectActions(myActionGroups, mainMenu, mainMenu.getTemplatePresentation().getText()); } @NotNull Map<String, ApplyIntentionAction> getAvailableIntentions() { Map<String, ApplyIntentionAction> map = new TreeMap<>(); if (myProject != null && !myProject.isDisposed() && myEditor != null && !myEditor.isDisposed()) { PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument()); ApplyIntentionAction[] children = file == null ? null : ApplyIntentionAction.getAvailableIntentions(myEditor, file); if (children != null) { for (ApplyIntentionAction action : children) { map.put(action.getName(), action); } } } return map; } @Override public String getPromptText() { return IdeBundle.message("prompt.gotoaction.enter.action"); } @Nullable @Override public String getCheckBoxName() { return IdeBundle.message("checkbox.disabled.included"); } @Override public char getCheckBoxMnemonic() { return 'd'; } @Override public String getNotInMessage() { return IdeBundle.message("label.no.enabled.actions.found"); } @Override public String getNotFoundMessage() { return IdeBundle.message("label.no.actions.found"); } @Override public boolean loadInitialCheckBoxState() { return false; } @Override public void saveInitialCheckBoxState(boolean state) { } public static class MatchedValue implements Comparable<MatchedValue> { @NotNull public final Comparable value; @NotNull final String pattern; public MatchedValue(@NotNull Comparable value, @NotNull String pattern) { this.value = value; this.pattern = pattern; } @Nullable @VisibleForTesting public String getValueText() { if (value instanceof OptionDescription) return ((OptionDescription)value).getHit(); if (!(value instanceof ActionWrapper)) return null; return ((ActionWrapper)value).getAction().getTemplatePresentation().getText(); } @Nullable @Override public String toString() { return getMatchingDegree() + " " + getValueText(); } private int getMatchingDegree() { String text = getValueText(); if (text != null) { int degree = getRank(text); return value instanceof ActionWrapper && !((ActionWrapper)value).isGroupAction() ? degree + 1 : degree; } return 0; } private int getRank(@NotNull String text) { if (StringUtil.equalsIgnoreCase(StringUtil.trimEnd(text, "..."), pattern)) return 3; if (StringUtil.startsWithIgnoreCase(text, pattern)) return 2; if (StringUtil.containsIgnoreCase(text, pattern)) return 1; return 0; } @Override public int compareTo(@NotNull MatchedValue o) { if (o == this) return 0; int diff = o.getMatchingDegree() - getMatchingDegree(); if (diff != 0) return diff; boolean edt = ApplicationManager.getApplication().isDispatchThread(); if (value instanceof ActionWrapper && o.value instanceof ActionWrapper) { if (edt || ((ActionWrapper)value).hasPresentation() && ((ActionWrapper)o.value).hasPresentation()) { boolean p1Enable = ((ActionWrapper)value).isAvailable(); boolean p2enable = ((ActionWrapper)o.value).isAvailable(); if (p1Enable && !p2enable) return -1; if (!p1Enable && p2enable) return 1; } //noinspection unchecked int compared = value.compareTo(o.value); if (compared != 0) return compared; } if (value instanceof ActionWrapper && o.value instanceof BooleanOptionDescription) { return edt && ((ActionWrapper)value).isAvailable() ? -1 : 1; } if (o.value instanceof ActionWrapper && value instanceof BooleanOptionDescription) { return edt && ((ActionWrapper)o.value).isAvailable() ? 1 : -1; } if (value instanceof BooleanOptionDescription && !(o.value instanceof BooleanOptionDescription) && o.value instanceof OptionDescription) return -1; if (o.value instanceof BooleanOptionDescription && !(value instanceof BooleanOptionDescription) && value instanceof OptionDescription) return 1; if (value instanceof OptionDescription && !(o.value instanceof OptionDescription)) return 1; if (o.value instanceof OptionDescription && !(value instanceof OptionDescription)) return -1; diff = StringUtil.notNullize(getValueText()).length() - StringUtil.notNullize(o.getValueText()).length(); if (diff != 0) return diff; //noinspection unchecked diff = value.compareTo(o.value); if (diff != 0) return diff; return o.hashCode() - hashCode(); } } @Override public ListCellRenderer getListCellRenderer() { return new GotoActionListCellRenderer(this::getGroupName); } protected String getActionId(@NotNull AnAction anAction) { return myActionManager.getId(anAction); } @NotNull private static JLabel createIconLabel(@Nullable Icon icon, boolean disabled) { LayeredIcon layeredIcon = new LayeredIcon(2); layeredIcon.setIcon(EMPTY_ICON, 0); if (icon == null) return new JLabel(layeredIcon); int width = icon.getIconWidth(); int height = icon.getIconHeight(); int emptyIconWidth = EMPTY_ICON.getIconWidth(); int emptyIconHeight = EMPTY_ICON.getIconHeight(); if (width <= emptyIconWidth && height <= emptyIconHeight) { layeredIcon.setIcon(disabled && IconLoader.isGoodSize(icon) ? IconLoader.getDisabledIcon(icon) : icon, 1, (emptyIconWidth - width) / 2, (emptyIconHeight - height) / 2); } return new JLabel(layeredIcon); } @Override public int compare(@NotNull Object o1, @NotNull Object o2) { if (ChooseByNameBase.EXTRA_ELEM.equals(o1)) return 1; if (ChooseByNameBase.EXTRA_ELEM.equals(o2)) return -1; return ((MatchedValue)o1).compareTo((MatchedValue)o2); } @NotNull public static AnActionEvent updateActionBeforeShow(@NotNull AnAction anAction, @NotNull DataContext dataContext) { AnActionEvent event = AnActionEvent.createFromDataContext(ActionPlaces.ACTION_SEARCH, null, dataContext); ActionUtil.performDumbAwareUpdate(anAction, event, false); return event; } public static Color defaultActionForeground(boolean isSelected, @Nullable Presentation presentation) { if (presentation != null && (!presentation.isEnabled() || !presentation.isVisible())) return UIUtil.getInactiveTextColor(); if (isSelected) return UIUtil.getListSelectionForeground(); return UIUtil.getListForeground(); } @Override @NotNull public String[] getNames(boolean checkBoxState) { return ArrayUtil.EMPTY_STRING_ARRAY; } @Override @NotNull public Object[] getElementsByName(String id, boolean checkBoxState, String pattern) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } @NotNull public String getGroupName(@NotNull OptionDescription description) { String name = description.getGroupName(); if (name == null) name = myConfigurablesNames.getValue().get(description.getConfigurableId()); String settings = SystemInfo.isMac ? "Preferences" : "Settings"; if (name == null || name.equals(description.getHit())) return settings; return settings + " > " + name; } @NotNull Map<String, String> getConfigurablesNames() { return myConfigurablesNames.getValue(); } private void collectActions(@NotNull Map<AnAction, String> result, @NotNull ActionGroup group, @Nullable String containingGroupName) { AnAction[] actions = group.getChildren(null); includeGroup(result, group, actions, containingGroupName); for (AnAction action : actions) { if (action == null || action instanceof Separator) continue; if (action instanceof ActionGroup) { ActionGroup actionGroup = (ActionGroup)action; String groupName = actionGroup.getTemplatePresentation().getText(); collectActions(result, actionGroup, getGroupName(StringUtil.isEmpty(groupName) || !actionGroup.isPopup() ? containingGroupName : groupName)); } else { String groupName = group.getTemplatePresentation().getText(); if (result.containsKey(action)) { result.put(action, null); } else { result.put(action, getGroupName(StringUtil.isEmpty(groupName) ? containingGroupName : groupName)); } } } } @Nullable private static String getGroupName(@Nullable String groupName) { if (groupName != null) { Matcher matcher = INNER_GROUP_WITH_IDS.matcher(groupName); if (matcher.matches()) return matcher.group(1); } return groupName; } private void includeGroup(@NotNull Map<AnAction, String> result, @NotNull ActionGroup group, @NotNull AnAction[] actions, @Nullable String containingGroupName) { boolean showGroup = true; for (AnAction action : actions) { if (myActionManager.getId(action) != null) { showGroup = false; break; } } if (showGroup) { result.put(group, getGroupName(containingGroupName)); } } @Override @Nullable public String getFullName(@NotNull Object element) { return getElementName(element); } @NonNls @Override public String getHelpId() { return "procedures.navigating.goto.action"; } @Override @NotNull public String[] getSeparators() { return ArrayUtil.EMPTY_STRING_ARRAY; } @Nullable @Override public String getElementName(@NotNull Object mv) { return ((MatchedValue) mv).getValueText(); } protected MatchMode actionMatches(@NotNull String pattern, MinusculeMatcher matcher, @NotNull AnAction anAction) { Presentation presentation = anAction.getTemplatePresentation(); String text = presentation.getText(); String description = presentation.getDescription(); String groupName = myActionGroups.get(anAction); if (text != null && matcher.matches(text)) { return MatchMode.NAME; } else if (description != null && !description.equals(text) && matcher.matches(description)) { return MatchMode.DESCRIPTION; } if (text == null) { return MatchMode.NONE; } if (matcher.matches(groupName + " " + text)) { return anAction instanceof ToggleAction ? MatchMode.NAME : MatchMode.GROUP; } return matcher.matches(text + " " + groupName) ? MatchMode.GROUP : MatchMode.NONE; } @Nullable protected Project getProject() { return myProject; } protected Component getContextComponent() { return myContextComponent; } @NotNull public SortedSet<Object> sortItems(@NotNull Set<Object> elements) { TreeSet<Object> objects = ContainerUtilRt.newTreeSet(this); objects.addAll(elements); return objects; } private void updateOnEdt(Runnable update) { Semaphore semaphore = new Semaphore(1); ProgressIndicator indicator = ProgressIndicatorProvider.getGlobalProgressIndicator(); ApplicationManager.getApplication().invokeLater(() -> { try { update.run(); } finally { semaphore.up(); } }, myModality, __ -> indicator != null && indicator.isCanceled()); while (!semaphore.waitFor(10)) { if (indicator != null && indicator.isCanceled()) { // don't use `checkCanceled` because some smart devs might suppress PCE and end up with a deadlock like IDEA-177788 throw new ProcessCanceledException(); } } } public enum MatchMode { NONE, INTENTION, NAME, DESCRIPTION, GROUP, NON_MENU } @Override public boolean willOpenEditor() { return false; } @Override public boolean useMiddleMatching() { return true; } public static class ActionWrapper implements Comparable<ActionWrapper> { @NotNull private final AnAction myAction; @NotNull private final MatchMode myMode; @Nullable private final String myGroupName; private final DataContext myDataContext; private final GotoActionModel myModel; private volatile Presentation myPresentation; public ActionWrapper(@NotNull AnAction action, @Nullable String groupName, @NotNull MatchMode mode, DataContext dataContext, GotoActionModel model) { myAction = action; myMode = mode; myGroupName = groupName; myDataContext = dataContext; myModel = model; } @NotNull public AnAction getAction() { return myAction; } @NotNull public MatchMode getMode() { return myMode; } @Override public int compareTo(@NotNull ActionWrapper o) { int compared = myMode.compareTo(o.getMode()); if (compared != 0) return compared; Presentation myPresentation = myAction.getTemplatePresentation(); Presentation oPresentation = o.getAction().getTemplatePresentation(); String myText = myPresentation.getText(); String oText = oPresentation.getText(); int byText = StringUtil.compare(StringUtil.trimEnd(myText, "..."), StringUtil.trimEnd(oText, "..."), true); if (byText != 0) return byText; int byTextLength = StringUtil.notNullize(myText).length() - StringUtil.notNullize(oText).length(); if (byTextLength != 0) return byTextLength; int byGroup = Comparing.compare(myGroupName, o.myGroupName); if (byGroup != 0) return byGroup; int byDesc = StringUtil.compare(myPresentation.getDescription(), oPresentation.getDescription(), true); if (byDesc != 0) return byDesc; return 0; } public boolean isAvailable() { Presentation presentation = getPresentation(); return presentation != null && presentation.isEnabledAndVisible(); } public Presentation getPresentation() { if (myPresentation != null) return myPresentation; Runnable r = () -> myPresentation = updateActionBeforeShow(myAction, myDataContext).getPresentation(); if (ApplicationManager.getApplication().isDispatchThread()) { r.run(); } else { myModel.updateOnEdt(r); } return myPresentation; } private boolean hasPresentation() { return myPresentation != null; } @Nullable public String getGroupName() { if (myAction instanceof ActionGroup && Comparing.equal(myAction.getTemplatePresentation().getText(), myGroupName)) return null; return myGroupName; } public boolean isGroupAction() { return myAction instanceof ActionGroup; } @Override public boolean equals(Object obj) { return obj instanceof ActionWrapper && compareTo((ActionWrapper)obj) == 0; } @Override public int hashCode() { String text = myAction.getTemplatePresentation().getText(); return text != null ? text.hashCode() : 0; } @Override public String toString() { return myAction.toString(); } } public static class GotoActionListCellRenderer extends DefaultListCellRenderer { private final Function<OptionDescription, String> myGroupNamer; public GotoActionListCellRenderer(Function<OptionDescription, String> groupNamer) { myGroupNamer = groupNamer; } @NotNull @Override public Component getListCellRendererComponent(@NotNull JList list, Object matchedValue, int index, boolean isSelected, boolean cellHasFocus) { boolean showIcon = UISettings.getInstance().getShowIconsInMenus(); JPanel panel = new JPanel(new BorderLayout()); panel.setBorder(JBUI.Borders.empty(2)); panel.setOpaque(true); Color bg = UIUtil.getListBackground(isSelected); panel.setBackground(bg); SimpleColoredComponent nameComponent = new SimpleColoredComponent(); nameComponent.setBackground(bg); panel.add(nameComponent, BorderLayout.CENTER); if (matchedValue instanceof String) { //... nameComponent.append((String)matchedValue, new SimpleTextAttributes(STYLE_PLAIN, defaultActionForeground(isSelected, null))); if (showIcon) { panel.add(new JBLabel(EMPTY_ICON), BorderLayout.WEST); } return panel; } Color groupFg = isSelected ? UIUtil.getListSelectionForeground() : UIUtil.getLabelDisabledForeground(); Object value = ((MatchedValue) matchedValue).value; String pattern = ((MatchedValue)matchedValue).pattern; Border eastBorder = JBUI.Borders.emptyRight(2); if (value instanceof ActionWrapper) { ActionWrapper actionWithParentGroup = (ActionWrapper)value; AnAction anAction = actionWithParentGroup.getAction(); Presentation presentation = anAction.getTemplatePresentation(); boolean toggle = anAction instanceof ToggleAction; String groupName = actionWithParentGroup.getAction() instanceof ApplyIntentionAction ? null : actionWithParentGroup.getGroupName(); Presentation actionPresentation = actionWithParentGroup.getPresentation(); Color fg = defaultActionForeground(isSelected, actionPresentation); boolean disabled = actionPresentation != null && (!actionPresentation.isEnabled() || !actionPresentation.isVisible()); if (disabled) { groupFg = UIUtil.getLabelDisabledForeground(); } if (showIcon) { Icon icon = presentation.getIcon(); panel.add(createIconLabel(icon, disabled), BorderLayout.WEST); } appendWithColoredMatches(nameComponent, getName(presentation.getText(), groupName, toggle), pattern, fg, isSelected); panel.setToolTipText(presentation.getDescription()); Shortcut[] shortcuts = getActiveKeymapShortcuts(ActionManager.getInstance().getId(anAction)).getShortcuts(); String shortcutText = KeymapUtil.getPreferredShortcutText( shortcuts); if (StringUtil.isNotEmpty(shortcutText)) { nameComponent.append(" " + shortcutText, new SimpleTextAttributes(SimpleTextAttributes.STYLE_SMALLER | SimpleTextAttributes.STYLE_BOLD, UIUtil.isUnderDarcula() ? groupFg : ColorUtil.shift(groupFg, 1.3))); } if (toggle) { AnActionEvent event = AnActionEvent.createFromDataContext(ActionPlaces.UNKNOWN, null, ((ActionWrapper)value).myDataContext); boolean selected = ((ToggleAction)anAction).isSelected(event); addOnOffButton(panel, selected); } else { if (groupName != null) { JLabel groupLabel = new JLabel(groupName); groupLabel.setBackground(bg); groupLabel.setBorder(eastBorder); groupLabel.setForeground(groupFg); panel.add(groupLabel, BorderLayout.EAST); } } } else if (value instanceof OptionDescription) { if (!isSelected && !(value instanceof BooleanOptionDescription)) { Color descriptorBg = UIUtil.isUnderDarcula() ? ColorUtil.brighter(UIUtil.getListBackground(), 1) : LightColors.SLIGHTLY_GRAY; panel.setBackground(descriptorBg); nameComponent.setBackground(descriptorBg); } String hit = ((OptionDescription)value).getHit(); if (hit == null) { hit = ((OptionDescription)value).getOption(); } hit = StringUtil.unescapeXml(hit); hit = hit.replace(" ", " "); // avoid extra spaces from mnemonics and xml conversion String fullHit = hit; hit = StringUtil.first(hit, 45, true); Color fg = UIUtil.getListForeground(isSelected); appendWithColoredMatches(nameComponent, hit.trim(), pattern, fg, isSelected); if (showIcon) { panel.add(new JLabel(EMPTY_ICON), BorderLayout.WEST); } panel.setToolTipText(fullHit); if (value instanceof BooleanOptionDescription) { boolean selected = ((BooleanOptionDescription)value).isOptionEnabled(); addOnOffButton(panel, selected); } else { JLabel settingsLabel = new JLabel(myGroupNamer.fun((OptionDescription)value)); settingsLabel.setForeground(groupFg); settingsLabel.setBackground(bg); settingsLabel.setBorder(eastBorder); panel.add(settingsLabel, BorderLayout.EAST); } } return panel; } private static void addOnOffButton(@NotNull JPanel panel, boolean selected) { OnOffButton button = new OnOffButton(); button.setSelected(selected); panel.add(button, BorderLayout.EAST); panel.setBorder(JBUI.Borders.empty(0, 2)); } @NotNull private static String getName(@Nullable String text, @Nullable String groupName, boolean toggle) { return toggle && StringUtil.isNotEmpty(groupName) ? StringUtil.isNotEmpty(text) ? groupName + ": " + text : groupName : StringUtil.notNullize(text); } private static void appendWithColoredMatches(SimpleColoredComponent nameComponent, @NotNull String name, @NotNull String pattern, Color fg, boolean selected) { SimpleTextAttributes plain = new SimpleTextAttributes(STYLE_PLAIN, fg); SimpleTextAttributes highlighted = new SimpleTextAttributes(null, fg, null, STYLE_SEARCH_MATCH); List<TextRange> fragments = ContainerUtil.newArrayList(); if (selected) { int matchStart = StringUtil.indexOfIgnoreCase(name, pattern, 0); if (matchStart >= 0) { fragments.add(TextRange.from(matchStart, pattern.length())); } } SpeedSearchUtil.appendColoredFragments(nameComponent, name, fragments, plain, highlighted); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.ant.dom; import com.intellij.lang.ant.AntFilesProvider; import com.intellij.lang.ant.AntSupport; import com.intellij.lang.ant.ReflectedProject; import com.intellij.lang.ant.config.impl.AntResourcesClassLoader; import com.intellij.lang.properties.IProperty; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiFileFactory; import com.intellij.psi.PsiFileSystemItem; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.LocalTimeCounter; import com.intellij.util.xml.XmlName; import gnu.trove.THashMap; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.*; /** * Storage for user-defined tasks and data types * parsed from ant files * @author Eugene Zhuravlev */ public class CustomAntElementsRegistry { public static final ThreadLocal<Boolean> ourIsBuildingClasspathForCustomTagLoading = ThreadLocal.withInitial(() -> Boolean.FALSE); private static final Logger LOG = Logger.getInstance("#com.intellij.lang.ant.dom.CustomAntElementsRegistry"); private static final Key<CustomAntElementsRegistry> REGISTRY_KEY = Key.create("_custom_element_registry_"); private final Map<XmlName, ClassProvider> myCustomElements = new THashMap<>(); private final Map<AntDomNamedElement, String> myTypeDefErrors = new THashMap<>(); private final Map<XmlName, AntDomNamedElement> myDeclarations = new THashMap<>(); private final Map<String, ClassLoader> myNamedLoaders = new THashMap<>(); private CustomAntElementsRegistry(final AntDomProject antProject) { antProject.accept(new CustomTagDefinitionFinder(antProject)); } public static CustomAntElementsRegistry getInstance(AntDomProject antProject) { CustomAntElementsRegistry registry = antProject.getContextAntProject().getUserData(REGISTRY_KEY); if (registry == null) { registry = new CustomAntElementsRegistry(antProject); antProject.putUserData(REGISTRY_KEY, registry); } return registry; } @NotNull public Set<XmlName> getCompletionVariants(AntDomElement parentElement) { if (parentElement instanceof AntDomCustomElement) { // this case is already handled in AntDomExtender when defining children return Collections.emptySet(); } final Set<XmlName> result = new HashSet<>(); final Pair<AntDomMacroDef, AntDomScriptDef> contextMacroOrScriptDef = getContextMacroOrScriptDef(parentElement); final AntDomMacroDef restrictToMacroDef = Pair.getFirst(contextMacroOrScriptDef); final AntDomScriptDef restrictToScriptDef = Pair.getSecond(contextMacroOrScriptDef); final boolean parentIsDataType = parentElement.isDataType(); for (final XmlName xmlName : myCustomElements.keySet()) { final AntDomNamedElement declaringElement = myDeclarations.get(xmlName); if (declaringElement instanceof AntDomMacrodefElement) { if (restrictToMacroDef == null || !restrictToMacroDef.equals(declaringElement.getParentOfType(AntDomMacroDef.class, true))) { continue; } } else if (declaringElement instanceof AntDomScriptdefElement) { if (restrictToScriptDef == null || !restrictToScriptDef.equals(declaringElement.getParentOfType(AntDomScriptDef.class, true))) { continue; } } if (declaringElement != null) { if (declaringElement.equals(restrictToMacroDef) || declaringElement.equals(restrictToScriptDef)) { continue; } } if (parentIsDataType) { if (declaringElement instanceof AntDomMacroDef || declaringElement instanceof AntDomScriptDef || declaringElement instanceof AntDomTaskdef) { continue; } if (declaringElement instanceof AntDomTypeDef) { final AntDomTypeDef typedef = (AntDomTypeDef)declaringElement; final Class clazz = lookupClass(xmlName); if (clazz != null && typedef.isTask(clazz)) { continue; } } } result.add(xmlName); } return result; } @Nullable private Pair<AntDomMacroDef, AntDomScriptDef> getContextMacroOrScriptDef(AntDomElement element) { final AntDomMacroDef macrodef = element.getParentOfType(AntDomMacroDef.class, false); if (macrodef != null) { return new Pair<>(macrodef, null); } for (AntDomCustomElement custom = element.getParentOfType(AntDomCustomElement.class, false); custom != null; custom = custom.getParentOfType(AntDomCustomElement.class, true)) { final AntDomNamedElement declaring = getDeclaringElement(custom.getXmlName()); if (declaring instanceof AntDomMacroDef) { return new Pair<>((AntDomMacroDef)declaring, null); } else if (declaring instanceof AntDomScriptDef) { return new Pair<>(null, (AntDomScriptDef)declaring); } } return null; } @Nullable public AntDomElement findDeclaringElement(final AntDomElement parentElement, final XmlName customElementName) { final AntDomElement declaration = myDeclarations.get(customElementName); if (declaration == null) { return null; } if (declaration instanceof AntDomMacrodefElement) { final Pair<AntDomMacroDef, AntDomScriptDef> contextMacroOrScriptDef = getContextMacroOrScriptDef(parentElement); final AntDomMacroDef macrodefUsed = Pair.getFirst(contextMacroOrScriptDef); if (macrodefUsed == null || !macrodefUsed.equals(declaration.getParentOfType(AntDomMacroDef.class, true))) { return null; } } else if (declaration instanceof AntDomScriptdefElement) { final Pair<AntDomMacroDef, AntDomScriptDef> contextMacroOrScriptDef = getContextMacroOrScriptDef(parentElement); final AntDomScriptDef scriptDefUsed = Pair.getSecond(contextMacroOrScriptDef); if (scriptDefUsed == null || !scriptDefUsed.equals(declaration.getParentOfType(AntDomScriptDef.class, true))) { return null; } } return declaration; } public AntDomNamedElement getDeclaringElement(XmlName customElementName) { return myDeclarations.get(customElementName); } @Nullable public Class lookupClass(XmlName xmlName) { final ClassProvider provider = myCustomElements.get(xmlName); return provider == null ? null : provider.lookupClass(); } @Nullable public String lookupError(XmlName xmlName) { final ClassProvider provider = myCustomElements.get(xmlName); return provider == null ? null : provider.getError(); } public boolean hasTypeLoadingErrors(AntDomTypeDef typedef) { final String generalError = myTypeDefErrors.get(typedef); if (generalError != null) { return true; } return StreamEx.ofKeys(myDeclarations, typedef::equals).anyMatch(name -> lookupError(name) != null); } public List<String> getTypeLoadingErrors(AntDomTypeDef typedef) { final String generalError = myTypeDefErrors.get(typedef); if (generalError != null) { return Collections.singletonList(generalError); } List<String> errors = null; for (Map.Entry<XmlName, AntDomNamedElement> entry : myDeclarations.entrySet()) { if (typedef.equals(entry.getValue())) { final String err = lookupError(entry.getKey()); if (err != null) { if (errors == null) { errors = new ArrayList<>(); } errors.add(err); } } } return errors == null ? Collections.emptyList() : errors; } private void rememberNamedClassLoader(AntDomCustomClasspathComponent typedef, AntDomProject antProject) { final String loaderRef = typedef.getLoaderRef().getStringValue(); if (loaderRef != null) { if (!myNamedLoaders.containsKey(loaderRef)) { myNamedLoaders.put(loaderRef, createClassLoader(collectUrls(typedef), antProject)); } } } @NotNull private ClassLoader getClassLoader(AntDomCustomClasspathComponent customComponent, AntDomProject antProject) { final String loaderRef = customComponent.getLoaderRef().getStringValue(); if (loaderRef != null) { final ClassLoader loader = myNamedLoaders.get(loaderRef); if (loader != null) { return loader; } } return createClassLoader(collectUrls(customComponent), antProject); } @Nullable public static PsiFile loadContentAsFile(PsiFile originalFile, LanguageFileType fileType) { final VirtualFile vFile = originalFile.getVirtualFile(); if (vFile == null) { return null; } try { return loadContentAsFile(originalFile.getProject(), vFile.getInputStream(), fileType); } catch (IOException e) { LOG.info(e); } return null; } public static PsiFile loadContentAsFile(Project project, InputStream stream, LanguageFileType fileType) throws IOException { final StringBuilder builder = new StringBuilder(); try { int nextByte; while ((nextByte = stream.read()) >= 0) { builder.append((char)nextByte); } } finally { stream.close(); } final PsiFileFactory factory = PsiFileFactory.getInstance(project); return factory.createFileFromText("_ant_dummy__." + fileType.getDefaultExtension(), fileType, builder, LocalTimeCounter.currentTime(), false, false); } private void addCustomDefinition(@NotNull AntDomNamedElement declaringTag, String customTagName, String nsUri, ClassProvider classProvider) { final XmlName xmlName = new XmlName(customTagName, nsUri == null? "" : nsUri); myCustomElements.put(xmlName, classProvider); myDeclarations.put(xmlName, declaringTag); } private static PsiFile createDummyFile(@NonNls final String name, final LanguageFileType type, final CharSequence str, Project project) { return PsiFileFactory.getInstance(project).createFileFromText(name, type, str, LocalTimeCounter.currentTime(), false, false); } private static boolean isXmlFormat(AntDomTypeDef typedef, @NotNull final String resourceOrFileName) { final String format = typedef.getFormat().getStringValue(); if (format != null) { return "xml".equalsIgnoreCase(format); } return StringUtil.endsWithIgnoreCase(resourceOrFileName, ".xml"); } @NotNull public static ClassLoader createClassLoader(final List<URL> urls, final AntDomProject antProject) { final ClassLoader parentLoader = antProject.getClassLoader(); if (urls.size() == 0) { return parentLoader; } return new AntResourcesClassLoader(urls, parentLoader, false, false); } public static List<URL> collectUrls(AntDomClasspathElement typedef) { boolean cleanupNeeded = false; if (!ourIsBuildingClasspathForCustomTagLoading.get()) { ourIsBuildingClasspathForCustomTagLoading.set(Boolean.TRUE); cleanupNeeded = true; } try { final List<URL> urls = new ArrayList<>(); // check classpath attribute final List<File> cpFiles = typedef.getClasspath().getValue(); if (cpFiles != null) { for (File file : cpFiles) { try { urls.add(toLocalURL(file)); } catch (MalformedURLException ignored) { LOG.info(ignored); } } } final HashSet<AntFilesProvider> processed = new HashSet<>(); final AntDomElement referencedPath = typedef.getClasspathRef().getValue(); if (referencedPath instanceof AntFilesProvider) { for (File cpFile : ((AntFilesProvider)referencedPath).getFiles(processed)) { try { urls.add(toLocalURL(cpFile)); } catch (MalformedURLException ignored) { LOG.info(ignored); } } } // check nested elements for (final Iterator<AntDomElement> it = typedef.getAntChildrenIterator(); it.hasNext();) { AntDomElement child = it.next(); if (child instanceof AntFilesProvider) { for (File cpFile : ((AntFilesProvider)child).getFiles(processed)) { try { urls.add(toLocalURL(cpFile)); } catch (MalformedURLException ignored) { LOG.info(ignored); } } } } return urls; } finally { if (cleanupNeeded) { ourIsBuildingClasspathForCustomTagLoading.remove(); } } } private static URL toLocalURL(final File file) throws MalformedURLException { return file.toURI().toURL(); } private class CustomTagDefinitionFinder extends AntDomRecursiveVisitor { private final Set<AntDomElement> myElementsOnThePath = new HashSet<>(); private final Set<String> processedAntlibs = new HashSet<>(); private final AntDomProject myAntProject; CustomTagDefinitionFinder(AntDomProject antProject) { myAntProject = antProject; } @Override public void visitAntDomElement(AntDomElement element) { if (element instanceof AntDomCustomElement || myElementsOnThePath.contains(element)) { return; // avoid stack overflow } myElementsOnThePath.add(element); try { final XmlTag tag = element.getXmlTag(); if (tag != null) { final String[] uris = tag.knownNamespaces(); for (String uri : uris) { if (!processedAntlibs.contains(uri)) { processedAntlibs.add(uri); final String antLibResource = AntDomAntlib.toAntlibResource(uri); if (antLibResource != null) { final XmlElement xmlElement = element.getXmlElement(); if (xmlElement != null) { final ClassLoader loader = myAntProject.getClassLoader(); final InputStream stream = loader.getResourceAsStream(antLibResource); if (stream != null) { try { final XmlFile xmlFile = (XmlFile)loadContentAsFile(xmlElement.getProject(), stream, StdFileTypes.XML); if (xmlFile != null) { loadDefinitionsFromAntlib(xmlFile, uri, loader, null, myAntProject); } } catch (IOException e) { LOG.info(e); } } } } } } } super.visitAntDomElement(element); } finally { myElementsOnThePath.remove(element); } } @Override public void visitMacroDef(AntDomMacroDef macrodef) { final String customTagName = macrodef.getName().getStringValue(); if (customTagName != null) { final String nsUri = macrodef.getUri().getStringValue(); addCustomDefinition(macrodef, customTagName, nsUri, ClassProvider.EMPTY); for (AntDomMacrodefElement element : macrodef.getMacroElements()) { final String customSubTagName = element.getName().getStringValue(); if (customSubTagName != null) { addCustomDefinition(element, customSubTagName, nsUri, ClassProvider.EMPTY); } } } } @Override public void visitScriptDef(AntDomScriptDef scriptdef) { final String customTagName = scriptdef.getName().getStringValue(); if (customTagName != null) { final String nsUri = scriptdef.getUri().getStringValue(); final ClassLoader classLoader = getClassLoader(scriptdef, myAntProject); // register the scriptdef addCustomDefinition(scriptdef, customTagName, nsUri, ClassProvider.EMPTY); // registering nested elements ReflectedProject reflectedProject = null; for (AntDomScriptdefElement element : scriptdef.getScriptdefElements()) { final String customSubTagName = element.getName().getStringValue(); if (customSubTagName != null) { final String classname = element.getClassname().getStringValue(); if (classname != null) { addCustomDefinition(element, customTagName, nsUri, ClassProvider.create(classname, classLoader)); } else { Class clazz = null; final String typeName = element.getElementType().getStringValue(); if (typeName != null) { clazz = lookupClass(new XmlName(typeName)); if (clazz == null) { if (reflectedProject == null) { // lazy init reflectedProject = ReflectedProject.getProject(myAntProject.getClassLoader()); } final Hashtable<String, Class> coreTasks = reflectedProject.getTaskDefinitions(); if (coreTasks != null) { clazz = coreTasks.get(typeName); } if (clazz == null) { final Hashtable<String, Class> coreTypes = reflectedProject.getDataTypeDefinitions(); if (coreTypes != null) { clazz = coreTypes.get(typeName); } } } } addCustomDefinition(element, customSubTagName, nsUri, ClassProvider.create(clazz)); } } } } } @Override public void visitPresetDef(AntDomPresetDef presetdef) { final String customTagName = presetdef.getName().getStringValue(); if (customTagName != null) { final String nsUri = presetdef.getUri().getStringValue(); addCustomDefinition(presetdef, customTagName, nsUri, ClassProvider.EMPTY); } } @Override public void visitTypeDef(AntDomTypeDef typedef) { // if loaderRef attribute is specified, make sure the loader is built and stored rememberNamedClassLoader(typedef, myAntProject); defineCustomElements(typedef, myAntProject); } @Override public void visitInclude(AntDomInclude includeTag) { processInclude(includeTag); } @Override public void visitImport(AntDomImport importTag) { processInclude(importTag); } private void processInclude(AntDomIncludingDirective directive) { final PsiFileSystemItem item = directive.getFile().getValue(); if (item instanceof PsiFile) { final AntDomProject slaveProject = AntSupport.getAntDomProject((PsiFile)item); if (slaveProject != null) { slaveProject.accept(this); } } } private void defineCustomElements(AntDomTypeDef typedef, final AntDomProject antProject) { final String uri = typedef.getUri().getStringValue(); final String customTagName = typedef.getName().getStringValue(); final String classname = typedef.getClassName().getStringValue(); if (classname != null && customTagName != null) { addCustomDefinition(typedef, customTagName, uri, ClassProvider.create(classname, getClassLoader(typedef, antProject))); } else { defineCustomElementsFromResources(typedef, uri, antProject, null); } } private void defineCustomElementsFromResources(AntDomTypeDef typedef, final String uri, AntDomProject antProject, ClassLoader loader) { final XmlElement xmlElement = antProject.getXmlElement(); final Project project = xmlElement != null? xmlElement.getProject() : null; if (project == null) { return; } XmlFile xmlFile = null; PropertiesFile propFile = null; final String resource = typedef.getResource().getStringValue(); if (resource != null) { if (loader == null) { loader = getClassLoader(typedef, antProject); } final InputStream stream = loader.getResourceAsStream(resource); if (stream != null) { try { if (isXmlFormat(typedef, resource)) { xmlFile = (XmlFile)loadContentAsFile(project, stream, StdFileTypes.XML); } else { propFile = (PropertiesFile)loadContentAsFile(project, stream, StdFileTypes.PROPERTIES); } } catch (IOException e) { LOG.info(e); } } else { myTypeDefErrors.put(typedef, "Resource \"" + resource + "\" not found in the classpath"); } } else { final PsiFileSystemItem file = typedef.getFile().getValue(); if (file instanceof PsiFile) { if (isXmlFormat(typedef, file.getName())) { xmlFile = file instanceof XmlFile ? (XmlFile)file : (XmlFile)loadContentAsFile((PsiFile)file, StdFileTypes.XML); } else { // assume properties format propFile = file instanceof PropertiesFile ? (PropertiesFile)file : (PropertiesFile)loadContentAsFile((PsiFile)file, StdFileTypes.PROPERTIES); } } } if (propFile != null) { if (loader == null) { // if not initialized yet loader = getClassLoader(typedef, antProject); } for (final IProperty property : propFile.getProperties()) { addCustomDefinition(typedef, property.getUnescapedKey(), uri, ClassProvider.create(property.getUnescapedValue(), loader)); } } if (xmlFile != null) { if (loader == null) { // if not initialized yet loader = getClassLoader(typedef, antProject); } loadDefinitionsFromAntlib(xmlFile, uri, loader, typedef, antProject); } } private void loadDefinitionsFromAntlib(XmlFile xmlFile, String uri, ClassLoader loader, @Nullable AntDomTypeDef typedef, AntDomProject antProject) { final AntDomAntlib antLib = AntSupport.getAntLib(xmlFile); if (antLib != null) { final List<AntDomTypeDef> defs = new ArrayList<>(); defs.addAll(antLib.getTaskdefs()); defs.addAll(antLib.getTypedefs()); if (!defs.isEmpty()) { for (AntDomTypeDef def : defs) { final String tagName = def.getName().getStringValue(); final String className = def.getClassName().getStringValue(); if (tagName != null && className != null) { AntDomNamedElement declaringElement = typedef != null? typedef : def; addCustomDefinition(declaringElement, tagName, uri, ClassProvider.create(className, loader)); } else { defineCustomElementsFromResources(def, uri, antProject, loader); } } } } } } }
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service; import com.google.gson.Gson; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.remote.ConfigRepoConfig; import com.thoughtworks.go.config.remote.ConfigReposConfig; import com.thoughtworks.go.config.remote.RepoConfigOrigin; import com.thoughtworks.go.config.rules.Allow; import com.thoughtworks.go.config.update.FullConfigUpdateCommand; import com.thoughtworks.go.helper.ConfigFileFixture; import com.thoughtworks.go.helper.GoConfigMother; import com.thoughtworks.go.helper.PartialConfigMother; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult; import com.thoughtworks.go.server.service.support.ServerStatusService; import com.thoughtworks.go.util.ClonerFactory; import com.thoughtworks.go.util.GoConfigFileHelper; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.SystemUtils; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.rules.Timeout; import org.junit.runner.Description; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.UUID; import java.util.concurrent.TimeoutException; import java.util.function.Predicate; import static com.thoughtworks.go.helper.MaterialConfigsMother.git; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:/applicationContext-global.xml", "classpath:/applicationContext-dataLocalAccess.xml", "classpath:/testPropertyConfigurer.xml", "classpath:/spring-all-servlet.xml", }) public class ConfigSaveDeadlockDetectionIntegrationTest { @Autowired private GoConfigDao goConfigDao; @Autowired private GoConfigService goConfigService; @Autowired private CachedGoConfig cachedGoConfig; @Autowired private PipelineConfigService pipelineConfigService; @Autowired private ServerStatusService serverStatusService; @Autowired private PartialConfigService partialConfigService; @Autowired private CachedGoPartials cachedGoPartials; private GoConfigFileHelper configHelper; private final int THREE_MINUTES = 3 * 60 * 1000; @Before public void setup() throws Exception { configHelper = new GoConfigFileHelper(ConfigFileFixture.XML_WITH_SINGLE_ENVIRONMENT); configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile(); configHelper.onSetUp(); goConfigService.forceNotifyListeners(); } @After public void tearDown() throws Exception { configHelper.onTearDown(); } @Rule public final TestRule timeout = RuleChain .outerRule(new TestWatcher() { @Override protected void failed(Throwable e, Description description) { if (e.getMessage().contains("test timed out") || e instanceof TimeoutException) { try { fail("Test timed out, possible deadlock. Thread Dump:" + new Gson().toJson(serverStatusService.asJson(Username.ANONYMOUS, new HttpLocalizedOperationResult()))); } catch (Exception e1) { throw new RuntimeException(e1); } } } }) .around(new Timeout(THREE_MINUTES)); @Test public void shouldNotDeadlockWhenAllPossibleWaysOfUpdatingTheConfigAreBeingUsedAtTheSameTime() throws Exception { int EXISTING_ENV_COUNT = goConfigService.cruiseConfig().getEnvironments().size(); final ArrayList<Thread> group1 = new ArrayList<>(); final ArrayList<Thread> group2 = new ArrayList<>(); final ArrayList<Thread> group3 = new ArrayList<>(); final ArrayList<Thread> group4 = new ArrayList<>(); final ArrayList<Thread> group5 = new ArrayList<>(); int count = 100; final int pipelineCreatedThroughApiCount = count; final int pipelineCreatedThroughUICount = count; final int configRepoAdditionThreadCount = count; final int configRepoDeletionThreadCount = count; final int fullConfigSaveThreadCount = count; for (int i = 0; i < pipelineCreatedThroughUICount; i++) { Thread thread = configSaveThread(i); group1.add(thread); } for (int i = 0; i < pipelineCreatedThroughApiCount; i++) { Thread thread = pipelineSaveThread(i); group2.add(thread); } ConfigReposConfig configRepos = new ConfigReposConfig(); for (int i = 0; i < configRepoAdditionThreadCount; i++) { ConfigRepoConfig configRepoConfig = ConfigRepoConfig.createConfigRepoConfig(git("url" + i), "plugin", "id-" + i); configRepoConfig.getRules().add(new Allow("refer", "*", "*")); configRepos.add(configRepoConfig); Thread thread = configRepoSaveThread(configRepoConfig, i); group3.add(thread); } for (int i = 0; i < configRepoDeletionThreadCount; i++) { ConfigRepoConfig configRepoConfig = ConfigRepoConfig.createConfigRepoConfig(git("to-be-deleted-url" + i), "plugin", "to-be-deleted-" + i); cachedGoPartials.cacheAsLastKnown(configRepoConfig.getRepo().getFingerprint(), PartialConfigMother.withPipeline("to-be-deleted" + i, new RepoConfigOrigin(configRepoConfig, "plugin"))); configRepos.add(configRepoConfig); Thread thread = configRepoDeleteThread(configRepoConfig, i); group4.add(thread); } for (int i = 0; i < fullConfigSaveThreadCount; i++) { Thread thread = fullConfigSaveThread(i); group5.add(thread); } configHelper.setConfigRepos(configRepos); for (int i = 0; i < count; i++) { Thread timerThread = null; try { timerThread = createThread(new Runnable() { @Override public void run() { try { writeConfigToFile(new File(goConfigDao.fileLocation())); } catch (Exception e) { e.printStackTrace(); fail("Failed with error: " + e.getMessage()); } cachedGoConfig.forceReload(); } }, "timer-thread"); } catch (InterruptedException e) { fail(e.getMessage()); } try { group1.get(i).start(); group2.get(i).start(); group3.get(i).start(); group4.get(i).start(); group5.get(i).start(); timerThread.start(); group1.get(i).join(); group2.get(i).join(); group3.get(i).join(); group4.get(i).join(); group5.get(i).join(); timerThread.join(); } catch (InterruptedException e) { fail(e.getMessage()); } } assertThat(goConfigService.getAllPipelineConfigs().size(), is(pipelineCreatedThroughApiCount + pipelineCreatedThroughUICount + configRepoAdditionThreadCount)); assertThat(goConfigService.getConfigForEditing().getAllPipelineConfigs().size(), is(pipelineCreatedThroughApiCount + pipelineCreatedThroughUICount)); assertThat(goConfigService.getConfigForEditing().getEnvironments().size(), is(fullConfigSaveThreadCount + EXISTING_ENV_COUNT)); } private void writeConfigToFile(File configFile) throws IOException { if (!SystemUtils.IS_OS_WINDOWS) { update(configFile); return; } int retries = 1; while (retries <= 5) { try { update(configFile); return; } catch (IOException e) { try { System.out.println(String.format("Retry attempt - %s. Error: %s", retries, e.getMessage())); e.printStackTrace(); Thread.sleep(10); } catch (InterruptedException e1) { e1.printStackTrace(); } retries = retries + 1; } } throw new RuntimeException(String.format("Could not write to config file after %s attempts", retries)); } private void update(File configFile) throws IOException { String currentConfig = FileUtils.readFileToString(configFile, UTF_8); String updatedConfig = currentConfig.replaceFirst("artifactsdir=\".*\"", "artifactsdir=\"" + UUID.randomUUID().toString() + "\""); FileUtils.writeStringToFile(configFile, updatedConfig, UTF_8); } private Thread configRepoSaveThread(final ConfigRepoConfig configRepoConfig, final int counter) throws InterruptedException { return createThread(new Runnable() { @Override public void run() { partialConfigService.onSuccessPartialConfig(configRepoConfig, PartialConfigMother.withPipeline("remote-pipeline" + counter, new RepoConfigOrigin(configRepoConfig, "1"))); } }, "config-repo-save-thread" + counter); } private Thread fullConfigSaveThread(final int counter) throws InterruptedException { return createThread(new Runnable() { @Override public void run() { try { CruiseConfig cruiseConfig = cachedGoConfig.loadForEditing(); CruiseConfig cruiseConfig1 = ClonerFactory.instance().deepClone(cruiseConfig); cruiseConfig1.addEnvironment(UUID.randomUUID().toString()); goConfigDao.updateFullConfig(new FullConfigUpdateCommand(cruiseConfig1, cruiseConfig.getMd5())); } catch (Exception e) { e.printStackTrace(); } } }, "full-config-save-thread" + counter); } private Thread configRepoDeleteThread(final ConfigRepoConfig configRepoToBeDeleted, final int counter) throws InterruptedException { return createThread(new Runnable() { @Override public void run() { goConfigService.updateConfig(new UpdateConfigCommand() { @Override public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception { ConfigRepoConfig repoConfig = cruiseConfig.getConfigRepos().stream().filter(new Predicate<ConfigRepoConfig>() { @Override public boolean test(ConfigRepoConfig item) { return configRepoToBeDeleted.getRepo().equals(item.getRepo()); } }).findFirst().orElse(null); cruiseConfig.getConfigRepos().remove(repoConfig); return cruiseConfig; } }); } }, "config-repo-delete-thread" + counter); } private Thread pipelineSaveThread(int counter) throws InterruptedException { return createThread(new Runnable() { @Override public void run() { PipelineConfig pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), git("FOO")); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); pipelineConfigService.createPipelineConfig(new Username(new CaseInsensitiveString("root")), pipelineConfig, result, "default"); assertThat(result.message(), result.isSuccessful(), is(true)); } }, "pipeline-config-save-thread" + counter); } private Thread configSaveThread(final int counter) throws InterruptedException { return createThread(new Runnable() { @Override public void run() { goConfigService.updateConfig(new UpdateConfigCommand() { @Override public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception { PipelineConfig pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), git("FOO")); cruiseConfig.addPipeline("default", pipelineConfig); return cruiseConfig; } }); } }, "config-save-thread" + counter); } private Thread createThread(Runnable runnable, String name) throws InterruptedException { Thread thread = new Thread(runnable, name); thread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { e.printStackTrace(); throw new RuntimeException(e.getMessage(), e); } }); return thread; } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.LinkedList; import java.util.List; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi /** A rule to be applied in a Policy. */ public final class Rule implements ApiMessage { private final String action; private final List<Condition> conditions; private final String description; private final List<String> ins; private final List<LogConfig> logConfigs; private final List<String> notIns; private final List<String> permissions; private Rule() { this.action = null; this.conditions = null; this.description = null; this.ins = null; this.logConfigs = null; this.notIns = null; this.permissions = null; } private Rule( String action, List<Condition> conditions, String description, List<String> ins, List<LogConfig> logConfigs, List<String> notIns, List<String> permissions) { this.action = action; this.conditions = conditions; this.description = description; this.ins = ins; this.logConfigs = logConfigs; this.notIns = notIns; this.permissions = permissions; } @Override public Object getFieldValue(String fieldName) { if ("action".equals(fieldName)) { return action; } if ("conditions".equals(fieldName)) { return conditions; } if ("description".equals(fieldName)) { return description; } if ("ins".equals(fieldName)) { return ins; } if ("logConfigs".equals(fieldName)) { return logConfigs; } if ("notIns".equals(fieldName)) { return notIns; } if ("permissions".equals(fieldName)) { return permissions; } return null; } @Nullable @Override public ApiMessage getApiMessageRequestBody() { return null; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** Required */ public String getAction() { return action; } /** Additional restrictions that must be met. All conditions must pass for the rule to match. */ public List<Condition> getConditionsList() { return conditions; } /** Human-readable description of the rule. */ public String getDescription() { return description; } /** * If one or more 'in' clauses are specified, the rule matches if the PRINCIPAL/AUTHORITY_SELECTOR * is in at least one of these entries. */ public List<String> getInsList() { return ins; } /** * The config returned to callers of tech.iam.IAM.CheckPolicy for any entries that match the LOG * action. */ public List<LogConfig> getLogConfigsList() { return logConfigs; } /** * If one or more 'not_in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. */ public List<String> getNotInsList() { return notIns; } /** * A permission is a string of form '..' (e.g., 'storage.buckets.list'). A value of '&#42;' * matches all permissions, and a verb part of '&#42;' (e.g., 'storage.buckets.&#42;') matches all * verbs. */ public List<String> getPermissionsList() { return permissions; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(Rule prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static Rule getDefaultInstance() { return DEFAULT_INSTANCE; } private static final Rule DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new Rule(); } public static class Builder { private String action; private List<Condition> conditions; private String description; private List<String> ins; private List<LogConfig> logConfigs; private List<String> notIns; private List<String> permissions; Builder() {} public Builder mergeFrom(Rule other) { if (other == Rule.getDefaultInstance()) return this; if (other.getAction() != null) { this.action = other.action; } if (other.getConditionsList() != null) { this.conditions = other.conditions; } if (other.getDescription() != null) { this.description = other.description; } if (other.getInsList() != null) { this.ins = other.ins; } if (other.getLogConfigsList() != null) { this.logConfigs = other.logConfigs; } if (other.getNotInsList() != null) { this.notIns = other.notIns; } if (other.getPermissionsList() != null) { this.permissions = other.permissions; } return this; } Builder(Rule source) { this.action = source.action; this.conditions = source.conditions; this.description = source.description; this.ins = source.ins; this.logConfigs = source.logConfigs; this.notIns = source.notIns; this.permissions = source.permissions; } /** Required */ public String getAction() { return action; } /** Required */ public Builder setAction(String action) { this.action = action; return this; } /** Additional restrictions that must be met. All conditions must pass for the rule to match. */ public List<Condition> getConditionsList() { return conditions; } /** Additional restrictions that must be met. All conditions must pass for the rule to match. */ public Builder addAllConditions(List<Condition> conditions) { if (this.conditions == null) { this.conditions = new LinkedList<>(); } this.conditions.addAll(conditions); return this; } /** Additional restrictions that must be met. All conditions must pass for the rule to match. */ public Builder addConditions(Condition conditions) { if (this.conditions == null) { this.conditions = new LinkedList<>(); } this.conditions.add(conditions); return this; } /** Human-readable description of the rule. */ public String getDescription() { return description; } /** Human-readable description of the rule. */ public Builder setDescription(String description) { this.description = description; return this; } /** * If one or more 'in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in at least one of these entries. */ public List<String> getInsList() { return ins; } /** * If one or more 'in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in at least one of these entries. */ public Builder addAllIns(List<String> ins) { if (this.ins == null) { this.ins = new LinkedList<>(); } this.ins.addAll(ins); return this; } /** * If one or more 'in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in at least one of these entries. */ public Builder addIns(String ins) { if (this.ins == null) { this.ins = new LinkedList<>(); } this.ins.add(ins); return this; } /** * The config returned to callers of tech.iam.IAM.CheckPolicy for any entries that match the LOG * action. */ public List<LogConfig> getLogConfigsList() { return logConfigs; } /** * The config returned to callers of tech.iam.IAM.CheckPolicy for any entries that match the LOG * action. */ public Builder addAllLogConfigs(List<LogConfig> logConfigs) { if (this.logConfigs == null) { this.logConfigs = new LinkedList<>(); } this.logConfigs.addAll(logConfigs); return this; } /** * The config returned to callers of tech.iam.IAM.CheckPolicy for any entries that match the LOG * action. */ public Builder addLogConfigs(LogConfig logConfigs) { if (this.logConfigs == null) { this.logConfigs = new LinkedList<>(); } this.logConfigs.add(logConfigs); return this; } /** * If one or more 'not_in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. */ public List<String> getNotInsList() { return notIns; } /** * If one or more 'not_in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. */ public Builder addAllNotIns(List<String> notIns) { if (this.notIns == null) { this.notIns = new LinkedList<>(); } this.notIns.addAll(notIns); return this; } /** * If one or more 'not_in' clauses are specified, the rule matches if the * PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. */ public Builder addNotIns(String notIns) { if (this.notIns == null) { this.notIns = new LinkedList<>(); } this.notIns.add(notIns); return this; } /** * A permission is a string of form '..' (e.g., 'storage.buckets.list'). A value of '&#42;' * matches all permissions, and a verb part of '&#42;' (e.g., 'storage.buckets.&#42;') matches * all verbs. */ public List<String> getPermissionsList() { return permissions; } /** * A permission is a string of form '..' (e.g., 'storage.buckets.list'). A value of '&#42;' * matches all permissions, and a verb part of '&#42;' (e.g., 'storage.buckets.&#42;') matches * all verbs. */ public Builder addAllPermissions(List<String> permissions) { if (this.permissions == null) { this.permissions = new LinkedList<>(); } this.permissions.addAll(permissions); return this; } /** * A permission is a string of form '..' (e.g., 'storage.buckets.list'). A value of '&#42;' * matches all permissions, and a verb part of '&#42;' (e.g., 'storage.buckets.&#42;') matches * all verbs. */ public Builder addPermissions(String permissions) { if (this.permissions == null) { this.permissions = new LinkedList<>(); } this.permissions.add(permissions); return this; } public Rule build() { return new Rule(action, conditions, description, ins, logConfigs, notIns, permissions); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setAction(this.action); newBuilder.addAllConditions(this.conditions); newBuilder.setDescription(this.description); newBuilder.addAllIns(this.ins); newBuilder.addAllLogConfigs(this.logConfigs); newBuilder.addAllNotIns(this.notIns); newBuilder.addAllPermissions(this.permissions); return newBuilder; } } @Override public String toString() { return "Rule{" + "action=" + action + ", " + "conditions=" + conditions + ", " + "description=" + description + ", " + "ins=" + ins + ", " + "logConfigs=" + logConfigs + ", " + "notIns=" + notIns + ", " + "permissions=" + permissions + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof Rule) { Rule that = (Rule) o; return Objects.equals(this.action, that.getAction()) && Objects.equals(this.conditions, that.getConditionsList()) && Objects.equals(this.description, that.getDescription()) && Objects.equals(this.ins, that.getInsList()) && Objects.equals(this.logConfigs, that.getLogConfigsList()) && Objects.equals(this.notIns, that.getNotInsList()) && Objects.equals(this.permissions, that.getPermissionsList()); } return false; } @Override public int hashCode() { return Objects.hash(action, conditions, description, ins, logConfigs, notIns, permissions); } }
package org.trianacode.enactment.io; import org.trianacode.TrianaInstance; import org.trianacode.enactment.io.handlers.*; import org.trianacode.taskgraph.Node; import org.trianacode.taskgraph.Task; import org.trianacode.taskgraph.TaskGraphException; import org.trianacode.taskgraph.ser.DocumentHandler; import org.trianacode.taskgraph.ser.XMLReader; import org.trianacode.taskgraph.service.TypeChecking; import org.w3c.dom.Element; import java.io.*; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Very much work in progress... * * @author Andrew Harrison * @version 1.0.0 Oct 2, 2010 */ public class IoHandler { private static Map<String, IoTypeHandler> handlers = new HashMap<String, IoTypeHandler>(); static { registerHandler(new DoubleHandler()); registerHandler(new IntegerHandler()); registerHandler(new SerializableHandler()); registerHandler(new StringHandler()); registerHandler(new StreamHandler()); registerHandler(new BytesHandler()); } public static void registerHandler(IoTypeHandler handler) { String[] types = handler.getKnownTypes(); for (String type : types) { handlers.put(type, handler); } } public static IoTypeHandler getHandler(String type) { return handlers.get(type); } private StreamResolver streamResolver = new StreamResolver(); public NodeMappings map(IoConfiguration config, Task task) throws TaskGraphException { String toolname = config.getToolName(); String ver = config.getToolVersion(); if (toolname != null) { if (!toolname.equals(task.getQualifiedToolName())) { throw new TaskGraphException("config tool name " + toolname + " does not match tool name " + task.getQualifiedToolName()); } } if (ver != null) { if (!ver.equals(task.getVersion())) { throw new TaskGraphException("config tool version " + ver + " does not match tool version " + task.getVersion()); } } List<IoMapping> maps = config.getInputs(); Node[] nodes = task.getDataInputNodes(); NodeMappings ret = new NodeMappings(); for (Node node : nodes) { IoMapping curr = null; for (IoMapping map : maps) { String name = map.getNodeName(); if (name.equals(node.getNodeIndex() + "")) { curr = map; break; } } if (curr == null && node.isEssential()) { throw new TaskGraphException("No IOMapping defined for essential node:" + node.getNodeIndex()); } if (curr != null) { IoType iotype = curr.getIoType(); String type = iotype.getType(); IoTypeHandler handler = getHandler(type); if (handler == null) { throw new TaskGraphException("Unsupported type:" + type); } String val = iotype.getValue(); if (val != null) { InputStream in = null; if (iotype.isReference()) { in = streamResolver.handle(val); if (in == null) { throw new TaskGraphException("Could not resolve io type reference:" + val); } } else { in = new ByteArrayInputStream(val.getBytes()); } Object o = handler.read(type, in); String[] intypes = task.getDataInputTypes(node.getNodeIndex()); if (intypes == null) { intypes = task.getDataInputTypes(); } String[] clss = new String[]{o.getClass().getName()}; boolean compatible = TypeChecking.isCompatibility(TypeChecking.classForTrianaType(clss), TypeChecking.classForTrianaType(intypes)); if (!compatible) { throw new TaskGraphException("input types are not compatible:" + type + " is not compatible with " + Arrays.asList(intypes) + " expected by " + task + " (" + task.getQualifiedTaskName() + ")" ); } ret.addMapping(node.getNodeIndex(), o); } } } //TODO map outputs after execution, serialise // List<IoMapping> outputs = config.getOutputs(); // for (Node node : task.getDataOutputNodes()){ // IoMapping mapping = null; // for(IoMapping map : outputs){ // String name = map.getNodeName(); // if (name.equals(node.getNodeIndex() + "")) { // mapping = map; // break; // } // } // if (mapping == null && node.isEssential()) { // throw new TaskGraphException("No IOMapping defined for essential node:" + node.getNodeIndex()); // } // if (mapping != null){ // ret.addOutputMapping(node.getNodeIndex(), mapping); // } // } return ret; } public void serialize(DocumentHandler handler, IoConfiguration config) throws IOException { Element root = handler.element("configuration"); handler.setRoot(root); if (config.getToolName() != null) { handler.addAttribute(root, "toolName", config.getToolName()); } if (config.getToolVersion() != null) { handler.addAttribute(root, "toolVersion", config.getToolVersion()); } List<IoMapping> mappings = config.getInputs(); Element mps = handler.element("inputPorts"); root.appendChild(mps); for (IoMapping mapping : mappings) { Element map = handler.element("inputPort"); mps.appendChild(serializeMapping(handler, mapping, map)); } mps = handler.element("outputPorts"); mappings = config.getOutputs(); root.appendChild(mps); for (IoMapping mapping : mappings) { Element map = handler.element("outputPort"); mps.appendChild(serializeMapping(handler, mapping, map)); } } private Element serializeMapping(DocumentHandler handler, IoMapping mapping, Element map) { handler.addAttribute(map, "name", mapping.getNodeName()); IoType iot = mapping.getIoType(); handler.addAttribute(map, "type", iot.getType()); if (iot.getValue() != null) { if (iot.isReference()) { handler.addAttribute(map, "uri", iot.getValue()); } else { map.setTextContent(iot.getValue()); } } return map; } public IoConfiguration deserialize(InputStream in) throws IOException { DocumentHandler handler = new DocumentHandler(in); Element root = handler.root(); if (!root.getTagName().equals("configuration")) { System.out.println("root tag not recognised."); throw new IOException("unknown element:" + root.getTagName()); } String toolname = root.getAttribute("toolName"); String ver = root.getAttribute("toolVersion"); if (toolname != null && toolname.length() == 0) { toolname = null; } if (ver != null && ver.length() == 0) { ver = null; } Element inports = handler.getChild(root, "inputPorts"); IoConfiguration conf = new IoConfiguration(toolname, ver); if (inports != null) { List<Element> ins = handler.getChildren(inports, "inputPort"); for (Element element : ins) { deserializeMapping(conf, element, true); } } Element outports = handler.getChild(root, "outputPorts"); if (outports != null) { List<Element> outs = handler.getChildren(outports, "outputPort"); for (Element element : outs) { System.out.println("Deserializing outputPort"); deserializeMapping(conf, element, false); } } return conf; } private void deserializeMapping(IoConfiguration conf, Element element, boolean in) { String node = element.getAttribute("name"); String tp = element.getAttribute("type"); if (tp != null && node != null) { String uri = element.getAttribute("uri"); IoMapping iom; if (uri != null && uri.length() > 0) { iom = new IoMapping(new IoType(uri, tp, true), node); } else { if (element.getTextContent() != null && element.getTextContent().length() > 0) { iom = new IoMapping(new IoType(element.getTextContent().trim(), tp), node); } else { iom = new IoMapping(new IoType(null, tp), node); } } if (in) { conf.addInput(iom); } else { conf.addOutput(iom); } } } public static IoTypeHandler getHandler(Object value) { if (value instanceof String) { return getHandler("string"); } else if (value instanceof Boolean) { return getHandler("boolean"); } else if (value instanceof Double) { return getHandler("double"); } else if (value instanceof Integer) { return getHandler("integer"); } else if (value instanceof byte[]) { return getHandler("bytes"); } else if (value instanceof InputStream) { return getHandler("stream"); } else if (value instanceof Serializable) { return getHandler("java64"); } return null; } private static void testMappings(IoConfiguration conf, String wf) throws Exception { File f = new File(wf); if (!f.exists() || f.length() == 0) { System.out.println("Cannot find workflow file:" + wf); System.exit(1); } TrianaInstance engine = new TrianaInstance(); engine.init(); XMLReader reader = new XMLReader(new FileReader(f)); Task tool = (Task) reader.readComponent(engine.getProperties()); NodeMappings ret = new IoHandler().map(conf, tool); Map<Integer, Object> map = ret.getMap(); System.out.println("Node Mappings index => value"); for (Integer integer : map.keySet()) { System.out.println(integer + " => " + map.get(integer)); } } public static void main(String[] args) throws Exception { IoMapping in0 = new IoMapping(new IoType("hello x", "string"), "0"); IoMapping in1 = new IoMapping(new IoType("hello a", "string"), "1"); IoMapping in2 = new IoMapping(new IoType("./hello.txt", "string", true), "2"); IoMapping out0 = new IoMapping(new IoType("string"), "0"); IoConfiguration conf = new IoConfiguration("common.regexTG1", "0.1", Arrays.asList(in0, in1, in2), Arrays.asList(out0)); DocumentHandler handler = new DocumentHandler(); new IoHandler().serialize(handler, conf); handler.output(System.out, true); ByteArrayOutputStream bout = new ByteArrayOutputStream(); handler.output(bout); IoConfiguration ioc = new IoHandler().deserialize(new ByteArrayInputStream(bout.toByteArray())); System.out.println("config:"); System.out.println(" toolname:" + ioc.getToolName()); System.out.println(" tool version:" + ioc.getToolVersion()); List<IoMapping> mappings = ioc.getInputs(); for (IoMapping mapping : mappings) { System.out.println(" mapping:"); System.out.println(" name:" + mapping.getNodeName()); System.out.println(" type:" + mapping.getIoType().getType()); System.out.println(" val:" + mapping.getIoType().getValue()); System.out.println(" ref:" + mapping.getIoType().isReference()); } //testMappings(ioc, "/Users/scmabh/work/projects/triana/code/triana/triana-toolboxes/common/regexTG1.xml"); } }
package talecraft.client.gui.misc; import java.text.NumberFormat; import java.util.List; import java.util.Locale; import com.google.common.collect.Lists; import net.minecraft.client.gui.GuiScreen; import net.minecraft.nbt.NBTBase; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.text.TextFormatting; import net.minecraftforge.common.util.Constants.NBT; import talecraft.client.gui.qad.QADButton; import talecraft.client.gui.qad.QADFACTORY; import talecraft.client.gui.qad.QADGuiScreen; import talecraft.client.gui.qad.QADNumberTextField; import talecraft.client.gui.qad.QADPanel; import talecraft.client.gui.qad.QADScrollPanel; import talecraft.client.gui.qad.QADTextField; import talecraft.client.gui.qad.QADTickBox; import talecraft.client.gui.qad.QADNumberTextField.NumberType; import talecraft.client.gui.qad.QADTextField.TextChangeListener; import talecraft.client.gui.qad.QADTickBox.TickBoxModel; import talecraft.client.gui.qad.model.nbtcompound.NBTBooleanTickBoxModel; import talecraft.client.gui.qad.model.nbtcompound.NBTByteTextFieldModel; import talecraft.client.gui.qad.model.nbtcompound.NBTDoubleTextFieldModel; import talecraft.client.gui.qad.model.nbtcompound.NBTFloatTextFieldModel; import talecraft.client.gui.qad.model.nbtcompound.NBTIntegerTextFieldModel; import talecraft.client.gui.qad.model.nbtcompound.NBTShortTextFieldModel; import talecraft.client.gui.qad.model.nbtcompound.NBTStringTextFieldModel; import talecraft.util.MutableInteger; import talecraft.util.NBTHelper; import talecraft.util.RecursiveNBTIterator; import talecraft.util.RecursiveNBTIterator.NBTTreeConsumer; public class GuiEntityEditor extends QADGuiScreen { public interface RemoteEntityDataLink { void updateData(NBTTagCompound entityData); } RemoteEntityDataLink dataLink; final NBTTagCompound entityData; QADTickBox rawDataTickBox; QADButton buttonRefresh; QADButton buttonCancel; QADButton buttonApply; QADScrollPanel scrollPanel; List<QADPanel> panels; NumberFormat format; final int rightColumnOffset = 180; final int lineHeight = 20; boolean showRawData = false; public GuiEntityEditor(NBTTagCompound entity, RemoteEntityDataLink dataLink) { this.entityData = entity; this.dataLink = dataLink; this.entityData.removeTag("UUIDMost"); this.entityData.removeTag("UUIDLeast"); this.entityData.removeTag("Dimension"); format = NumberFormat.getInstance(Locale.ENGLISH); format.setMaximumFractionDigits(2); format.setMinimumFractionDigits(2); format.setMinimumIntegerDigits(1); format.setMaximumIntegerDigits(32); format.setGroupingUsed(true); } @Override public void buildGui() { { QADPanel panel = new QADPanel(); panel.setPosition(0, 0); panel.setSize(9999, 22); panel.setBackgroundColor(0); addComponent(panel); } addComponent(QADFACTORY.createLabel("Editing Entity: " + entityData.getString("UUID"), 2, 2)); addComponent(QADFACTORY.createLabel("Entity Type: " + entityData.getString("id"), 2, 2+10)); // dont do anything, let the extended classes do their thing! buttonRefresh = new QADButton(0, 0, 20, "R"); buttonCancel = new QADButton(0, 0, 40, "Cancel"); buttonApply = new QADButton(0, 0, 40, "Merge"); addComponent(buttonRefresh); addComponent(buttonCancel); addComponent(buttonApply); buttonApply.setAction(new Runnable() { @Override public void run() { dataLink.updateData(entityData); displayGuiScreen(null); } }); buttonCancel.setAction(new Runnable() { @Override public void run() { displayGuiScreen(null); } }); buttonRefresh.setAction(new Runnable() { @Override public void run() { generatePanels(); relayout(); } }); rawDataTickBox = new QADTickBox(0, (20-14)/2, 14, 14); rawDataTickBox.setModel(new TickBoxModel() { @Override public void toggleState() { GuiEntityEditor.this.showRawData ^= true; generatePanels(); relayout(); } @Override public void setState(boolean newState) { GuiEntityEditor.this.showRawData = newState; generatePanels(); relayout(); } @Override public boolean getState() { return GuiEntityEditor.this.showRawData; } }); rawDataTickBox.setTooltip("Show raw NBT data."); addComponent(rawDataTickBox); scrollPanel = new QADScrollPanel(); scrollPanel.setPosition(0, 22); addComponent(scrollPanel); this.panels = Lists.newArrayList(); generatePanels(); } // Use entityData to generate panels! private void generatePanels() { // clear lists panels.clear(); scrollPanel.removeAllComponents(); {// General Data Panel generatePanels$General(); } if(entityData.hasKey("HealF")) { // Entity Living Base generatePanels$EntityLivingBase(); } if(entityData.getTagList("Attributes", NBT.TAG_COMPOUND).tagCount() > 0) { // Attributes generatePanels$Attributes(); } { // Inventory generatePanels$Inventory(); } if(entityData.getString("id").equalsIgnoreCase("Villager")) { // Entity Villager generatePanels$EntityVillager(); } if(entityData.getString("id").equalsIgnoreCase("Creeper")) { // Entity Creeper generatePanels$EntityCreeper(); } if(entityData.getString("id").equalsIgnoreCase("Slime")) { // Entity Slime generatePanels$EntitySlime(); } if(entityData.getString("id").equalsIgnoreCase("Enderman")) { // Entity Enderman generatePanels$EntityEnderman(); } if(entityData.getString("id").equalsIgnoreCase("Sheep")) { generatePanels$EntitySheep(); } if(entityData.getString("id").equalsIgnoreCase("Pig")) { generatePanels$EntityPig(); } if(entityData.getString("id").equalsIgnoreCase("Ozelot")) { generatePanels$EntityOzelot(); } if(entityData.getString("id").equalsIgnoreCase("Wolf")) { generatePanels$EntityWolf(); } if(entityData.getString("id").equalsIgnoreCase("VillagerGolem")) { generatePanels$EntityVillagerGolem(); } if(entityData.getString("id").equalsIgnoreCase("Zombie")) { generatePanels$EntityZombie(); } if(entityData.getString("id").equalsIgnoreCase("PigZombie")) { generatePanels$EntityZombie(); generatePanels$EntityPigZombie(); } if(entityData.getString("id").equalsIgnoreCase("Guardian")) { generatePanels$EntityGuardian(); } if(entityData.getString("id").equalsIgnoreCase("EntityHorse")) { generatePanels$EntityHorse(); } if(entityData.hasKey("OwnerUUID")) { // Pet generatePanels$Pet(); } if(showRawData) { // NBT; generatePanels$NBT(); } int yOff = 6; for(QADPanel panel : panels) { panel.setY(yOff); panel.setX(4); panel.setWidth(width); scrollPanel.addComponent(panel); yOff += panel.getHeight() + 8; } scrollPanel.setViewportHeight(yOff); } private void generatePanels$EntityHorse() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Horse", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Is tamed?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Tame", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Has reproduced?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("HasReproduced", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Is bred?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Bred", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Is eating haystack?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("EatingHaystack", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Is chested?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("ChestedHorse", entityData))); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Variant", 2, yOff)); Number value = entityData.getInteger("Variant"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Variant", entityData)); panel.addComponent(tf); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Type", 2, yOff)); Number value = entityData.getInteger("Type"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Type", entityData)); panel.addComponent(tf); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Temper", 2, yOff)); Number value = entityData.getShort("Temper"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTShortTextFieldModel("Temper", entityData)); panel.addComponent(tf); yOff += lineHeight; } panel.setHeight(yOff); } private void generatePanels$EntityGuardian() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Guardian", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Is elder?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Elder", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityZombie() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Zombie", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Can break doors?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("CanBreakDoors", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityPigZombie() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Pig Zombie", 2, yOff)); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Anger Time", 2, yOff)); Number value = entityData.getShort("Anger"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTShortTextFieldModel("Anger", entityData)); panel.addComponent(tf); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Conversion Time", 2, yOff)); Number value = entityData.getInteger("ConversionTime"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("ConversionTime", entityData)); panel.addComponent(tf); yOff += lineHeight; } panel.setHeight(yOff); } private void generatePanels$EntityVillagerGolem() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Villager Golem", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Player created?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("PlayerCreated", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityWolf() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Wolf", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Collar Color", 2, yOff)); { Number value = entityData.getByte("CollarColor"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTByteTextFieldModel("CollarColor", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Angry?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Angry", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityOzelot() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Ozelot", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Type", 2, yOff)); { Number value = entityData.getByte("CatType"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTByteTextFieldModel("CatType", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$Pet() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Pet", 2, yOff)); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Owner UUID", 2, yOff)); QADTextField textField = new QADTextField(fontRenderer, rightColumnOffset, yOff-3, 140, 14); textField.setModel(new NBTStringTextFieldModel("OwnerUUID", entityData)); textField.textChangedListener = new TextChangeListener(){ @Override public void call(QADTextField field, String text) { try { field.setTextColor(0xFFFFFFFF); } catch (IllegalArgumentException e) { field.setTextColor(0xFFFF0000); } } }; panel.addComponent(textField); yOff += lineHeight; } if(entityData.hasKey("Sitting")) { panel.addComponent(QADFACTORY.createLabel("Sitting?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Sitting", entityData))); yOff += lineHeight; } panel.setHeight(yOff); } private void generatePanels$EntitySheep() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Sheep", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Sheared?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Sheared", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Color", 2, yOff)); { Number value = entityData.getByte("Color"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTByteTextFieldModel("Color", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityPig() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int lines = 5; panel.setHeight(lines*lineHeight+2); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Pig", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Saddled?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Saddle", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityEnderman() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int lines = 5; panel.setHeight(lines*lineHeight+2); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Enderman", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Carry", 2, yOff)); panel.addComponent(QADFACTORY.createLabel(""+entityData.getShort("carried"), rightColumnOffset, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Carry Data", 2, yOff)); panel.addComponent(QADFACTORY.createLabel(""+entityData.getShort("carriedData"), rightColumnOffset, yOff)); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntitySlime() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int lines = 5; panel.setHeight(lines*lineHeight+2); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Slime", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Size", 2, yOff)); { Number value = entityData.getInteger("Size"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Size", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$EntityCreeper() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panels.add(panel); int lines = 5; panel.setHeight(lines*lineHeight+2); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Creeper", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Ignited?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("ignited", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Fuse", 2, yOff)); { Number value = entityData.getShort("fuse"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTShortTextFieldModel("fuse", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Explosion Radius", 2, yOff)); { Number value = entityData.getShort("ExplosionRadius"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTShortTextFieldModel("ExplosionRadius", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$General() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.general"); panels.add(panel); int lines = 5; panel.setHeight(lines*lineHeight+2); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"General Information", 2, yOff)); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Custom Name", 2, yOff)); QADTextField textField = new QADTextField(fontRenderer, rightColumnOffset, yOff-3, 140, 14); textField.setModel(new NBTStringTextFieldModel("CustomName", entityData)); panel.addComponent(textField); yOff += lineHeight; } panel.addComponent(QADFACTORY.createLabel("Type", 2, yOff)); panel.addComponent(QADFACTORY.createLabel(entityData.getString("id"), rightColumnOffset, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("UUID", 2, yOff)); panel.addComponent(QADFACTORY.createLabel(entityData.getString("UUID"), rightColumnOffset, yOff)); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Position (exact)", 2, yOff)); StringBuilder builder = new StringBuilder(); NBTTagList Pos = entityData.getTagList("Pos", NBT.TAG_DOUBLE); builder.append(format.format(Pos.getDoubleAt(0))).append(", "); builder.append(format.format(Pos.getDoubleAt(1))).append(", "); builder.append(format.format(Pos.getDoubleAt(2))); panel.addComponent(QADFACTORY.createLabel(builder.toString(), rightColumnOffset, yOff)); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Position (block)", 2, yOff)); StringBuilder builder = new StringBuilder(); NBTTagList li = entityData.getTagList("Pos", NBT.TAG_DOUBLE); builder.append((int)Math.floor(li.getDoubleAt(0))).append(", "); builder.append((int)Math.floor(li.getDoubleAt(1))).append(", "); builder.append((int)Math.floor(li.getDoubleAt(2))); panel.addComponent(QADFACTORY.createLabel(builder.toString(), rightColumnOffset, yOff)); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Rotation", 2, yOff)); StringBuilder builder = new StringBuilder(); NBTTagList li = entityData.getTagList("Rotation", NBT.TAG_DOUBLE); builder.append((int)Math.round(li.getDoubleAt(0))).append(", "); builder.append((int)Math.round(li.getDoubleAt(1))); panel.addComponent(QADFACTORY.createLabel(builder.toString(), rightColumnOffset, yOff)); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("Motion", 2, yOff)); StringBuilder builder = new StringBuilder(); NBTTagList Pos = entityData.getTagList("Motion", NBT.TAG_DOUBLE); builder.append(format.format(Pos.getDoubleAt(0))).append(", "); builder.append(format.format(Pos.getDoubleAt(1))).append(", "); builder.append(format.format(Pos.getDoubleAt(2))); panel.addComponent(QADFACTORY.createLabel(builder.toString(), rightColumnOffset, yOff)); yOff += lineHeight; } panel.addComponent(QADFACTORY.createLabel("Persistence Required", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("PersistenceRequired", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("No-Clip?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("TC_NoClip", entityData))); yOff += lineHeight; /* { panel.addComponent(QADFACTORY.createLabel("[TC] Width", 2, yOff)); Number value = entityData.getFloat("TC_Width"); NumberType type = NumberType.DECIMAL; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTFloatTextFieldModel("TC_Width", entityData)); panel.addComponent(tf); yOff += lineHeight; } { panel.addComponent(QADFACTORY.createLabel("[TC] Height", 2, yOff)); Number value = entityData.getFloat("TC_Height"); NumberType type = NumberType.DECIMAL; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTFloatTextFieldModel("TC_Height", entityData)); panel.addComponent(tf); yOff += lineHeight; } //*/ { panel.addComponent(QADFACTORY.createLabel("Step Height", 2, yOff)); Number value = entityData.getFloat("TC_StepHeight"); NumberType type = NumberType.DECIMAL; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTFloatTextFieldModel("TC_StepHeight", entityData)); panel.addComponent(tf); yOff += lineHeight; } panel.setHeight(yOff); } private void generatePanels$NBT() { final QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.nbt"); panels.add(panel); int yOff = 2; final int lineHeight = 14; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GOLD +"Raw NBT", 2, yOff)); yOff += lineHeight; { QADButton buttonAsJson = QADFACTORY.createButton("Copy to Clipboard as JSON", 2, yOff, rightColumnOffset); buttonAsJson.setAction(new Runnable() { @Override public void run() { NBTTagCompound compound = (NBTTagCompound) entityData.copy(); compound.removeTag("Pos"); compound.removeTag("Motion"); compound.removeTag("Rotation"); String nbtAsJson = NBTHelper.asJson(compound); if(nbtAsJson != null) { GuiScreen.setClipboardString(nbtAsJson); } } }); buttonAsJson.setTooltip("Copies the entity data to the","clipboard as a string of JSON."); panel.addComponent(buttonAsJson); } yOff += lineHeight*2; // { // QADButton buttonEditRaw = QADFACTORY.createButton("Edit raw NBT", 2, yOff, rightColumnOffset); // buttonEditRaw.setAction(new Runnable() { // @Override public void run() { // GuiNBTEditor nbtEditor = new GuiNBTEditor(entityData); // nbtEditor.returnScreen = GuiEntityEditor.this; // displayGuiScreen(nbtEditor); // } // }); // buttonEditRaw.setTooltip("Opens the NBT-Editor."); // panel.addComponent(buttonEditRaw); // } // yOff += lineHeight*2; final MutableInteger yOffMut = new MutableInteger(yOff); RecursiveNBTIterator.iterate(entityData, new NBTTreeConsumer() { @Override public void consume(int depth, String name, NBTBase tag, NBTTagCompound parent) { int x = 6 + depth * 6; if(tag == null) { panel.addComponent(QADFACTORY.createLabel("---", x, yOffMut.get()-7)); return; } if(tag instanceof NBTTagCompound) { panel.addComponent(QADFACTORY.createLabel(name, x, yOffMut.get())); } else if (tag instanceof NBTTagList) { NBTTagList list = (NBTTagList)tag; if(list.getTagType() == NBT.TAG_COMPOUND) { panel.addComponent(QADFACTORY.createLabel(name, x, yOffMut.get())); } if(list.getTagType() == NBT.TAG_DOUBLE) { StringBuilder builder = new StringBuilder(); builder.append(list.tagCount()); builder.append(" ["); for(int i = 0; i < list.tagCount(); i++) { builder.append(TextFormatting.DARK_GRAY); builder.append(format.format(list.getDoubleAt(i))); builder.append(TextFormatting.WHITE); builder.append(", "); } builder.setLength(builder.length()-2); builder.append("]"); panel.addComponent(QADFACTORY.createLabel(name + " = " + builder.toString(), x, yOffMut.get())); } } else { panel.addComponent(QADFACTORY.createLabel(name + " = " + tag.toString(), x, yOffMut.get())); } yOffMut.add(lineHeight); } }); yOff = yOffMut.get(); panel.setHeight(yOff); } private void generatePanels$Inventory() { NBTTagList inventory = null; { NBTTagList __e = entityData.getTagList("Equipment", NBT.TAG_COMPOUND); NBTTagList __i = entityData.getTagList("Inventory", NBT.TAG_COMPOUND); if(__e.tagCount() > 0) { inventory = __e; } if(__i.tagCount() > 0) { inventory = __i; } if(inventory == null) return; } QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.inventory"); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.BLUE+"Inventory", 2, yOff)); yOff += lineHeight; NBTTagList list = inventory; for(int i = 0; i < list.tagCount(); i++) { final NBTTagCompound slot = list.getCompoundTagAt(i); if(slot.hasNoTags()) { continue; } int slotID = slot.hasKey("Slot") ? slot.getByte("Slot") : -1; panel.addComponent(QADFACTORY.createLabel("Item " + ((slotID == -1) ? i : slotID), 2, yOff)); panel.addComponent(QADFACTORY.createButton(NBTItemToString(slot), rightColumnOffset, yOff, 200, new Runnable() { @Override public void run() { QADGuiScreen guiScreen = new GuiItemStackEditor(slot); guiScreen.setBehind(GuiEntityEditor.this); displayGuiScreen(guiScreen); } })).textAlignment = 0; yOff += lineHeight; } if(yOff <= lineHeight+3) { return; } panel.setHeight(yOff); panels.add(panel); } private void generatePanels$EntityVillager() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.entity.villager"); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Villager", 2, yOff)); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Riches?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Riches", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Willing?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Willing", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Profession", 2, yOff)); panel.addComponent(QADFACTORY.createLabel(""+entityData.getInteger("Profession"), rightColumnOffset, yOff)); { Number value = entityData.getInteger("Profession"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Profession", entityData)); panel.addComponent(tf); } yOff += lineHeight; if(entityData.hasKey("Career")) { panel.addComponent(QADFACTORY.createLabel("Career Level", 2, yOff)); { Number value = entityData.getInteger("CareerLevel"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("CareerLevel", entityData)); panel.addComponent(tf); } yOff += lineHeight; } if(entityData.hasKey("Offers")) { panel.addComponent(QADFACTORY.createLabel(TextFormatting.YELLOW+"Offers", 2, yOff)); yOff += lineHeight; final NBTTagCompound offers = entityData.getCompoundTag("Offers"); final NBTTagList recipes = offers.getTagList("Recipes", NBT.TAG_COMPOUND); panel.addComponent(QADFACTORY.createButton("Add Recipe", 2, yOff, 100, new Runnable() { @Override public void run() { NBTTagCompound compound = new NBTTagCompound(); compound.setInteger("uses", 0); compound.setInteger("maxUses", 1); compound.setBoolean("rewardExp", false); NBTTagCompound compoundBUY = new NBTTagCompound(); compoundBUY.setShort("Damage", (short)0); compoundBUY.setByte("Count", (byte)1); compoundBUY.setString("id", "minecraft:emerald"); NBTTagCompound compoundSELL = new NBTTagCompound(); compoundSELL.setShort("Damage", (short)0); compoundSELL.setByte("Count", (byte)1); compoundSELL.setString("id", "minecraft:stone"); compound.setTag("buy", compoundBUY); compound.setTag("sell", compoundSELL); recipes.appendTag(compound);; generatePanels(); relayout(); } })); yOff += lineHeight; for(int i = 0; i < recipes.tagCount(); i++) { final int index = i; final NBTTagCompound recipe = recipes.getCompoundTagAt(i); panel.addComponent(QADFACTORY.createLabel(TextFormatting.AQUA+"Recipe "+i, 2+8, yOff+6)); panel.addComponent(QADFACTORY.createButton("Remove Recipe", rightColumnOffset, yOff, 100, new Runnable() { @Override public void run() { recipes.removeTag(index); generatePanels(); relayout(); } })); yOff += lineHeight + 8; panel.addComponent(QADFACTORY.createLabel("Times Used:", 2+8, yOff)); { Number value = recipe.getInteger("uses"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("uses", recipe)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Maximum Uses:", 2+8, yOff)); { Number value = recipe.getInteger("maxUses"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("maxUses", recipe)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Reward Experience:", 2+8, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("rewardExp", recipe))); yOff += lineHeight; final NBTTagCompound slotBuy = recipe.getCompoundTag("buy"); final NBTTagCompound slotSell = recipe.getCompoundTag("sell"); panel.addComponent(QADFACTORY.createLabel("Buys:", 2+8, yOff)); panel.addComponent(QADFACTORY.createButton(NBTItemToString(slotBuy), rightColumnOffset, yOff, 140, new Runnable() { @Override public void run() { QADGuiScreen guiScreen = new GuiItemStackEditor(slotBuy); guiScreen.setBehind(GuiEntityEditor.this); displayGuiScreen(guiScreen); } })); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Sells:", 2+8, yOff)); panel.addComponent(QADFACTORY.createButton(NBTItemToString(slotSell), rightColumnOffset, yOff, 140, new Runnable() { @Override public void run() { QADGuiScreen guiScreen = new GuiItemStackEditor(slotSell); guiScreen.setBehind(GuiEntityEditor.this); displayGuiScreen(guiScreen); } })); yOff += lineHeight; yOff += lineHeight; } } panel.setHeight(yOff); } private void generatePanels$EntityLivingBase() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.livingbase"); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GREEN+"Entity Living", 2, yOff)); yOff += lineHeight; { panel.addComponent(QADFACTORY.createLabel("Health", 2, yOff)); Number value = entityData.getFloat("HealF"); NumberType type = NumberType.DECIMAL; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTFloatTextFieldModel("HealF", entityData)); panel.addComponent(tf); yOff += lineHeight; } panel.addComponent(QADFACTORY.createLabel("Air", 2, yOff)); { Number value = entityData.getInteger("Air"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Air", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Fire", 2, yOff)); { Number value = entityData.getInteger("Fire"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Fire", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Age", 2, yOff)); { Number value = entityData.getInteger("Age"); NumberType type = NumberType.INTEGER; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTIntegerTextFieldModel("Age", entityData)); panel.addComponent(tf); } yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("No AI?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("NoAI", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Invulnerable?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("Invulnerable", entityData))); yOff += lineHeight; panel.addComponent(QADFACTORY.createLabel("Can pick up loot?", 2, yOff)); panel.addComponent(new QADTickBox(rightColumnOffset, yOff-3, new NBTBooleanTickBoxModel("CanPickUpLoot", entityData))); yOff += lineHeight; panel.setHeight(yOff); } private void generatePanels$Attributes() { QADPanel panel = new QADPanel(); panel.setBackgroundColor(1); panel.setName("panel.attributes"); panels.add(panel); int yOff = 2; panel.addComponent(QADFACTORY.createLabel(TextFormatting.GRAY+"Attributes", 2, yOff)); yOff += lineHeight; NBTTagList list = entityData.getTagList("Attributes", NBT.TAG_COMPOUND); for(int i = 0; i < list.tagCount(); i++) { NBTTagCompound attribute = list.getCompoundTagAt(i); { String name = attribute.getString("Name"); panel.addComponent(QADFACTORY.createLabel(name, 2, yOff)); } if(attribute.hasKey("Base")) { Number value = entityData.getFloat("Base"); NumberType type = NumberType.DECIMAL; QADNumberTextField tf = new QADNumberTextField(fontRenderer, rightColumnOffset, yOff - 3, 140, 14, value, type); tf.setModel(new NBTDoubleTextFieldModel("Base", attribute)); panel.addComponent(tf); } yOff += lineHeight-3; } panel.setHeight(yOff); } private static final String NBTItemToString(NBTTagCompound slot) { StringBuilder builder = new StringBuilder(); builder.append(slot.getString("id")).append("/"); builder.append(slot.getShort("Damage")).append(" x"); builder.append(slot.getByte("Count")); return builder.toString(); } @Override public void layoutGui() { buttonCancel.setX(width-40); buttonApply.setX(width-82); buttonRefresh.setX(width-104); rawDataTickBox.setX(width - 124); scrollPanel.setSize(width, height-22); boolean fit = scrollPanel.getDoesViewportFit(); for(QADPanel panel : panels) { panel.setWidth(width-(fit?8:12)); } } }
/* * Copyright 2020 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.gchq.gaffer.rest.factory; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.junit.jupiter.api.Test; import uk.gov.gchq.gaffer.commonutil.StreamUtil; import uk.gov.gchq.gaffer.data.element.Edge; import uk.gov.gchq.gaffer.data.element.Element; import uk.gov.gchq.gaffer.data.element.Entity; import uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator; import uk.gov.gchq.gaffer.data.element.id.EdgeId; import uk.gov.gchq.gaffer.data.element.id.ElementId; import uk.gov.gchq.gaffer.data.element.id.EntityId; import uk.gov.gchq.gaffer.data.elementdefinition.view.View; import uk.gov.gchq.gaffer.operation.OperationChain; import uk.gov.gchq.gaffer.operation.data.EntitySeed; import uk.gov.gchq.gaffer.operation.impl.GetWalks; import uk.gov.gchq.gaffer.operation.impl.add.AddElements; import uk.gov.gchq.gaffer.operation.impl.compare.Max; import uk.gov.gchq.gaffer.operation.impl.compare.Min; import uk.gov.gchq.gaffer.operation.impl.compare.Sort; import uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds; import uk.gov.gchq.gaffer.operation.impl.get.GetElements; import uk.gov.gchq.gaffer.store.schema.Schema; import java.util.ArrayList; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; public class AbstractExamplesFactoryTest { private static final Schema SCHEMA = new Schema.Builder() .json(StreamUtil.schema(TestExamplesFactory.class)) .build(); @Test public void shouldUseSchemaToCreateGetElementsInput() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When GetElements operation = (GetElements) examplesFactory.generateExample(GetElements.class); // Then int size = 0; for (ElementId e : operation.getInput()) { size++; if (e instanceof EntityId) { assertEquals(String.class, ((EntityId) e).getVertex().getClass()); } else { assertEquals(String.class, ((EdgeId) e).getDestination().getClass()); assertEquals(String.class, ((EdgeId) e).getSource().getClass()); } } assertEquals(2, size); } @Test public void shouldUseSchemaToCreateGetAdjacentIdsInput() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When GetAdjacentIds operation = (GetAdjacentIds) examplesFactory.generateExample(GetAdjacentIds.class); // Then int size = 0; for (ElementId e : operation.getInput()) { size++; if (e instanceof EntityId) { assertEquals(String.class, ((EntityId) e).getVertex().getClass()); } else { throw new RuntimeException("Expected operation only to contain entity ids"); } } assertEquals(1, size); } @Test public void shouldPopulateAddElementsAccordingToSchema() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When AddElements operation = (AddElements) examplesFactory.generateExample(AddElements.class); // Then ArrayList<Element> expectedInput = Lists.newArrayList( new Entity.Builder() .group("BasicEntity") .vertex("vertex1") .property("count", 1) .build(), new Entity.Builder() .group("BasicEntity") .vertex("vertex2") .property("count", 2) .build(), new Edge.Builder() .group("BasicEdge") .source("vertex1") .dest("vertex2") .directed(true) .property("count", 1) .build() ); assertEquals(expectedInput, Lists.newArrayList(operation.getInput())); } @Test public void shouldUseSchemaForGroupsInSortOperation() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When Sort operation = (Sort) examplesFactory.generateExample(Sort.class); // Then // Sort has no equals method assertEquals(1, operation.getComparators().size()); assertEquals(Sets.newHashSet("BasicEdge"), ((ElementPropertyComparator) operation.getComparators().get(0)).getGroups()); assertEquals("count", ((ElementPropertyComparator) operation.getComparators().get(0)).getProperty()); } @Test public void shouldUseSchemaForMaxOperation() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When Max operation = (Max) examplesFactory.generateExample(Max.class); // Then // Max has no equals method assertEquals(1, operation.getComparators().size()); assertEquals(Sets.newHashSet("BasicEdge"), ((ElementPropertyComparator) operation.getComparators().get(0)).getGroups()); assertEquals("count", ((ElementPropertyComparator) operation.getComparators().get(0)).getProperty()); } @Test public void shouldUseSchemaForMinOperation() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When Min operation = (Min) examplesFactory.generateExample(Min.class); // Then // Min has no equals method assertEquals(1, operation.getComparators().size()); assertEquals(Sets.newHashSet("BasicEdge"), ((ElementPropertyComparator) operation.getComparators().get(0)).getGroups()); assertEquals("count", ((ElementPropertyComparator) operation.getComparators().get(0)).getProperty()); } @Test public void shouldProvideEmptyGetWalksIfSchemaContainsNoEdges() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(new Schema()); // When GetWalks operation = (GetWalks) examplesFactory.generateExample(GetWalks.class); // Then assertNull(operation.getInput()); assertEquals(0, operation.getOperations().size()); } @Test public void shouldProvideSchemaPopulatedGetWalksIfSchemaContainsEdges() throws InstantiationException, IllegalAccessException { // Given TestExamplesFactory examplesFactory = new TestExamplesFactory(SCHEMA); // When GetWalks operation = (GetWalks) examplesFactory.generateExample(GetWalks.class); // Then assertEquals(Lists.newArrayList(new EntitySeed("vertex1")), Lists.newArrayList(operation.getInput())); assertEquals(Lists.newArrayList(new OperationChain.Builder() .first( new GetElements.Builder() .view(new View.Builder() .edge("BasicEdge") .build()) .build()) .build()), operation.getOperations()); } private static class TestExamplesFactory extends AbstractExamplesFactory { private Schema schema; TestExamplesFactory(final Schema schema) { this.schema = schema; } @Override protected Schema getSchema() { return this.schema; } } }
/** * Copyright (C) 2012-2015 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.virtustream.compute; import org.apache.log4j.Logger; import org.dasein.cloud.AsynchronousTask; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.ResourceStatus; import org.dasein.cloud.compute.AbstractImageSupport; import org.dasein.cloud.compute.Architecture; import org.dasein.cloud.compute.ImageCapabilities; import org.dasein.cloud.compute.ImageClass; import org.dasein.cloud.compute.ImageCreateOptions; import org.dasein.cloud.compute.ImageFilterOptions; import org.dasein.cloud.compute.MachineImage; import org.dasein.cloud.compute.MachineImageState; import org.dasein.cloud.compute.Platform; import org.dasein.cloud.compute.VirtualMachine; import org.dasein.cloud.util.APITrace; import org.dasein.cloud.virtustream.Virtustream; import org.dasein.cloud.virtustream.VirtustreamMethod; import org.dasein.util.CalendarWrapper; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; public class Templates extends AbstractImageSupport<Virtustream> { static private final Logger logger = Logger.getLogger(Templates.class); static private final String CAPTURE_IMAGE = "Image.captureImage"; static private final String DISCONNECT_NIC = "Image.disconnectNic"; static private final String GET_IMAGE = "Image.getImage"; static private final String IS_SUBSCRIBED = "Image.isSubscribed"; static private final String LIST_IMAGES = "Image.listImages"; static private final String LIST_IMAGE_STATUS = "Image.listImageStatus"; static private final String REMOVE_TEMPLATE = "Image.removeTemplate"; static private final String SEARCH_PUBLIC_IMAGES = "Image.searchPublicImages"; public Templates(@Nonnull Virtustream provider) { super(provider); } private transient volatile TemplateCapabilities capabilities; @Override public ImageCapabilities getCapabilities() throws CloudException, InternalException { if( capabilities == null ) { capabilities = new TemplateCapabilities(getProvider()); } return capabilities; } @Nullable @Override public MachineImage getImage(@Nonnull String providerImageId) throws CloudException, InternalException { APITrace.begin(getProvider(), GET_IMAGE); try { VirtustreamMethod method = new VirtustreamMethod(getProvider()); String obj = method.getString("VirtualMachine/"+providerImageId+"?$filter=IsRemoved eq false", GET_IMAGE); if (obj != null && obj.length()> 0 ) { try { JSONObject json = new JSONObject(obj); MachineImage img = toImage(json); if (img != null) { return img; } } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } } return null; } finally { APITrace.end(); } } @Override protected MachineImage capture(@Nonnull ImageCreateOptions options, @Nullable AsynchronousTask<MachineImage> task) throws CloudException, InternalException { APITrace.begin(getProvider(), CAPTURE_IMAGE); try { // create a copy of the vm first and then mark the copy as a template String vmid = options.getVirtualMachineId(); String templateName = options.getName(); String description = options.getDescription(); boolean powerOn = false; VirtualMachines support = getProvider().getComputeServices().getVirtualMachineSupport(); VirtualMachine currentVM = support.getVirtualMachine(vmid); VirtualMachine newVM = support.clone(vmid, currentVM.getProviderDataCenterId(), templateName, description, powerOn, currentVM.getProviderFirewallIds()); VirtustreamMethod method = new VirtustreamMethod(getProvider()); String obj = method.postString("/VirtualMachine/MarkAsTemplate", newVM.getProviderVirtualMachineId(), CAPTURE_IMAGE); String templateId = null; if (obj != null && obj.length() > 0) { try { JSONObject json = new JSONObject(obj); templateId = getProvider().parseTaskId(json); } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } } if (templateId == null) { logger.error("Template created without error but no new id returned"); throw new CloudException("Template created without error but no new id returned"); } long timeout = System.currentTimeMillis()+(CalendarWrapper.MINUTE *5l); MachineImage img = null; while (timeout > System.currentTimeMillis()) { img = getImage(templateId); if (img != null) { break; } try { Thread.sleep(15000l); } catch (InterruptedException ignore) {} } if( img == null ) { logger.error("Machine image job completed successfully, but no image " + templateId + " exists."); throw new CloudException("Machine image job completed successfully, but no image " + templateId + " exists."); } if( task != null ) { task.completeWithResult(img); } return img; } finally { APITrace.end(); } } @Nonnull @Override public String getProviderTermForImage(@Nonnull Locale locale, @Nonnull ImageClass cls) { return "template"; } @Override public boolean isImageSharedWithPublic(@Nonnull String providerImageId) throws CloudException, InternalException { MachineImage img = getImage(providerImageId); return img.getTag("isPublic").equals("true"); } @Override public boolean isSubscribed() throws CloudException, InternalException { APITrace.begin(getProvider(), IS_SUBSCRIBED); try { try { VirtustreamMethod method = new VirtustreamMethod(getProvider()); List<MachineImage> list = new ArrayList<MachineImage>(); method.getString("VirtualMachine?$filter=IsTemplate eq true and IsRemoved eq false", LIST_IMAGES); return true; } catch (Throwable ignore) { return false; } } finally { APITrace.end(); } } @Nonnull @Override public Iterable<ResourceStatus> listImageStatus(@Nonnull ImageClass cls) throws CloudException, InternalException { APITrace.begin(getProvider(), LIST_IMAGE_STATUS); try { if( !cls.equals(ImageClass.MACHINE) ) { return Collections.emptyList(); } VirtustreamMethod method = new VirtustreamMethod(getProvider()); List<ResourceStatus> list = new ArrayList<ResourceStatus>(); String obj = method.getString("VirtualMachine?$filter=IsTemplate eq true and IsRemoved eq false and TenantID eq '"+getContext().getAccountNumber()+"'", LIST_IMAGES); if (obj != null && obj.length() > 0) { JSONArray json = null; JSONObject node = null; try { json = new JSONArray(obj); } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } for (int i=0; i<json.length(); i++) { try { node = json.getJSONObject(i); String imageId = null; imageId = node.getString("VirtualMachineID"); if (imageId == null) { logger.error("Found a template without an id"); continue; } //check this is indeed a template boolean isTemplate = node.getBoolean("IsTemplate"); if (!isTemplate) { logger.error("Resource with id "+imageId+" is not a template"); continue; } boolean isRemoved = node.getBoolean("IsRemoved"); if (isRemoved) { logger.debug("IsRemoved flag set so not returning template "+imageId); continue; } if (!node.has("TenantID") || node.isNull("TenantID")) { logger.warn("No tenant id found for "+imageId); continue; } ResourceStatus status = new ResourceStatus(imageId, MachineImageState.ACTIVE); list.add(status); } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSONObject "+node); } } } return list; } finally { APITrace.end(); } } @Nonnull @Override public Iterable<MachineImage> listImages(@Nullable ImageFilterOptions options) throws CloudException, InternalException { APITrace.begin(getProvider(), GET_IMAGE); try { VirtustreamMethod method = new VirtustreamMethod(getProvider()); List<MachineImage> list = new ArrayList<MachineImage>(); String obj = method.getString("VirtualMachine?$filter=IsTemplate eq true and IsRemoved eq false and TenantID eq '"+getContext().getAccountNumber()+"'", LIST_IMAGES); if (obj != null && obj.length() > 0) { JSONArray json = null; try { json = new JSONArray(obj); for (int i=0; i<json.length(); i++) { MachineImage img = toImage(json.getJSONObject(i)); if (img != null && (options == null || options.matches(img))) { if (options != null) { if (options.getWithAllRegions()) { list.add(img); } else if (img.getProviderRegionId().equals(getContext().getRegionId())) { list.add(img); } } else { list.add(img); } } } } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } } return list; } finally { APITrace.end(); } } @Nonnull @Override public Iterable<MachineImage> searchPublicImages(@Nonnull ImageFilterOptions options) throws CloudException, InternalException { APITrace.begin(getProvider(), SEARCH_PUBLIC_IMAGES); try { VirtustreamMethod method = new VirtustreamMethod(getProvider()); List<MachineImage> list = new ArrayList<MachineImage>(); JSONArray json; String obj = method.getString("VirtualMachine?$filter=IsGlobalTemplate eq true and IsRemoved eq false", SEARCH_PUBLIC_IMAGES); if (obj != null && obj.length() > 0) { try { json = new JSONArray(obj); for (int i = 0; i<json.length(); i++) { MachineImage img = toImage(json.getJSONObject(i)); if (img != null && (options == null || options.matches(img))) { list.add(img); } } } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } } return list; } finally { APITrace.end(); } } @Override public boolean supportsCustomImages() throws CloudException, InternalException { return true; } @Override public void remove(@Nonnull String providerImageId, boolean checkState) throws CloudException, InternalException { APITrace.begin(getProvider(), REMOVE_TEMPLATE); try { VirtustreamMethod method = new VirtustreamMethod(getProvider()); String obj = method.postString("VirtualMachine/RemoveTemplate", providerImageId, REMOVE_TEMPLATE); if (obj != null && obj.length() > 0) { JSONObject json; try { json = new JSONObject(obj); if (getProvider().parseTaskId(json) == null) { logger.warn("No confirmation of RemoveTemplate task completion but no error either"); } } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSON "+e.getMessage()); } } } finally { APITrace.end(); } } private MachineImage toImage(@Nonnull JSONObject node) throws InternalException, CloudException { try { String ownerId = null; String regionId = null; String imageId; MachineImageState state = MachineImageState.ACTIVE; String name = null; String description = null; Architecture architecture; Platform platform; String datacenterId = null; HashMap<String,String> properties = new HashMap<String,String>(); imageId = node.getString("VirtualMachineID"); if (imageId == null) { logger.error("Found a template without an id"); return null; } //check this is indeed a template boolean isTemplate = node.getBoolean("IsTemplate"); if (!isTemplate) { logger.error("Resource with id "+imageId+" is not a template"); return null; } boolean isRemoved = node.getBoolean("IsRemoved"); if (isRemoved) { logger.debug("IsRemoved flag set so not returning template "+imageId); return null; } if (node.has("CustomerDefinedName") && !node.isNull("CustomerDefinedName")) { name = node.getString("CustomerDefinedName"); } if (node.has("Description") && !node.isNull("Description")) { description = node.getString("Description"); } platform = Platform.guess(node.getString("OS")); if (platform.equals(Platform.UNKNOWN)) { platform = Platform.guess(name); } architecture = guess(node.getString("OSFullName")); if (node.has("TenantID") && !node.isNull("TenantID")) { ownerId = node.getString("TenantID"); } else { //no owner id so this template may not be stable return null; } if (node.has("RegionID") && !node.isNull("RegionID")) { regionId = node.getString("RegionID"); } properties.put("isPublic", node.getBoolean("IsGlobalTemplate") == true ? "true" : "false"); if (node.has("Hypervisor") && !node.isNull("Hypervisor")) { JSONObject hv = node.getJSONObject("Hypervisor"); JSONObject site = hv.getJSONObject("Site"); datacenterId = site.getString("SiteID"); if (regionId == null || regionId.equals("0")) { //get region from hypervisor site JSONObject r = site.getJSONObject("Region"); regionId = r.getString("RegionID"); } } if (node.has("Disks") && !node.isNull("Disks")) { JSONArray disks = node.getJSONArray("Disks"); JSONObject disk = disks.getJSONObject(0); int deviceKey = disk.getInt("DeviceKey"); properties.put("diskDeviceKey", Integer.toString(deviceKey)); } if (node.has("Nics") && !node.isNull("Nics")) { JSONArray nics = node.getJSONArray("Nics"); JSONObject nic = nics.getJSONObject(0); int deviceKey = nic.getInt("DeviceKey"); properties.put("nicDeviceKey", Integer.toString(deviceKey)); String nicID = nic.getString("VirtualMachineNicID"); properties.put("nicID", nicID); int adapterType = nic.getInt("AdapterType"); properties.put("nicAdapterType", Integer.toString(adapterType)); } if (regionId == null) { logger.error("Unable to find region id for template "+imageId); return null; } if (name == null) { name = imageId; } if (description == null) { description = name; } MachineImage img = MachineImage.getMachineImageInstance(ownerId, regionId, imageId, state, name, description, architecture, platform); img.setTags(properties); if (datacenterId != null) { img.constrainedTo(datacenterId); } return img; } catch (JSONException e) { logger.error(e); throw new InternalException("Unable to parse JSONObject "+e.getMessage()); } } private Architecture guess(String desc) { Architecture arch = Architecture.I64; if( desc.contains("x64") ) { arch = Architecture.I64; } else if( desc.contains("x32") ) { arch = Architecture.I32; } else if( desc.contains("64 bit") ) { arch = Architecture.I64; } else if( desc.contains("32 bit") ) { arch = Architecture.I32; } else if( desc.contains("i386") ) { arch = Architecture.I32; } else if( desc.contains("64") ) { arch = Architecture.I64; } else if( desc.contains("32") ) { arch = Architecture.I32; } return arch; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.handler.component; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import org.apache.lucene.search.BooleanQuery; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.MoreLikeThisParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.handler.MoreLikeThisHandler; import org.apache.solr.schema.IndexSchema; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocList; import org.apache.solr.search.DocListAndSet; import org.apache.solr.search.SolrIndexSearcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * TODO! * * * @since solr 1.3 */ public class MoreLikeThisComponent extends SearchComponent { public static final String COMPONENT_NAME = "mlt"; public static final String DIST_DOC_ID = "mlt.dist.id"; static final Logger log = LoggerFactory .getLogger(MoreLikeThisComponent.class); @Override public void prepare(ResponseBuilder rb) throws IOException { } @Override public void process(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); if (params.getBool(MoreLikeThisParams.MLT, false)) { log.debug("Starting MoreLikeThis.Process. isShard: " + params.getBool(ShardParams.IS_SHARD)); SolrIndexSearcher searcher = rb.req.getSearcher(); int mltcount = params.getInt(MoreLikeThisParams.DOC_COUNT, 5); if (params.getBool(ShardParams.IS_SHARD, false)) { if (params.get(MoreLikeThisComponent.DIST_DOC_ID) == null) { if (rb.getResults().docList.size() == 0) { // return empty response rb.rsp.add("moreLikeThis", new NamedList<DocList>()); return; } MoreLikeThisHandler.MoreLikeThisHelper mlt = new MoreLikeThisHandler.MoreLikeThisHelper( params, searcher); NamedList<BooleanQuery> bQuery = mlt.getMoreLikeTheseQuery(rb .getResults().docList); NamedList<String> temp = new NamedList<String>(); Iterator<Entry<String,BooleanQuery>> idToQueryIt = bQuery.iterator(); while (idToQueryIt.hasNext()) { Entry<String,BooleanQuery> idToQuery = idToQueryIt.next(); String s = idToQuery.getValue().toString(); log.debug("MLT Query:" + s); temp.add(idToQuery.getKey(), idToQuery.getValue().toString()); } rb.rsp.add("moreLikeThis", temp); } else { NamedList<DocList> sim = getMoreLikeThese(rb, rb.req.getSearcher(), rb.getResults().docList, mltcount); rb.rsp.add("moreLikeThis", sim); } } else { // non distrib case NamedList<DocList> sim = getMoreLikeThese(rb, rb.req.getSearcher(), rb.getResults().docList, mltcount); rb.rsp.add("moreLikeThis", sim); } } } @Override public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0 && rb.req.getParams().getBool(COMPONENT_NAME, false)) { log.debug("ShardRequest.response.size: " + sreq.responses.size()); for (ShardResponse r : sreq.responses) { NamedList<?> moreLikeThisReponse = (NamedList<?>) r.getSolrResponse() .getResponse().get("moreLikeThis"); log.debug("ShardRequest.response.shard: " + r.getShard()); if (moreLikeThisReponse != null) { for (Entry<String,?> entry : moreLikeThisReponse) { log.debug("id: \"" + entry.getKey() + "\" Query: \"" + entry.getValue() + "\""); ShardRequest s = buildShardQuery(rb, (String) entry.getValue(), entry.getKey()); rb.addRequest(this, s); } } } } if ((sreq.purpose & ShardRequest.PURPOSE_GET_MLT_RESULTS) != 0) { for (ShardResponse r : sreq.responses) { log.debug("MLT Query returned: " + r.getSolrResponse().getResponse().toString()); } } } @Override public void finishStage(ResponseBuilder rb) { // Handling Responses in finishStage, because solrResponse will put // moreLikeThis xml // segment ahead of result/response. if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS && rb.req.getParams().getBool(COMPONENT_NAME, false)) { Map<Object,SolrDocumentList> tempResults = new LinkedHashMap<Object,SolrDocumentList>(); int mltcount = rb.req.getParams().getInt(MoreLikeThisParams.DOC_COUNT, 5); String keyName = rb.req.getSchema().getUniqueKeyField().getName(); for (ShardRequest sreq : rb.finished) { if ((sreq.purpose & ShardRequest.PURPOSE_GET_MLT_RESULTS) != 0) { for (ShardResponse r : sreq.responses) { log.debug("ShardRequest.response.shard: " + r.getShard()); String key = r.getShardRequest().params .get(MoreLikeThisComponent.DIST_DOC_ID); SolrDocumentList shardDocList = (SolrDocumentList) r.getSolrResponse().getResponse().get("response"); if (shardDocList == null) { continue; } log.info("MLT: results added for key: " + key + " documents: " + shardDocList.toString()); // if (log.isDebugEnabled()) { // for (SolrDocument doc : shardDocList) { // doc.addField("shard", "=" + r.getShard()); // } // } SolrDocumentList mergedDocList = tempResults.get(key); if (mergedDocList == null) { mergedDocList = new SolrDocumentList(); mergedDocList.addAll(shardDocList); mergedDocList.setNumFound(shardDocList.getNumFound()); mergedDocList.setStart(shardDocList.getStart()); mergedDocList.setMaxScore(shardDocList.getMaxScore()); } else { mergedDocList = mergeSolrDocumentList(mergedDocList, shardDocList, mltcount, keyName); } log.debug("Adding docs for key: " + key); tempResults.put(key, mergedDocList); } } } NamedList<SolrDocumentList> list = buildMoreLikeThisNamed(tempResults, rb.resultIds); rb.rsp.add("moreLikeThis", list); } super.finishStage(rb); } /** * Returns NamedList based on the order of * resultIds.shardDoc.positionInResponse */ NamedList<SolrDocumentList> buildMoreLikeThisNamed( Map<Object,SolrDocumentList> allMlt, Map<Object,ShardDoc> resultIds) { NamedList<SolrDocumentList> result = new NamedList<SolrDocumentList>(); TreeMap<Integer,Object> sortingMap = new TreeMap<Integer,Object>(); for (Entry<Object,ShardDoc> next : resultIds.entrySet()) { sortingMap.put(next.getValue().positionInResponse, next.getKey()); } for (Object key : sortingMap.values()) { SolrDocumentList sdl = allMlt.get(key); if (sdl == null) { sdl = new SolrDocumentList(); sdl.setNumFound(0); sdl.setStart(0); } result.add(key.toString(), sdl); } return result; } public SolrDocumentList mergeSolrDocumentList(SolrDocumentList one, SolrDocumentList two, int maxSize, String idField) { List<SolrDocument> l = new ArrayList<SolrDocument>(); // De-dup records sets. Shouldn't happen if indexed correctly. Map<String,SolrDocument> map = new HashMap<String,SolrDocument>(); for (SolrDocument doc : one) { Object id = doc.getFieldValue(idField); assert id != null : doc.toString(); map.put(id.toString(), doc); } for (SolrDocument doc : two) { map.put(doc.getFieldValue(idField).toString(), doc); } l = new ArrayList<SolrDocument>(map.values()); // Comparator to sort docs based on score. null scores/docs are set to 0. // hmm...we are ordering by scores that are not really comparable... Comparator<SolrDocument> c = new Comparator<SolrDocument>() { public int compare(SolrDocument o1, SolrDocument o2) { Float f1 = getFloat(o1); Float f2 = getFloat(o2); return f2.compareTo(f1); } private Float getFloat(SolrDocument doc) { Float f = 0f; if (doc != null) { Object o = doc.getFieldValue("score"); if (o != null && o instanceof Float) { f = (Float) o; } } return f; } }; Collections.sort(l, c); // Truncate list to maxSize if (l.size() > maxSize) { l = l.subList(0, maxSize); } // Create SolrDocumentList Attributes from originals SolrDocumentList result = new SolrDocumentList(); result.addAll(l); result.setMaxScore(Math.max(one.getMaxScore(), two.getMaxScore())); result.setNumFound(one.getNumFound() + two.getNumFound()); result.setStart(Math.min(one.getStart(), two.getStart())); return result; } ShardRequest buildShardQuery(ResponseBuilder rb, String q, String key) { ShardRequest s = new ShardRequest(); s.params = new ModifiableSolrParams(rb.req.getParams()); s.purpose |= ShardRequest.PURPOSE_GET_MLT_RESULTS; // Maybe unnecessary, but safe. s.purpose |= ShardRequest.PURPOSE_PRIVATE; s.params.remove(ShardParams.SHARDS); // s.params.remove(MoreLikeThisComponent.COMPONENT_NAME); // needed to correlate results s.params.set(MoreLikeThisComponent.DIST_DOC_ID, key); s.params.set(CommonParams.START, 0); int mltcount = s.params.getInt(MoreLikeThisParams.DOC_COUNT, 20); // overrequest s.params.set(CommonParams.ROWS, mltcount); // adding score to rank moreLikeThis s.params.remove(CommonParams.FL); // Should probably add something like this: // String fl = s.params.get(MoreLikeThisParams.RETURN_FL, "*"); // if(fl != null){ // s.params.set(CommonParams.FL, fl + ",score"); // } String id = rb.req.getSchema().getUniqueKeyField() .getName(); s.params.set(CommonParams.FL, "score," + id); s.params.set("sort", "score desc"); // MLT Query is submitted as normal query to shards. s.params.set(CommonParams.Q, q); return s; } ShardRequest buildMLTQuery(ResponseBuilder rb, String q) { ShardRequest s = new ShardRequest(); s.params = new ModifiableSolrParams(); s.params.set(CommonParams.START, 0); String id = rb.req.getSchema().getUniqueKeyField().getName(); s.params.set(CommonParams.FL, "score," + id); // MLT Query is submitted as normal query to shards. s.params.set(CommonParams.Q, q); s.shards = ShardRequest.ALL_SHARDS; return s; } NamedList<DocList> getMoreLikeThese(ResponseBuilder rb, SolrIndexSearcher searcher, DocList docs, int flags) throws IOException { SolrParams p = rb.req.getParams(); IndexSchema schema = searcher.getSchema(); MoreLikeThisHandler.MoreLikeThisHelper mltHelper = new MoreLikeThisHandler.MoreLikeThisHelper( p, searcher); NamedList<DocList> mlt = new SimpleOrderedMap<DocList>(); DocIterator iterator = docs.iterator(); SimpleOrderedMap<Object> dbg = null; if (rb.isDebug()) { dbg = new SimpleOrderedMap<Object>(); } while (iterator.hasNext()) { int id = iterator.nextDoc(); int rows = p.getInt(MoreLikeThisParams.DOC_COUNT, 5); DocListAndSet sim = mltHelper.getMoreLikeThis(id, 0, rows, null, null, flags); String name = schema.printableUniqueKey(searcher.doc(id)); mlt.add(name, sim.docList); if (dbg != null) { SimpleOrderedMap<Object> docDbg = new SimpleOrderedMap<Object>(); docDbg.add("rawMLTQuery", mltHelper.getRawMLTQuery().toString()); docDbg .add("boostedMLTQuery", mltHelper.getBoostedMLTQuery().toString()); docDbg.add("realMLTQuery", mltHelper.getRealMLTQuery().toString()); SimpleOrderedMap<Object> explains = new SimpleOrderedMap<Object>(); DocIterator mltIte = sim.docList.iterator(); while (mltIte.hasNext()) { int mltid = mltIte.nextDoc(); String key = schema.printableUniqueKey(searcher.doc(mltid)); explains.add(key, searcher.explain(mltHelper.getRealMLTQuery(), mltid)); } docDbg.add("explain", explains); dbg.add(name, docDbg); } } // add debug information if (dbg != null) { rb.addDebugInfo("moreLikeThis", dbg); } return mlt; } // /////////////////////////////////////////// // / SolrInfoMBean // ////////////////////////////////////////// @Override public String getDescription() { return "More Like This"; } @Override public String getSource() { return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_2/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java $"; } @Override public URL[] getDocs() { return null; } }
package com.artifex.mupdflib; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.*; import android.graphics.Bitmap.Config; import android.os.Handler; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ProgressBar; import java.util.ArrayList; import java.util.Iterator; /* class PatchInfo { //public BitmapHolder bmh; //public Bitmap bm; public Point patchViewSize; public Rect patchArea; public boolean completeRedraw; public PatchInfo(Point aPatchViewSize, Rect aPatchArea, boolean aCompleteRedraw) { //bmh = aBmh; //bm = null; patchViewSize = aPatchViewSize; patchArea = aPatchArea; completeRedraw = aCompleteRedraw; } } */ // Make our ImageViews opaque to optimize redraw class OpaqueImageView extends ImageView { public OpaqueImageView(Context context) { super(context); } @Override public boolean isOpaque() { return true; } } interface TextProcessor { void onStartLine(); void onWord(TextWord word); void onEndLine(); } class TextSelector { final private TextWord[][] mText; final private RectF mSelectBox; public TextSelector(TextWord[][] text, RectF selectBox) { mText = text; mSelectBox = selectBox; } public void select(TextProcessor tp) { if (mText == null || mSelectBox == null) return; ArrayList<TextWord[]> lines = new ArrayList<TextWord[]>(); for (TextWord[] line : mText) if (line[0].bottom > mSelectBox.top && line[0].top < mSelectBox.bottom) lines.add(line); Iterator<TextWord[]> it = lines.iterator(); while (it.hasNext()) { TextWord[] line = it.next(); boolean firstLine = line[0].top < mSelectBox.top; boolean lastLine = line[0].bottom > mSelectBox.bottom; float start = Float.NEGATIVE_INFINITY; float end = Float.POSITIVE_INFINITY; if (firstLine && lastLine) { start = Math.min(mSelectBox.left, mSelectBox.right); end = Math.max(mSelectBox.left, mSelectBox.right); } else if (firstLine) { start = mSelectBox.left; } else if (lastLine) { end = mSelectBox.right; } tp.onStartLine(); for (TextWord word : line) if (word.right > start && word.left < end) tp.onWord(word); tp.onEndLine(); } } } public abstract class PageView extends ViewGroup { private static final int HIGHLIGHT_COLOR = 0x802572AC; private static final int LINK_COLOR = 0x20AC7225; private static final int BOX_COLOR = 0xFF4444FF; private static final int INK_COLOR = 0xFFFF0000; private static final float INK_THICKNESS = 10.0f; private static final int BACKGROUND_COLOR = 0xFFFFFFFF; private static final int PROGRESS_DIALOG_DELAY = 200; protected final Context mContext; protected int mPageNumber; private Point mParentSize; protected Point mSize; // Size of page at minimum zoom protected float mSourceScale; private ImageView mEntire; // Image rendered at minimum zoom //private BitmapHolder mEntireBmh; private Bitmap mEntireBm; private Matrix mEntireMat; private AsyncTask<Void, Void, TextWord[][]> mGetText; private AsyncTask<Void, Void, LinkInfo[]> mGetLinkInfo; //private AsyncTask<Void, Void, Bitmap> mDrawEntire; //private AsyncTask<Void,Void,Void> mDrawEntire; private CancellableAsyncTask<Void, Void> mDrawEntire; private Point mPatchViewSize; // View size on the basis of which the patch // was created private Rect mPatchArea; private ImageView mPatch; //private BitmapHolder mPatchBmh; private Bitmap mPatchBm; //private AsyncTask<PatchInfo, Void, PatchInfo> mDrawPatch; private CancellableAsyncTask<Void,Void> mDrawPatch; private RectF mSearchBoxes[]; private RectF mSearchBoxesPrim[]; protected LinkInfo mLinks[]; private RectF mSelectBox; private TextWord mText[][]; private RectF mItemSelectBox; protected ArrayList<ArrayList<PointF>> mDrawing; private View mSearchView; private boolean mIsBlank; private boolean mHighlightLinks; private ProgressBar mBusyIndicator; private final Handler mHandler = new Handler(); //public PageView(Context c, Point parentSize) { public PageView(Context c, Point parentSize, Bitmap sharedHqBm) { super(c); mContext = c; mParentSize = parentSize; setBackgroundColor(BACKGROUND_COLOR); //mEntireBmh = new BitmapHolder(); //mPatchBmh = new BitmapHolder(); try { mEntireBm = Bitmap.createBitmap(parentSize.x, parentSize.y, Config.ARGB_8888); } catch ( OutOfMemoryError e) { Log.d("MY_OOM_ERROR", "error"); } mPatchBm = sharedHqBm; mEntireMat = new Matrix(); } //protected abstract Bitmap drawPage(int sizeX, int sizeY, int patchX, // int patchY, int patchWidth, int patchHeight); //protected abstract Bitmap updatePage(BitmapHolder h, int sizeX, int sizeY, // int patchX, int patchY, int patchWidth, int patchHeight); //protected abstract void drawPage(Bitmap bm, int sizeX, int sizeY, int patchX, int patchY, int patchWidth, int patchHeight); //protected abstract void updatePage(Bitmap bm, int sizeX, int sizeY, int patchX, int patchY, int patchWidth, int patchHeight); protected abstract CancellableTaskDefinition<Void, Void> getDrawPageTask(Bitmap bm, int sizeX, int sizeY, int patchX, int patchY, int patchWidth, int patchHeight); protected abstract CancellableTaskDefinition<Void, Void> getUpdatePageTask(Bitmap bm, int sizeX, int sizeY, int patchX, int patchY, int patchWidth, int patchHeight); protected abstract LinkInfo[] getLinkInfo(); protected abstract TextWord[][] getText(); protected abstract void addMarkup(PointF[] quadPoints, Annotation.Type type); private void reinit() { // Cancel pending render task if (mDrawEntire != null) { //mDrawEntire.cancel(true); mDrawEntire.cancelAndWait(); mDrawEntire = null; } if (mDrawPatch != null) { //mDrawPatch.cancel(true); mDrawPatch.cancelAndWait(); mDrawPatch = null; } if (mGetLinkInfo != null) { mGetLinkInfo.cancel(true); mGetLinkInfo = null; } if (mGetText != null) { mGetText.cancel(true); mGetText = null; } mIsBlank = true; mPageNumber = 0; if (mSize == null) mSize = mParentSize; if (mEntire != null) { mEntire.setImageBitmap(null); //mEntireBmh.setBm(null); mEntire.invalidate(); } if (mPatch != null) { mPatch.setImageBitmap(null); //mPatchBmh.setBm(null); mPatch.invalidate(); } mPatchViewSize = null; mPatchArea = null; mSearchBoxes = null; mSearchBoxesPrim = null; mLinks = null; mSelectBox = null; mText = null; mItemSelectBox = null; } public void releaseResources() { reinit(); if (mBusyIndicator != null) { removeView(mBusyIndicator); mBusyIndicator = null; } } public void releaseBitmaps() { reinit(); mEntireBm = null; mPatchBm = null; } public void blank(int page) { reinit(); mPageNumber = page; if (mBusyIndicator == null) { mBusyIndicator = new ProgressBar(mContext); mBusyIndicator.setIndeterminate(true); mBusyIndicator.setBackgroundResource(R.drawable.busy); addView(mBusyIndicator); } setBackgroundColor(BACKGROUND_COLOR); } public void setPage(int page, PointF size) { // Cancel pending render task if (mDrawEntire != null) { //mDrawEntire.cancel(true); mDrawEntire.cancelAndWait(); mDrawEntire = null; } mIsBlank = false; // Highlights may be missing because mIsBlank was true on last draw if (mSearchView != null) mSearchView.invalidate(); mPageNumber = page; if (mEntire == null) { mEntire = new OpaqueImageView(mContext); //mEntire.setScaleType(ImageView.ScaleType.FIT_CENTER); mEntire.setScaleType(ImageView.ScaleType.MATRIX); addView(mEntire); } // Calculate scaled size that fits within the screen limits // This is the size at minimum zoom mSourceScale = Math.min(mParentSize.x / size.x, mParentSize.y / size.y); Point newSize = new Point((int) (size.x * mSourceScale), (int) (size.y * mSourceScale)); mSize = newSize; mEntire.setImageBitmap(null); //mEntireBmh.setBm(null); mEntire.invalidate(); // Get the link info in the background mGetLinkInfo = new AsyncTask<Void, Void, LinkInfo[]>() { protected LinkInfo[] doInBackground(Void... v) { return getLinkInfo(); } protected void onPostExecute(LinkInfo[] v) { mLinks = v; //invalidate(); if (mSearchView != null) mSearchView.invalidate(); } }; mGetLinkInfo.execute(); // Render the page in the background //mDrawEntire = new AsyncTask<Void, Void, Void>() { // protected Void doInBackground(Void... v) { // drawPage(mEntireBm, mSize.x, mSize.y, 0, 0, mSize.x, mSize.y); // return null; // } mDrawEntire = new CancellableAsyncTask<Void, Void>(getDrawPageTask(mEntireBm, mSize.x, mSize.y, 0, 0, mSize.x, mSize.y)) { //protected void onPreExecute() { @Override public void onPreExecute() { setBackgroundColor(BACKGROUND_COLOR); mEntire.setImageBitmap(null); //mEntireBmh.setBm(null); mEntire.invalidate(); if (mBusyIndicator == null) { mBusyIndicator = new ProgressBar(mContext); mBusyIndicator.setIndeterminate(true); mBusyIndicator.setBackgroundResource(R.drawable.busy); addView(mBusyIndicator); mBusyIndicator.setVisibility(INVISIBLE); mHandler.postDelayed(new Runnable() { public void run() { if (mBusyIndicator != null) mBusyIndicator.setVisibility(VISIBLE); } }, PROGRESS_DIALOG_DELAY); } } //protected void onPostExecute(Void v) { @Override public void onPostExecute(Void result) { removeView(mBusyIndicator); mBusyIndicator = null; //mEntire.setImageBitmap(bm); //mEntireBmh.setBm(bm); mEntire.setImageBitmap(mEntireBm); mEntire.invalidate(); //invalidate(); setBackgroundColor(Color.TRANSPARENT); } }; mDrawEntire.execute(); if (mSearchView == null) { mSearchView = new View(mContext) { @SuppressLint("DrawAllocation") @Override protected void onDraw(final Canvas canvas) { super.onDraw(canvas); // Work out current total scale factor // from source to view final float scale = mSourceScale * (float) getWidth() / (float) mSize.x; final Paint paint = new Paint(); if (!mIsBlank && mSearchBoxes != null) { paint.setColor(HIGHLIGHT_COLOR); for (RectF rect : mSearchBoxes) canvas.drawRect(rect.left * scale, rect.top * scale, rect.right * scale, rect.bottom * scale, paint); } if (!mIsBlank && mSearchBoxesPrim != null) { paint.setColor(HIGHLIGHT_COLOR); for (RectF rect : mSearchBoxesPrim) canvas.drawRect(rect.left * scale + getWidth() / 2, rect.top * scale, rect.right * scale + getWidth() / 2, rect.bottom * scale, paint); } if (!mIsBlank && mLinks != null && mHighlightLinks) { paint.setStrokeWidth(2); for (LinkInfo link : mLinks) { //canvas.drawRect(link.rect.left * scale, link.rect.top * scale, // link.rect.right * scale, link.rect.bottom * scale, paint); RectF rectfa = new RectF((link.rect.left - 2) * scale, (link.rect.top - 2) * scale, (link.rect.right + 2) * scale, (link.rect.bottom + 2) * scale); paint.setStyle(Paint.Style.FILL); paint.setColor(LINK_COLOR); canvas.drawRoundRect(rectfa, 3 * scale, 3 * scale, paint); paint.setStyle(Paint.Style.STROKE); paint.setColor(HIGHLIGHT_COLOR); canvas.drawRoundRect(rectfa, 3 * scale, 3 * scale, paint); } } if (mSelectBox != null && mText != null) { paint.setColor(HIGHLIGHT_COLOR); processSelectedText(new TextProcessor() { RectF rect; public void onStartLine() { rect = new RectF(); } public void onWord(TextWord word) { rect.union(word); } public void onEndLine() { if (!rect.isEmpty()) canvas.drawRect(rect.left * scale, rect.top * scale, rect.right * scale, rect.bottom * scale, paint); } }); } if (mItemSelectBox != null) { paint.setStyle(Paint.Style.STROKE); paint.setColor(BOX_COLOR); canvas.drawRect(mItemSelectBox.left * scale, mItemSelectBox.top * scale, mItemSelectBox.right * scale, mItemSelectBox.bottom * scale, paint); } if (mDrawing != null) { Path path = new Path(); PointF p; paint.setAntiAlias(true); paint.setDither(true); paint.setStrokeJoin(Paint.Join.ROUND); paint.setStrokeCap(Paint.Cap.ROUND); paint.setStyle(Paint.Style.FILL); paint.setStrokeWidth(INK_THICKNESS * scale); paint.setColor(INK_COLOR); Iterator<ArrayList<PointF>> it = mDrawing.iterator(); while (it.hasNext()) { ArrayList<PointF> arc = it.next(); if (arc.size() >= 2) { Iterator<PointF> iit = arc.iterator(); p = iit.next(); float mX = p.x * scale; float mY = p.y * scale; path.moveTo(mX, mY); while (iit.hasNext()) { p = iit.next(); float x = p.x * scale; float y = p.y * scale; path.quadTo(mX, mY, (x + mX) / 2, (y + mY) / 2); mX = x; mY = y; } path.lineTo(mX, mY); } else { p = arc.get(0); canvas.drawCircle(p.x * scale, p.y * scale, INK_THICKNESS * scale / 2, paint); } } paint.setStyle(Paint.Style.STROKE); canvas.drawPath(path, paint); } } }; addView(mSearchView); } requestLayout(); } public void setSearchBoxes(RectF searchBoxes[]) { mSearchBoxes = searchBoxes; if (mSearchView != null) mSearchView.invalidate(); } public void setSearchBoxesPrim(RectF searchBoxes[]) { mSearchBoxesPrim = searchBoxes; if (mSearchView != null) mSearchView.invalidate(); } public void setLinkHighlighting(boolean f) { mHighlightLinks = f; if (mSearchView != null) mSearchView.invalidate(); } public void deselectText() { mSelectBox = null; mSearchView.invalidate(); } public void selectText(float x0, float y0, float x1, float y1) { float scale = mSourceScale * (float) getWidth() / (float) mSize.x; float docRelX0 = (x0 - getLeft()) / scale; float docRelY0 = (y0 - getTop()) / scale; float docRelX1 = (x1 - getLeft()) / scale; float docRelY1 = (y1 - getTop()) / scale; // Order on Y but maintain the point grouping if (docRelY0 <= docRelY1) mSelectBox = new RectF(docRelX0, docRelY0, docRelX1, docRelY1); else mSelectBox = new RectF(docRelX1, docRelY1, docRelX0, docRelY0); mSearchView.invalidate(); if (mGetText == null) { mGetText = new AsyncTask<Void, Void, TextWord[][]>() { @Override protected TextWord[][] doInBackground(Void... params) { return getText(); } @Override protected void onPostExecute(TextWord[][] result) { mText = result; mSearchView.invalidate(); } }; mGetText.execute(); } } public void startDraw(float x, float y) { float scale = mSourceScale * (float) getWidth() / (float) mSize.x; float docRelX = (x - getLeft()) / scale; float docRelY = (y - getTop()) / scale; if (mDrawing == null) mDrawing = new ArrayList<ArrayList<PointF>>(); ArrayList<PointF> arc = new ArrayList<PointF>(); arc.add(new PointF(docRelX, docRelY)); mDrawing.add(arc); mSearchView.invalidate(); } public void continueDraw(float x, float y) { float scale = mSourceScale * (float) getWidth() / (float) mSize.x; float docRelX = (x - getLeft()) / scale; float docRelY = (y - getTop()) / scale; if (mDrawing != null && mDrawing.size() > 0) { ArrayList<PointF> arc = mDrawing.get(mDrawing.size() - 1); arc.add(new PointF(docRelX, docRelY)); mSearchView.invalidate(); } } public void cancelDraw() { mDrawing = null; mSearchView.invalidate(); } protected PointF[][] getDraw() { if (mDrawing == null) return null; PointF[][] path = new PointF[mDrawing.size()][]; for (int i = 0; i < mDrawing.size(); i++) { ArrayList<PointF> arc = mDrawing.get(i); path[i] = arc.toArray(new PointF[arc.size()]); } return path; } protected void processSelectedText(TextProcessor tp) { (new TextSelector(mText, mSelectBox)).select(tp); } public void setItemSelectBox(RectF rect) { mItemSelectBox = rect; if (mSearchView != null) mSearchView.invalidate(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int x, y; switch (View.MeasureSpec.getMode(widthMeasureSpec)) { case View.MeasureSpec.UNSPECIFIED: x = mSize.x; break; default: x = View.MeasureSpec.getSize(widthMeasureSpec); } switch (View.MeasureSpec.getMode(heightMeasureSpec)) { case View.MeasureSpec.UNSPECIFIED: y = mSize.y; break; default: y = View.MeasureSpec.getSize(heightMeasureSpec); } setMeasuredDimension(x, y); if (mBusyIndicator != null) { int limit = Math.min(mParentSize.x, mParentSize.y) / 2; mBusyIndicator.measure(View.MeasureSpec.AT_MOST | limit, View.MeasureSpec.AT_MOST | limit); } } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { int w = right - left; int h = bottom - top; if (mEntire != null) { //mEntireMat.setScale(w/(float)mSize.x, h/(float)mSize.y); //mEntire.setImageMatrix(mEntireMat); //mEntire.invalidate(); if (mEntire.getWidth() != w || mEntire.getHeight() != h) { mEntireMat.setScale(w/(float)mSize.x, h/(float)mSize.y); mEntire.setImageMatrix(mEntireMat); mEntire.invalidate(); } mEntire.layout(0, 0, w, h); } if (mSearchView != null) { mSearchView.layout(0, 0, w, h); } if (mPatchViewSize != null) { if (mPatchViewSize.x != w || mPatchViewSize.y != h) { // Zoomed since patch was created mPatchViewSize = null; mPatchArea = null; if (mPatch != null) { mPatch.setImageBitmap(null); //mPatchBmh.setBm(null); mPatch.invalidate(); } } else { mPatch.layout(mPatchArea.left, mPatchArea.top, mPatchArea.right, mPatchArea.bottom); } } if (mBusyIndicator != null) { int bw = mBusyIndicator.getMeasuredWidth(); int bh = mBusyIndicator.getMeasuredHeight(); mBusyIndicator.layout((w - bw) / 2, (h - bh) / 2, (w + bw) / 2, (h + bh) / 2); } } public void updateHq(boolean update) { Rect viewArea = new Rect(getLeft(), getTop(), getRight(), getBottom()); // If the viewArea's size matches the unzoomed size, there is no need // for an hq patch if (viewArea.width() == mSize.x || viewArea.height() == mSize.y) { // If the viewArea's size matches the unzoomed size, there is no need for an hq patch if (mPatch != null) { mPatch.setImageBitmap(null); mPatch.invalidate(); } } else { //Point patchViewSize = new Point(viewArea.width(), viewArea.height()); //Rect patchArea = new Rect(0, 0, mParentSize.x, mParentSize.y); final Point patchViewSize = new Point(viewArea.width(), viewArea.height()); final Rect patchArea = new Rect(0, 0, mParentSize.x, mParentSize.y); // Intersect and test that there is an intersection if (!patchArea.intersect(viewArea)) return; // Offset patch area to be relative to the view top left patchArea.offset(-viewArea.left, -viewArea.top); boolean area_unchanged = patchArea.equals(mPatchArea) && patchViewSize.equals(mPatchViewSize); // If being asked for the same area as last time and not because of // an update then nothing to do if (area_unchanged && !update) return; boolean completeRedraw = !(area_unchanged && update); // Stop the drawing of previous patch if still going if (mDrawPatch != null) { //mDrawPatch.cancel(true); mDrawPatch.cancelAndWait(); mDrawPatch = null; } //if (completeRedraw) { // The bitmap holder mPatchBm may still be rendered to by a // previously invoked task, and possibly for a different // area, so we cannot risk the bitmap generated by this task // being passed to it // mPatchBmh.drop(); // mPatchBmh = new BitmapHolder(); //} // Create and add the image view if not already done if (mPatch == null) { mPatch = new OpaqueImageView(mContext); //mPatch.setScaleType(ImageView.ScaleType.FIT_CENTER); mPatch.setScaleType(ImageView.ScaleType.MATRIX); addView(mPatch); if (mSearchView != null) mSearchView.bringToFront(); } CancellableTaskDefinition<Void, Void> task; if (completeRedraw) task = getDrawPageTask(mPatchBm, patchViewSize.x, patchViewSize.y, patchArea.left, patchArea.top, patchArea.width(), patchArea.height()); else task = getUpdatePageTask(mPatchBm, patchViewSize.x, patchViewSize.y, patchArea.left, patchArea.top, patchArea.width(), patchArea.height()); mDrawPatch = new CancellableAsyncTask<Void,Void>(task) { /* mDrawPatch = new AsyncTask<PatchInfo, Void, PatchInfo>() { protected PatchInfo doInBackground(PatchInfo... v) { if (v[0].completeRedraw) { //v[0].bm = drawPage(v[0].patchViewSize.x, drawPage(mPatchBm, v[0].patchViewSize.x, v[0].patchViewSize.y, v[0].patchArea.left, v[0].patchArea.top, v[0].patchArea.width(), v[0].patchArea.height()); } else { //v[0].bm = updatePage(v[0].bmh, v[0].patchViewSize.x, updatePage(mPatchBm, v[0].patchViewSize.x, v[0].patchViewSize.y, v[0].patchArea.left, v[0].patchArea.top, v[0].patchArea.width(), v[0].patchArea.height()); } return v[0]; } */ public void onPostExecute(Void result) { mPatchViewSize = patchViewSize; mPatchArea = patchArea; //protected void onPostExecute(PatchInfo v) { //if (mPatchBmh == v.bmh) { // mPatchViewSize = v.patchViewSize; // mPatchArea = v.patchArea; // if (v.bm != null) { // mPatch.setImageBitmap(v.bm); // v.bmh.setBm(v.bm); // v.bm = null; // } // requestLayout(); // Calling requestLayout here doesn't lead to a later // call to layout. No idea // why, but apparently others have run into the problem. // mPatch.layout(mPatchArea.left, mPatchArea.top, // mPatchArea.right, mPatchArea.bottom); // invalidate(); //mPatchViewSize = v.patchViewSize; //mPatchArea = v.patchArea; mPatch.setImageBitmap(mPatchBm); mPatch.invalidate(); //requestLayout(); // Calling requestLayout here doesn't lead to a later call to layout. No idea // why, but apparently others have run into the problem. mPatch.layout(mPatchArea.left, mPatchArea.top, mPatchArea.right, mPatchArea.bottom); //invalidate(); } }; //mDrawPatch.execute(new PatchInfo(patchViewSize, patchArea, mPatchBmh, completeRedraw)); //mDrawPatch.execute(new PatchInfo(patchViewSize, patchArea, completeRedraw)); mDrawPatch.execute(); } } public void update() { // Cancel pending render task if (mDrawEntire != null) { //mDrawEntire.cancel(true); mDrawEntire.cancelAndWait(); mDrawEntire = null; } if (mDrawPatch != null) { //mDrawPatch.cancel(true); mDrawPatch.cancelAndWait(); mDrawPatch = null; } // Render the page in the background mDrawEntire = new CancellableAsyncTask<Void, Void>(getUpdatePageTask(mEntireBm, mSize.x, mSize.y, 0, 0, mSize.x, mSize.y)) { //mDrawEntire = new AsyncTask<Void, Void, Void>() { // protected Void doInBackground(Void... v) { // Pass the current bitmap as a basis for the update, but use a // bitmap // holder so that the held bitmap will be nulled and not hold on // to // memory, should this view become redundant. // updatePage(mEntireBm, mSize.x, mSize.y, 0, 0, mSize.x, mSize.y); // return null; // } //protected void onPostExecute(Bitmap bm) { // if (bm != null) { // mEntire.setImageBitmap(bm); // mEntireBmh.setBm(bm); // } // invalidate(); //protected void onPostExecute(Void v) { public void onPostExecute(Void result) { mEntire.setImageBitmap(mEntireBm); mEntire.invalidate(); } }; mDrawEntire.execute(); updateHq(true); } public void removeHq() { // Stop the drawing of the patch if still going if (mDrawPatch != null) { //mDrawPatch.cancel(true); mDrawPatch.cancelAndWait(); mDrawPatch = null; } // And get rid of it mPatchViewSize = null; mPatchArea = null; if (mPatch != null) { mPatch.setImageBitmap(null); //mPatchBmh.setBm(null); mPatch.invalidate(); } } public int getPage() { return mPageNumber; } @Override public boolean isOpaque() { return true; } }
/* * Copyright 2004-2013 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.docksidestage.mysql.dbflute.cbean.cq.bs; import java.util.*; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.ckey.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.ordering.*; import org.dbflute.cbean.scoping.*; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.dbmeta.DBMetaProvider; import org.docksidestage.mysql.dbflute.allcommon.*; import org.docksidestage.mysql.dbflute.cbean.*; import org.docksidestage.mysql.dbflute.cbean.cq.*; /** * The abstract condition-query of white_non_unique_many_to_one_to. * @author DBFlute(AutoGenerator) */ public abstract class AbstractBsWhiteNonUniqueManyToOneToCQ extends AbstractConditionQuery { // =================================================================================== // Constructor // =========== public AbstractBsWhiteNonUniqueManyToOneToCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // DB Meta // ======= @Override protected DBMetaProvider xgetDBMetaProvider() { return DBMetaInstanceHandler.getProvider(); } public String asTableDbName() { return "white_non_unique_many_to_one_to"; } // =================================================================================== // Query // ===== /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toId The value of toId as equal. (basically NotNull: error as default, or no condition as option) */ public void setToId_Equal(Long toId) { doSetToId_Equal(toId); } protected void doSetToId_Equal(Long toId) { regToId(CK_EQ, toId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toId The value of toId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setToId_GreaterThan(Long toId) { regToId(CK_GT, toId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toId The value of toId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setToId_LessThan(Long toId) { regToId(CK_LT, toId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toId The value of toId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setToId_GreaterEqual(Long toId) { regToId(CK_GE, toId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toId The value of toId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setToId_LessEqual(Long toId) { regToId(CK_LE, toId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param minNumber The min number of toId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of toId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setToId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setToId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param minNumber The min number of toId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of toId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ public void setToId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueToId(), "TO_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toIdList The collection of toId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToId_InScope(Collection<Long> toIdList) { doSetToId_InScope(toIdList); } protected void doSetToId_InScope(Collection<Long> toIdList) { regINS(CK_INS, cTL(toIdList), xgetCValueToId(), "TO_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} * @param toIdList The collection of toId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToId_NotInScope(Collection<Long> toIdList) { doSetToId_NotInScope(toIdList); } protected void doSetToId_NotInScope(Collection<Long> toIdList) { regINS(CK_NINS, cTL(toIdList), xgetCValueToId(), "TO_ID"); } /** * IsNull {is null}. And OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} */ public void setToId_IsNull() { regToId(CK_ISN, DOBJ); } /** * IsNotNull {is not null}. And OnlyOnceRegistered. <br> * TO_ID: {PK, ID, NotNull, BIGINT(19)} */ public void setToId_IsNotNull() { regToId(CK_ISNN, DOBJ); } protected void regToId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueToId(), "TO_ID"); } protected abstract ConditionValue xgetCValueToId(); /** * Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toName The value of toName as equal. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToName_Equal(String toName) { doSetToName_Equal(fRES(toName)); } protected void doSetToName_Equal(String toName) { regToName(CK_EQ, toName); } /** * NotEqual(&lt;&gt;). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toName The value of toName as notEqual. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToName_NotEqual(String toName) { doSetToName_NotEqual(fRES(toName)); } protected void doSetToName_NotEqual(String toName) { regToName(CK_NES, toName); } /** * InScope {in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toNameList The collection of toName as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToName_InScope(Collection<String> toNameList) { doSetToName_InScope(toNameList); } protected void doSetToName_InScope(Collection<String> toNameList) { regINS(CK_INS, cTL(toNameList), xgetCValueToName(), "TO_NAME"); } /** * NotInScope {not in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toNameList The collection of toName as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setToName_NotInScope(Collection<String> toNameList) { doSetToName_NotInScope(toNameList); } protected void doSetToName_NotInScope(Collection<String> toNameList) { regINS(CK_NINS, cTL(toNameList), xgetCValueToName(), "TO_NAME"); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setToName_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">likeContain()</span>);</pre> * @param toName The value of toName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setToName_LikeSearch(String toName, ConditionOptionCall<LikeSearchOption> opLambda) { setToName_LikeSearch(toName, xcLSOP(opLambda)); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setToName_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre> * @param toName The value of toName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of like-search. (NotNull) */ public void setToName_LikeSearch(String toName, LikeSearchOption likeSearchOption) { regLSQ(CK_LS, fRES(toName), xgetCValueToName(), "TO_NAME", likeSearchOption); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toName The value of toName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setToName_NotLikeSearch(String toName, ConditionOptionCall<LikeSearchOption> opLambda) { setToName_NotLikeSearch(toName, xcLSOP(opLambda)); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * TO_NAME: {NotNull, VARCHAR(200)} * @param toName The value of toName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of not-like-search. (NotNull) */ public void setToName_NotLikeSearch(String toName, LikeSearchOption likeSearchOption) { regLSQ(CK_NLS, fRES(toName), xgetCValueToName(), "TO_NAME", likeSearchOption); } protected void regToName(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueToName(), "TO_NAME"); } protected abstract ConditionValue xgetCValueToName(); /** * Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCode The value of nonUniqueCode as equal. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setNonUniqueCode_Equal(String nonUniqueCode) { doSetNonUniqueCode_Equal(fRES(nonUniqueCode)); } protected void doSetNonUniqueCode_Equal(String nonUniqueCode) { regNonUniqueCode(CK_EQ, nonUniqueCode); } /** * NotEqual(&lt;&gt;). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCode The value of nonUniqueCode as notEqual. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setNonUniqueCode_NotEqual(String nonUniqueCode) { doSetNonUniqueCode_NotEqual(fRES(nonUniqueCode)); } protected void doSetNonUniqueCode_NotEqual(String nonUniqueCode) { regNonUniqueCode(CK_NES, nonUniqueCode); } /** * InScope {in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCodeList The collection of nonUniqueCode as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setNonUniqueCode_InScope(Collection<String> nonUniqueCodeList) { doSetNonUniqueCode_InScope(nonUniqueCodeList); } protected void doSetNonUniqueCode_InScope(Collection<String> nonUniqueCodeList) { regINS(CK_INS, cTL(nonUniqueCodeList), xgetCValueNonUniqueCode(), "NON_UNIQUE_CODE"); } /** * NotInScope {not in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCodeList The collection of nonUniqueCode as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setNonUniqueCode_NotInScope(Collection<String> nonUniqueCodeList) { doSetNonUniqueCode_NotInScope(nonUniqueCodeList); } protected void doSetNonUniqueCode_NotInScope(Collection<String> nonUniqueCodeList) { regINS(CK_NINS, cTL(nonUniqueCodeList), xgetCValueNonUniqueCode(), "NON_UNIQUE_CODE"); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} <br> * <pre>e.g. setNonUniqueCode_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">likeContain()</span>);</pre> * @param nonUniqueCode The value of nonUniqueCode as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setNonUniqueCode_LikeSearch(String nonUniqueCode, ConditionOptionCall<LikeSearchOption> opLambda) { setNonUniqueCode_LikeSearch(nonUniqueCode, xcLSOP(opLambda)); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} <br> * <pre>e.g. setNonUniqueCode_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre> * @param nonUniqueCode The value of nonUniqueCode as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of like-search. (NotNull) */ public void setNonUniqueCode_LikeSearch(String nonUniqueCode, LikeSearchOption likeSearchOption) { regLSQ(CK_LS, fRES(nonUniqueCode), xgetCValueNonUniqueCode(), "NON_UNIQUE_CODE", likeSearchOption); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCode The value of nonUniqueCode as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setNonUniqueCode_NotLikeSearch(String nonUniqueCode, ConditionOptionCall<LikeSearchOption> opLambda) { setNonUniqueCode_NotLikeSearch(nonUniqueCode, xcLSOP(opLambda)); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * NON_UNIQUE_CODE: {NotNull, CHAR(3)} * @param nonUniqueCode The value of nonUniqueCode as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of not-like-search. (NotNull) */ public void setNonUniqueCode_NotLikeSearch(String nonUniqueCode, LikeSearchOption likeSearchOption) { regLSQ(CK_NLS, fRES(nonUniqueCode), xgetCValueNonUniqueCode(), "NON_UNIQUE_CODE", likeSearchOption); } protected void regNonUniqueCode(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueNonUniqueCode(), "NON_UNIQUE_CODE"); } protected abstract ConditionValue xgetCValueNonUniqueCode(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * @param beginDate The value of beginDate as equal. (basically NotNull: error as default, or no condition as option) */ public void setBeginDate_Equal(java.time.LocalDate beginDate) { regBeginDate(CK_EQ, beginDate); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * @param beginDate The value of beginDate as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setBeginDate_GreaterThan(java.time.LocalDate beginDate) { regBeginDate(CK_GT, beginDate); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * @param beginDate The value of beginDate as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setBeginDate_LessThan(java.time.LocalDate beginDate) { regBeginDate(CK_LT, beginDate); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * @param beginDate The value of beginDate as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setBeginDate_GreaterEqual(java.time.LocalDate beginDate) { regBeginDate(CK_GE, beginDate); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * @param beginDate The value of beginDate as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setBeginDate_LessEqual(java.time.LocalDate beginDate) { regBeginDate(CK_LE, beginDate); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * <pre>e.g. setBeginDate_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of beginDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of beginDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of from-to. (NotNull) */ public void setBeginDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, ConditionOptionCall<FromToOption> opLambda) { setBeginDate_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda)); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * BEGIN_DATE: {NotNull, DATE(10)} * <pre>e.g. setBeginDate_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of beginDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of beginDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param fromToOption The option of from-to. (NotNull) */ public void setBeginDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, FromToOption fromToOption) { String nm = "BEGIN_DATE"; FromToOption op = fromToOption; regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueBeginDate(), nm, op); } protected void regBeginDate(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueBeginDate(), "BEGIN_DATE"); } protected abstract ConditionValue xgetCValueBeginDate(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * @param endDate The value of endDate as equal. (basically NotNull: error as default, or no condition as option) */ public void setEndDate_Equal(java.time.LocalDate endDate) { regEndDate(CK_EQ, endDate); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * @param endDate The value of endDate as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setEndDate_GreaterThan(java.time.LocalDate endDate) { regEndDate(CK_GT, endDate); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * @param endDate The value of endDate as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setEndDate_LessThan(java.time.LocalDate endDate) { regEndDate(CK_LT, endDate); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * @param endDate The value of endDate as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setEndDate_GreaterEqual(java.time.LocalDate endDate) { regEndDate(CK_GE, endDate); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * @param endDate The value of endDate as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setEndDate_LessEqual(java.time.LocalDate endDate) { regEndDate(CK_LE, endDate); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * <pre>e.g. setEndDate_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of endDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of endDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of from-to. (NotNull) */ public void setEndDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, ConditionOptionCall<FromToOption> opLambda) { setEndDate_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda)); } /** * FromTo with various options. (versatile) {(default) fromDatetime &lt;= column &lt;= toDatetime} <br> * And NullIgnored, OnlyOnceRegistered. <br> * END_DATE: {NotNull, DATE(10)} * <pre>e.g. setEndDate_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre> * @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of endDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of endDate. (basically NotNull: if op.allowOneSide(), null allowed) * @param fromToOption The option of from-to. (NotNull) */ public void setEndDate_FromTo(java.time.LocalDate fromDatetime, java.time.LocalDate toDatetime, FromToOption fromToOption) { String nm = "END_DATE"; FromToOption op = fromToOption; regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueEndDate(), nm, op); } protected void regEndDate(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueEndDate(), "END_DATE"); } protected abstract ConditionValue xgetCValueEndDate(); // =================================================================================== // ScalarCondition // =============== /** * Prepare ScalarCondition as equal. <br> * {where FOO = (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_Equal() { return xcreateSLCFunction(CK_EQ, WhiteNonUniqueManyToOneToCB.class); } /** * Prepare ScalarCondition as equal. <br> * {where FOO &lt;&gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_NotEqual() { return xcreateSLCFunction(CK_NES, WhiteNonUniqueManyToOneToCB.class); } /** * Prepare ScalarCondition as greaterThan. <br> * {where FOO &gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_GreaterThan() { return xcreateSLCFunction(CK_GT, WhiteNonUniqueManyToOneToCB.class); } /** * Prepare ScalarCondition as lessThan. <br> * {where FOO &lt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_LessThan() { return xcreateSLCFunction(CK_LT, WhiteNonUniqueManyToOneToCB.class); } /** * Prepare ScalarCondition as greaterEqual. <br> * {where FOO &gt;= (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_GreaterEqual() { return xcreateSLCFunction(CK_GE, WhiteNonUniqueManyToOneToCB.class); } /** * Prepare ScalarCondition as lessEqual. <br> * {where FOO &lt;= (select max(BAR) from ...)} * <pre> * cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery&lt;WhiteNonUniqueManyToOneToCB&gt;() { * public void query(WhiteNonUniqueManyToOneToCB subCB) { * subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span> * subCB.query().setBar... * } * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteNonUniqueManyToOneToCB> scalar_LessEqual() { return xcreateSLCFunction(CK_LE, WhiteNonUniqueManyToOneToCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) { assertObjectNotNull("subQuery", sq); WhiteNonUniqueManyToOneToCB cb = xcreateScalarConditionCB(); sq.query((CB)cb); String pp = keepScalarCondition(cb.query()); // for saving query-value cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by registerScalarCondition(fn, cb.query(), pp, rd, cs, op); } public abstract String keepScalarCondition(WhiteNonUniqueManyToOneToCQ sq); protected WhiteNonUniqueManyToOneToCB xcreateScalarConditionCB() { WhiteNonUniqueManyToOneToCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb; } protected WhiteNonUniqueManyToOneToCB xcreateScalarConditionPartitionByCB() { WhiteNonUniqueManyToOneToCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb; } // =================================================================================== // MyselfDerived // ============= public void xsmyselfDerive(String fn, SubQuery<WhiteNonUniqueManyToOneToCB> sq, String al, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteNonUniqueManyToOneToCB cb = new WhiteNonUniqueManyToOneToCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "TO_ID"; registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op); } public abstract String keepSpecifyMyselfDerived(WhiteNonUniqueManyToOneToCQ sq); /** * Prepare for (Query)MyselfDerived (correlated sub-query). * @return The object to set up a function for myself table. (NotNull) */ public HpQDRFunction<WhiteNonUniqueManyToOneToCB> myselfDerived() { return xcreateQDRFunctionMyselfDerived(WhiteNonUniqueManyToOneToCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteNonUniqueManyToOneToCB cb = new WhiteNonUniqueManyToOneToCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb); String pk = "TO_ID"; String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value. String prpp = keepQueryMyselfDerivedParameter(vl); registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op); } public abstract String keepQueryMyselfDerived(WhiteNonUniqueManyToOneToCQ sq); public abstract String keepQueryMyselfDerivedParameter(Object vl); // =================================================================================== // MyselfExists // ============ /** * Prepare for MyselfExists (correlated sub-query). * @param subCBLambda The implementation of sub-query. (NotNull) */ public void myselfExists(SubQuery<WhiteNonUniqueManyToOneToCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); WhiteNonUniqueManyToOneToCB cb = new WhiteNonUniqueManyToOneToCB(); cb.xsetupForMyselfExists(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query()); registerMyselfExists(cb.query(), pp); } public abstract String keepMyselfExists(WhiteNonUniqueManyToOneToCQ sq); // =================================================================================== // Full Text Search // ================ /** * Match for full-text search. <br> * Bind variable is unused because the condition value should be literal in MySQL. * @param textColumn The text column. (NotNull, StringColumn, TargetTableColumn) * @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition) * @param modifier The modifier of full-text search. (NullAllowed: If the value is null, No modifier specified) */ public void match(org.dbflute.dbmeta.info.ColumnInfo textColumn , String conditionValue , org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) { assertObjectNotNull("textColumn", textColumn); match(newArrayList(textColumn), conditionValue, modifier); } /** * Match for full-text search. <br> * Bind variable is unused because the condition value should be literal in MySQL. * @param textColumnList The list of text column. (NotNull, NotEmpty, StringColumn, TargetTableColumn) * @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition) * @param modifier The modifier of full-text search. (NullAllowed: If the value is null, no modifier specified) */ public void match(List<org.dbflute.dbmeta.info.ColumnInfo> textColumnList , String conditionValue , org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) { xdoMatchForMySQL(textColumnList, conditionValue, modifier); } // =================================================================================== // Manual Order // ============ /** * Order along manual ordering information. * <pre> * cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span> * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when BIRTHDATE &gt;= '2000/01/01' then 0</span> * <span style="color: #3F7E5E">// else 1</span> * <span style="color: #3F7E5E">// end asc, ...</span> * * cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional); * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span> * <span style="color: #3F7E5E">// else 3</span> * <span style="color: #3F7E5E">// end asc, ...</span> * </pre> * <p>This function with Union is unsupported!</p> * <p>The order values are bound (treated as bind parameter).</p> * @param opLambda The callback for option of manual-order containing order values. (NotNull) */ public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public! xdoWithManualOrder(cMOO(opLambda)); } // =================================================================================== // Small Adjustment // ================ // =================================================================================== // Very Internal // ============= protected WhiteNonUniqueManyToOneToCB newMyCB() { return new WhiteNonUniqueManyToOneToCB(); } // very internal (for suppressing warn about 'Not Use Import') protected String xabUDT() { return Date.class.getName(); } protected String xabCQ() { return WhiteNonUniqueManyToOneToCQ.class.getName(); } protected String xabLSO() { return LikeSearchOption.class.getName(); } protected String xabSLCS() { return HpSLCSetupper.class.getName(); } protected String xabSCP() { return SubQuery.class.getName(); } }
package main; /** * * @author Team 3 * @version 1.0 * * This class represents all of the persistent objects contained by the system upon startup. * * At startup time, this class should: * * - Query the database for the appropriate tables. * -- If those tables do not exist, or if the database itself doesn't exist, * create a new database and add a single entry for one Administrator. * * - Load the tables into the ArrayLists contained by this class * - Make this class available to the entire system. */ import java.io.FileNotFoundException; import java.io.IOException; import java.io.*; import java.util.HashMap; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.Persistence; import javax.persistence.Query; import model.*; public final class CourierSystem { public static HashMap<String, Employee> Employees; public static HashMap<String, Client> Clients; public static HashMap<String, Delivery> Deliveries; public static Map CityMap; public static Settings SystemSettings; public static Employee currentUser; private static EntityManagerFactory factory; private static EntityManager em; public static void InitializeCourierSystem() throws Exception { // Load the database. factory = Persistence.createEntityManagerFactory("entities"); em = factory.createEntityManager(); LoadCityMap(); LoadEmployees(); LoadClients(); LoadDeliveries(); LoadSettings(); } private static void LoadSettings() { try { FileInputStream fin = new FileInputStream("./settings.ser"); ObjectInputStream ois = new ObjectInputStream(fin); SystemSettings = (Settings) ois.readObject(); ois.close(); fin.close(); if (SystemSettings == null){ System.out.println("Settings failed to load."); SystemSettings = new Settings(); SystemSettings.setDefaultValues(); } } catch(IOException ioex) { SystemSettings = new Settings(); SystemSettings.setDefaultValues(); ioex.printStackTrace(); } catch (ClassNotFoundException e) { SystemSettings = new Settings(); SystemSettings.setDefaultValues(); e.printStackTrace(); } } public static void SaveSettings() throws IOException { FileOutputStream fout = new FileOutputStream("./settings.ser"); ObjectOutputStream oos = new ObjectOutputStream(fout); oos.writeObject(SystemSettings); oos.close(); fout.close(); System.out.println("Settings saved successfully."); } @SuppressWarnings("unchecked") public static void LoadEmployees() throws Exception { // Load the employee table // If tables is empty, create default employee. if (Employees == null) { Employees = new HashMap<String, Employee>(); } try { Query eQuery = em.createQuery("SELECT e FROM Employees e", Employee.class); List<Employee> emp = eQuery.getResultList(); for (Employee e : emp) { Employees.put(e.name, e); } } catch (Exception ex) { System.out.println(ex.getStackTrace()); } if (Employees.size() == 0) { Employees.put("Admin", new Employee("admin")); } } public static void UpdateEmployees() throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); for (Employee e : Employees.values()) { em.persist(e); } trans.commit(); LoadEmployees(); } public static void SaveEmployee(Employee e) throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); em.persist(e); trans.commit(); LoadEmployees(); } public static void RemoveEmployee(Employee e) throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); em.remove(e); Employees.remove(e.name); trans.commit(); } @SuppressWarnings("unchecked") public static void LoadClients() throws Exception { Clients = new HashMap<String, Client>(); try { Query eQuery = em.createQuery("SELECT c FROM Clients c", Client.class); List<Client> cli = eQuery.getResultList(); for (Client c : cli) { c.trueAddress = CityMap.getIntersection(c.address); Clients.put(c.name, c); } } catch (Exception ex) { System.out.println(ex.getStackTrace()); } } public static void UpdateClients() throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); for (Client c : Clients.values()) { c.address = c.trueAddress.getName(); em.persist(c); } trans.commit(); LoadClients(); } public static void SaveClient(Client c) throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); em.merge(c); trans.commit(); LoadClients(); } public static void RemoveClient(Client c) throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); em.remove(c); Clients.remove(c.name); trans.commit(); } public static void LoadDeliveries() throws IOException { Deliveries = new HashMap<String, Delivery>(); try { Query eQuery = em.createQuery("SELECT d from Deliveries d", Delivery.class); @SuppressWarnings("unchecked") List<Delivery> del = eQuery.getResultList(); for (Delivery d : del) { Deliveries.put(String.valueOf(d.packageID), d); } } catch (Exception ex) { System.out.println(ex.getStackTrace()); } } public static void UpdateDeliveries() throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); for (Delivery d : Deliveries.values()) { em.persist(d); } trans.commit(); LoadDeliveries(); } public static void SaveDelivery(Delivery d) throws FileNotFoundException, IOException { EntityTransaction trans = em.getTransaction(); trans.begin(); em.merge(d); trans.commit(); LoadDeliveries(); } public static void RemoveDelivery(Delivery d) throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); em.remove(d); Deliveries.remove(String.valueOf(d.packageID)); trans.commit(); } @SuppressWarnings("unchecked") public static void LoadCityMap() throws Exception { try { Query eQuery = em.createQuery("SELECT m FROM CityMap m", Intersection.class); List<Intersection> savedIntersections = eQuery.getResultList(); HashMap<String, Intersection> hashedIntersections = new HashMap<String, Intersection>(); for (Intersection i : savedIntersections) { hashedIntersections.put(i.getName(), i); } CityMap = new Map(); if (hashedIntersections != null && !hashedIntersections.isEmpty()) { CityMap.setIntersections(hashedIntersections); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } public static void SaveCityMap() throws Exception { EntityTransaction trans = em.getTransaction(); trans.begin(); for (Intersection i : CityMap.intersections.values()) { em.persist(i); } trans.commit(); } public static void PrintMapToConsole() { System.out.println("City Map: ID-" + CityMap.mapId); System.out.println("Last Saved: " + CityMap.lastSavedDate); System.out.println("Closed Intersections:"); for (String s : CityMap.getClosedIntersections()) { System.out.println(" " + s); } } private CourierSystem() throws Exception { InitializeCourierSystem(); }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules.logical; import org.apache.flink.api.common.eventtime.Watermark; import org.apache.flink.api.common.eventtime.WatermarkGenerator; import org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier; import org.apache.flink.api.common.eventtime.WatermarkOutput; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.config.ExecutionConfigOptions; import org.apache.flink.table.connector.source.DynamicTableSource; import org.apache.flink.table.connector.source.abilities.SupportsWatermarkPushDown; import org.apache.flink.table.data.RowData; import org.apache.flink.table.planner.calcite.FlinkTypeFactory; import org.apache.flink.table.planner.codegen.WatermarkGeneratorCodeGenerator; import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableSourceScan; import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalWatermarkAssigner; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import org.apache.flink.table.runtime.generated.GeneratedWatermarkGenerator; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleOperand; import org.apache.calcite.rex.RexNode; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import scala.Option; /** * Base rule for interface {@link SupportsWatermarkPushDown}. It offers a util to push the {@link * FlinkLogicalWatermarkAssigner} into the {@link FlinkLogicalTableSourceScan}. */ public abstract class PushWatermarkIntoTableSourceScanRuleBase extends RelOptRule { public PushWatermarkIntoTableSourceScanRuleBase(RelOptRuleOperand operand, String description) { super(operand, description); } /** * It uses the input watermark expression to generate the {@link WatermarkGeneratorSupplier}. * After the {@link WatermarkStrategy} is pushed into the scan, it will build a new scan. * However, when {@link FlinkLogicalWatermarkAssigner} is the parent of the {@link * FlinkLogicalTableSourceScan} it should modify the rowtime type to keep the type of plan is * consistent. In other cases, it just keep the data type of the scan as same as before and * leave the work when rewriting the projection. * * <p>NOTES: the row type of the scan is not always as same as the watermark assigner. Because * the scan will not add the rowtime column into the row when pushing the watermark assigner * into the scan. In some cases, query may have computed columns defined on rowtime column. If * modifying the type of the rowtime(with time attribute), it will also influence the type of * the computed column. Therefore, if the watermark assigner is not the parent of the scan, set * the type of the scan as before and leave the work to projection. */ protected FlinkLogicalTableSourceScan getNewScan( FlinkLogicalWatermarkAssigner watermarkAssigner, RexNode watermarkExpr, FlinkLogicalTableSourceScan scan, TableConfig tableConfig, boolean useWatermarkAssignerRowType) { GeneratedWatermarkGenerator generatedWatermarkGenerator = WatermarkGeneratorCodeGenerator.generateWatermarkGenerator( tableConfig, FlinkTypeFactory.toLogicalRowType(scan.getRowType()), watermarkExpr, Option.apply("context")); Configuration configuration = tableConfig.getConfiguration(); WatermarkGeneratorSupplier<RowData> supplier = new DefaultWatermarkGeneratorSupplier(configuration, generatedWatermarkGenerator); String digest = String.format("watermark=[%s]", watermarkExpr); WatermarkStrategy<RowData> watermarkStrategy = WatermarkStrategy.forGenerator(supplier); Duration idleTimeout = configuration.get(ExecutionConfigOptions.TABLE_EXEC_SOURCE_IDLE_TIMEOUT); if (!idleTimeout.isZero() && !idleTimeout.isNegative()) { watermarkStrategy.withIdleness(idleTimeout); digest = String.format("%s, idletimeout=[%s]", digest, idleTimeout.toMillis()); } TableSourceTable tableSourceTable = scan.getTable().unwrap(TableSourceTable.class); DynamicTableSource newDynamicTableSource = tableSourceTable.tableSource().copy(); ((SupportsWatermarkPushDown) newDynamicTableSource).applyWatermark(watermarkStrategy); // scan row type TableSourceTable newTableSourceTable; if (useWatermarkAssignerRowType) { // project is trivial and set rowtime type in scan newTableSourceTable = tableSourceTable.copy( newDynamicTableSource, watermarkAssigner.getRowType(), new String[] {digest}); } else { // project add/delete columns and set the rowtime column type in project newTableSourceTable = tableSourceTable.copy( newDynamicTableSource, scan.getRowType(), new String[] {digest}); } return FlinkLogicalTableSourceScan.create(scan.getCluster(), newTableSourceTable); } protected boolean supportsWatermarkPushDown(FlinkLogicalTableSourceScan scan) { TableSourceTable tableSourceTable = scan.getTable().unwrap(TableSourceTable.class); return tableSourceTable != null && tableSourceTable.tableSource() instanceof SupportsWatermarkPushDown; } /** * Wrapper of the {@link GeneratedWatermarkGenerator} that is used to create {@link * WatermarkGenerator}. The {@link DefaultWatermarkGeneratorSupplier} uses the {@link * WatermarkGeneratorSupplier.Context} to init the generated watermark generator. */ private static class DefaultWatermarkGeneratorSupplier implements WatermarkGeneratorSupplier<RowData> { private static final long serialVersionUID = 1L; private final Configuration configuration; private final GeneratedWatermarkGenerator generatedWatermarkGenerator; public DefaultWatermarkGeneratorSupplier( Configuration configuration, GeneratedWatermarkGenerator generatedWatermarkGenerator) { this.configuration = configuration; this.generatedWatermarkGenerator = generatedWatermarkGenerator; } @Override public WatermarkGenerator<RowData> createWatermarkGenerator(Context context) { List<Object> references = new ArrayList<>(Arrays.asList(generatedWatermarkGenerator.getReferences())); references.add(context); org.apache.flink.table.runtime.generated.WatermarkGenerator innerWatermarkGenerator = new GeneratedWatermarkGenerator( generatedWatermarkGenerator.getClassName(), generatedWatermarkGenerator.getCode(), references.toArray()) .newInstance(Thread.currentThread().getContextClassLoader()); try { innerWatermarkGenerator.open(configuration); } catch (Exception e) { throw new RuntimeException("Fail to instantiate generated watermark generator.", e); } return new DefaultWatermarkGeneratorSupplier.DefaultWatermarkGenerator( innerWatermarkGenerator); } /** * Wrapper of the code-generated {@link * org.apache.flink.table.runtime.generated.WatermarkGenerator}. */ private static class DefaultWatermarkGenerator implements WatermarkGenerator<RowData> { private static final long serialVersionUID = 1L; private final org.apache.flink.table.runtime.generated.WatermarkGenerator innerWatermarkGenerator; private Long currentWatermark = Long.MIN_VALUE; public DefaultWatermarkGenerator( org.apache.flink.table.runtime.generated.WatermarkGenerator watermarkGenerator) { this.innerWatermarkGenerator = watermarkGenerator; } @Override public void onEvent(RowData event, long eventTimestamp, WatermarkOutput output) { try { Long watermark = innerWatermarkGenerator.currentWatermark(event); if (watermark != null) { currentWatermark = watermark; } } catch (Exception e) { throw new RuntimeException( String.format( "Generated WatermarkGenerator fails to generate for row: %s.", event), e); } } @Override public void onPeriodicEmit(WatermarkOutput output) { output.emitWatermark(new Watermark(currentWatermark)); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest.completion; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; /** * Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality * for users as they type search terms. The implementation of the completion service uses FSTs that * are created at index-time and so must be defined in the mapping with the type "completion" before * indexing. */ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> { private static final XContentType CONTEXT_BYTES_XCONTENT_TYPE = XContentType.JSON; static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); static final ParseField SKIP_DUPLICATES_FIELD = new ParseField("skip_duplicates"); public static final String SUGGESTION_NAME = "completion"; /** * { * "field" : STRING * "size" : INT * "fuzzy" : BOOLEAN | FUZZY_OBJECT * "contexts" : QUERY_CONTEXTS * "regex" : REGEX_OBJECT * "payload" : STRING_ARRAY * } */ private static final ObjectParser<CompletionSuggestionBuilder.InnerBuilder, Void> PARSER = new ObjectParser<>(SUGGESTION_NAME, null); static { PARSER.declareField((parser, completionSuggestionContext, context) -> { if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { if (parser.booleanValue()) { completionSuggestionContext.fuzzyOptions = new FuzzyOptions.Builder().build(); } } else { completionSuggestionContext.fuzzyOptions = FuzzyOptions.parse(parser); } }, FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN); PARSER.declareField((parser, completionSuggestionContext, context) -> completionSuggestionContext.regexOptions = RegexOptions.parse(parser), RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT); PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::field, FIELDNAME_FIELD); PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::analyzer, ANALYZER_FIELD); PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::size, SIZE_FIELD); PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::shardSize, SHARDSIZE_FIELD); PARSER.declareField((p, v, c) -> { // Copy the current structure. We will parse, once the mapping is provided XContentBuilder builder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); builder.copyCurrentStructure(p); v.contextBytes = BytesReference.bytes(builder); p.skipChildren(); }, CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated PARSER.declareBoolean(CompletionSuggestionBuilder::skipDuplicates, SKIP_DUPLICATES_FIELD); } protected FuzzyOptions fuzzyOptions; protected RegexOptions regexOptions; protected BytesReference contextBytes = null; protected boolean skipDuplicates = false; public CompletionSuggestionBuilder(String field) { super(field); } /** * internal copy constructor that copies over all class fields except for the field which is * set to the one provided in the first argument */ private CompletionSuggestionBuilder(String fieldname, CompletionSuggestionBuilder in) { super(fieldname, in); fuzzyOptions = in.fuzzyOptions; regexOptions = in.regexOptions; contextBytes = in.contextBytes; skipDuplicates = in.skipDuplicates; } /** * Read from a stream. */ public CompletionSuggestionBuilder(StreamInput in) throws IOException { super(in); fuzzyOptions = in.readOptionalWriteable(FuzzyOptions::new); regexOptions = in.readOptionalWriteable(RegexOptions::new); contextBytes = in.readOptionalBytesReference(); skipDuplicates = in.readBoolean(); } @Override public void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(fuzzyOptions); out.writeOptionalWriteable(regexOptions); out.writeOptionalBytesReference(contextBytes); out.writeBoolean(skipDuplicates); } /** * Sets the prefix to provide completions for. * The prefix gets analyzed by the suggest analyzer. */ @Override public CompletionSuggestionBuilder prefix(String prefix) { super.prefix(prefix); return this; } /** * Same as {@link #prefix(String)} with fuzziness of <code>fuzziness</code> */ public CompletionSuggestionBuilder prefix(String prefix, Fuzziness fuzziness) { super.prefix(prefix); this.fuzzyOptions = new FuzzyOptions.Builder().setFuzziness(fuzziness).build(); return this; } /** * Same as {@link #prefix(String)} with full fuzzy options * see {@link FuzzyOptions.Builder} */ public CompletionSuggestionBuilder prefix(String prefix, FuzzyOptions fuzzyOptions) { super.prefix(prefix); this.fuzzyOptions = fuzzyOptions; return this; } /** * Sets a regular expression pattern for prefixes to provide completions for. */ @Override public CompletionSuggestionBuilder regex(String regex) { super.regex(regex); return this; } /** * Same as {@link #regex(String)} with full regular expression options * see {@link RegexOptions.Builder} */ public CompletionSuggestionBuilder regex(String regex, RegexOptions regexOptions) { this.regex(regex); this.regexOptions = regexOptions; return this; } /** * Sets query contexts for completion * @param queryContexts named query contexts * see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext} * and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext} */ public CompletionSuggestionBuilder contexts(Map<String, List<? extends ToXContent>> queryContexts) { Objects.requireNonNull(queryContexts, "contexts must not be null"); try { XContentBuilder contentBuilder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); contentBuilder.startObject(); for (Map.Entry<String, List<? extends ToXContent>> contextEntry : queryContexts.entrySet()) { contentBuilder.startArray(contextEntry.getKey()); for (ToXContent queryContext : contextEntry.getValue()) { queryContext.toXContent(contentBuilder, EMPTY_PARAMS); } contentBuilder.endArray(); } contentBuilder.endObject(); return contexts(contentBuilder); } catch (IOException e) { throw new IllegalArgumentException(e); } } private CompletionSuggestionBuilder contexts(XContentBuilder contextBuilder) { contextBytes = BytesReference.bytes(contextBuilder); return this; } /** * Returns whether duplicate suggestions should be filtered out. */ public boolean skipDuplicates() { return skipDuplicates; } /** * Should duplicates be filtered or not. Defaults to {@code false}. */ public CompletionSuggestionBuilder skipDuplicates(boolean skipDuplicates) { this.skipDuplicates = skipDuplicates; return this; } private static class InnerBuilder extends CompletionSuggestionBuilder { private String field; InnerBuilder() { super("_na_"); } private InnerBuilder field(String field) { this.field = field; return this; } } @Override protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { if (fuzzyOptions != null) { fuzzyOptions.toXContent(builder, params); } if (regexOptions != null) { regexOptions.toXContent(builder, params); } if (skipDuplicates) { builder.field(SKIP_DUPLICATES_FIELD.getPreferredName(), skipDuplicates); } if (contextBytes != null) { try (InputStream stream = contextBytes.streamInput()) { builder.rawField(CONTEXTS_FIELD.getPreferredName(), stream); } } return builder; } public static CompletionSuggestionBuilder fromXContent(XContentParser parser) throws IOException { CompletionSuggestionBuilder.InnerBuilder builder = new CompletionSuggestionBuilder.InnerBuilder(); PARSER.parse(parser, builder, null); String field = builder.field; // now we should have field name, check and copy fields over to the suggestion builder we return if (field == null) { throw new ElasticsearchParseException( "the required field option [" + FIELDNAME_FIELD.getPreferredName() + "] is missing"); } return new CompletionSuggestionBuilder(field, builder); } @Override public SuggestionContext build(QueryShardContext context) throws IOException { CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context); // copy over common settings to each suggestion builder final MapperService mapperService = context.getMapperService(); populateCommonFields(mapperService, suggestionContext); suggestionContext.setSkipDuplicates(skipDuplicates); suggestionContext.setFuzzyOptions(fuzzyOptions); suggestionContext.setRegexOptions(regexOptions); if (shardSize != null) { suggestionContext.setShardSize(shardSize); } MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField()); if (mappedFieldType == null || mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false) { throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); } if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; suggestionContext.setFieldType(type); if (type.hasContextMappings() && contextBytes != null) { Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = parseContextBytes(contextBytes, context.getXContentRegistry(), type.getContextMappings()); suggestionContext.setQueryContexts(queryContexts); } else if (contextBytes != null) { throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } } assert suggestionContext.getFieldType() != null : "no completion field type set"; return suggestionContext; } static Map<String, List<ContextMapping.InternalQueryContext>> parseContextBytes(BytesReference contextBytes, NamedXContentRegistry xContentRegistry, ContextMappings contextMappings) throws IOException { try (XContentParser contextParser = XContentHelper.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, contextBytes, CONTEXT_BYTES_XCONTENT_TYPE)) { contextParser.nextToken(); Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size()); assert contextParser.currentToken() == XContentParser.Token.START_OBJECT; XContentParser.Token currentToken; String currentFieldName; while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) { if (currentToken == XContentParser.Token.FIELD_NAME) { currentFieldName = contextParser.currentName(); final ContextMapping<?> mapping = contextMappings.get(currentFieldName); queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser)); } } return queryContexts; } } @Override public String getWriteableName() { return SUGGESTION_NAME; } @Override protected boolean doEquals(CompletionSuggestionBuilder other) { return skipDuplicates == other.skipDuplicates && Objects.equals(fuzzyOptions, other.fuzzyOptions) && Objects.equals(regexOptions, other.regexOptions) && Objects.equals(contextBytes, other.contextBytes); } @Override protected int doHashCode() { return Objects.hash(fuzzyOptions, regexOptions, contextBytes, skipDuplicates); } }
package org.mondo.collaboration.security.query; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.log4j.Logger; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.incquery.runtime.api.IMatchProcessor; import org.eclipse.incquery.runtime.api.IQuerySpecification; import org.eclipse.incquery.runtime.api.IncQueryEngine; import org.eclipse.incquery.runtime.api.impl.BaseMatcher; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.eclipse.incquery.runtime.matchers.tuple.Tuple; import org.eclipse.incquery.runtime.util.IncQueryLoggingUtil; import org.mondo.collaboration.security.query.LockAHelperMatch; import org.mondo.collaboration.security.query.util.LockAHelperQuerySpecification; import wt.Control; import wt.Signal; /** * Generated pattern matcher API of the org.mondo.collaboration.security.query.lockAHelper pattern, * providing pattern-specific query methods. * * <p>Use the pattern matcher on a given model via {@link #on(IncQueryEngine)}, * e.g. in conjunction with {@link IncQueryEngine#on(Notifier)}. * * <p>Matches of the pattern will be represented as {@link LockAHelperMatch}. * * <p>Original source: * <code><pre> * pattern lockAHelper(signal :Signal, cycle, value, ctrl :Control) { * Control.cycle(ctrl, cycle); * Control.provides(ctrl, signal); * Signal.frequency(signal, value); * //Signal.type(signal, ::Output); * } * </pre></code> * * @see LockAHelperMatch * @see LockAHelperProcessor * @see LockAHelperQuerySpecification * */ @SuppressWarnings("all") public class LockAHelperMatcher extends BaseMatcher<LockAHelperMatch> { /** * Initializes the pattern matcher within an existing EMF-IncQuery engine. * If the pattern matcher is already constructed in the engine, only a light-weight reference is returned. * The match set will be incrementally refreshed upon updates. * @param engine the existing EMF-IncQuery engine in which this matcher will be created. * @throws IncQueryException if an error occurs during pattern matcher creation * */ public static LockAHelperMatcher on(final IncQueryEngine engine) throws IncQueryException { // check if matcher already exists LockAHelperMatcher matcher = engine.getExistingMatcher(querySpecification()); if (matcher == null) { matcher = new LockAHelperMatcher(engine); // do not have to "put" it into engine.matchers, reportMatcherInitialized() will take care of it } return matcher; } private final static int POSITION_SIGNAL = 0; private final static int POSITION_CYCLE = 1; private final static int POSITION_VALUE = 2; private final static int POSITION_CTRL = 3; private final static Logger LOGGER = IncQueryLoggingUtil.getLogger(LockAHelperMatcher.class); /** * Initializes the pattern matcher over a given EMF model root (recommended: Resource or ResourceSet). * If a pattern matcher is already constructed with the same root, only a light-weight reference is returned. * The scope of pattern matching will be the given EMF model root and below (see FAQ for more precise definition). * The match set will be incrementally refreshed upon updates from this scope. * <p>The matcher will be created within the managed {@link IncQueryEngine} belonging to the EMF model root, so * multiple matchers will reuse the same engine and benefit from increased performance and reduced memory footprint. * @param emfRoot the root of the EMF containment hierarchy where the pattern matcher will operate. Recommended: Resource or ResourceSet. * @throws IncQueryException if an error occurs during pattern matcher creation * @deprecated use {@link #on(IncQueryEngine)} instead, e.g. in conjunction with {@link IncQueryEngine#on(Notifier)} * */ @Deprecated public LockAHelperMatcher(final Notifier emfRoot) throws IncQueryException { this(IncQueryEngine.on(emfRoot)); } /** * Initializes the pattern matcher within an existing EMF-IncQuery engine. * If the pattern matcher is already constructed in the engine, only a light-weight reference is returned. * The match set will be incrementally refreshed upon updates. * @param engine the existing EMF-IncQuery engine in which this matcher will be created. * @throws IncQueryException if an error occurs during pattern matcher creation * @deprecated use {@link #on(IncQueryEngine)} instead * */ @Deprecated public LockAHelperMatcher(final IncQueryEngine engine) throws IncQueryException { super(engine, querySpecification()); } /** * Returns the set of all matches of the pattern that conform to the given fixed values of some parameters. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return matches represented as a LockAHelperMatch object. * */ public Collection<LockAHelperMatch> getAllMatches(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return rawGetAllMatches(new Object[]{pSignal, pCycle, pValue, pCtrl}); } /** * Returns an arbitrarily chosen match of the pattern that conforms to the given fixed values of some parameters. * Neither determinism nor randomness of selection is guaranteed. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return a match represented as a LockAHelperMatch object, or null if no match is found. * */ public LockAHelperMatch getOneArbitraryMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return rawGetOneArbitraryMatch(new Object[]{pSignal, pCycle, pValue, pCtrl}); } /** * Indicates whether the given combination of specified pattern parameters constitute a valid pattern match, * under any possible substitution of the unspecified parameters (if any). * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return true if the input is a valid (partial) match of the pattern. * */ public boolean hasMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return rawHasMatch(new Object[]{pSignal, pCycle, pValue, pCtrl}); } /** * Returns the number of all matches of the pattern that conform to the given fixed values of some parameters. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return the number of pattern matches found. * */ public int countMatches(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return rawCountMatches(new Object[]{pSignal, pCycle, pValue, pCtrl}); } /** * Executes the given processor on each match of the pattern that conforms to the given fixed values of some parameters. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @param processor the action that will process each pattern match. * */ public void forEachMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl, final IMatchProcessor<? super LockAHelperMatch> processor) { rawForEachMatch(new Object[]{pSignal, pCycle, pValue, pCtrl}, processor); } /** * Executes the given processor on an arbitrarily chosen match of the pattern that conforms to the given fixed values of some parameters. * Neither determinism nor randomness of selection is guaranteed. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @param processor the action that will process the selected match. * @return true if the pattern has at least one match with the given parameter values, false if the processor was not invoked * */ public boolean forOneArbitraryMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl, final IMatchProcessor<? super LockAHelperMatch> processor) { return rawForOneArbitraryMatch(new Object[]{pSignal, pCycle, pValue, pCtrl}, processor); } /** * Returns a new (partial) match. * This can be used e.g. to call the matcher with a partial match. * <p>The returned match will be immutable. Use {@link #newEmptyMatch()} to obtain a mutable match object. * @param pSignal the fixed value of pattern parameter signal, or null if not bound. * @param pCycle the fixed value of pattern parameter cycle, or null if not bound. * @param pValue the fixed value of pattern parameter value, or null if not bound. * @param pCtrl the fixed value of pattern parameter ctrl, or null if not bound. * @return the (partial) match object. * */ public LockAHelperMatch newMatch(final Signal pSignal, final String pCycle, final Integer pValue, final Control pCtrl) { return LockAHelperMatch.newMatch(pSignal, pCycle, pValue, pCtrl); } /** * Retrieve the set of values that occur in matches for signal. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ protected Set<Signal> rawAccumulateAllValuesOfsignal(final Object[] parameters) { Set<Signal> results = new HashSet<Signal>(); rawAccumulateAllValues(POSITION_SIGNAL, parameters, results); return results; } /** * Retrieve the set of values that occur in matches for signal. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Signal> getAllValuesOfsignal() { return rawAccumulateAllValuesOfsignal(emptyArray()); } /** * Retrieve the set of values that occur in matches for signal. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Signal> getAllValuesOfsignal(final LockAHelperMatch partialMatch) { return rawAccumulateAllValuesOfsignal(partialMatch.toArray()); } /** * Retrieve the set of values that occur in matches for signal. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Signal> getAllValuesOfsignal(final String pCycle, final Integer pValue, final Control pCtrl) { return rawAccumulateAllValuesOfsignal(new Object[]{ null, pCycle, pValue, pCtrl }); } /** * Retrieve the set of values that occur in matches for cycle. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ protected Set<String> rawAccumulateAllValuesOfcycle(final Object[] parameters) { Set<String> results = new HashSet<String>(); rawAccumulateAllValues(POSITION_CYCLE, parameters, results); return results; } /** * Retrieve the set of values that occur in matches for cycle. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<String> getAllValuesOfcycle() { return rawAccumulateAllValuesOfcycle(emptyArray()); } /** * Retrieve the set of values that occur in matches for cycle. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<String> getAllValuesOfcycle(final LockAHelperMatch partialMatch) { return rawAccumulateAllValuesOfcycle(partialMatch.toArray()); } /** * Retrieve the set of values that occur in matches for cycle. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<String> getAllValuesOfcycle(final Signal pSignal, final Integer pValue, final Control pCtrl) { return rawAccumulateAllValuesOfcycle(new Object[]{ pSignal, null, pValue, pCtrl }); } /** * Retrieve the set of values that occur in matches for value. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ protected Set<Integer> rawAccumulateAllValuesOfvalue(final Object[] parameters) { Set<Integer> results = new HashSet<Integer>(); rawAccumulateAllValues(POSITION_VALUE, parameters, results); return results; } /** * Retrieve the set of values that occur in matches for value. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Integer> getAllValuesOfvalue() { return rawAccumulateAllValuesOfvalue(emptyArray()); } /** * Retrieve the set of values that occur in matches for value. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Integer> getAllValuesOfvalue(final LockAHelperMatch partialMatch) { return rawAccumulateAllValuesOfvalue(partialMatch.toArray()); } /** * Retrieve the set of values that occur in matches for value. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Integer> getAllValuesOfvalue(final Signal pSignal, final String pCycle, final Control pCtrl) { return rawAccumulateAllValuesOfvalue(new Object[]{ pSignal, pCycle, null, pCtrl }); } /** * Retrieve the set of values that occur in matches for ctrl. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ protected Set<Control> rawAccumulateAllValuesOfctrl(final Object[] parameters) { Set<Control> results = new HashSet<Control>(); rawAccumulateAllValues(POSITION_CTRL, parameters, results); return results; } /** * Retrieve the set of values that occur in matches for ctrl. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Control> getAllValuesOfctrl() { return rawAccumulateAllValuesOfctrl(emptyArray()); } /** * Retrieve the set of values that occur in matches for ctrl. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Control> getAllValuesOfctrl(final LockAHelperMatch partialMatch) { return rawAccumulateAllValuesOfctrl(partialMatch.toArray()); } /** * Retrieve the set of values that occur in matches for ctrl. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */ public Set<Control> getAllValuesOfctrl(final Signal pSignal, final String pCycle, final Integer pValue) { return rawAccumulateAllValuesOfctrl(new Object[]{ pSignal, pCycle, pValue, null }); } @Override protected LockAHelperMatch tupleToMatch(final Tuple t) { try { return LockAHelperMatch.newMatch((wt.Signal) t.get(POSITION_SIGNAL), (java.lang.String) t.get(POSITION_CYCLE), (java.lang.Integer) t.get(POSITION_VALUE), (wt.Control) t.get(POSITION_CTRL)); } catch(ClassCastException e) { LOGGER.error("Element(s) in tuple not properly typed!",e); return null; } } @Override protected LockAHelperMatch arrayToMatch(final Object[] match) { try { return LockAHelperMatch.newMatch((wt.Signal) match[POSITION_SIGNAL], (java.lang.String) match[POSITION_CYCLE], (java.lang.Integer) match[POSITION_VALUE], (wt.Control) match[POSITION_CTRL]); } catch(ClassCastException e) { LOGGER.error("Element(s) in array not properly typed!",e); return null; } } @Override protected LockAHelperMatch arrayToMatchMutable(final Object[] match) { try { return LockAHelperMatch.newMutableMatch((wt.Signal) match[POSITION_SIGNAL], (java.lang.String) match[POSITION_CYCLE], (java.lang.Integer) match[POSITION_VALUE], (wt.Control) match[POSITION_CTRL]); } catch(ClassCastException e) { LOGGER.error("Element(s) in array not properly typed!",e); return null; } } /** * @return the singleton instance of the query specification of this pattern * @throws IncQueryException if the pattern definition could not be loaded * */ public static IQuerySpecification<LockAHelperMatcher> querySpecification() throws IncQueryException { return LockAHelperQuerySpecification.instance(); } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core.services.splitter; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import javax.validation.Valid; import javax.validation.constraints.NotNull; import com.adaptris.annotation.AdapterComponent; import com.adaptris.annotation.ComponentProfile; import com.adaptris.annotation.DisplayOrder; import com.adaptris.core.AdaptrisMarshaller; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.CoreException; import com.adaptris.core.DefaultMarshaller; import com.adaptris.core.EventHandler; import com.adaptris.core.EventHandlerAware; import com.adaptris.core.Service; import com.adaptris.core.ServiceException; import com.adaptris.core.ServiceImp; import com.adaptris.core.services.aggregator.MessageAggregator; import com.adaptris.core.util.Args; import com.adaptris.core.util.ExceptionHelper; import com.adaptris.core.util.LifecycleHelper; import com.adaptris.util.TimeInterval; import com.thoughtworks.xstream.annotations.XStreamAlias; /** * Implementation of the Splitter and Aggregator enterprise integration pattern. * * <p> * This service splits a message according to the configured {@link MessageSplitter} implementation, executes the configured * {@link com.adaptris.core.Service} and subsequently joins all the messages back using the configured {@link MessageAggregator} implementation * <p> * <p> * For simplicity a new (cloned) instance of the underlying {@link com.adaptris.core.Service} is created for every split message, and executed in its * own thread; this means that where there is a high cost of initialisation for the service, then you may get better performance * aggregating the messages in a different way. * </p> * * @config split-join-service * * * @author lchan * */ @XStreamAlias("split-join-service") @AdapterComponent @ComponentProfile( summary = "Split a message and then execute the associated services on the split items, aggregating the split messages afterwards", tag = "service,splitjoin") @DisplayOrder(order = {"splitter", "service", "aggregator", "timeout"}) public class SplitJoinService extends ServiceImp implements EventHandlerAware { private static final String GENERIC_EXCEPTION_MSG = "Exception waiting for all services to complete"; private static TimeInterval DEFAULT_TTL = new TimeInterval(600L, TimeUnit.SECONDS); @NotNull @Valid private Service service; @NotNull @Valid private MessageSplitter splitter; @NotNull @Valid private MessageAggregator aggregator; @Valid private TimeInterval timeout; private transient ExecutorService executors; private transient AdaptrisMarshaller marshaller = null; private transient EventHandler eventHandler; public SplitJoinService() { super(); } @Override public void doService(AdaptrisMessage msg) throws ServiceException { List<AdaptrisMessage> splitMessages = splitMessage(msg); if (splitMessages.isEmpty()) { log.debug("No output from splitter; nothing to do"); return; } final CyclicBarrier gate = new CyclicBarrier(splitMessages.size() + 1); final ServiceExceptionHandler handler = new ServiceExceptionHandler(); long count = 0; for (AdaptrisMessage splitMsg : splitMessages) { count++; splitMsg.addMetadata(MessageSplitterServiceImp.KEY_CURRENT_SPLIT_MESSAGE_COUNT, Long.toString(count)); ServiceExecutor exe = new ServiceExecutor(handler, gate, cloneService(service), splitMsg); executors.execute(exe); } msg.addMetadata(MessageSplitterServiceImp.KEY_SPLIT_MESSAGE_COUNT, Long.toString(count)); waitFor(gate, handler); log.trace("Finished waiting for operations "); checkForExceptions(handler); joinMessage(msg, splitMessages); } private void checkForExceptions(ServiceExceptionHandler handler) throws ServiceException { Throwable e = handler.getFirstThrowableException(); if (e != null) { log.error("One or more services failed; " + e.getMessage()); throw ExceptionHelper.wrapServiceException(e); } } private void joinMessage(AdaptrisMessage joined, List<AdaptrisMessage> split) throws ServiceException { try { getAggregator().joinMessage(joined, split); } catch (CoreException e) { throw ExceptionHelper.wrapServiceException(e); } } private List<AdaptrisMessage> splitMessage(AdaptrisMessage m) throws ServiceException { List<AdaptrisMessage> msgs = new ArrayList<AdaptrisMessage>(); try { msgs = toList(getSplitter().splitMessage(m)); } catch (CoreException e) { throw ExceptionHelper.wrapServiceException(e); } return msgs; } /** * Convert the Iterable into a List. If it's already a list, just return it. If not, * it will be iterated and the resulting list returned. */ private List<AdaptrisMessage> toList(Iterable<AdaptrisMessage> iter) { if (iter instanceof List) { return (List<AdaptrisMessage>) iter; } List<AdaptrisMessage> result = new ArrayList<AdaptrisMessage>(); try (CloseableIterable<AdaptrisMessage> messages = CloseableIterable.FACTORY.ensureCloseable(iter)) { for (AdaptrisMessage msg : messages) { result.add(msg); } } catch (IOException e) { log.warn("Could not close Iterable!", e); } return result; } private void waitFor(CyclicBarrier gate, ServiceExceptionHandler handler) { try { gate.await(timeoutMs(), TimeUnit.MILLISECONDS); } catch (Exception gateException) { handler.uncaughtException(Thread.currentThread(), new CoreException(GENERIC_EXCEPTION_MSG, gateException)); } } @Override protected void initService() throws CoreException { if (getSplitter() == null) { throw new CoreException("Null MessageSplitter implementation"); } if (getAggregator() == null) { throw new CoreException("Null MessageJoiner implementation"); } if (getService() == null) { throw new CoreException("Null Service implementation"); } executors = Executors.newCachedThreadPool(); marshaller = DefaultMarshaller.getDefaultMarshaller(); } @Override protected void closeService() { executors.shutdown(); try { if (!executors.awaitTermination(60, TimeUnit.SECONDS)) { executors.shutdownNow(); } } catch (InterruptedException e) { log.warn("Failed to shutdown execution pool"); } } @Override public void prepare() throws CoreException { if (getService() != null) { getService().prepare(); } } private Service cloneService(Service original) throws ServiceException { Service result = null; try { result = (Service) marshaller.unmarshal(marshaller.marshal(original)); result.prepare(); } catch (CoreException e) { throw ExceptionHelper.wrapServiceException(e); } return result; } private class ServiceExecutor implements Runnable { private ServiceExceptionHandler handler; private CyclicBarrier gate; private Service service; private AdaptrisMessage msg; ServiceExecutor(ServiceExceptionHandler ceh, CyclicBarrier cb, Service s, AdaptrisMessage msg) { handler = ceh; gate = cb; service = s; this.msg = msg; } @Override public void run() { try { LifecycleHelper.registerEventHandler(service, eventHandler); LifecycleHelper.init(service); LifecycleHelper.start(service); service.doService(msg); } catch (Exception e) { handler.uncaughtException(Thread.currentThread(), e); } finally { LifecycleHelper.stop(service); LifecycleHelper.close(service); } waitFor(gate, handler); } } private class ServiceExceptionHandler implements Thread.UncaughtExceptionHandler { private List<Throwable> exceptionList = Collections.synchronizedList(new ArrayList<Throwable>()); /** * @see java.lang.Thread.UncaughtExceptionHandler#uncaughtException(java.lang.Thread, java.lang.Throwable) */ @Override public void uncaughtException(Thread t, Throwable e) { log.error("uncaughtException from " + t.getName(), e); exceptionList.add(e); } public Throwable getFirstThrowableException() { Throwable result = null; if (exceptionList.size() > 0) { result = exceptionList.get(0); } return result; } } /** * @return the timeToLive */ public TimeInterval getTimeout() { return timeout; } /** * Set the maximum amount of time to wait for all the instances of services to complete. * <p> * If the time to live is exceeded then an exception will be thrown by the service * </p> * * @param ttl the timeout to set, default is 10 minutes */ public void setTimeout(TimeInterval ttl) { this.timeout = ttl; } long timeoutMs() { return getTimeout() != null ? getTimeout().toMilliseconds() : DEFAULT_TTL.toMilliseconds(); } @Override public void registerEventHandler(EventHandler eh) { eventHandler = eh; } /** * @return the service */ public Service getService() { return service; } /** * The {@link com.adaptris.core.Service} to execute over all the split messages. * * @param s the service to set */ public void setService(Service s) { this.service = Args.notNull(s, "service"); } /** * @return the messageSplitter */ public MessageSplitter getSplitter() { return splitter; } /** * The {@link MessageSplitter} implementation to use to split the incoming message. * * @param ms the messageSplitter to set */ public void setSplitter(MessageSplitter ms) { this.splitter = Args.notNull(ms, "splitter"); } /** * @return the messageJoiner */ public MessageAggregator getAggregator() { return aggregator; } /** * The {@link MessageAggregator} implementation to use to join messages together. * * @param mj the messageJoiner to set */ public void setAggregator(MessageAggregator mj) { this.aggregator = Args.notNull(mj, "aggregator"); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.macie2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies the operator to use in a property-based condition that filters the results of a query for findings. For * detailed information and examples of each operator, see <a * href="https://docs.aws.amazon.com/macie/latest/user/findings-filter-basics.html">Fundamentals of filtering * findings</a> in the <i>Amazon Macie User Guide</i>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/macie2-2020-01-01/CriterionAdditionalProperties" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CriterionAdditionalProperties implements Serializable, Cloneable, StructuredPojo { /** * <p> * The value for the property matches (equals) the specified value. If you specify multiple values, Macie uses OR * logic to join the values. * </p> */ private java.util.List<String> eq; /** * <p> * The value for the property exclusively matches (equals an exact match for) all the specified values. If you * specify multiple values, Amazon Macie uses AND logic to join the values. * </p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * </p> */ private java.util.List<String> eqExactMatch; /** * <p> * The value for the property is greater than the specified value. * </p> */ private Long gt; /** * <p> * The value for the property is greater than or equal to the specified value. * </p> */ private Long gte; /** * <p> * The value for the property is less than the specified value. * </p> */ private Long lt; /** * <p> * The value for the property is less than or equal to the specified value. * </p> */ private Long lte; /** * <p> * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, * Macie uses OR logic to join the values. * </p> */ private java.util.List<String> neq; /** * <p> * The value for the property matches (equals) the specified value. If you specify multiple values, Macie uses OR * logic to join the values. * </p> * * @return The value for the property matches (equals) the specified value. If you specify multiple values, Macie * uses OR logic to join the values. */ public java.util.List<String> getEq() { return eq; } /** * <p> * The value for the property matches (equals) the specified value. If you specify multiple values, Macie uses OR * logic to join the values. * </p> * * @param eq * The value for the property matches (equals) the specified value. If you specify multiple values, Macie * uses OR logic to join the values. */ public void setEq(java.util.Collection<String> eq) { if (eq == null) { this.eq = null; return; } this.eq = new java.util.ArrayList<String>(eq); } /** * <p> * The value for the property matches (equals) the specified value. If you specify multiple values, Macie uses OR * logic to join the values. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEq(java.util.Collection)} or {@link #withEq(java.util.Collection)} if you want to override the * existing values. * </p> * * @param eq * The value for the property matches (equals) the specified value. If you specify multiple values, Macie * uses OR logic to join the values. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withEq(String... eq) { if (this.eq == null) { setEq(new java.util.ArrayList<String>(eq.length)); } for (String ele : eq) { this.eq.add(ele); } return this; } /** * <p> * The value for the property matches (equals) the specified value. If you specify multiple values, Macie uses OR * logic to join the values. * </p> * * @param eq * The value for the property matches (equals) the specified value. If you specify multiple values, Macie * uses OR logic to join the values. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withEq(java.util.Collection<String> eq) { setEq(eq); return this; } /** * <p> * The value for the property exclusively matches (equals an exact match for) all the specified values. If you * specify multiple values, Amazon Macie uses AND logic to join the values. * </p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * </p> * * @return The value for the property exclusively matches (equals an exact match for) all the specified values. If * you specify multiple values, Amazon Macie uses AND logic to join the values.</p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. */ public java.util.List<String> getEqExactMatch() { return eqExactMatch; } /** * <p> * The value for the property exclusively matches (equals an exact match for) all the specified values. If you * specify multiple values, Amazon Macie uses AND logic to join the values. * </p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * </p> * * @param eqExactMatch * The value for the property exclusively matches (equals an exact match for) all the specified values. If * you specify multiple values, Amazon Macie uses AND logic to join the values.</p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. */ public void setEqExactMatch(java.util.Collection<String> eqExactMatch) { if (eqExactMatch == null) { this.eqExactMatch = null; return; } this.eqExactMatch = new java.util.ArrayList<String>(eqExactMatch); } /** * <p> * The value for the property exclusively matches (equals an exact match for) all the specified values. If you * specify multiple values, Amazon Macie uses AND logic to join the values. * </p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEqExactMatch(java.util.Collection)} or {@link #withEqExactMatch(java.util.Collection)} if you want to * override the existing values. * </p> * * @param eqExactMatch * The value for the property exclusively matches (equals an exact match for) all the specified values. If * you specify multiple values, Amazon Macie uses AND logic to join the values.</p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withEqExactMatch(String... eqExactMatch) { if (this.eqExactMatch == null) { setEqExactMatch(new java.util.ArrayList<String>(eqExactMatch.length)); } for (String ele : eqExactMatch) { this.eqExactMatch.add(ele); } return this; } /** * <p> * The value for the property exclusively matches (equals an exact match for) all the specified values. If you * specify multiple values, Amazon Macie uses AND logic to join the values. * </p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * </p> * * @param eqExactMatch * The value for the property exclusively matches (equals an exact match for) all the specified values. If * you specify multiple values, Amazon Macie uses AND logic to join the values.</p> * <p> * You can use this operator with the following properties: customDataIdentifiers.detections.arn, * customDataIdentifiers.detections.name, resourcesAffected.s3Bucket.tags.key, * resourcesAffected.s3Bucket.tags.value, resourcesAffected.s3Object.tags.key, * resourcesAffected.s3Object.tags.value, sensitiveData.category, and sensitiveData.detections.type. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withEqExactMatch(java.util.Collection<String> eqExactMatch) { setEqExactMatch(eqExactMatch); return this; } /** * <p> * The value for the property is greater than the specified value. * </p> * * @param gt * The value for the property is greater than the specified value. */ public void setGt(Long gt) { this.gt = gt; } /** * <p> * The value for the property is greater than the specified value. * </p> * * @return The value for the property is greater than the specified value. */ public Long getGt() { return this.gt; } /** * <p> * The value for the property is greater than the specified value. * </p> * * @param gt * The value for the property is greater than the specified value. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withGt(Long gt) { setGt(gt); return this; } /** * <p> * The value for the property is greater than or equal to the specified value. * </p> * * @param gte * The value for the property is greater than or equal to the specified value. */ public void setGte(Long gte) { this.gte = gte; } /** * <p> * The value for the property is greater than or equal to the specified value. * </p> * * @return The value for the property is greater than or equal to the specified value. */ public Long getGte() { return this.gte; } /** * <p> * The value for the property is greater than or equal to the specified value. * </p> * * @param gte * The value for the property is greater than or equal to the specified value. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withGte(Long gte) { setGte(gte); return this; } /** * <p> * The value for the property is less than the specified value. * </p> * * @param lt * The value for the property is less than the specified value. */ public void setLt(Long lt) { this.lt = lt; } /** * <p> * The value for the property is less than the specified value. * </p> * * @return The value for the property is less than the specified value. */ public Long getLt() { return this.lt; } /** * <p> * The value for the property is less than the specified value. * </p> * * @param lt * The value for the property is less than the specified value. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withLt(Long lt) { setLt(lt); return this; } /** * <p> * The value for the property is less than or equal to the specified value. * </p> * * @param lte * The value for the property is less than or equal to the specified value. */ public void setLte(Long lte) { this.lte = lte; } /** * <p> * The value for the property is less than or equal to the specified value. * </p> * * @return The value for the property is less than or equal to the specified value. */ public Long getLte() { return this.lte; } /** * <p> * The value for the property is less than or equal to the specified value. * </p> * * @param lte * The value for the property is less than or equal to the specified value. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withLte(Long lte) { setLte(lte); return this; } /** * <p> * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, * Macie uses OR logic to join the values. * </p> * * @return The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple * values, Macie uses OR logic to join the values. */ public java.util.List<String> getNeq() { return neq; } /** * <p> * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, * Macie uses OR logic to join the values. * </p> * * @param neq * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple * values, Macie uses OR logic to join the values. */ public void setNeq(java.util.Collection<String> neq) { if (neq == null) { this.neq = null; return; } this.neq = new java.util.ArrayList<String>(neq); } /** * <p> * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, * Macie uses OR logic to join the values. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNeq(java.util.Collection)} or {@link #withNeq(java.util.Collection)} if you want to override the * existing values. * </p> * * @param neq * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple * values, Macie uses OR logic to join the values. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withNeq(String... neq) { if (this.neq == null) { setNeq(new java.util.ArrayList<String>(neq.length)); } for (String ele : neq) { this.neq.add(ele); } return this; } /** * <p> * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, * Macie uses OR logic to join the values. * </p> * * @param neq * The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple * values, Macie uses OR logic to join the values. * @return Returns a reference to this object so that method calls can be chained together. */ public CriterionAdditionalProperties withNeq(java.util.Collection<String> neq) { setNeq(neq); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEq() != null) sb.append("Eq: ").append(getEq()).append(","); if (getEqExactMatch() != null) sb.append("EqExactMatch: ").append(getEqExactMatch()).append(","); if (getGt() != null) sb.append("Gt: ").append(getGt()).append(","); if (getGte() != null) sb.append("Gte: ").append(getGte()).append(","); if (getLt() != null) sb.append("Lt: ").append(getLt()).append(","); if (getLte() != null) sb.append("Lte: ").append(getLte()).append(","); if (getNeq() != null) sb.append("Neq: ").append(getNeq()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CriterionAdditionalProperties == false) return false; CriterionAdditionalProperties other = (CriterionAdditionalProperties) obj; if (other.getEq() == null ^ this.getEq() == null) return false; if (other.getEq() != null && other.getEq().equals(this.getEq()) == false) return false; if (other.getEqExactMatch() == null ^ this.getEqExactMatch() == null) return false; if (other.getEqExactMatch() != null && other.getEqExactMatch().equals(this.getEqExactMatch()) == false) return false; if (other.getGt() == null ^ this.getGt() == null) return false; if (other.getGt() != null && other.getGt().equals(this.getGt()) == false) return false; if (other.getGte() == null ^ this.getGte() == null) return false; if (other.getGte() != null && other.getGte().equals(this.getGte()) == false) return false; if (other.getLt() == null ^ this.getLt() == null) return false; if (other.getLt() != null && other.getLt().equals(this.getLt()) == false) return false; if (other.getLte() == null ^ this.getLte() == null) return false; if (other.getLte() != null && other.getLte().equals(this.getLte()) == false) return false; if (other.getNeq() == null ^ this.getNeq() == null) return false; if (other.getNeq() != null && other.getNeq().equals(this.getNeq()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEq() == null) ? 0 : getEq().hashCode()); hashCode = prime * hashCode + ((getEqExactMatch() == null) ? 0 : getEqExactMatch().hashCode()); hashCode = prime * hashCode + ((getGt() == null) ? 0 : getGt().hashCode()); hashCode = prime * hashCode + ((getGte() == null) ? 0 : getGte().hashCode()); hashCode = prime * hashCode + ((getLt() == null) ? 0 : getLt().hashCode()); hashCode = prime * hashCode + ((getLte() == null) ? 0 : getLte().hashCode()); hashCode = prime * hashCode + ((getNeq() == null) ? 0 : getNeq().hashCode()); return hashCode; } @Override public CriterionAdditionalProperties clone() { try { return (CriterionAdditionalProperties) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.macie2.model.transform.CriterionAdditionalPropertiesMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.ventanas; import com.clases.Centro; import java.math.BigDecimal; import javax.swing.JOptionPane; /** * * @author bcastrof */ public class AltaCentros extends javax.swing.JFrame { /** * Creates new form AltaCentros */ public AltaCentros() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jLabel7 = new javax.swing.JLabel(); nombre = new javax.swing.JTextField(); calle = new javax.swing.JTextField(); numero = new javax.swing.JTextField(); ciudad = new javax.swing.JTextField(); codigoPostal = new javax.swing.JTextField(); provincia = new javax.swing.JTextField(); telefono = new javax.swing.JTextField(); alta = new javax.swing.JButton(); limpiar = new javax.swing.JButton(); volver = new javax.swing.JButton(); jLabel8 = new javax.swing.JLabel(); jLabel11 = new javax.swing.JLabel(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setTitle("Alta Centros"); getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout()); jLabel1.setText("Nombre: *"); getContentPane().add(jLabel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 110, -1, 26)); jLabel1.getAccessibleContext().setAccessibleDescription(""); jLabel2.setText("Calle: *"); getContentPane().add(jLabel2, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 160, -1, 26)); jLabel2.getAccessibleContext().setAccessibleDescription(""); jLabel3.setText("Numero: *"); getContentPane().add(jLabel3, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 210, -1, 26)); jLabel3.getAccessibleContext().setAccessibleDescription(""); jLabel4.setText("Ciudad: *"); getContentPane().add(jLabel4, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 260, -1, 26)); jLabel4.getAccessibleContext().setAccessibleDescription(""); jLabel5.setText("Codigo Postal: *"); getContentPane().add(jLabel5, new org.netbeans.lib.awtextra.AbsoluteConstraints(110, 310, -1, 26)); jLabel5.getAccessibleContext().setAccessibleDescription(""); jLabel6.setText("Provincia: *"); getContentPane().add(jLabel6, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 360, -1, 26)); jLabel6.getAccessibleContext().setAccessibleDescription(""); jLabel7.setText("Telefono: *"); getContentPane().add(jLabel7, new org.netbeans.lib.awtextra.AbsoluteConstraints(130, 410, -1, 26)); jLabel7.getAccessibleContext().setAccessibleDescription(""); nombre.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nombreActionPerformed(evt); } }); getContentPane().add(nombre, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 110, 184, 26)); nombre.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(calle, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 160, 184, 26)); calle.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(numero, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 210, 184, 26)); numero.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(ciudad, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 260, 184, 26)); ciudad.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(codigoPostal, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 310, 184, 26)); codigoPostal.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(provincia, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 360, 184, 26)); provincia.getAccessibleContext().setAccessibleDescription(""); getContentPane().add(telefono, new org.netbeans.lib.awtextra.AbsoluteConstraints(210, 410, 184, 26)); telefono.getAccessibleContext().setAccessibleDescription(""); alta.setText("Alta"); alta.setPreferredSize(new java.awt.Dimension(97, 23)); alta.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { altaActionPerformed(evt); } }); getContentPane().add(alta, new org.netbeans.lib.awtextra.AbsoluteConstraints(90, 480, -1, -1)); limpiar.setText("Limpiar"); limpiar.setPreferredSize(new java.awt.Dimension(97, 23)); limpiar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { limpiarActionPerformed(evt); } }); getContentPane().add(limpiar, new org.netbeans.lib.awtextra.AbsoluteConstraints(230, 480, -1, -1)); volver.setText("Volver"); volver.setPreferredSize(new java.awt.Dimension(97, 23)); volver.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { volverActionPerformed(evt); } }); getContentPane().add(volver, new org.netbeans.lib.awtextra.AbsoluteConstraints(370, 480, -1, -1)); jLabel8.setFont(new java.awt.Font("Andalus", 1, 24)); // NOI18N jLabel8.setText("ALTA DE CENTROS DE TRABAJO"); getContentPane().add(jLabel8, new org.netbeans.lib.awtextra.AbsoluteConstraints(90, 30, -1, -1)); jLabel11.setForeground(new java.awt.Color(255, 51, 51)); jLabel11.setText("* Campos obligatorios"); getContentPane().add(jLabel11, new org.netbeans.lib.awtextra.AbsoluteConstraints(420, 510, -1, -1)); pack(); }// </editor-fold>//GEN-END:initComponents //ESTO FUNCIONA private void altaActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_altaActionPerformed String name = nombre.getText(); String street = calle.getText(); BigDecimal number = new BigDecimal(numero.getText()); String city = ciudad.getText(); BigDecimal postalCode=new BigDecimal(codigoPostal.getText()); String province = provincia.getText(); BigDecimal phone = new BigDecimal(telefono.getText()); Centro c = new Centro (name, street, number, city, postalCode, province, phone); boolean guardado = c.gestionCentros(); if (guardado) { //todo intentar poner este mensaje mas guapo JOptionPane.showMessageDialog(null, "Centro dado de alta correctamente", "Alta", JOptionPane.INFORMATION_MESSAGE); } limpiarFormulario(); }//GEN-LAST:event_altaActionPerformed private void limpiarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_limpiarActionPerformed limpiarFormulario(); }//GEN-LAST:event_limpiarActionPerformed private void nombreActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nombreActionPerformed // TODO add your handling code here: }//GEN-LAST:event_nombreActionPerformed private void volverActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_volverActionPerformed CentrosUd abrir = new CentrosUd(); abrir.setVisible(true); this.setVisible(false); }//GEN-LAST:event_volverActionPerformed private void limpiarFormulario() { nombre.setText(""); calle.setText(""); numero.setText(""); ciudad.setText(""); codigoPostal.setText(""); provincia.setText(""); telefono.setText(""); } /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(AltaCentros.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(AltaCentros.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(AltaCentros.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(AltaCentros.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new AltaCentros().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton alta; private javax.swing.JTextField calle; private javax.swing.JTextField ciudad; private javax.swing.JTextField codigoPostal; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JButton limpiar; private javax.swing.JTextField nombre; private javax.swing.JTextField numero; private javax.swing.JTextField provincia; private javax.swing.JTextField telefono; private javax.swing.JButton volver; // End of variables declaration//GEN-END:variables }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.forms.common.rendering.client.widgets.integerBox; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.event.shared.GwtEvent; import com.google.gwt.user.client.ui.Widget; import com.google.gwtmockito.GwtMock; import com.google.gwtmockito.GwtMockitoTestRunner; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.any; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(GwtMockitoTestRunner.class) public class IntegerBoxTest { public static final int ARROW_LEFT_KEYCODE = 37; public static final int ARROW_RIGHT_KEYCODE = 39; public static final int PERIOD_KEYCODE = 190; public static final Long TEST_VALUE_LONG = 27l; public static final String TEST_VALUE_STRING = TEST_VALUE_LONG.toString(); protected IntegerBoxView view; @GwtMock protected GwtEvent<?> event; @GwtMock protected Widget viewWidget; protected IntegerBox integerBox; @Before public void setup() { view = mock(IntegerBoxView.class); when(view.asWidget()).thenReturn(viewWidget); integerBox = new IntegerBox(view); verify(view).setPresenter(integerBox); integerBox.asWidget(); verify(view).asWidget(); } @Test public void testSetValueWithoutEvents() { integerBox.setValue(TEST_VALUE_LONG); verify(view).setValue(TEST_VALUE_STRING); } @Test public void testSetValueWithEvents() { integerBox = spy(integerBox); integerBox.setValue(TEST_VALUE_LONG, true); verify(view).setValue(TEST_VALUE_STRING); verify(integerBox).notifyValueChange(TEST_VALUE_STRING); } @Test public void testChangeValueToNull() { integerBox = spy(integerBox); when(view.getTextValue()).thenReturn(TEST_VALUE_STRING); integerBox.setValue(null, true); verify(integerBox).notifyValueChange(null); } @Test public void testSetValueToNullWhileItIsAlreadyNull() { integerBox = spy(integerBox); integerBox.setValue(null, true); verify(integerBox, times(0)).notifyValueChange(null); } @Test public void testKeyCodeLetter() { testKeyCode(KeyCodes.KEY_A, false, true); } @Test public void testKeyCodeSpace() { testKeyCode(KeyCodes.KEY_SPACE, false, true); } @Test public void testKeyCodePeriod() { testKeyCode(PERIOD_KEYCODE, false, true); } @Test public void testKeyCodeNumPadPeriod() { testKeyCode(KeyCodes.KEY_NUM_PERIOD, false, true); } @Test public void testKeyCodeDigit() { testKeyCode(KeyCodes.KEY_ONE, false, false); } @Test public void testKeyCodeNumPadDigit() { testKeyCode(KeyCodes.KEY_NUM_ONE, false, false); } @Test public void testKeyCodeBackSpace() { testKeyCode(KeyCodes.KEY_BACKSPACE, false, false); } @Test public void testKeyCodeLeftArrow() { testKeyCode(ARROW_LEFT_KEYCODE, false, false); } @Test public void testKeyCodeRightArrow() { testKeyCode(ARROW_RIGHT_KEYCODE, false, false); } @Test public void testKeyCodeTab() { testKeyCode(KeyCodes.KEY_TAB, false, false); } @Test public void testKeyCodeShiftTab() { testKeyCode(KeyCodes.KEY_TAB, true, false); } private void testKeyCode(int keyCode, boolean isShiftPressed, boolean expectInvalid) { boolean result = integerBox.isInvalidKeyCode(keyCode, isShiftPressed); assertEquals(result, expectInvalid); } @Test public void testEvents() { ValueChangeHandler handler = mock(ValueChangeHandler.class); integerBox.addValueChangeHandler(handler); verify(view, atLeast(1)).asWidget(); verify(viewWidget).addHandler(any(), any()); integerBox.fireEvent(event); verify(view, atLeast(2)).asWidget(); verify(viewWidget).fireEvent(event); } @Test public void testEnableTrue() { testEnable(true); } @Test public void testEnableFalse() { testEnable(false); } private void testEnable(boolean enable) { integerBox.setEnabled(enable); verify(view).setEnabled(enable); } @Test public void testSetPlaceholder() { String placeholder = "Random placeholder"; integerBox.setPlaceholder(placeholder); verify(view).setPlaceholder(eq(placeholder)); } @Test public void testSetId() { String id = "field_id"; integerBox.setId(id); verify(view).setId(eq(id)); } @Test public void testSetMaxLength() { int maxLength = 10; integerBox.setMaxLength(maxLength); verify(view).setMaxLength(eq(maxLength)); } }
package view; import java.awt.*; import java.awt.event.*; import java.util.*; import java.util.function.*; import javax.swing.*; import javax.swing.event.*; import control.*; import model.*; /** * Panel for configuring the maze. * @author cryingshadow */ public class MazeSettingsDisplay extends JPanel { /** * For serialization. */ private static final long serialVersionUID = -73323973550133352L; /** * @param title The title. * @param comp The component. * @return The specified component with a title label in front of it. */ private static JPanel addTitle(final String title, final Component comp) { final JPanel res = new JPanel(new GridBagLayout()); final GridBagConstraints c = new GridBagConstraints(); c.gridx = 0; c.gridy = 0; c.weightx = 0.0; c.fill = GridBagConstraints.HORIZONTAL; res.add(new JLabel(title + ": "), c); c.gridx = 1; c.weightx = 1.0; res.add(comp, c); return res; } /** * @param title The title. * @param init The initial value. * @param min The minimum value. * @param max The maximum value. * @param step The step width. * @param setter The setter. * @return A panel with a spinner having an integer number model specified by init, min, max, and step. Moreover, * the spinner has a label in front of it with the specified title and changes to the number model invoke * the specified setter. */ private static JPanel createSpinnerPanel( final String title, final int init, final int min, final int max, final int step, final Consumer<Integer> setter ) { final JSpinner spinner = new JSpinner(new SpinnerNumberModel(init, min, max, step)); spinner.addChangeListener( new ChangeListener() { @Override public void stateChanged(final ChangeEvent e) { setter.accept((Integer)spinner.getModel().getValue()); } } ); return MazeSettingsDisplay.addTitle(title, spinner); } /** * The competition. */ private final Competition competition; /** * The competition control. */ private final CompetitionControl competitionControl; /** * The settings. */ private final Settings settings; /** * @param settings The settings. * @param competition The competition. * @param competitionControl The competition control. */ public MazeSettingsDisplay( final Settings settings, final Competition competition, final CompetitionControl competitionControl ) { this.settings = settings; this.competition = competition; this.competitionControl = competitionControl; this.setLayout(new GridBagLayout()); final GridBagConstraints c = new GridBagConstraints(); c.gridx = 0; c.fill = GridBagConstraints.HORIZONTAL; c.weightx = 1.0; c.ipady = 3; this.setBorder(BorderFactory.createTitledBorder("Maze")); final JSlider zoom = new JSlider(Settings.MIN_FIELD_SIZE, Settings.MAX_FIELD_SIZE, Settings.NORMAL_FIELD_SIZE); zoom.setMajorTickSpacing(10); zoom.setPaintTicks(true); zoom.setPaintLabels(true); zoom.setLabelTable(zoom.createStandardLabels(50, 50)); zoom.addChangeListener( new ChangeListener(){ @Override public void stateChanged(final ChangeEvent event) { try { MazeSettingsDisplay.this.settings.setZoom(zoom.getValue()); } catch (final Exception e) { ExceptionDisplay.showException(MazeSettingsDisplay.this, e); } } } ); final JCheckBox arena = new JCheckBox("Arena", this.settings.isArena()); arena.addActionListener( new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { MazeSettingsDisplay.this.settings.setArena(arena.isSelected()); } } ); final JButton generateButton = new JButton("NEW MAZE"); generateButton.addActionListener( new ActionListener(){ @Override public void actionPerformed(final ActionEvent event) { try { MazeSettingsDisplay.this.competitionControl.generateMaze(); } catch (final Exception e) { ExceptionDisplay.showException(MazeSettingsDisplay.this, e); } } } ); this.competition.addChangeListener( new ChangeListener() { @Override public void stateChanged(final ChangeEvent e) { generateButton.setEnabled(!MazeSettingsDisplay.this.competition.isRunning()); } } ); c.gridy = 0; this.add(MazeSettingsDisplay.addTitle("Zoom", zoom), c); c.gridy = 1; this.add( MazeSettingsDisplay.createSpinnerPanel( "Width", this.settings.getWidth(), Settings.MINIMUM_DIMENSION, Settings.MAXIMUM_DIMENSION, 1, this.settings::setWidth ), c ); c.gridy = 2; this.add( MazeSettingsDisplay.createSpinnerPanel( "Height", this.settings.getHeight(), Settings.MINIMUM_DIMENSION, Settings.MAXIMUM_DIMENSION, 1, this.settings::setHeight ), c ); c.gridy = 3; this.add( MazeSettingsDisplay.createSpinnerPanel( "Walls", this.settings.getWalls(), 0, (Settings.MAXIMUM_DIMENSION * Settings.MAXIMUM_DIMENSION) / 2, 1, this.settings::setWalls ), c ); c.gridy = 4; this.add(arena, c); c.gridy = 5; this.add( MazeSettingsDisplay.createSpinnerPanel( "Food per Snake", this.settings.getFoodPerSnake(), 1, 10, 1, this.settings::setFoodPerSnake ), c ); c.gridy = 6; this.add( MazeSettingsDisplay.createSpinnerPanel( "Initial Snake Length", this.settings.getInitialSnakeLength(), 1, 100, 1, this.settings::setInitialSnakeLength ), c ); c.gridy = 7; this.add( MazeSettingsDisplay.createSpinnerPanel( "Maximum Hunger", this.settings.getMaxHunger().orElse(0), 0, Settings.MAXIMUM_DIMENSION * Settings.MAXIMUM_DIMENSION, 1, this::setMaxHunger ), c ); c.gridy = 8; this.add(generateButton, c); } /** * @param maxHunger The maximum hunger a snake can survive. */ private void setMaxHunger(final int maxHunger) { this.settings.setMaxHunger(maxHunger == 0 ? Optional.empty() : Optional.of(maxHunger)); } }
package ohrm.SpatialMachines.tile; import cofh.api.energy.EnergyStorage; import cofh.api.energy.IEnergyHandler; import cofh.api.energy.IEnergyReceiver; import cofh.api.energy.IEnergyStorage; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraftforge.common.util.ForgeDirection; public abstract class TileEnergyBasic extends TileInventory implements IEnergyHandler { /** * The number of ticks since the cycle started */ public int cycledTicks; protected EnergyStorage storage; public int cycleLength; public TileEnergyBasic() { storage = new EnergyStorage(getMaxEnergyStored()); //So the machine doesn't start straight away set it to -1 cycledTicks -= 1; } // Internal energy methods /** * The maximum energy that can be stored * @return The amount of energy that can be stored */ protected abstract int getMaxEnergyStored(); /** * Returns the energy cost to run a cycle. Consumed at the beginning of a cycle. * @return Amount of energy needed to start a cycle. */ public abstract int getCycleEnergyCost(); /** * @return The length of a cycle, in ticks. */ public abstract int getCycleLength(); /** * Check material/non-energy requirements for starting a cycle. * Do not consume resources here * @return True if a cycle can start/continue, false otherwise. */ public abstract boolean canBeginCycle(); /** * Perform any necessary operations at the start of a cycle. * Do not consume resources here. * Called every tick to make sure all requirements are still there */ public abstract void onPoweredCycleBegin(); /** * Perform any necessary operations at the end of a cycle. * Consume and produce resources here. */ public abstract void onPoweredCycleEnd(); /** * Gets how many ticks into the cycle we currently are * @return cycleTicks */ public int getCurrentCycledTicks(){ return cycledTicks; } /** * Gets weather the machine is active * @return true if cycled ticks is 0 or greater */ public boolean isActive(){ return cycledTicks >= 0; } /** * Get how close to completion the cycle is * @return float value up to 1 showing how close to completion it is */ public float getCycleCompletion(){ if(cycledTicks < 0) return 0f; else return (float)cycledTicks/(float)getCycleLength(); } /* NBT reading * Read the energy levels in the block * and the current cycled ticks */ @Override public void readFromNBT(NBTTagCompound nbt) { super.readFromNBT(nbt); if(nbt.hasKey("storage")) this.storage.readFromNBT(nbt.getCompoundTag("storage")); if(nbt.hasKey("cycledTicks")) cycledTicks = nbt.getInteger("cycledTicks"); } /* NBT writing * write the energy levels in the block * and the current cycled ticks */ @Override public void writeToNBT(NBTTagCompound nbt) { super.writeToNBT(nbt); NBTTagCompound energyNBTTag = new NBTTagCompound(); this.storage.writeToNBT(energyNBTTag); nbt.setTag("storage", energyNBTTag); nbt.setInteger("cycledTicks", cycledTicks); } @Override public void updateEntity() { super.updateEntity(); if(!worldObj.isRemote){ if(cycledTicks >= 0){ cycledTicks++; if(!canBeginCycle()){ cycledTicks = -1; this.worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); }else if(cycledTicks >= getCycleLength()){ onPoweredCycleEnd(); cycledTicks = -1; this.worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } } if(cycledTicks < 0 && getCycleEnergyCost() <= storage.getEnergyStored() && canBeginCycle()){ this.storage.extractEnergy(getCycleEnergyCost(), false); cycledTicks = 0; onPoweredCycleBegin(); this.worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } } } /* IEnergyConnection */ @Override public boolean canConnectEnergy(ForgeDirection from) { return true; } /* IEnergyReceiver */ @Override public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) { return storage.receiveEnergy(maxReceive, simulate); } /* IEnergyProvider */ @Override public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) { return storage.extractEnergy(maxExtract, simulate); } /* IEnergyReceiver and IEnergyProvider */ @Override public int getEnergyStored(ForgeDirection from) { return storage.getEnergyStored(); } @Override public int getMaxEnergyStored(ForgeDirection from) { return storage.getMaxEnergyStored(); } }
/* * Copyright 2013-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v2; import org.cloudfoundry.AbstractIntegrationTest; import org.cloudfoundry.ApplicationUtils; import org.cloudfoundry.ServiceBrokerUtils; import org.cloudfoundry.client.CloudFoundryClient; import org.cloudfoundry.client.v2.servicebrokers.CreateServiceBrokerRequest; import org.cloudfoundry.client.v2.servicebrokers.DeleteServiceBrokerRequest; import org.cloudfoundry.client.v2.servicebrokers.GetServiceBrokerRequest; import org.cloudfoundry.client.v2.servicebrokers.ListServiceBrokersRequest; import org.cloudfoundry.client.v2.servicebrokers.UpdateServiceBrokerRequest; import org.cloudfoundry.client.v2.shareddomains.ListSharedDomainsRequest; import org.cloudfoundry.client.v2.shareddomains.SharedDomainResource; import org.cloudfoundry.client.v2.spaces.CreateSpaceRequest; import org.cloudfoundry.client.v2.spaces.CreateSpaceResponse; import org.cloudfoundry.util.PaginationUtils; import org.cloudfoundry.util.ResourceUtils; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.ClassPathResource; import reactor.core.Exceptions; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import java.io.IOException; import java.nio.file.Path; import java.time.Duration; import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; import static org.cloudfoundry.ServiceBrokerUtils.createServiceBroker; import static org.cloudfoundry.ServiceBrokerUtils.deleteServiceBroker; import static org.cloudfoundry.util.tuple.TupleUtils.function; public final class ServiceBrokersTest extends AbstractIntegrationTest { @Autowired private CloudFoundryClient cloudFoundryClient; @Autowired private Mono<String> organizationId; @Autowired private Mono<String> serviceBrokerId; @Autowired private String serviceBrokerName; @Test public void create() { String planName = this.nameFactory.getPlanName(); String serviceBrokerName = this.nameFactory.getServiceBrokerName(); String serviceName = this.nameFactory.getServiceName(); String spaceName = this.nameFactory.getSpaceName(); Path application; try { application = new ClassPathResource("test-service-broker.jar").getFile().toPath(); } catch (IOException e) { throw Exceptions.propagate(e); } ApplicationUtils.ApplicationMetadata applicationMetadata = this.organizationId .flatMap(organizationId -> createSpaceId(this.cloudFoundryClient, organizationId, spaceName)) .flatMap(spaceId -> ServiceBrokerUtils.pushServiceBrokerApplication(this.cloudFoundryClient, application, this.nameFactory, planName, serviceName, spaceId)) .block(Duration.ofMinutes(5)); this.cloudFoundryClient.serviceBrokers() .create(CreateServiceBrokerRequest.builder() .authenticationPassword("test-authentication-password") .authenticationUsername("test-authentication-username") .brokerUrl(applicationMetadata.uri) .name(serviceBrokerName) .spaceId(applicationMetadata.spaceId) .build()) .flatMapMany(response -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.serviceBrokers() .list(ListServiceBrokersRequest.builder() .name(serviceBrokerName) .page(page) .build()))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); deleteServiceBroker(this.cloudFoundryClient, applicationMetadata.applicationId) .block(Duration.ofMinutes(5)); } @Test public void delete() { String planName = this.nameFactory.getPlanName(); String serviceBrokerName = this.nameFactory.getServiceBrokerName(); String serviceName = this.nameFactory.getServiceName(); String spaceName = this.nameFactory.getSpaceName(); ServiceBrokerUtils.ServiceBrokerMetadata serviceBrokerMetadata = this.organizationId .flatMap(organizationId -> createSpaceId(this.cloudFoundryClient, organizationId, spaceName)) .flatMap(spaceId -> createServiceBroker(this.cloudFoundryClient, this.nameFactory, planName, serviceBrokerName, serviceName, spaceId, true)) .block(Duration.ofMinutes(5)); this.cloudFoundryClient.serviceBrokers() .delete(DeleteServiceBrokerRequest.builder() .serviceBrokerId(serviceBrokerMetadata.serviceBrokerId) .build()) .flatMapMany(response -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.serviceBrokers() .list(ListServiceBrokersRequest.builder() .name(serviceBrokerName) .page(page) .build()))) .as(StepVerifier::create) .expectComplete() .verify(Duration.ofMinutes(5)); deleteServiceBroker(this.cloudFoundryClient, serviceBrokerMetadata.applicationMetadata.applicationId) .block(Duration.ofMinutes(5)); } @Test public void get() { this.serviceBrokerId .flatMap(serviceBrokerId -> this.cloudFoundryClient.serviceBrokers() .get(GetServiceBrokerRequest.builder() .serviceBrokerId(serviceBrokerId) .build())) .as(StepVerifier::create) .assertNext(serviceBroker -> assertThat(ResourceUtils.getEntity(serviceBroker).getName()).isEqualTo(this.serviceBrokerName)) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void list() { PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.serviceBrokers() .list(ListServiceBrokersRequest.builder() .name(this.serviceBrokerName) .page(page) .build())) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void update() { String planName = this.nameFactory.getPlanName(); String serviceBrokerName1 = this.nameFactory.getServiceBrokerName(); String serviceBrokerName2 = this.nameFactory.getServiceBrokerName(); String serviceName = this.nameFactory.getServiceName(); String spaceName = this.nameFactory.getSpaceName(); ServiceBrokerUtils.ServiceBrokerMetadata serviceBrokerMetadata = this.organizationId .flatMap(organizationId -> createSpaceId(this.cloudFoundryClient, organizationId, spaceName)) .flatMap(spaceId -> createServiceBroker(this.cloudFoundryClient, this.nameFactory, planName, serviceBrokerName1, serviceName, spaceId, true)) .block(Duration.ofMinutes(5)); this.cloudFoundryClient.serviceBrokers() .update(UpdateServiceBrokerRequest.builder() .serviceBrokerId(serviceBrokerMetadata.serviceBrokerId) .name(serviceBrokerName2) .build()) .flatMapMany(serviceBrokerId -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.serviceBrokers() .list(ListServiceBrokersRequest.builder() .name(serviceBrokerName2) .page(page) .build()))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); deleteServiceBroker(this.cloudFoundryClient, serviceBrokerMetadata.applicationMetadata.applicationId) .block(Duration.ofMinutes(5)); } private static Mono<String> createSpaceId(CloudFoundryClient cloudFoundryClient, String organizationId, String spaceName) { return requestCreateSpace(cloudFoundryClient, organizationId, spaceName) .map(ResourceUtils::getId); } private static Mono<CreateSpaceResponse> requestCreateSpace(CloudFoundryClient cloudFoundryClient, String organizationId, String spaceName) { return cloudFoundryClient.spaces() .create(CreateSpaceRequest.builder() .name(spaceName) .organizationId(organizationId) .build()); } }
package com.jivesoftware.authHelper.utils; import com.carrotsearch.junitbenchmarks.BenchmarkOptions; import com.carrotsearch.junitbenchmarks.BenchmarkRule; import com.carrotsearch.junitbenchmarks.annotation.AxisRange; import com.carrotsearch.junitbenchmarks.annotation.BenchmarkHistoryChart; import com.carrotsearch.junitbenchmarks.annotation.BenchmarkMethodChart; import com.carrotsearch.junitbenchmarks.annotation.LabelType; import com.jivesoftware.authHelper.consts.SecurityLogType; import junit.framework.TestCase; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; import org.apache.commons.httpclient.UsernamePasswordCredentials; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.params.HttpConnectionManagerParams; import org.json.JSONException; import org.json.JSONObject; import org.junit.Before; import org.junit.FixMethodOrder; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.runner.JUnitCore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.junit.runners.MethodSorters; import javax.net.ssl.SSLException; import java.io.IOException; import java.security.Provider; import java.security.Security; import java.util.Comparator; import java.util.SortedSet; import java.util.TreeSet; /** * Created with IntelliJ IDEA. * User: dovamir * Date: 4/29/14 * Time: 7:21 PM * * * to make these tests relevant , they test real URLs on the web. Of course the drawback * is that if these urls change the tests will fail . * the tests to NTLM, kerberos and proxy have been tested internally but are not fully implemented here * because I could not find stable and publically available endpoints to test. */ @RunWith(JUnit4.class) @FixMethodOrder(MethodSorters.NAME_ASCENDING) @BenchmarkOptions(benchmarkRounds = 1, warmupRounds = 0, concurrency = BenchmarkOptions.CONCURRENCY_AVAILABLE_CORES) public class AuthUtilsTest extends TestCase { HttpClient client; @Rule public org.junit.rules.TestRule benchmarkRun = new BenchmarkRule(); public static void main(String[] args) throws Exception { JUnitCore.main( "com.jivesoftware.authHelper.utils.AuthUtilsTest"); } @Before public void setUp() throws Exception { super.setUp(); MultiThreadedHttpConnectionManager connManag = new MultiThreadedHttpConnectionManager(); HttpConnectionManagerParams managParams = connManag.getParams(); managParams.setConnectionTimeout(10000); // 1 managParams.setSoTimeout(10000); //2 client = new HttpClient(connManag); client.getParams().setParameter("http.connection-manager.timeout", new Long(10000)); //3 } @Override protected void tearDown() throws Exception { client = null; super.tearDown(); } @Test public void testGzipedResponseAsJson() throws IOException { String url = "http://api.stackexchange.com/2.2/questions?site=stackoverflow"; String respose = executeRequestReturnResponseAsString(url); assertNotNull("Should return a response", respose); assertTrue("response should be json ", isJSONValid(respose)); } @Test public void testBasicAuth() throws IOException { String url = "http://browserspy.dk/password-ok.php"; int respose1 = executeRequestReturnStatus(url); assertEquals("Should return a 401 response", 401, respose1); CredentialsUtils.setBasicAuthCredentials(client, new UsernamePasswordCredentials("test", "test")); int respose2 = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose2); } /* some sites to test ssl certificates: https://www.ssllabs.com/ssltest/ http://www.digicert.com/help/ http://www.sslshopper.com/ssl-checker.html http://testssl.disig.sk/index.en.html */ @Test() public void testJDKDefaultSSLtoValidCert() throws IOException { String url = "https://www.google.com/"; //valid certificate SSLUtils.trustAllSSLCertificates(); int respose2 = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose2); } @Test() public void testSSLTrustAlltoValidCert() throws IOException { String url = "https://google.com/"; //invalid certificate SSLUtils.trustAllSSLCertificates(); int respose1 = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose1); } @Test() public void testSSLTrustAlltoInvalidNameCert() throws IOException { String url = "https://example.com/"; //invalid certificate SSLUtils.trustAllSSLCertificates(); int respose1 = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose1); } @Test() public void testSSLWithBrowserUserAgent() throws IOException { String url = "https://testssl.disig.sk"; //expired certificate SSLUtils.trustAllSSLCertificates(); AuthUtils.useBrowserUserAgent(client); int respose1 = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose1); } @Test() public void testSSLWithoutBrowserUserAgent() throws IOException { String url = "https://testssl.disig.sk"; //expired certificate SSLUtils.trustAllSSLCertificates(); int respose1 = executeRequestReturnStatus(url); assertEquals("Should return a 403 response when no browser user agent provided", 403, respose1); } @Test(expected = SSLException.class) public void testJDKDefaultSSLtoInvalidNameCert() throws IOException, SSLException { String url = "https://example.com/"; //invalid certificate SSLUtils.trustJDKDefaultSSLCertificates(); int respose1 = executeRequestReturnStatus(url); fail("should not get here"); } @Test(expected = SSLException.class) public void testJDKDefaultSSLtoExpiredCert() throws IOException, SSLException { String url = "https://testssl-expire.disig.sk/"; //expired certificate SSLUtils.trustJDKDefaultSSLCertificates(); String respose1 = executeRequestReturnResponseAsString(url); fail("should not get here"); } /* test cryptography providers */ @Test public void testDefaultEncryptionProviders() throws Exception { System.out.println("========testDefaultEncryptionProviders======="); Provider[] providers = Security.getProviders(); int numservices = 0; System.out.println("========default Providers only======="); for (Provider p : providers) { String info = p.getInfo(); System.out.println(p.getClass() + " - " + info); } System.out.println("========default Providers + services======="); for (Provider p : providers) { String info = p.getInfo(); //System.out.println(p.getClass() + " - " + info); numservices += printServices(p); } System.out.println("total number of default providers : " + providers.length); System.out.println("total number of default services : " + numservices); } /* test cryptography providers */ @Test public void testExtendedEncryptionProviders() throws Exception { System.out.println("========testExtendedEncryptionProviders======="); AuthUtils.addEncryptionProviders(); Provider[] providers = Security.getProviders(); int numservices = 0; System.out.println("======== Extended Providers ======="); for (Provider p : providers) { String info = p.getInfo(); System.out.println(p.getClass() + " - " + info); } System.out.println("========Extended Providers + services======="); for (Provider p : providers) { String info = p.getInfo(); //System.out.println(p.getClass() + " - " + info); numservices += printServices(p); } System.out.println("total number of providers : " + providers.length); System.out.println("total number of services : " + numservices); } /* Not yet implemented */ @Ignore("Not yet implemented") public void testSSLTrustCustomStore() throws IOException { String url = ""; //SSLUtils.trustCustomHTTPSCertificates(); int respose = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose); } //uncomment this annotation to use the test //@Test public void testKERBEROS() throws IOException { AuthUtils.securityLogging(SecurityLogType.KERBEROS, true); String url = "your url"; SSLUtils.trustAllSSLCertificates(); CredentialsUtils .setKerberosCredentials(client, new UsernamePasswordCredentials("user", "password"), "domain", "KDC"); String respose = executeRequestReturnResponseAsString(url); System.out.print(respose); // assertEquals("Should return a 200 response", 200, respose); } @Ignore("Not yet implemented") public void testNTLM() throws IOException { String url = "yourNTLMserver"; SSLUtils.trustAllSSLCertificates(); CredentialsUtils.setNTLMCredentials(client, new UsernamePasswordCredentials("xxx", "xxx"), "domain"); int respose = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose); } @Ignore("Not yet implemented") public void testUseNTLMforMixedAuth() throws IOException { String url = "yourCLAIMSandNTLMserver"; SSLUtils.trustAllSSLCertificates(); CredentialsUtils.setNTLMCredentials(client, new UsernamePasswordCredentials("xxx", "xxx"), "domain"); int respose = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose); } @Ignore("Not yet implemented") public void testProxy() throws IOException { String url = "http://api.stackexchange.com/2.2/questions?site=stackoverflow"; CredentialsUtils.setProxyHost(client, null, "88.88.88.88", 8080); AuthUtils.useBrowserUserAgent(client); int respose = executeRequestReturnStatus(url); assertEquals("Should return a 200 response", 200, respose); } /* test utils */ private String executeRequestReturnResponseAsString(String url) throws IOException { GetMethod httpget = new GetMethod(url); client.executeMethod(httpget); return AuthUtils.getResponseAsStringAndHandleGzip(httpget); } private int executeRequestReturnStatus(String url) throws IOException { GetMethod httpget = new GetMethod(url); client.executeMethod(httpget); return httpget.getStatusCode(); } public static boolean isJSONValid(String test) { try { new JSONObject(test); return true; } catch (JSONException ex) { return false; } } private int printServices(Provider p) { SortedSet<Provider.Service> services = new TreeSet(new ProviderServiceComparator()); services.addAll(p.getServices()); for (Provider.Service service : services) { String algo = service.getAlgorithm(); //System.out.println("==> Service: " + service.getType() + " - " + algo); } return services.size(); } /** * This is to sort the various Services to make it easier on the eyes... */ private class ProviderServiceComparator implements Comparator<Provider.Service> { @Override public int compare(Provider.Service object1, Provider.Service object2) { String s1 = object1.getType() + object1.getAlgorithm(); String s2 = object2.getType() + object2.getAlgorithm(); return s1.compareTo(s2); } } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.device.bluetooth; import android.annotation.TargetApi; import android.bluetooth.BluetoothDevice; import android.content.Context; import android.os.Build; import android.os.ParcelUuid; import org.chromium.base.Log; import org.chromium.base.ThreadUtils; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import java.util.HashMap; import java.util.HashSet; import java.util.List; /** * Exposes android.bluetooth.BluetoothDevice as necessary for C++ * device::BluetoothDeviceAndroid. * * Lifetime is controlled by device::BluetoothDeviceAndroid. */ @JNINamespace("device") @TargetApi(Build.VERSION_CODES.LOLLIPOP) final class ChromeBluetoothDevice { private static final String TAG = "Bluetooth"; private long mNativeBluetoothDeviceAndroid; final Wrappers.BluetoothDeviceWrapper mDevice; private HashSet<String> mUuidsFromScan; Wrappers.BluetoothGattWrapper mBluetoothGatt; private final BluetoothGattCallbackImpl mBluetoothGattCallbackImpl; final HashMap<Wrappers.BluetoothGattCharacteristicWrapper, ChromeBluetoothRemoteGattCharacteristic> mWrapperToChromeCharacteristicsMap; final HashMap<Wrappers.BluetoothGattDescriptorWrapper, ChromeBluetoothRemoteGattDescriptor> mWrapperToChromeDescriptorsMap; private ChromeBluetoothDevice( long nativeBluetoothDeviceAndroid, Wrappers.BluetoothDeviceWrapper deviceWrapper) { mNativeBluetoothDeviceAndroid = nativeBluetoothDeviceAndroid; mDevice = deviceWrapper; mUuidsFromScan = new HashSet<String>(); mBluetoothGattCallbackImpl = new BluetoothGattCallbackImpl(); mWrapperToChromeCharacteristicsMap = new HashMap<Wrappers.BluetoothGattCharacteristicWrapper, ChromeBluetoothRemoteGattCharacteristic>(); mWrapperToChromeDescriptorsMap = new HashMap<Wrappers.BluetoothGattDescriptorWrapper, ChromeBluetoothRemoteGattDescriptor>(); Log.v(TAG, "ChromeBluetoothDevice created."); } /** * Handles C++ object being destroyed. */ @CalledByNative private void onBluetoothDeviceAndroidDestruction() { if (mBluetoothGatt != null) { mBluetoothGatt.close(); mBluetoothGatt = null; } mNativeBluetoothDeviceAndroid = 0; } // --------------------------------------------------------------------------------------------- // BluetoothDeviceAndroid methods implemented in java: // Implements BluetoothDeviceAndroid::Create. // 'Object' type must be used because inner class Wrappers.BluetoothDeviceWrapper reference is // not handled by jni_generator.py JavaToJni. http://crbug.com/505554 @CalledByNative private static ChromeBluetoothDevice create( long nativeBluetoothDeviceAndroid, Object deviceWrapper) { return new ChromeBluetoothDevice( nativeBluetoothDeviceAndroid, (Wrappers.BluetoothDeviceWrapper) deviceWrapper); } // Implements BluetoothDeviceAndroid::UpdateAdvertisedUUIDs. @CalledByNative private boolean updateAdvertisedUUIDs(List<ParcelUuid> uuidsFromScan) { if (uuidsFromScan == null) { return false; } boolean uuidsUpdated = false; for (ParcelUuid uuid : uuidsFromScan) { uuidsUpdated |= mUuidsFromScan.add(uuid.toString()); } return uuidsUpdated; } // Implements BluetoothDeviceAndroid::GetBluetoothClass. @CalledByNative private int getBluetoothClass() { return mDevice.getBluetoothClass_getDeviceClass(); } // Implements BluetoothDeviceAndroid::GetAddress. @CalledByNative private String getAddress() { return mDevice.getAddress(); } // Implements BluetoothDeviceAndroid::IsPaired. @CalledByNative private boolean isPaired() { return mDevice.getBondState() == BluetoothDevice.BOND_BONDED; } // Implements BluetoothDeviceAndroid::GetUUIDs. @CalledByNative private String[] getUuids() { // TODO(scheib): return merged list of UUIDs from scan results and, // after a device is connected, discoverServices. crbug.com/508648 return mUuidsFromScan.toArray(new String[mUuidsFromScan.size()]); } // Implements BluetoothDeviceAndroid::CreateGattConnectionImpl. @CalledByNative private void createGattConnectionImpl(Context context) { Log.i(TAG, "connectGatt"); if (mBluetoothGatt != null) mBluetoothGatt.close(); // autoConnect set to false as under experimentation using autoConnect failed to complete // connections. mBluetoothGatt = mDevice.connectGatt(context, false /* autoConnect */, mBluetoothGattCallbackImpl); } // Implements BluetoothDeviceAndroid::DisconnectGatt. @CalledByNative private void disconnectGatt() { Log.i(TAG, "BluetoothGatt.disconnect"); if (mBluetoothGatt != null) mBluetoothGatt.disconnect(); } // Implements BluetoothDeviceAndroid::GetDeviceName. @CalledByNative private String getDeviceName() { return mDevice.getName(); } // Implements callbacks related to a GATT connection. private class BluetoothGattCallbackImpl extends Wrappers.BluetoothGattCallbackWrapper { @Override public void onConnectionStateChange(final int status, final int newState) { Log.i(TAG, "onConnectionStateChange status:%d newState:%s", status, (newState == android.bluetooth.BluetoothProfile.STATE_CONNECTED) ? "Connected" : "Disconnected"); if (newState == android.bluetooth.BluetoothProfile.STATE_CONNECTED) { mBluetoothGatt.discoverServices(); } else if (newState == android.bluetooth.BluetoothProfile.STATE_DISCONNECTED) { if (mBluetoothGatt != null) { mBluetoothGatt.close(); mBluetoothGatt = null; } } ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { if (mNativeBluetoothDeviceAndroid != 0) { nativeOnConnectionStateChange(mNativeBluetoothDeviceAndroid, status, newState == android.bluetooth.BluetoothProfile.STATE_CONNECTED); } } }); } @Override public void onServicesDiscovered(final int status) { Log.i(TAG, "onServicesDiscovered status:%d==%s", status, status == android.bluetooth.BluetoothGatt.GATT_SUCCESS ? "OK" : "Error"); ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { if (mNativeBluetoothDeviceAndroid != 0) { // TODO(crbug.com/576906): Update or replace existing GATT objects if they // change after initial discovery. for (Wrappers.BluetoothGattServiceWrapper service : mBluetoothGatt.getServices()) { // Create an adapter unique service ID. getInstanceId only differs // between service instances with the same UUID on this device. String serviceInstanceId = getAddress() + "/" + service.getUuid().toString() + "," + service.getInstanceId(); nativeCreateGattRemoteService( mNativeBluetoothDeviceAndroid, serviceInstanceId, service); } nativeOnGattServicesDiscovered(mNativeBluetoothDeviceAndroid); } } }); } @Override public void onCharacteristicChanged( final Wrappers.BluetoothGattCharacteristicWrapper characteristic) { Log.i(TAG, "device onCharacteristicChanged."); ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { ChromeBluetoothRemoteGattCharacteristic chromeCharacteristic = mWrapperToChromeCharacteristicsMap.get(characteristic); if (chromeCharacteristic == null) { // Android events arriving with no Chrome object is expected rarely only // when the event races object destruction. Log.v(TAG, "onCharacteristicChanged when chromeCharacteristic == null."); } else { chromeCharacteristic.onCharacteristicChanged(); } } }); } @Override public void onCharacteristicRead( final Wrappers.BluetoothGattCharacteristicWrapper characteristic, final int status) { ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { ChromeBluetoothRemoteGattCharacteristic chromeCharacteristic = mWrapperToChromeCharacteristicsMap.get(characteristic); if (chromeCharacteristic == null) { // Android events arriving with no Chrome object is expected rarely: only // when the event races object destruction. Log.v(TAG, "onCharacteristicRead when chromeCharacteristic == null."); } else { chromeCharacteristic.onCharacteristicRead(status); } } }); } @Override public void onCharacteristicWrite( final Wrappers.BluetoothGattCharacteristicWrapper characteristic, final int status) { ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { ChromeBluetoothRemoteGattCharacteristic chromeCharacteristic = mWrapperToChromeCharacteristicsMap.get(characteristic); if (chromeCharacteristic == null) { // Android events arriving with no Chrome object is expected rarely: only // when the event races object destruction. Log.v(TAG, "onCharacteristicWrite when chromeCharacteristic == null."); } else { chromeCharacteristic.onCharacteristicWrite(status); } } }); } } // --------------------------------------------------------------------------------------------- // BluetoothAdapterDevice C++ methods declared for access from java: // Binds to BluetoothDeviceAndroid::OnConnectionStateChange. private native void nativeOnConnectionStateChange( long nativeBluetoothDeviceAndroid, int status, boolean connected); // Binds to BluetoothDeviceAndroid::CreateGattRemoteService. // TODO(http://crbug.com/505554): Replace 'Object' with specific type when JNI fixed. private native void nativeCreateGattRemoteService(long nativeBluetoothDeviceAndroid, String instanceId, Object bluetoothGattServiceWrapper); // Binds to BluetoothDeviceAndroid::GattServicesDiscovered. private native void nativeOnGattServicesDiscovered(long nativeBluetoothDeviceAndroid); }
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl; import java.io.File; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import org.lwjgl.input.Mouse; /** * <p> * System class (named Sys so as not to conflict with java.lang.System) * </p> * @author cix_foo <cix_foo@users.sourceforge.net> * @version $Revision$ * $Id$ */ public final class Sys { /** The native library name */ private static final String JNI_LIBRARY_NAME = "lwjgl"; /** Current version of library */ private static final String VERSION = "2.9.3"; private static final String POSTFIX64BIT = "64"; /** The implementation instance to delegate platform specific behavior to */ private static final SysImplementation implementation; private static final boolean is64Bit; private static void doLoadLibrary(final String lib_name) { AccessController.doPrivileged(new PrivilegedAction<Object>() { public Object run() { String library_path = System.getProperty("org.lwjgl.librarypath"); if (library_path != null) { System.load(library_path + File.separator + LWJGLUtil.mapLibraryName(lib_name)); } else { System.loadLibrary(lib_name); } return null; } }); } private static void loadLibrary(final String lib_name) { // actively try to load 64bit libs on 64bit architectures first String osArch = System.getProperty("os.arch"); boolean try64First = LWJGLUtil.getPlatform() != LWJGLUtil.PLATFORM_MACOSX && ("amd64".equals(osArch) || "x86_64".equals(osArch)); Error err = null; if ( try64First ) { try { doLoadLibrary(lib_name + POSTFIX64BIT); return; } catch (UnsatisfiedLinkError e) { err = e; } } // fallback to loading the "old way" try { doLoadLibrary(lib_name); } catch (UnsatisfiedLinkError e) { if ( try64First ) throw err; if (implementation.has64Bit()) { try { doLoadLibrary(lib_name + POSTFIX64BIT); return; } catch (UnsatisfiedLinkError e2) { LWJGLUtil.log("Failed to load 64 bit library: " + e2.getMessage()); } } // Throw original error throw e; } } static { implementation = createImplementation(); loadLibrary(JNI_LIBRARY_NAME); is64Bit = implementation.getPointerSize() == 8; int native_jni_version = implementation.getJNIVersion(); int required_version = implementation.getRequiredJNIVersion(); if (native_jni_version != required_version) throw new LinkageError("Version mismatch: jar version is '" + required_version + "', native library version is '" + native_jni_version + "'"); implementation.setDebug(LWJGLUtil.DEBUG); } private static SysImplementation createImplementation() { switch (LWJGLUtil.getPlatform()) { case LWJGLUtil.PLATFORM_LINUX: return new LinuxSysImplementation(); case LWJGLUtil.PLATFORM_WINDOWS: return new WindowsSysImplementation(); case LWJGLUtil.PLATFORM_MACOSX: return new MacOSXSysImplementation(); default: throw new IllegalStateException("Unsupported platform"); } } /** * No constructor for Sys. */ private Sys() { } /** * Return the version of the core LWJGL libraries as a String. */ public static String getVersion() { return VERSION; } /** * Initialization. This is just a dummy method to trigger the static constructor. */ public static void initialize() { } /** Returns true if a 64bit implementation was loaded. */ public static boolean is64Bit() { return is64Bit; } /** * Obtains the number of ticks that the hires timer does in a second. This method is fast; * it should be called as frequently as possible, as it recalibrates the timer. * * @return timer resolution in ticks per second or 0 if no timer is present. */ public static long getTimerResolution() { return implementation.getTimerResolution(); } /** * Gets the current value of the hires timer, in ticks. When the Sys class is first loaded * the hires timer is reset to 0. If no hires timer is present then this method will always * return 0.<p><strong>NOTEZ BIEN</strong> that the hires timer WILL wrap around. * * @return the current hires time, in ticks (always >= 0) */ public static long getTime() { return implementation.getTime() & 0x7FFFFFFFFFFFFFFFL; } /** * Attempt to display a modal alert to the user. This method should be used * when a game fails to initialize properly or crashes out losing its display * in the process. It is provided because AWT may not be available on the target * platform, although on Mac and Linux and other platforms supporting AWT we * delegate the task to AWT instead of doing it ourselves. * <p> * The alert should display the title and the message and then the current * thread should block until the user dismisses the alert - typically with an * OK button click. * <p> * It may be that the user's system has no windowing system installed for some * reason, in which case this method may do nothing at all, or attempt to provide * some console output. * * @param title The title of the alert. We suggest using the title of your game. * @param message The message text for the alert. */ public static void alert(String title, String message) { boolean grabbed = Mouse.isGrabbed(); if (grabbed) { Mouse.setGrabbed(false); } if (title == null) title = ""; if (message == null) message = ""; implementation.alert(title, message); if (grabbed) { Mouse.setGrabbed(true); } } /** * Open the system web browser and point it at the specified URL. It is recommended * that this not be called whilst your game is running, but on application exit in * a shutdown hook, as the screen resolution will not be reset when the browser is * brought into view. * <p> * There is no guarantee that this will work, nor that we can detect if it has * failed - hence we don't return success code or throw an Exception. This is just a * best attempt at opening the URL given - don't rely on it to work! * <p> * @param url The URL. Ensure that the URL is properly encoded. * @return false if we are CERTAIN the call has failed */ public static boolean openURL(String url) { // Attempt to use Webstart if we have it available try { // Lookup the javax.jnlp.BasicService object final Class<?> serviceManagerClass = Class.forName("javax.jnlp.ServiceManager"); Method lookupMethod = AccessController.doPrivileged(new PrivilegedExceptionAction<Method>() { public Method run() throws Exception { return serviceManagerClass.getMethod("lookup", String.class); } }); Object basicService = lookupMethod.invoke(serviceManagerClass, new Object[] {"javax.jnlp.BasicService"}); final Class<?> basicServiceClass = Class.forName("javax.jnlp.BasicService"); Method showDocumentMethod = AccessController.doPrivileged(new PrivilegedExceptionAction<Method>() { public Method run() throws Exception { return basicServiceClass.getMethod("showDocument", URL.class); } }); try { Boolean ret = (Boolean)showDocumentMethod.invoke(basicService, new URL(url)); return ret; } catch (MalformedURLException e) { e.printStackTrace(System.err); return false; } } catch (Exception ue) { return implementation.openURL(url); } } /** * Get the contents of the system clipboard. The system might not have a * clipboard (particularly if it doesn't even have a keyboard) in which case * we return null. Otherwise we return a String, which may be the empty * string "". * * @return a String, or null if there is no system clipboard. */ public static String getClipboard() { return implementation.getClipboard(); } }
/* * * * Copyright 2014 NAVER Corp. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * */ package com.navercorp.pinpoint.web.service; import com.navercorp.pinpoint.rpc.Future; import com.navercorp.pinpoint.rpc.PinpointSocket; import com.navercorp.pinpoint.rpc.ResponseMessage; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannel; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelContext; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelMessageListener; import com.navercorp.pinpoint.rpc.stream.StreamChannelStateChangeEventHandler; import com.navercorp.pinpoint.rpc.util.ListUtils; import com.navercorp.pinpoint.thrift.dto.command.TCmdActiveThreadCount; import com.navercorp.pinpoint.thrift.dto.command.TCmdActiveThreadCountRes; import com.navercorp.pinpoint.thrift.dto.command.TCommandTransfer; import com.navercorp.pinpoint.thrift.dto.command.TRouteResult; import com.navercorp.pinpoint.thrift.io.DeserializerFactory; import com.navercorp.pinpoint.thrift.io.HeaderTBaseDeserializer; import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializer; import com.navercorp.pinpoint.thrift.io.SerializerFactory; import com.navercorp.pinpoint.thrift.util.SerializationUtils; import com.navercorp.pinpoint.web.cluster.ClusterManager; import com.navercorp.pinpoint.web.cluster.DefaultPinpointRouteResponse; import com.navercorp.pinpoint.web.cluster.FailedPinpointRouteResponse; import com.navercorp.pinpoint.web.cluster.PinpointRouteResponse; import com.navercorp.pinpoint.web.vo.AgentActiveThreadCount; import com.navercorp.pinpoint.web.vo.AgentActiveThreadCountList; import com.navercorp.pinpoint.web.vo.AgentInfo; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.*; import java.util.concurrent.TimeUnit; /** * @author HyunGil Jeong * @Author Taejin Koo */ @Service public class AgentServiceImpl implements AgentService { private static final long DEFAULT_FUTURE_TIMEOUT = 3000; private static final long DEFAULT_TIME_DIFF_DAYS = 7; private static final long DEFAULT_TIME_DIFF_MS = TimeUnit.MILLISECONDS.convert(DEFAULT_TIME_DIFF_DAYS, TimeUnit.DAYS); private final Logger logger = LoggerFactory.getLogger(this.getClass()); @Autowired private AgentInfoService agentInfoService; @Autowired private ClusterManager clusterManager; @Autowired private SerializerFactory<HeaderTBaseSerializer> commandSerializerFactory; @Autowired private DeserializerFactory<HeaderTBaseDeserializer> commandDeserializerFactory; @Override public AgentInfo getAgentInfo(String applicationName, String agentId) { long currentTime = System.currentTimeMillis(); Set<AgentInfo> agentInfos = agentInfoService.getAgentsByApplicationName(applicationName, currentTime); for (AgentInfo agentInfo : agentInfos) { if (agentInfo == null) { continue; } if (!agentInfo.getApplicationName().equals(applicationName)) { continue; } if (!agentInfo.getAgentId().equals(agentId)) { continue; } return agentInfo; } return null; } @Override public AgentInfo getAgentInfo(String applicationName, String agentId, long startTimeStamp) { return getAgentInfo(applicationName, agentId, startTimeStamp, false); } @Override public AgentInfo getAgentInfo(String applicationName, String agentId, long startTimeStamp, boolean checkDB) { if (checkDB) { long currentTime = System.currentTimeMillis(); Set<AgentInfo> agentInfos = agentInfoService.getAgentsByApplicationName(applicationName, currentTime); for (AgentInfo agentInfo : agentInfos) { if (agentInfo == null) { continue; } if (!agentInfo.getApplicationName().equals(applicationName)) { continue; } if (!agentInfo.getAgentId().equals(agentId)) { continue; } if (agentInfo.getStartTimestamp() != startTimeStamp) { continue; } return agentInfo; } return null; } else { AgentInfo agentInfo = new AgentInfo(); agentInfo.setApplicationName(applicationName); agentInfo.setAgentId(agentId); agentInfo.setStartTimestamp(startTimeStamp); return agentInfo; } } @Override public List<AgentInfo> getRecentAgentInfoList(String applicationName) { return this.getRecentAgentInfoList(applicationName, DEFAULT_TIME_DIFF_MS); } @Override public List<AgentInfo> getRecentAgentInfoList(String applicationName, long timeDiff) { List<AgentInfo> agentInfoList = new ArrayList<>(); long currentTime = System.currentTimeMillis(); Set<AgentInfo> agentInfos = agentInfoService.getAgentsByApplicationName(applicationName, currentTime, timeDiff); for (AgentInfo agentInfo : agentInfos) { ListUtils.addIfValueNotNull(agentInfoList, agentInfo); } return agentInfoList; } @Override public boolean isConnected(AgentInfo agentInfo) { return clusterManager.isConnected(agentInfo); } @Override public PinpointRouteResponse invoke(AgentInfo agentInfo, TBase<?, ?> tBase) throws TException { byte[] payload = serializeRequest(tBase); return invoke(agentInfo, payload); } @Override public PinpointRouteResponse invoke(AgentInfo agentInfo, TBase<?, ?> tBase, long timeout) throws TException { byte[] payload = serializeRequest(tBase); return invoke(agentInfo, payload, timeout); } @Override public PinpointRouteResponse invoke(AgentInfo agentInfo, byte[] payload) throws TException { return invoke(agentInfo, payload, DEFAULT_FUTURE_TIMEOUT); } @Override public PinpointRouteResponse invoke(AgentInfo agentInfo, byte[] payload, long timeout) throws TException { TCommandTransfer transferObject = createCommandTransferObject(agentInfo, payload); PinpointSocket socket = clusterManager.getSocket(agentInfo); Future<ResponseMessage> future = null; if (socket != null) { future = socket.request(serializeRequest(transferObject)); } PinpointRouteResponse response = getResponse(future, timeout); return response; } @Override public Map<AgentInfo, PinpointRouteResponse> invoke(List<AgentInfo> agentInfoList, TBase<?, ?> tBase) throws TException { byte[] payload = serializeRequest(tBase); return invoke(agentInfoList, payload); } @Override public Map<AgentInfo, PinpointRouteResponse> invoke(List<AgentInfo> agentInfoList, TBase<?, ?> tBase, long timeout) throws TException { byte[] payload = serializeRequest(tBase); return invoke(agentInfoList, payload, timeout); } @Override public Map<AgentInfo, PinpointRouteResponse> invoke(List<AgentInfo> agentInfoList, byte[] payload) throws TException { return invoke(agentInfoList, payload, DEFAULT_FUTURE_TIMEOUT); } @Override public Map<AgentInfo, PinpointRouteResponse> invoke(List<AgentInfo> agentInfoList, byte[] payload, long timeout) throws TException { Map<AgentInfo, Future<ResponseMessage>> futureMap = new HashMap<>(); for (AgentInfo agentInfo : agentInfoList) { TCommandTransfer transferObject = createCommandTransferObject(agentInfo, payload); PinpointSocket socket = clusterManager.getSocket(agentInfo); if (socket != null) { Future<ResponseMessage> future = socket.request(serializeRequest(transferObject)); futureMap.put(agentInfo, future); } else { futureMap.put(agentInfo, null); } } long startTime = System.currentTimeMillis(); Map<AgentInfo, PinpointRouteResponse> result = new HashMap<>(); for (Map.Entry<AgentInfo, Future<ResponseMessage>> futureEntry : futureMap.entrySet()) { AgentInfo agentInfo = futureEntry.getKey(); Future<ResponseMessage> future = futureEntry.getValue(); PinpointRouteResponse response = getResponse(future, getTimeoutMillis(startTime, timeout)); result.put(agentInfo, response); } return result; } @Override public ClientStreamChannelContext openStream(AgentInfo agentInfo, TBase<?, ?> tBase, ClientStreamChannelMessageListener messageListener) throws TException { byte[] payload = serializeRequest(tBase); return openStream(agentInfo, payload, messageListener, null); } @Override public ClientStreamChannelContext openStream(AgentInfo agentInfo, byte[] payload, ClientStreamChannelMessageListener messageListener) throws TException { return openStream(agentInfo, payload, messageListener, null); } @Override public ClientStreamChannelContext openStream(AgentInfo agentInfo, TBase<?, ?> tBase, ClientStreamChannelMessageListener messageListener, StreamChannelStateChangeEventHandler<ClientStreamChannel> stateChangeListener) throws TException { byte[] payload = serializeRequest(tBase); return openStream(agentInfo, payload, messageListener, stateChangeListener); } @Override public ClientStreamChannelContext openStream(AgentInfo agentInfo, byte[] payload, ClientStreamChannelMessageListener messageListener, StreamChannelStateChangeEventHandler<ClientStreamChannel> stateChangeListener) throws TException { TCommandTransfer transferObject = createCommandTransferObject(agentInfo, payload); PinpointSocket socket = clusterManager.getSocket(agentInfo); if (socket != null) { return socket.openStream(serializeRequest(transferObject), messageListener, stateChangeListener); } return null; } @Override public AgentActiveThreadCountList getActiveThreadCount(List<AgentInfo> agentInfoList) throws TException { byte[] activeThread = serializeRequest(new TCmdActiveThreadCount()); return getActiveThreadCount(agentInfoList, activeThread); } @Override public AgentActiveThreadCountList getActiveThreadCount(List<AgentInfo> agentInfoList, byte[] payload) throws TException { AgentActiveThreadCountList agentActiveThreadStatusList = new AgentActiveThreadCountList(agentInfoList.size()); Map<AgentInfo, PinpointRouteResponse> responseList = invoke(agentInfoList, payload); for (Map.Entry<AgentInfo, PinpointRouteResponse> entry : responseList.entrySet()) { AgentInfo agentInfo = entry.getKey(); PinpointRouteResponse response = entry.getValue(); AgentActiveThreadCount agentActiveThreadStatus = new AgentActiveThreadCount(agentInfo.getAgentId()); TRouteResult routeResult = response.getRouteResult(); if (routeResult == TRouteResult.OK) { agentActiveThreadStatus.setResult(response.getResponse(TCmdActiveThreadCountRes.class, null)); } else { agentActiveThreadStatus.setFail(routeResult.name()); } agentActiveThreadStatusList.add(agentActiveThreadStatus); } return agentActiveThreadStatusList; } private TCommandTransfer createCommandTransferObject(AgentInfo agentInfo, byte[] payload) { TCommandTransfer transferObject = new TCommandTransfer(); transferObject.setApplicationName(agentInfo.getApplicationName()); transferObject.setAgentId(agentInfo.getAgentId()); transferObject.setStartTime(agentInfo.getStartTimestamp()); transferObject.setPayload(payload); return transferObject; } private PinpointRouteResponse getResponse(Future<ResponseMessage> future, long timeout) { if (future == null) { return new FailedPinpointRouteResponse(TRouteResult.NOT_FOUND, null); } boolean completed = future.await(DEFAULT_FUTURE_TIMEOUT); if (completed) { DefaultPinpointRouteResponse response = new DefaultPinpointRouteResponse(future.getResult().getMessage()); response.parse(commandDeserializerFactory); return response; } else { return new FailedPinpointRouteResponse(TRouteResult.TIMEOUT, null); } } private long getTimeoutMillis(long startTime, long timeout) { return Math.max(startTime + timeout - System.currentTimeMillis(), 100L); } @Override public byte[] serializeRequest(TBase<?, ?> tBase) throws TException { return SerializationUtils.serialize(tBase, commandSerializerFactory); } @Override public byte[] serializeRequest(TBase<?, ?> tBase, byte[] defaultValue) { return SerializationUtils.serialize(tBase, commandSerializerFactory, defaultValue); } @Override public TBase<?, ?> deserializeResponse(byte[] objectData) throws TException { return SerializationUtils.deserialize(objectData, commandDeserializerFactory); } @Override public TBase<?, ?> deserializeResponse(byte[] objectData, TBase<?, ?> defaultValue) { return SerializationUtils.deserialize(objectData, commandDeserializerFactory, defaultValue); } }
/* * Copyright 2014, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.internal; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.Preconditions; import io.grpc.MessageEncoding; import io.grpc.Status; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import javax.annotation.concurrent.NotThreadSafe; /** * Deframer for GRPC frames. * * <p>This class is not thread-safe. All calls to public methods should be made in the transport * thread. */ @NotThreadSafe public class MessageDeframer implements Closeable { private static final int HEADER_LENGTH = 5; private static final int COMPRESSED_FLAG_MASK = 1; private static final int RESERVED_MASK = 0xFE; /** * A listener of deframing events. */ public interface Listener { /** * Called when the given number of bytes has been read from the input source of the deframer. * * @param numBytes the number of bytes read from the deframer's input source. */ void bytesRead(int numBytes); /** * Called to deliver the next complete message. * * @param is stream containing the message. */ void messageRead(InputStream is); /** * Called when end-of-stream has not yet been reached but there are no complete messages * remaining to be delivered. */ void deliveryStalled(); /** * Called when the stream is complete and all messages have been successfully delivered. */ void endOfStream(); } private enum State { HEADER, BODY } private final Listener listener; private MessageEncoding.Decompressor decompressor; private State state = State.HEADER; private int requiredLength = HEADER_LENGTH; private boolean compressedFlag; private boolean endOfStream; private CompositeReadableBuffer nextFrame; private CompositeReadableBuffer unprocessed = new CompositeReadableBuffer(); private long pendingDeliveries; private boolean deliveryStalled = true; private boolean inDelivery = false; /** * Creates a deframer. Compression will not be supported. * * @param listener listener for deframer events. */ public MessageDeframer(Listener listener) { this(listener, MessageEncoding.NONE); } /** * Create a deframer. * * @param listener listener for deframer events. * @param decompressor the compression used if a compressed frame is encountered, with * {@code NONE} meaning unsupported */ public MessageDeframer(Listener listener, MessageEncoding.Decompressor decompressor) { this.listener = Preconditions.checkNotNull(listener, "sink"); this.decompressor = Preconditions.checkNotNull(decompressor, "decompressor"); } /** * Sets the decompressor available to use. The message encoding for the stream comes later in * time, and thus will not be available at the time of construction. This should only be set * once, since the compression codec cannot change after the headers have been sent. * * @param decompressor the decompressing wrapper. */ public void setDecompressor(MessageEncoding.Decompressor decompressor) { this.decompressor = checkNotNull(decompressor, "Can't pass an empty decompressor"); } /** * Requests up to the given number of messages from the call to be delivered to * {@link Listener#messageRead(InputStream)}. No additional messages will be delivered. * * <p>If {@link #close()} has been called, this method will have no effect. * * @param numMessages the requested number of messages to be delivered to the listener. */ public void request(int numMessages) { Preconditions.checkArgument(numMessages > 0, "numMessages must be > 0"); if (isClosed()) { return; } pendingDeliveries += numMessages; deliver(); } /** * Adds the given data to this deframer and attempts delivery to the sink. * * @param data the raw data read from the remote endpoint. Must be non-null. * @param endOfStream if {@code true}, indicates that {@code data} is the end of the stream from * the remote endpoint. * @throws IllegalStateException if {@link #close()} has been called previously or if * {@link #deframe(ReadableBuffer, boolean)} has previously been called with * {@code endOfStream=true}. */ public void deframe(ReadableBuffer data, boolean endOfStream) { Preconditions.checkNotNull(data, "data"); boolean needToCloseData = true; try { checkNotClosed(); Preconditions.checkState(!this.endOfStream, "Past end of stream"); needToCloseData = false; unprocessed.addBuffer(data); // Indicate that all of the data for this stream has been received. this.endOfStream = endOfStream; deliver(); } finally { if (needToCloseData) { data.close(); } } } /** * Indicates whether delivery is currently stalled, pending receipt of more data. */ public boolean isStalled() { return deliveryStalled; } /** * Closes this deframer and frees any resources. After this method is called, additional * calls will have no effect. */ @Override public void close() { try { if (unprocessed != null) { unprocessed.close(); } if (nextFrame != null) { nextFrame.close(); } } finally { unprocessed = null; nextFrame = null; } } /** * Indicates whether or not this deframer has been closed. */ public boolean isClosed() { return unprocessed == null; } /** * Throws if this deframer has already been closed. */ private void checkNotClosed() { Preconditions.checkState(!isClosed(), "MessageDeframer is already closed"); } /** * Reads and delivers as many messages to the sink as possible. */ private void deliver() { // We can have reentrancy here when using a direct executor, triggered by calls to // request more messages. This is safe as we simply loop until pendingDelivers = 0 if (inDelivery) { return; } inDelivery = true; try { // Process the uncompressed bytes. boolean stalled = false; while (pendingDeliveries > 0 && readRequiredBytes()) { switch (state) { case HEADER: processHeader(); break; case BODY: // Read the body and deliver the message. processBody(); // Since we've delivered a message, decrement the number of pending // deliveries remaining. pendingDeliveries--; break; default: throw new AssertionError("Invalid state: " + state); } } // We are stalled when there are no more bytes to process. This allows delivering errors as // soon as the buffered input has been consumed, independent of whether the application // has requested another message. stalled = !isDataAvailable(); if (endOfStream) { if (!isDataAvailable()) { listener.endOfStream(); } else if (stalled) { // We've received the entire stream and have data available but we don't have // enough to read the next frame ... this is bad. throw Status.INTERNAL.withDescription("Encountered end-of-stream mid-frame") .asRuntimeException(); } } // Never indicate that we're stalled if we've received all the data for the stream. stalled &= !endOfStream; // If we're transitioning to the stalled state, notify the listener. boolean previouslyStalled = deliveryStalled; deliveryStalled = stalled; if (stalled && !previouslyStalled) { listener.deliveryStalled(); } } finally { inDelivery = false; } } private boolean isDataAvailable() { return unprocessed.readableBytes() > 0 || (nextFrame != null && nextFrame.readableBytes() > 0); } /** * Attempts to read the required bytes into nextFrame. * * @return {@code true} if all of the required bytes have been read. */ private boolean readRequiredBytes() { int totalBytesRead = 0; try { if (nextFrame == null) { nextFrame = new CompositeReadableBuffer(); } // Read until the buffer contains all the required bytes. int missingBytes; while ((missingBytes = requiredLength - nextFrame.readableBytes()) > 0) { if (unprocessed.readableBytes() == 0) { // No more data is available. return false; } int toRead = Math.min(missingBytes, unprocessed.readableBytes()); totalBytesRead += toRead; nextFrame.addBuffer(unprocessed.readBytes(toRead)); } return true; } finally { if (totalBytesRead > 0) { listener.bytesRead(totalBytesRead); } } } /** * Processes the GRPC compression header which is composed of the compression flag and the outer * frame length. */ private void processHeader() { int type = nextFrame.readUnsignedByte(); if ((type & RESERVED_MASK) != 0) { throw Status.INTERNAL.withDescription("Frame header malformed: reserved bits not zero") .asRuntimeException(); } compressedFlag = (type & COMPRESSED_FLAG_MASK) != 0; // Update the required length to include the length of the frame. requiredLength = nextFrame.readInt(); // Continue reading the frame body. state = State.BODY; } /** * Processes the body of the GRPC compression frame. A single compression frame may contain * several GRPC messages within it. */ private void processBody() { InputStream stream = compressedFlag ? getCompressedBody() : getUncompressedBody(); nextFrame = null; listener.messageRead(stream); // Done with this frame, begin processing the next header. state = State.HEADER; requiredLength = HEADER_LENGTH; } private InputStream getUncompressedBody() { return ReadableBuffers.openStream(nextFrame, true); } private InputStream getCompressedBody() { if (decompressor == MessageEncoding.NONE) { throw Status.INTERNAL.withDescription( "Can't decode compressed frame as compression not configured.").asRuntimeException(); } try { return decompressor.decompress(ReadableBuffers.openStream(nextFrame, true)); } catch (IOException e) { throw new RuntimeException(e); } } }
package cgeo.geocaching.staticmaps; import cgeo.geocaching.Intents; import cgeo.geocaching.R; import cgeo.geocaching.activity.AbstractListActivity; import cgeo.geocaching.enumerations.LoadFlags; import cgeo.geocaching.models.Geocache; import cgeo.geocaching.models.Waypoint; import cgeo.geocaching.storage.DataStore; import cgeo.geocaching.ui.dialog.Dialogs; import cgeo.geocaching.utils.Log; import android.app.ProgressDialog; import android.content.DialogInterface; import android.graphics.Bitmap; import android.os.Bundle; import android.support.annotation.NonNull; import android.view.Menu; import android.view.MenuItem; import java.util.concurrent.Callable; import io.reactivex.Maybe; import io.reactivex.Observable; import io.reactivex.Single; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.disposables.Disposable; import io.reactivex.functions.Action; import io.reactivex.functions.Consumer; import io.reactivex.functions.Function; import io.reactivex.schedulers.Schedulers; import org.androidannotations.annotations.EActivity; import org.androidannotations.annotations.Extra; import org.androidannotations.annotations.OptionsItem; @EActivity public class StaticMapsActivity extends AbstractListActivity { @Extra(Intents.EXTRA_DOWNLOAD) boolean download = false; @Extra(Intents.EXTRA_WAYPOINT_ID) Integer waypointId = null; @Extra(Intents.EXTRA_GEOCODE) String geocode = null; private Geocache cache; private ProgressDialog waitDialog = null; private StaticMapsAdapter adapter; private MenuItem menuRefresh; private CompositeDisposable resumeDisposables = new CompositeDisposable(); @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState, R.layout.staticmaps_activity); } @Override public void onStart() { super.onStart(); cache = DataStore.loadCache(geocode, LoadFlags.LOAD_CACHE_OR_DB); if (cache == null) { Log.e("StaticMapsActivity.onCreate: cannot find the cache " + geocode); finish(); return; } setCacheTitleBar(cache); adapter = new StaticMapsAdapter(this); setListAdapter(adapter); } @Override public boolean onCreateOptionsMenu(final Menu menu) { getMenuInflater().inflate(R.menu.static_maps_activity_options, menu); menuRefresh = menu.findItem(R.id.menu_refresh); return super.onCreateOptionsMenu(menu); } @Override public void onResume() { super.onResume(); adapter.clear(); final Disposable load = loadAndDisplay(); resumeDisposables.add(load); waitDialog = ProgressDialog.show(this, null, res.getString(R.string.map_static_loading), true); waitDialog.setCancelable(true); waitDialog.setOnCancelListener(new DialogInterface.OnCancelListener() { @Override public void onCancel(final DialogInterface dialog) { load.dispose(); } }); } @Override public void onPause() { resumeDisposables.clear(); super.onPause(); } @NonNull private Disposable loadAndDisplay() { return loadMaps().observeOn(AndroidSchedulers.mainThread()).map(new Function<Bitmap, Bitmap>() { @Override public Bitmap apply(final Bitmap bitmap) throws Exception { adapter.add(bitmap); return bitmap; } }).ignoreElements().subscribe(new Action() { @Override public void run() { Dialogs.dismiss(waitDialog); if (adapter.isEmpty()) { if (download) { resumeDisposables.add(downloadStaticMaps().subscribe(new Consumer<Boolean>() { @Override public void accept(final Boolean succeeded) throws Exception { if (succeeded) { // Loading from disk will succeed this time AndroidSchedulers.mainThread().scheduleDirect(new Runnable() { @Override public void run() { adapter.clear(); resumeDisposables.add(loadAndDisplay()); } }); } else { showToast(res.getString(R.string.err_detail_google_maps_limit_reached)); } } })); } else { showToast(res.getString(R.string.err_detail_not_load_map_static)); finish(); } } else if (menuRefresh != null) { menuRefresh.setEnabled(true); } } }); } private Observable<Bitmap> loadMaps() { return Observable.range(1, StaticMapsProvider.MAPS_LEVEL_MAX).concatMap(new Function<Integer, Observable<Bitmap>>() { @Override public Observable<Bitmap> apply(final Integer zoomLevel) throws Exception { return Maybe.fromCallable(new Callable<Bitmap>() { @Override public Bitmap call() throws Exception { return waypointId != null ? StaticMapsProvider.getWaypointMap(geocode, cache.getWaypointById(waypointId), zoomLevel) : StaticMapsProvider.getCacheMap(geocode, zoomLevel); } }).toObservable().subscribeOn(Schedulers.io()); } }); } @OptionsItem(R.id.menu_refresh) void refreshMaps() { menuRefresh.setEnabled(false); downloadStaticMaps().toCompletable().observeOn(AndroidSchedulers.mainThread()).subscribe(new Action() { @Override public void run() throws Exception { menuRefresh.setEnabled(true); loadMaps(); } }); } private Single<Boolean> downloadStaticMaps() { if (waypointId == null) { showToast(res.getString(R.string.info_storing_static_maps)); return StaticMapsProvider.storeCacheStaticMap(cache).andThen(Single.fromCallable(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return cache.hasStaticMap(); } })); } final Waypoint waypoint = cache.getWaypointById(waypointId); if (waypoint != null) { showToast(res.getString(R.string.info_storing_static_maps)); // refresh always removes old waypoint files StaticMapsProvider.removeWpStaticMaps(waypoint, geocode); return StaticMapsProvider.storeWaypointStaticMap(cache, waypoint).andThen(Single.fromCallable(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return StaticMapsProvider.hasStaticMapForWaypoint(geocode, waypoint); } })); } showToast(res.getString(R.string.err_detail_not_load_map_static)); return Single.just(false); } @Override public void finish() { Dialogs.dismiss(waitDialog); super.finish(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.cep.extension; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.apache.stratos.messaging.broker.publish.EventPublisher; import org.apache.stratos.messaging.broker.publish.EventPublisherPool; import org.apache.stratos.messaging.domain.topology.*; import org.apache.stratos.messaging.event.health.stat.MemberFaultEvent; import org.apache.stratos.messaging.message.receiver.topology.TopologyManager; import org.apache.stratos.messaging.util.MessagingUtil; import org.wso2.siddhi.core.config.SiddhiContext; import org.wso2.siddhi.core.event.StreamEvent; import org.wso2.siddhi.core.event.in.InEvent; import org.wso2.siddhi.core.event.in.InListEvent; import org.wso2.siddhi.core.persistence.ThreadBarrier; import org.wso2.siddhi.core.query.QueryPostProcessingElement; import org.wso2.siddhi.core.query.processor.window.RunnableWindowProcessor; import org.wso2.siddhi.core.query.processor.window.WindowProcessor; import org.wso2.siddhi.core.util.collection.queue.scheduler.ISchedulerSiddhiQueue; import org.wso2.siddhi.core.util.collection.queue.scheduler.SchedulerSiddhiQueue; import org.wso2.siddhi.core.util.collection.queue.scheduler.SchedulerSiddhiQueueGrid; import org.wso2.siddhi.query.api.definition.AbstractDefinition; import org.wso2.siddhi.query.api.expression.Expression; import org.wso2.siddhi.query.api.expression.Variable; import org.wso2.siddhi.query.api.expression.constant.IntConstant; import org.wso2.siddhi.query.api.expression.constant.LongConstant; import org.wso2.siddhi.query.api.extension.annotation.SiddhiExtension; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; /** * CEP window processor to handle faulty member instances. This window processor is responsible for * publishing MemberFault event if health stats are not received within a given time window. */ @SiddhiExtension(namespace = "stratos", function = "faultHandling") public class FaultHandlingWindowProcessor extends WindowProcessor implements RunnableWindowProcessor { private static final Logger log = Logger.getLogger(FaultHandlingWindowProcessor.class); private static final String ACTIVATE_TIMEOUT_KEY = "cep.fault.handler.extension.activate.timeout"; private static final int ACTIVATE_TIMEOUT = Integer.getInteger(ACTIVATE_TIMEOUT_KEY, 60 * 1000 * 15); private static final int TIME_OUT = 60 * 1000; private ScheduledExecutorService faultHandleScheduler; private ScheduledFuture<?> lastSchedule; private ThreadBarrier threadBarrier; private long timeToKeep; private ISchedulerSiddhiQueue<StreamEvent> window; private EventPublisher healthStatPublisher = EventPublisherPool .getPublisher(MessagingUtil.Topics.HEALTH_STAT_TOPIC.getTopicName()); private Map<String, Object> MemberFaultEventMap = new HashMap<String, Object>(); private Map<String, Object> memberFaultEventMessageMap = new HashMap<String, Object>(); // Map of member id's to their last received health event time stamp private ConcurrentHashMap<String, Long> memberTimeStampMap = new ConcurrentHashMap<String, Long>(); private volatile boolean isActive; private volatile boolean hasMemberTimeStampMapInitialized; private long startTime = System.currentTimeMillis(); // Event receiver to receive topology events published by cloud-controller private CEPTopologyEventReceiver cepTopologyEventReceiver = new CEPTopologyEventReceiver(this); // Stratos member id attribute index in stream execution plan private int memberIdAttrIndex; @Override protected void processEvent(InEvent event) { addDataToMap(event); } @Override protected void processEvent(InListEvent listEvent) { for (int i = 0, size = listEvent.getActiveEvents(); i < size; i++) { addDataToMap((InEvent) listEvent.getEvent(i)); } } /** * Add new entry to time stamp map from the received event. * * @param event Event received by Siddhi. */ private void addDataToMap(InEvent event) { if (!isActive) { log.info("Received first event. Marking fault handling window processor as active"); isActive = true; } String id = (String) event.getData()[memberIdAttrIndex]; //checking whether this member is the topology. //sometimes there can be a delay between publishing member terminated events //and actually terminating instances. Hence CEP might get events for already terminated members //so we are checking the topology for the member existence Member member = getMemberFromId(id); if (null == member) { log.warn(String.format("Member with [id] %s not found in the topology. Event rejected", id)); return; } if (StringUtils.isNotEmpty(id)) { memberTimeStampMap.put(id, event.getTimeStamp()); } else { log.warn("NULL member id found in the event received. Event rejected."); } if (log.isDebugEnabled()) { log.debug("Event received from [member-id] " + id + " [time-stamp] " + event.getTimeStamp()); } } @Override public Iterator<StreamEvent> iterator() { return window.iterator(); } @Override public Iterator<StreamEvent> iterator(String predicate) { if (siddhiContext.isDistributedProcessingEnabled()) { return ((SchedulerSiddhiQueueGrid<StreamEvent>) window).iterator(predicate); } else { return window.iterator(); } } /** * Retrieve the current activated members from the topology and initialize the timestamp map. * This will allow the system to recover from a restart * * @param topology Topology model object */ boolean loadTimeStampMapFromTopology(Topology topology) { long currentTimeStamp = System.currentTimeMillis(); if (topology == null || topology.getServices() == null) { return false; } // TODO make this efficient by adding APIs to messaging component for (Service service : topology.getServices()) { if (service.getClusters() != null) { for (Cluster cluster : service.getClusters()) { if (cluster.getMembers() != null) { for (Member member : cluster.getMembers()) { // we are checking faulty status only in previously activated members if (member != null && MemberStatus.Active.equals(member.getStatus())) { // Initialize the member time stamp map from the topology at the beginning memberTimeStampMap.putIfAbsent(member.getMemberId(), currentTimeStamp); } } } } } } hasMemberTimeStampMapInitialized = true; if (log.isInfoEnabled()) { log.info("Member timestamps were successfully loaded from the topology: [timestamps] " + Arrays.toString(memberTimeStampMap.entrySet().toArray())); } return true; } private Member getMemberFromId(String memberId) { if (StringUtils.isEmpty(memberId)) { return null; } if (TopologyManager.isInitialized()) { try { TopologyManager.acquireReadLock(); if (TopologyManager.getTopology().getServices() == null) { return null; } // TODO make this efficient by adding APIs to messaging component for (Service service : TopologyManager.getTopology().getServices()) { if (service.getClusters() != null) { for (Cluster cluster : service.getClusters()) { if (cluster.getMembers() != null) { for (Member member : cluster.getMembers()) { if (memberId.equals(member.getMemberId())) { return member; } } } } } } } catch (Exception e) { log.error("Error while reading topology" + e); } finally { TopologyManager.releaseReadLock(); } } return null; } private void publishMemberFault(Member member) { if (member == null) { log.warn("Failed to publish member fault event. Member object is null"); return; } log.info("Publishing member fault event for [member-id] " + member.getMemberId()); MemberFaultEvent memberFaultEvent = new MemberFaultEvent(member.getClusterId(), member.getClusterInstanceId(), member.getMemberId(), member.getPartitionId(), member.getNetworkPartitionId(), 0); memberFaultEventMessageMap.put("message", memberFaultEvent); healthStatPublisher.publish(MemberFaultEventMap, true); } @Override public void run() { try { // wait until the first event OR given timeout to expire in order to activate this window processor // this is to prevent false positives at the CEP startup if (!isActive && System.currentTimeMillis() - startTime > ACTIVATE_TIMEOUT) { log.info("Activation wait timeout has expired. Marking fault handling window processor as active"); isActive = true; } // do not process events until memberTimeStampMap is initialized and window processor is activated // memberTimeStampMap will be initialized only after receiving the complete topology event if (!(isActive && hasMemberTimeStampMapInitialized)) { return; } threadBarrier.pass(); for (Object o : memberTimeStampMap.entrySet()) { Map.Entry pair = (Map.Entry) o; long currentTime = System.currentTimeMillis(); Long eventTimeStamp = (Long) pair.getValue(); if ((currentTime - eventTimeStamp) > TIME_OUT) { String memberId = (String) pair.getKey(); Member member = getMemberFromId(memberId); if (member != null) { log.info("Faulty member detected [member-id] " + pair.getKey() + " with [last time-stamp] " + eventTimeStamp + " [time-out] " + TIME_OUT + " milliseconds"); publishMemberFault(member); } memberTimeStampMap.remove(memberId); } } if (log.isDebugEnabled()) { log.debug("Fault handling processor iteration completed with [time-stamp map length] " + memberTimeStampMap.size() + " [time-stamp map] " + memberTimeStampMap); } } catch (Throwable t) { log.error(t.getMessage(), t); } finally { if (lastSchedule != null) { lastSchedule.cancel(false); } lastSchedule = faultHandleScheduler.schedule(this, timeToKeep, TimeUnit.MILLISECONDS); } } @Override protected Object[] currentState() { return new Object[]{window.currentState()}; } @Override protected void restoreState(Object[] data) { window.restoreState(data); window.restoreState((Object[]) data[0]); window.reSchedule(); } @Override protected void init(Expression[] parameters, QueryPostProcessingElement nextProcessor, AbstractDefinition streamDefinition, String elementId, boolean async, SiddhiContext siddhiContext) { if (parameters[0] instanceof IntConstant) { timeToKeep = ((IntConstant) parameters[0]).getValue(); } else { timeToKeep = ((LongConstant) parameters[0]).getValue(); } String memberIdAttrName = ((Variable) parameters[1]).getAttributeName(); memberIdAttrIndex = streamDefinition.getAttributePosition(memberIdAttrName); if (this.siddhiContext.isDistributedProcessingEnabled()) { window = new SchedulerSiddhiQueueGrid<StreamEvent>(elementId, this, this.siddhiContext, this.async); } else { window = new SchedulerSiddhiQueue<StreamEvent>(this); } MemberFaultEventMap .put("org.apache.stratos.messaging.event.health.stat.MemberFaultEvent", memberFaultEventMessageMap); //Ordinary scheduling window.schedule(); if (log.isInfoEnabled()) { log.info(String.format("Fault handling window processor initialized with [timeToKeep] %s, " + "[memberIdAttrName] %s, [memberIdAttrIndex] %s, [distributed-enabled] %s, " + "[activate-timeout] %d", timeToKeep, memberIdAttrName, memberIdAttrIndex, siddhiContext.isDistributedProcessingEnabled(), ACTIVATE_TIMEOUT)); } } @Override public void schedule() { if (lastSchedule != null) { lastSchedule.cancel(false); } lastSchedule = faultHandleScheduler.schedule(this, timeToKeep, TimeUnit.MILLISECONDS); } @Override public void scheduleNow() { if (lastSchedule != null) { lastSchedule.cancel(false); } lastSchedule = faultHandleScheduler.schedule(this, 0, TimeUnit.MILLISECONDS); } @Override public void setScheduledExecutorService(ScheduledExecutorService scheduledExecutorService) { this.faultHandleScheduler = scheduledExecutorService; } @Override public void setThreadBarrier(ThreadBarrier threadBarrier) { this.threadBarrier = threadBarrier; } @Override public void destroy() { // terminate topology listener thread cepTopologyEventReceiver.destroy(); window = null; } ConcurrentHashMap<String, Long> getMemberTimeStampMap() { return memberTimeStampMap; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.09.09 at 01:22:27 PM CEST // package test; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="specific-use" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute ref="{http://www.w3.org/1999/xlink}type"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}href"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}role"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}title"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}show"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}actuate"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "email") public class Email { @XmlValue protected String content; @XmlAttribute(name = "content-type") @XmlSchemaType(name = "anySimpleType") protected String contentType; @XmlAttribute(name = "specific-use") @XmlSchemaType(name = "anySimpleType") protected String specificUse; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String type; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String title; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * @return * possible object is * {@link String } * */ public String getContent() { return content; } /** * Sets the value of the content property. * * @param value * allowed object is * {@link String } * */ public void setContent(String value) { this.content = value; } /** * Gets the value of the contentType property. * * @return * possible object is * {@link String } * */ public String getContentType() { return contentType; } /** * Sets the value of the contentType property. * * @param value * allowed object is * {@link String } * */ public void setContentType(String value) { this.contentType = value; } /** * Gets the value of the specificUse property. * * @return * possible object is * {@link String } * */ public String getSpecificUse() { return specificUse; } /** * Sets the value of the specificUse property. * * @param value * allowed object is * {@link String } * */ public void setSpecificUse(String value) { this.specificUse = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the title property. * * @return * possible object is * {@link String } * */ public String getTitle() { return title; } /** * Sets the value of the title property. * * @param value * allowed object is * {@link String } * */ public void setTitle(String value) { this.title = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }
package com.perpetumobile.bit.fb; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Map; import org.apache.commons.codec.binary.Base64; import org.apache.commons.fileupload.FileItem; import com.perpetumobile.bit.config.Config; import com.perpetumobile.bit.http.HttpManager; import com.perpetumobile.bit.http.HttpRequest; import com.perpetumobile.bit.orm.xml.SAXParserManager; import com.perpetumobile.bit.orm.xml.XMLRecord; import com.perpetumobile.bit.servlet.tools.URLTool; import com.perpetumobile.bit.util.Logger; import com.perpetumobile.bit.util.Util; /** * * @author Zoran Dukic */ public class FBUtil { static private Logger logger = new Logger(FBUtil.class); static public String getFBAppID(String appName) { String result = null; if(!Util.nullOrEmptyString(appName)) { result = Config.getInstance().getClassProperty(appName, "FB.AppID", ""); } else { result = Config.getInstance().getProperty("FB.AppID", ""); } return result; } static public String getFBAppSecret(String appName) { String result = null; if(!Util.nullOrEmptyString(appName)) { result = Config.getInstance().getClassProperty(appName, "FB.AppSecret", ""); } return result; } static public String parseFBSRCookie(String appName, String fbsrCookieStr) { String result = null; if(!Util.nullOrEmptyString(fbsrCookieStr)) { String[] val = fbsrCookieStr.split("\\."); if(val != null && val.length == 2) { try { String key = getFBAppSecret(appName); if(!Util.nullOrEmptyString(key)) { String sig = Util.toHex(Base64.decodeBase64(val[0])); if(!Util.nullOrEmptyString(sig)) { String md = Util.getHmac("HmacSHA256", key, val[1]); if(sig.equals(md)) { result = new String(Base64.decodeBase64(val[1]), "UTF8"); } } } else { result = new String(Base64.decodeBase64(val[1]), "UTF8"); } } catch (UnsupportedEncodingException e) { logger.error("FBUtil.parseFBSRCookie exception", e); } } } return result; } static public boolean isConnected(String fbid, String accessToken) throws UnsupportedEncodingException { String fbResponse = getGraphAPI(fbid, "", accessToken); return !Util.nullOrEmptyString(fbResponse) && fbResponse.indexOf("error") == -1; } static public String getGraphAPIUrl(String fbid, String what, String accessToken, Object params) throws UnsupportedEncodingException { StringBuffer buf = new StringBuffer("https://graph.facebook.com/"); boolean fbidAdded = false; if(!Util.nullOrEmptyString(fbid)) { buf.append(fbid); fbidAdded = true; } if (!Util.nullOrEmptyString(what)) { if(fbidAdded) buf.append("/"); buf.append(what); } if(!Util.nullOrEmptyString(accessToken)) { int index = buf.indexOf("?"); if(index != -1) { buf.append("&access_token="); } else { buf.append("?access_token="); } buf.append(accessToken); } if(params instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> p = (Map<String, Object>) params; String par = ""; int index = buf.indexOf("?"); if(index == -1) par = "?"; for (Map.Entry<String, Object> me : p.entrySet()) { if(me.getValue() != null) { if(par.length() != 1) par += "&"; par += me.getKey() + "=" + URLEncoder.encode(me.getValue().toString(), "UTF-8"); //works for String, JSONObject, JSONArray } } buf.append(par); } return buf.toString(); } static public String getGraphAPIUrl(String fbid, String what, String accessToken) throws UnsupportedEncodingException { return getGraphAPIUrl(fbid, what, accessToken, null); } static public String getGraphAPI(String fbid, String what, String accessToken, Object params) throws UnsupportedEncodingException { String result = null; String url = getGraphAPIUrl(fbid, what, accessToken, params); if(!Util.nullOrEmptyString(url)) { result = HttpManager.getInstance().get(url).getPageSource(); } return result; } static public String getGraphAPI(String fbid, String what, String accessToken) throws UnsupportedEncodingException { return getGraphAPI(fbid, what, accessToken, null); } static public String getGraphObject(String what, String accessToken) throws UnsupportedEncodingException { return getGraphAPI(null, what, accessToken); } static public String postGraphAPI(String fbid, String what, String accessToken, Object params) throws UnsupportedEncodingException { String result = null; String url = getGraphAPIUrl(fbid, what, accessToken, params); if(!Util.nullOrEmptyString(url)) { result = HttpManager.getInstance().post(url).getPageSource(); } return result; } static public String postImageGraphAPI(String fbid, String what, String accessToken, Object params, File imageFile) throws UnsupportedEncodingException { String result = null; String url = getGraphAPIUrl(fbid, what, accessToken, params); String ext = imageFile.getName().substring(imageFile.getName().lastIndexOf('.') + 1); String type = ""; if("jpeg".equals(ext) || "jpg".equals(ext)) type = "image/jpeg"; else if ("png".equals(ext)) type = "image/png"; else if ("bmp".equals(ext)) type = "image/bmp"; else if ("gif".equals(ext)) type = "image/gif"; else if ("zip".equals(ext)) type = "application/zip"; else return null; //TODO throw an exception if(!Util.nullOrEmptyString(url)) { result = HttpManager.getInstance().post(url, "image", imageFile, type).getPageSource(); } return result; } static public String postGraphAPI(String fbid, String what, String accessToken) throws UnsupportedEncodingException { return postGraphAPI(fbid, what, accessToken, null); } static public String postGraphAPI(String fbid, String what, String fieldName, FileItem fileItem, String accessToken) throws UnsupportedEncodingException { String result = null; String url = getGraphAPIUrl(fbid, what, accessToken); if(!Util.nullOrEmptyString(url)) { result = HttpManager.getInstance().post(url, fieldName, fileItem.get(), fileItem.getName(), fileItem.getContentType()).getPageSource(); } return result; } static public String postGraphObject(String what, String accessToken) throws UnsupportedEncodingException { return postGraphAPI(null, what, accessToken); } static public String deleteGraphAPI(String fbid, String what, String accessToken, Object params) throws UnsupportedEncodingException { String result = null; String url = getGraphAPIUrl(fbid, what, accessToken, params); if(!Util.nullOrEmptyString(url)) { result = HttpManager.getInstance().delete(url).getPageSource(); } return result; } static public String getFQLUrl(String query, String accessToken) { StringBuffer buf = new StringBuffer("https://api.facebook.com/method/fql.query?query="); buf.append(URLTool.encode(query, "UTF8")); if(!Util.nullOrEmptyString(accessToken)) { buf.append("&access_token="); buf.append(accessToken); } return buf.toString(); } static public ArrayList<? extends XMLRecord> getFQL(String tableName, String query, String accessToken) { ArrayList<? extends XMLRecord> result = new ArrayList<XMLRecord>(); String url = getFQLUrl(query, accessToken); if(!Util.nullOrEmptyString(url)) { try { XMLRecord root = SAXParserManager.getInstance().parseImpl(new HttpRequest(url), false, "FQL", "fql_query_response"); if(root != null) { result = root.getXMLRecords("FQL", "fql_query_response", tableName); } } catch (Exception e) { logger.error("FBUtil.getFQL exception", e); } } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.factories; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.ConfigOptions.OptionBuilder; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.streaming.api.functions.source.StatefulSequenceSource; import org.apache.flink.streaming.api.functions.source.datagen.DataGenerator; import org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSource; import org.apache.flink.streaming.api.functions.source.datagen.RandomGenerator; import org.apache.flink.streaming.api.functions.source.datagen.SequenceGenerator; import org.apache.flink.table.api.TableSchema; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.source.DynamicTableSource; import org.apache.flink.table.connector.source.ScanTableSource; import org.apache.flink.table.connector.source.SourceFunctionProvider; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.StringData; import org.apache.flink.table.sources.StreamTableSource; import org.apache.flink.table.types.DataType; import org.apache.flink.table.utils.TableSchemaUtils; import java.util.HashSet; import java.util.Set; import static org.apache.flink.configuration.ConfigOptions.key; /** * Factory for creating configured instances of {@link DataGenTableSource} in a stream environment. */ @PublicEvolving public class DataGenTableSourceFactory implements DynamicTableSourceFactory { public static final String IDENTIFIER = "datagen"; public static final ConfigOption<Long> ROWS_PER_SECOND = key("rows-per-second") .longType() .defaultValue(Long.MAX_VALUE) .withDescription("Rows per second to control the emit rate."); public static final String FIELDS = "fields"; public static final String KIND = "kind"; public static final String START = "start"; public static final String END = "end"; public static final String MIN = "min"; public static final String MAX = "max"; public static final String LENGTH = "length"; public static final String SEQUENCE = "sequence"; public static final String RANDOM = "random"; @Override public String factoryIdentifier() { return IDENTIFIER; } @Override public Set<ConfigOption<?>> requiredOptions() { return new HashSet<>(); } @Override public Set<ConfigOption<?>> optionalOptions() { Set<ConfigOption<?>> options = new HashSet<>(); options.add(ROWS_PER_SECOND); return options; } @Override public DynamicTableSource createDynamicTableSource(Context context) { Configuration options = new Configuration(); context.getCatalogTable().getOptions().forEach(options::setString); TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(context.getCatalogTable().getSchema()); DataGenerator[] fieldGenerators = new DataGenerator[tableSchema.getFieldCount()]; for (int i = 0; i < fieldGenerators.length; i++) { fieldGenerators[i] = createDataGenerator( tableSchema.getFieldName(i).get(), tableSchema.getFieldDataType(i).get(), options); } return new DataGenTableSource(fieldGenerators, tableSchema, options.get(ROWS_PER_SECOND)); } private DataGenerator createDataGenerator(String name, DataType type, ReadableConfig options) { String genType = options.get( key(FIELDS + "." + name + "." + KIND).stringType().defaultValue(RANDOM)); switch (genType) { case RANDOM: return createRandomGenerator(name, type, options); case SEQUENCE: return createSequenceGenerator(name, type, options); default: throw new ValidationException("Unsupported generator type: " + genType); } } private DataGenerator createRandomGenerator(String name, DataType type, ReadableConfig options) { ConfigOption<Integer> lenKey = key(FIELDS + "." + name + "." + LENGTH) .intType().defaultValue(100); OptionBuilder minKey = key(FIELDS + "." + name + "." + MIN); OptionBuilder maxKey = key(FIELDS + "." + name + "." + MAX); switch (type.getLogicalType().getTypeRoot()) { case BOOLEAN: return RandomGenerator.booleanGenerator(); case CHAR: case VARCHAR: int length = options.get(lenKey); return new RandomGenerator<StringData>() { @Override public StringData next() { return StringData.fromString(random.nextHexString(length)); } }; case TINYINT: return RandomGenerator.byteGenerator( options.get(minKey.intType().defaultValue((int) Byte.MIN_VALUE)).byteValue(), options.get(maxKey.intType().defaultValue((int) Byte.MAX_VALUE)).byteValue()); case SMALLINT: return RandomGenerator.shortGenerator( options.get(minKey.intType().defaultValue((int) Short.MIN_VALUE)).shortValue(), options.get(maxKey.intType().defaultValue((int) Short.MAX_VALUE)).shortValue()); case INTEGER: return RandomGenerator.intGenerator( options.get(minKey.intType().defaultValue(Integer.MIN_VALUE)), options.get(maxKey.intType().defaultValue(Integer.MAX_VALUE))); case BIGINT: return RandomGenerator.longGenerator( options.get(minKey.longType().defaultValue(Long.MIN_VALUE)), options.get(maxKey.longType().defaultValue(Long.MAX_VALUE))); case FLOAT: return RandomGenerator.floatGenerator( options.get(minKey.floatType().defaultValue(Float.MIN_VALUE)), options.get(maxKey.floatType().defaultValue(Float.MAX_VALUE))); case DOUBLE: return RandomGenerator.doubleGenerator( options.get(minKey.doubleType().defaultValue(Double.MIN_VALUE)), options.get(maxKey.doubleType().defaultValue(Double.MAX_VALUE))); default: throw new ValidationException("Unsupported type: " + type); } } private DataGenerator createSequenceGenerator(String name, DataType type, ReadableConfig options) { OptionBuilder startKey = key(FIELDS + "." + name + "." + START); OptionBuilder endKey = key(FIELDS + "." + name + "." + END); options.getOptional(startKey.stringType().noDefaultValue()).orElseThrow( () -> new ValidationException("Could not find required property '" + startKey + "'.")); options.getOptional(endKey.stringType().noDefaultValue()).orElseThrow( () -> new ValidationException("Could not find required property '" + endKey + "'.")); switch (type.getLogicalType().getTypeRoot()) { case CHAR: case VARCHAR: return new SequenceGenerator<StringData>( options.get(startKey.longType().noDefaultValue()), options.get(endKey.longType().noDefaultValue())) { @Override public StringData next() { return StringData.fromString(valuesToEmit.poll().toString()); } }; case TINYINT: return SequenceGenerator.byteGenerator( options.get(startKey.intType().noDefaultValue()).byteValue(), options.get(endKey.intType().noDefaultValue()).byteValue()); case SMALLINT: return SequenceGenerator.shortGenerator( options.get(startKey.intType().noDefaultValue()).shortValue(), options.get(endKey.intType().noDefaultValue()).shortValue()); case INTEGER: return SequenceGenerator.intGenerator( options.get(startKey.intType().noDefaultValue()), options.get(endKey.intType().noDefaultValue())); case BIGINT: return SequenceGenerator.longGenerator( options.get(startKey.longType().noDefaultValue()), options.get(endKey.longType().noDefaultValue())); case FLOAT: return SequenceGenerator.floatGenerator( options.get(startKey.intType().noDefaultValue()).shortValue(), options.get(endKey.intType().noDefaultValue()).shortValue()); case DOUBLE: return SequenceGenerator.doubleGenerator( options.get(startKey.intType().noDefaultValue()), options.get(endKey.intType().noDefaultValue())); default: throw new ValidationException("Unsupported type: " + type); } } /** * A {@link StreamTableSource} that emits each number from a given interval exactly once, * possibly in parallel. See {@link StatefulSequenceSource}. */ static class DataGenTableSource implements ScanTableSource { private final DataGenerator[] fieldGenerators; private final TableSchema schema; private final long rowsPerSecond; private DataGenTableSource(DataGenerator[] fieldGenerators, TableSchema schema, long rowsPerSecond) { this.fieldGenerators = fieldGenerators; this.schema = schema; this.rowsPerSecond = rowsPerSecond; } @Override public ScanRuntimeProvider getScanRuntimeProvider(Context context) { return SourceFunctionProvider.of(createSource(), false); } @VisibleForTesting DataGeneratorSource<RowData> createSource() { return new DataGeneratorSource<>(new DataGenTableSource.RowGenerator(), rowsPerSecond); } @Override public DynamicTableSource copy() { return new DataGenTableSource(fieldGenerators, schema, rowsPerSecond); } @Override public String asSummaryString() { return "DataGenTableSource"; } @Override public ChangelogMode getChangelogMode() { return ChangelogMode.insertOnly(); } private class RowGenerator implements DataGenerator<RowData> { @Override public void open( String name, FunctionInitializationContext context, RuntimeContext runtimeContext) throws Exception { for (int i = 0; i < fieldGenerators.length; i++) { fieldGenerators[i].open(schema.getFieldName(i).get(), context, runtimeContext); } } @Override public boolean hasNext() { for (DataGenerator generator : fieldGenerators) { if (!generator.hasNext()) { return false; } } return true; } @Override public RowData next() { GenericRowData row = new GenericRowData(schema.getFieldCount()); for (int i = 0; i < fieldGenerators.length; i++) { row.setField(i, fieldGenerators[i].next()); } return row; } } } }
package com.soriole.web.webrtc_signaling_server.domain; import com.google.common.collect.Maps; import com.soriole.web.webrtc_signaling_server.MessageMatcher; import com.soriole.web.webrtc_signaling_server.api.dto.NextRTCEvent; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import com.soriole.web.webrtc_signaling_server.BaseTest; import com.soriole.web.webrtc_signaling_server.repository.Members; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import javax.websocket.CloseReason; import javax.websocket.Session; import java.util.HashMap; import java.util.List; import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; @ContextConfiguration(classes = {ServerEventCheck.class, LocalStreamCreated2.class}) public class BroadcastServerTest extends BaseTest { @Rule public ExpectedException expect = ExpectedException.none(); @Autowired private Server server; @Autowired private Members members; @Autowired protected ServerEventCheck eventCheckerCall; @Autowired protected LocalStreamCreated2 eventLocalStream; @Test public void shouldCreateConversationOnCreateSignal() throws Exception { // given Session session = mockSession("s1"); server.register(session); // when server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), session); // then List<NextRTCEvent> events = eventCheckerCall.getEvents(); assertThat(events.size(), is(2)); } @Test public void shouldCreateConversation() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); // when server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), s1); // then assertThat(s1Matcher.getMessages().size(), is(1)); assertThat(s1Matcher.getMessage().getSignal(), is("created")); assertThat(s2Matcher.getMessages().size(), is(0)); } @Test public void shouldCreateConversationThenJoinAndSendOfferRequest() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); // when server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), s1); String conversationKey = s1Matcher.getMessage().getContent(); // s1Matcher.reset(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); // then assertThat(s1Matcher.getMessages().size(), is(3)); assertMessage(s1Matcher, 0, EMPTY, "s1", "created", conversationKey); assertMessage(s1Matcher, 1, "s2", "s1", "newJoined", "s2"); assertMessage(s1Matcher, 2, "s2", "s1", "offerRequest", EMPTY); assertThat(s2Matcher.getMessages().size(), is(1)); assertMessage(s2Matcher, 0, EMPTY, "s2", "joined", conversationKey); } @Test public void shouldCreateConversationJoinMemberAndPassOfferResponseToRestMembers() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); server.handle(Message.create()// .custom(broadcast()) .signal("create")// .build(), s1); String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); s1Matcher.reset(); s2Matcher.reset(); // when // s2 has to create local stream server.handle(Message.create()// .to("s2")// .signal("offerResponse")// .content("s2 spd")// .build(), s1); // then assertThat(s2Matcher.getMessages().size(), is(1)); assertMessage(s2Matcher, 0, "s1", "s2", "answerRequest", "s2 spd"); assertThat(s1Matcher.getMessages().size(), is(0)); } @Test public void shouldCreateConversationJoinMemberAndPassOfferResponseToRestTwoMembers() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); MessageMatcher s3Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); Session s3 = mockSession("s3", s3Matcher); server.register(s1); server.register(s2); server.register(s3); server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), s1); String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s3); s1Matcher.reset(); s2Matcher.reset(); s3Matcher.reset(); // when // s2 has to create local stream server.handle(Message.create()// .to("s1")// .signal("offerResponse")// .content("s2 spd")// .build(), s2); // s3 has to create local stream server.handle(Message.create()// .to("s1")// .signal("offerResponse")// .content("s3 spd")// .build(), s3); // then assertThat(s2Matcher.getMessages().size(), is(1)); assertMessage(s2Matcher, 0, "s1", "s2", "answerRequest", "s2 spd"); assertThat(s3Matcher.getMessages().size(), is(1)); assertMessage(s3Matcher, 0, "s1", "s3", "answerRequest", "s3 spd"); assertThat(s1Matcher.getMessages().size(), is(0)); } @Test public void shouldExchangeSpds() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), s1); // -> created String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); // -> joined // -> offerRequest server.handle(Message.create()// .to("s1")// .signal("offerResponse")// .content("s2 spd")// .build(), s2); // -> answerRequest s1Matcher.reset(); s2Matcher.reset(); // when server.handle(Message.create()// .to("s2")// .signal("answerResponse")// .content("s1 spd")// .build(), s1); // then assertThat(s1Matcher.getMessages().size(), is(1)); assertMessage(s1Matcher, 0, "s2", "s1", "finalize", "s1 spd"); assertThat(s2Matcher.getMessages().size(), is(0)); } @Test public void shouldExchangeCandidates() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session broadcaster = mockSession("broadcaster", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(broadcaster); server.register(s2); server.handle(Message.create()// .signal("create")// .custom(broadcast()) .build(), broadcaster); // -> created String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); // -> joined // -> offerRequest server.handle(Message.create()// .to("broadcaster")// .signal("offerResponse")// .content("s2 spd")// .build(), s2); // -> answerRequest server.handle(Message.create()// .to("s2")// .signal("answerResponse")// .content("broadcaster spd")// .build(), broadcaster); // -> finalize s1Matcher.reset(); s2Matcher.reset(); server.handle(Message.create()// .to("broadcaster")// .signal("candidate")// .content("candidate s2")// .build(), s2); server.handle(Message.create()// .to("s2")// .signal("candidate")// .content("candidate broadcaster")// .build(), broadcaster); // when // then assertThat(s1Matcher.getMessages().size(), is(1)); assertThat(s2Matcher.getMessages().size(), is(1)); assertMessage(s2Matcher, 0, "broadcaster", "s2", "candidate", "candidate broadcaster"); assertMessage(s1Matcher, 0, "s2", "broadcaster", "candidate", "candidate s2"); } @Test public void shouldUnregisterSession() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); server.handle(Message.create()// .custom(broadcast()) .signal("create")// .build(), s1); // -> created String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); s1Matcher.reset(); s2Matcher.reset(); server.unregister(s1, mock(CloseReason.class)); // when // then assertThat(s1Matcher.getMessages().size(), is(0)); assertThat(s2Matcher.getMessages().size(), is(2)); assertMessage(s2Matcher, 0, "s1", "s2", "left", "s1"); } @Test public void shouldInformAudienceAboutMissingBroadcaster() throws Exception { // given MessageMatcher s1Matcher = new MessageMatcher(); MessageMatcher s2Matcher = new MessageMatcher(); Session s1 = mockSession("s1", s1Matcher); Session s2 = mockSession("s2", s2Matcher); server.register(s1); server.register(s2); server.handle(Message.create()// .custom(broadcast()) .signal("create")// .build(), s1); // -> created String conversationKey = s1Matcher.getMessage().getContent(); server.handle(Message.create()// .signal("join")// .content(conversationKey)// .build(), s2); s1Matcher.reset(); s2Matcher.reset(); server.unregister(s1, mock(CloseReason.class)); // when // then assertThat(s1Matcher.getMessages().size(), is(0)); assertThat(s2Matcher.getMessages().size(), is(2)); assertMessage(s2Matcher, 0, "s1", "s2", "left", "s1"); assertMessage(s2Matcher, 1, "s1", "s2", "end", conversationKey); } private HashMap<String, String> broadcast() { HashMap<String, String> custom = Maps.newHashMap(); custom.put("type", "BROADCAST"); return custom; } @Before public void resetObjects() { eventCheckerCall.reset(); eventLocalStream.reset(); members.unregisterBy(mockSession("s1"), null); members.unregisterBy(mockSession("s2"), null); members.unregisterBy(mockSession("s3"), null); } }
/** * Copyright 2011 Google Inc. * Copyright 2013-2014 Ronald W Hoffman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ScripterRon.BitcoinCore; import java.io.EOFException; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * <p>The Bitcoin block chain contains all of the transactions that have occurred and is available to everyone. * The block chain consists of a series of blocks starting with the genesis block (block 0) and continuing * to the chain head (the latest block in the chain).</p> * * <p>Each block is composed of one or more transactions. The first transaction is called the coinbase transaction * and it assigns the block reward to the miner who solved the block hash. The remaining transactions move coins * from Input A to Output B. A single transaction can contain multiple inputs and multiple outputs. The sum of * the inputs minus the sum of the output represents the mining fee for that transaction.</p> * * <p>A block has the following format:</p> * <pre> * Size Field Description * ==== ===== =========== * 80 bytes BlockHeader Consists of 6 fields that are hashed to calculate the block hash * VarInt TxCount Number of transactions in the block * Variable Transactions The transactions in the block * </pre> * * <p>The block header has the following format:</p> * <pre> * Size Field Description * ==== ===== =========== * 4 bytes Version The block version number * 32 bytes PrevBlockHash The hash of the preceding block in the chain * 32 byte MerkleRoot The Merkle root for the transactions in the block * 4 bytes Time The time the block was mined * 4 bytes Difficulty The target difficulty * 4 bytes Nonce The nonce used to generate the required hash *</pre> */ public class Block implements ByteSerializable { /** The serialized byte stream */ private byte[] blockData; /** The block version */ private int blockVersion; /** The block hash calculated from the block header */ private Sha256Hash blockHash; /** The hash for the previous block in the chain */ private Sha256Hash prevBlockHash; /** The Merkle root for the transactions in the block */ private Sha256Hash merkleRoot; /** The Merkle tree for the transaction in the block */ private List<byte[]> merkleTree; /** The block timestamp */ private long timeStamp; /** The target difficulty */ private long targetDifficulty; /** The nonce */ private int nonce; /** The transactions contained in the block */ private List<Transaction> transactions; /** * Create an empty block for use by subclasses */ protected Block() { } /** * Create a block from a serialized byte array * * @param inBytes Byte array containing the serialized data * @param doVerify TRUE if the block structure should be verified * @throws EOFException End-of-data while processing byte stream * @throws VerificationException Block verification failed */ public Block(byte[] inBytes, boolean doVerify) throws EOFException, VerificationException { this(inBytes, 0, inBytes.length, doVerify); } /** * Create a block from a serialized byte array * * @param inBytes Byte array containing the serialized data * @param inOffset Starting offset within the array * @param inLength Length of the serialized data * @param doVerify TRUE if the block structure should be verified * @throws EOFException Serialized byte stream is too short * @throws VerificationException Block verification failed */ public Block(byte[] inBytes, int inOffset, int inLength, boolean doVerify) throws EOFException, VerificationException { this(new SerializedBuffer(inBytes, inOffset, inLength), doVerify); } /** * Create a block from a serialized buffer * * @param inBuffer Serialized buffer * @param doVerify TRUE if the block structure should be verified * @throws EOFException Serialized byte stream is too short * @throws VerificationException Block verification failed */ public Block(SerializedBuffer inBuffer, boolean doVerify) throws EOFException, VerificationException { // // We must have at least 80 bytes // if (inBuffer.available() < BlockHeader.HEADER_SIZE) throw new EOFException("Block header truncated"); // // Compute the block hash from the serialized block header // int startPosition = inBuffer.getPosition(); blockHash = new Sha256Hash(Utils.reverseBytes(Utils.doubleDigest(inBuffer.getBytes(BlockHeader.HEADER_SIZE)))); inBuffer.setPosition(startPosition); // // Read the block header // readHeader(inBuffer); // // Read the transactions // readTransactions(inBuffer); // // Verify the block and its transactions. Note that transaction signatures and connected // outputs will be verified when the block is added to the block chain. // if (doVerify) verifyBlock(); // // Save a copy of the serialized byte stream // inBuffer.setSegmentStart(startPosition); blockData = inBuffer.getSegmentBytes(); } /** * Write the serialized block data to the output buffer * * @param outBuffer Output buffer * @return Output buffer */ @Override public SerializedBuffer getBytes(SerializedBuffer outBuffer) { outBuffer.putBytes(blockData); return outBuffer; } /** * Return the serialized block data * * @return Byte array containing the serialized block */ @Override public byte[] getBytes() { return blockData; } /** * Write the serialized block header to the output buffer * * @param outBuffer Output buffer * @return Output buffer */ public SerializedBuffer getHeaderBytes(SerializedBuffer outBuffer) { outBuffer.putBytes(blockData, 0, BlockHeader.HEADER_SIZE); return outBuffer; } /** * Return the serialized block header * * @return Byte array containing just the block header */ public byte[] getHeaderBytes() { return Arrays.copyOfRange(blockData, 0, BlockHeader.HEADER_SIZE); } /** * <p>Returns the block version. Only Version 1 and Version 2 blocks are supported.</p> * <ul> * <li>Blocks created before BIP 34 are Version 1 and do not contain the chain height * in the coinbase transaction input script</li> * <li>Blocks created after BIP 34 are Version 2 and contain the chain height in the coinbase * transaction input script</li> * </ul> * * @return Block version */ public int getVersion() { return blockVersion; } /** * Returns the time the block was mined * * @return The block timestamp in seconds since the Unix epoch (Jan 1, 1970) */ public long getTimeStamp() { return timeStamp; } /** * Returns the block hash calculated over the block header * * @return Block hash */ public Sha256Hash getHash() { return blockHash; } /** * Returns the block hash as a formatted hex string * * @return Hex string */ public String getHashAsString() { return blockHash.toString(); } /** * Returns the hash of the previous block in the chain * * @return Previous block hash */ public Sha256Hash getPrevBlockHash() { return prevBlockHash; } /** * Returns the Merkle root * * @return Merkle root */ public Sha256Hash getMerkleRoot() { return merkleRoot; } /** * Returns the Merkle tree * * @return Merkle tree */ public List<byte[]> getMerkleTree() { if (merkleTree == null) merkleTree = buildMerkleTree(); return merkleTree; } /** * Returns the target difficulty in compact form * * @return Target difficulty */ public long getTargetDifficulty() { return targetDifficulty; } /** * Returns the target difficulty as a 256-bit value that can be compared to a SHA-256 hash. * Inside a block. the target is represented using the compact form. * * @return The difficulty target */ public BigInteger getTargetDifficultyAsInteger() { return Utils.decodeCompactBits(targetDifficulty); } /** * Returns the work represented by this block * * Work is defined as the number of tries needed to solve a block in the * average case. As the target gets lower, the amount of work goes up. * * @return The work represented by this block */ public BigInteger getWork() { BigInteger target = getTargetDifficultyAsInteger(); return BlockHeader.LARGEST_HASH.divide(target.add(BigInteger.ONE)); } /** * Returns the block nonce * * @return Block nonce */ public int getNonce() { return nonce; } /** * Returns the transactions in this block * * @return Transaction list */ public List<Transaction> getTransactions() { return transactions; } /** * Calculates the Merkle root from the block transactions * * @return Merkle root */ private Sha256Hash calculateMerkleRoot() { if (merkleTree == null) merkleTree = buildMerkleTree(); return new Sha256Hash(merkleTree.get(merkleTree.size()-1)); } /** * Builds the Merkle tree from the block transactions * * @return List of byte arrays representing the nodes in the Merkle tree */ private List<byte[]> buildMerkleTree() { // // The Merkle root is based on a tree of hashes calculated from the transactions: // // root // / \ // A B // / \ / \ // t1 t2 t3 t4 // // The tree is represented as a list: t1,t2,t3,t4,A,B,root where each entry is a hash // // The hashing algorithm is double SHA-256. The leaves are a hash of the serialized contents of the transaction. // The interior nodes are hashes of the concatenation of the two child hashes. // // This structure allows the creation of proof that a transaction was included into a block without having to // provide the full block contents. Instead, you can provide only a Merkle branch. For example to prove tx2 was // in a block you can just provide tx2, the hash(tx1) and B. Now the other party has everything they need to // derive the root, which can be checked against the block header. These proofs are useful when we // want to download partial block contents. // // Note that if the number of transactions is not even, the last tx is repeated to make it so. // A tree with 5 transactions would look like this: // // root // / \ // 4 5 // / \ / \ // 1 2 3 3 // / \ / \ / \ // t1 t2 t3 t4 t5 t5 // ArrayList<byte[]> tree = new ArrayList<>(); transactions.stream().forEach((tx) -> tree.add(tx.getHash().getBytes())); // // The tree is generated starting at the leaves and moving down to the root // int levelOffset = 0; // // Step through each level, stopping when we reach the root (levelSize == 1). // for (int levelSize=transactions.size(); levelSize>1; levelSize=(levelSize+1)/2) { // // Process each pair of nodes on the current level // for (int left=0; left<levelSize; left+=2) { // // The right hand node can be the same as the left hand in the case where we have // an odd number of nodes for the level // int right = Math.min(left+1, levelSize-1); byte[]leftBytes = Utils.reverseBytes(tree.get(levelOffset+left)); byte[]rightBytes = Utils.reverseBytes(tree.get(levelOffset+right)); byte[]nodeHash = Utils.doubleDigestTwoBuffers(leftBytes, 0, 32, rightBytes, 0, 32); tree.add(Utils.reverseBytes(nodeHash)); } // // Move to the next level. // levelOffset += levelSize; } return tree; } /** * Reads the block header from the input stream * * @param inBuffer Input buffer * @throws EOFException Serialized input stream is too short * @throws VerificationException Block structure is incorrect */ private void readHeader(SerializedBuffer inBuffer) throws EOFException, VerificationException { blockVersion = inBuffer.getInt(); if (blockVersion < 1 || blockVersion > 3) throw new VerificationException(String.format("Block version %d is not supported", blockVersion)); prevBlockHash = new Sha256Hash(Utils.reverseBytes(inBuffer.getBytes(32))); merkleRoot = new Sha256Hash(Utils.reverseBytes(inBuffer.getBytes(32))); timeStamp = inBuffer.getUnsignedInt(); targetDifficulty = inBuffer.getUnsignedInt(); nonce = inBuffer.getInt(); } /** * Reads the transactions from the serialized stream * * @param inBuffer Serialized buffer * @throws EOFException Serialized input stream is too short * @throws VerificationException Transaction verification failed */ private void readTransactions(SerializedBuffer inBuffer) throws EOFException, VerificationException { int count = inBuffer.getVarInt(); if (count < 1 || count > NetParams.MAX_BLOCK_SIZE/60) throw new VerificationException(String.format("Transaction count %d is not valid", count)); transactions = new ArrayList<>(count); for (int i=0; i<count; i++) transactions.add(new Transaction(inBuffer)); } /** * <p>Checks the block to ensure it follows the rules laid out in the network parameters.</p> * <p>The following checks are performed:</p> * <ul> * <li>Check the proof of work by comparing the block hash to the target difficulty</li> * <li>Check the timestamp against the current time</li> * <li>Verify that there is a single coinbase transaction and it is the first transaction * in the block</li> * <li>Verify the merkle root</li> * <li>Verify the transaction structure</li> * <li>Verify the transaction lock time</li> * </ul> * * @throws VerificationException Block verification failed */ private void verifyBlock() throws VerificationException { // // Ensure this block does in fact represent real work done. If the difficulty is high enough, // we can be fairly certain the work was done by the network. // // The block hash must be less than or equal to the target difficulty (the difficulty increases // by requiring an increasing number of leading zeroes in the block hash) // BigInteger target = getTargetDifficultyAsInteger(); if (target.signum() <= 0 || target.compareTo(NetParams.PROOF_OF_WORK_LIMIT) > 0) throw new VerificationException("Target difficulty is not valid", RejectMessage.REJECT_INVALID, blockHash); BigInteger hash = getHash().toBigInteger(); if (hash.compareTo(target) > 0) throw new VerificationException("Block hash is higher than target difficulty", RejectMessage.REJECT_INVALID, blockHash); // // Verify the block timestamp // long currentTime = System.currentTimeMillis()/1000; if (timeStamp > currentTime+NetParams.ALLOWED_TIME_DRIFT) throw new VerificationException("Block timestamp is too far in the future", RejectMessage.REJECT_INVALID, blockHash); // // Check that there is just one coinbase transaction and it is the first transaction in the block // boolean foundCoinBase = false; for (Transaction tx : transactions) { if (tx.isCoinBase()) { if (foundCoinBase) throw new VerificationException("Block contains multiple coinbase transactions", RejectMessage.REJECT_MALFORMED, blockHash); foundCoinBase = true; } else if (!foundCoinBase) { throw new VerificationException("First transaction in block is not the coinbase transaction", RejectMessage.REJECT_MALFORMED, blockHash); } } // // Verify the Merkle root // Sha256Hash checkRoot = calculateMerkleRoot(); if (!checkRoot.equals(merkleRoot)) throw new VerificationException("Merkle root is not correct", RejectMessage.REJECT_INVALID, blockHash); // // Verify the transactions in the block // for (Transaction tx : transactions) { // // Verify the transaction structure // tx.verify(false); // // A transaction is locked if the lock time is greater than the block time (we allow // a 10-minute leeway) // if (tx.getLockTime() > timeStamp + (10*60)) { // // A transaction is unlocked if all of the input sequences are -1 even though // the lock time has not been reached // List<TransactionInput> txInputs = tx.getInputs(); for (TransactionInput txInput : txInputs) { if (txInput.getSeqNumber() != -1) throw new VerificationException("Transaction lock time greater than block time", RejectMessage.REJECT_INVALID, tx.getHash()); } } } } /** * Determines if this block is equal to another block * * @param obj The block to compare * @return TRUE if the blocks are equal */ @Override public boolean equals(Object obj) { return (obj!=null && (obj instanceof Block) && blockHash.equals(((Block)obj).blockHash)); } /** * Returns the hash code for this object. The returned value is based on the block hash but is * not the same value. * * @return Hash code */ @Override public int hashCode() { return blockHash.hashCode(); } /** * Returns a string representation for this block * * @return Formatted string */ @Override public String toString() { return String.format("Block hash: %s\n Previous block hash %s\n Merkle root: %s\n Target difficulty %d", getHashAsString(), getPrevBlockHash().toString(), getMerkleRoot().toString(), targetDifficulty); } }
package marketmaker.application; import java.math.BigDecimal; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Set; import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jms.annotation.JmsListener; import com.ripple.core.types.known.tx.txns.OfferCreate; import marketmaker.entities.AccountBalance; import marketmaker.entities.AccountBalanceRepository; import marketmaker.entities.AccountOffer; import marketmaker.entities.AccountOfferRepository; import marketmaker.entities.Avalanche; import marketmaker.entities.AvalancheRepository; import marketmaker.entities.Offer; import marketmaker.entities.OfferCreateEntity; import marketmaker.entities.OfferCreateEntityRepository; import marketmaker.entities.OfferRepository; /** * Created by bob on 09/01/15. */ public class RippleMessageStoreListener { private static Logger log = LoggerFactory.getLogger(RippleMessageStoreListener.class); private static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); @Autowired private AccountOfferRepository accountOfferRepository; @Autowired private AccountBalanceRepository accountBalanceRepository; // @Autowired // private OfferBookRepository offerBookRepository; @Autowired private OfferRepository offerRepository; @Autowired private AvalancheRepository avalancheRepository; @Autowired private OfferCreateEntityRepository offerCreateEntityRepository; public RippleMessageStoreListener() { } @JmsListener(destination = "offer_create") public void onTransaction(String message) { try { JSONObject json = new JSONObject(message); OfferCreate offer = (OfferCreate) OfferCreate.fromJSONObject(json); OfferCreateEntity e = new OfferCreateEntity(); e.setCreatedAt(Calendar.getInstance().getTime()); if (offer.expiration() != null) e.setExpiration(offer.expiration().toString()); if (offer.sequence() != null) e.setOfferSequence(offer.sequence().toString()); if (offer.takerGets() != null) { e.setTakerGetsCurrency(offer.takerGets().currencyString()); e.setTakerGetsIssuer(offer.takerGets().issuerString()); e.setTakerGetsValue(offer.takerGets().value()); } if (offer.takerPays() != null) { e.setTakerPaysCurrency(offer.takerPays().currencyString()); e.setTakerPaysIssuer(offer.takerPays().issuerString()); e.setTakerPaysValue(offer.takerPays().value()); } offerCreateEntityRepository.save(e); } catch (Exception ex) { log.error(ex.getMessage(), ex); } } @JmsListener(destination = "avalanche") public void onAvalanche(String message) { try { JSONObject json = new JSONObject(message); Avalanche avalanche = new Avalanche(); avalanche.setCreatedAt(Calendar.getInstance().getTime()); avalanche.setBaseAmount(new BigDecimal(json.getDouble("baseAmount"))); avalanche.setBaseAsset(json.getString("baseAsset")); avalanche.setBaseExpo(new BigDecimal(json.getDouble("baseExpo"))); avalanche.setCancelAllAccountOffersOnStart(json.getBoolean("cancelAllAccountOffersOnStart")); avalanche.setCounterAsset(json.getString("counterAsset")); avalanche.setCounterExpo(new BigDecimal(json.getDouble("counterExpo"))); avalanche.setDegreeAsk(new BigDecimal(json.getDouble("degreeAsk"))); avalanche.setDegreeBid(new BigDecimal(json.getDouble("degreeBid"))); avalanche.setEnableOpportunityTaker(json.getBoolean("enableOpportunityTaker")); avalanche.setFilteredListAsks(json.get("filteredListAsks").toString()); avalanche.setFilteredListBids(json.get("filteredListBids").toString()); avalanche.setListAsks(json.get("listAsks").toString()); avalanche.setListBids(json.get("listBids").toString()); avalanche.setMarginAsk(new BigDecimal(json.getDouble("marginAsk"))); avalanche.setMarginBid(new BigDecimal(json.getDouble("marginBid"))); avalanche.setMaxOpenAsks(new BigDecimal(json.getDouble("maxOpenAsks"))); avalanche.setMaxOpenBids(new BigDecimal(json.getDouble("maxOpenBids"))); avalanche.setPathFrom(json.getString("pathFrom")); avalanche.setPathTo(json.getString("pathTo")); avalanche.setRangeAsks(json.get("rangeAsks").toString()); avalanche.setRangeBids(json.get("rangeBids").toString()); avalanche.setRangeCountBids(json.get("rangeCountBids").toString()); avalanche.setRangeCountAsks(json.get("rangeCountAsks").toString()); avalanche.setRefAsset(json.getString("refAsset")); avalanche.setRefCost(new BigDecimal(json.getDouble("refCost"))); avalanche.setSlippage(new BigDecimal(json.getDouble("slippage"))); avalancheRepository.save(avalanche); } catch (Exception ex) { log.error(ex.getMessage(), ex); } } @JmsListener(destination = "offerbook") public void onOfferBook(String message) { // try { // JSONObject o = new JSONObject(message); // OfferBook ob = new OfferBook(); // ob.setCreated_at(Calendar.getInstance().getTime()); // JSONObject payIssue = o.getJSONObject("payIssue"); // JSONObject getIssue = o.getJSONObject("getIssue"); // ob.setPayIssueCurrency(payIssue.optString("currency")); // ob.setPayIssueIssuer(payIssue.optString("issuer")); // ob.setGetIssueCurrency(getIssue.optString("currency")); // ob.setGetIssueIssuer(getIssue.optString("issuer")); // Set<Offer> offersAsks = new HashSet<>(); // Set<Offer> offersBids = new HashSet<>(); // JSONArray listAsks = o.getJSONArray("offersAsks"); // JSONArray listBids = o.getJSONArray("offersBids"); // addOffers(offersAsks, listAsks); // addOffers(offersBids, listBids); // // ob.setOffersAsks(offersAsks); // ob.setOffersBids(offersBids); // // offerBookRepository.save(ob); // } catch (Exception ex) { // log.error(ex.getMessage(), ex); // } } private void addOffers(Set<Offer> offers, JSONArray list) { Date now = Calendar.getInstance().getTime(); for (int i = 0; i < list.length(); i++) { JSONObject ask = list.getJSONObject(i); Offer offer = new Offer(); offer.setCreated_at(now); offer.setBookDirectory(ask.optString("bookDirectory")); offer.setAccount(ask.optString("account")); offer.setBookNode(ask.optString("bookNode")); offer.setFlags(ask.optString("flags")); offer.setIndex(ask.optString("index")); offer.setLedgerEntryType(ask.optString("ledgerEntryType")); // offer.setOwner_funds(new // BigDecimal(ask.getDouble("owner_funds"))); offer.setOwnerNode(ask.optString("OwnerNode")); offer.setPreviousTxnID(ask.optString("PreviousTxnID")); offer.setPreviousTxnLgrSeq(ask.optString("PreviousTxnLgrSeq")); offer.setQuality(new BigDecimal(ask.getDouble("quality"))); if (ask.optJSONObject("TakerPays") != null) { JSONObject takerPays = ask.optJSONObject("TakerPays"); offer.setTakerPaysValue(new BigDecimal(takerPays.getDouble("value"))); offer.setTakerPaysIssuer(takerPays.optString("issuer")); offer.setTakerPaysCurrency(takerPays.optString("currency")); } else { offer.setTakerPaysValue(new BigDecimal(ask.getDouble("TakerPays"))); } if (ask.optJSONObject("TakerGets") != null) { JSONObject takerGets = ask.optJSONObject("TakerGets"); offer.setTakerGetsValue(new BigDecimal(takerGets.getDouble("value"))); offer.setTakerGetsIssuer(takerGets.optString("issuer")); offer.setTakerGetsCurrency(takerGets.optString("currency")); } else { offer.setTakerPaysValue(new BigDecimal(ask.getDouble("TakerGets"))); } offerRepository.save(offer); offers.add(offer); } } @JmsListener(destination = "account_balance") public void onAccountBalance(String message) { try { JSONObject jsonObject = new JSONObject(message); JSONObject result = jsonObject.getJSONObject("result"); JSONArray lines = result.getJSONArray("lines"); Date now = Calendar.getInstance().getTime(); for (int i = 0; i < lines.length(); i++) { JSONObject line = lines.getJSONObject(i); AccountBalance ab = new AccountBalance(); ab.setAccount(line.optString("account")); ab.setBalance(new BigDecimal(line.getDouble("balance"))); ab.setCreatedAt(now); ab.setCurrency(line.optString(("currency"))); ab.setLimit_balance(new BigDecimal(line.getDouble("limit"))); ab.setLimit_peer(new BigDecimal(line.getDouble("limit_peer"))); ab.setQuality_in(new BigDecimal(line.getDouble("quality_in"))); ab.setQuality_out(new BigDecimal(line.getDouble("quality_out"))); accountBalanceRepository.save(ab); } } catch (Exception ex) { log.error(ex.getMessage(), ex); } } @JmsListener(destination = "account_offers") public void onAccountOffers(String message) { try { accountOfferRepository.updateStatus("inactive"); JSONObject jsonObject = new JSONObject(message); JSONObject result = jsonObject.getJSONObject("result"); JSONArray offers = result.getJSONArray("offers"); Date now = Calendar.getInstance().getTime(); for (int i = 0; i < offers.length(); i++) { JSONObject o = offers.getJSONObject(i); AccountOffer a = new AccountOffer(); a.setStatus("active"); a.setCreatedAt(now); a.setFlags(o.optString("flags")); a.setQuality(o.optString("quality")); a.setSeq(o.optString("seq")); a.setLedgerCurrentIndex(result.getLong("ledger_current_index")); if (o.optJSONObject("taker_gets") != null) { JSONObject takerGets = o.optJSONObject("taker_gets"); a.setTakerGetsCurrency(takerGets.optString("currency")); a.setTakerGetsIssuer(takerGets.optString("issuer")); a.setTakerGetsValue(new BigDecimal(takerGets.getDouble("value"))); } else { a.setTakerGetsValue(new BigDecimal(o.getDouble("taker_gets"))); } if (o.optJSONObject("taker_pays") != null) { JSONObject takerPays = o.optJSONObject("taker_pays"); a.setTakerPaysCurrency(takerPays.optString("currency")); a.setTakerPaysIssuer(takerPays.optString("issuer")); a.setTakerPaysValue(new BigDecimal(takerPays.getDouble("value"))); } else { a.setTakerPaysValue(new BigDecimal(o.getDouble("taker_pays"))); } accountOfferRepository.save(a); } } catch (Exception e) { log.error(e.getMessage(), e); } } }
/* * (C) Copyright 2017-2020 OpenVidu (https://openvidu.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.openvidu.server.recording.service; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import org.apache.commons.io.FileUtils; import org.kurento.client.ErrorEvent; import org.kurento.client.EventListener; import org.kurento.client.MediaPipeline; import org.kurento.client.MediaProfileSpecType; import org.kurento.client.RecorderEndpoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import com.google.gson.JsonIOException; import com.google.gson.JsonObject; import com.google.gson.JsonSyntaxException; import io.openvidu.client.OpenViduException; import io.openvidu.client.OpenViduException.Code; import io.openvidu.client.internal.ProtocolElements; import io.openvidu.java.client.Recording.OutputMode; import io.openvidu.java.client.Recording.Status; import io.openvidu.java.client.RecordingProperties; import io.openvidu.server.cdr.CallDetailRecord; import io.openvidu.server.config.OpenviduConfig; import io.openvidu.server.core.EndReason; import io.openvidu.server.core.MediaServer; import io.openvidu.server.core.Participant; import io.openvidu.server.core.Session; import io.openvidu.server.core.SessionEventsHandler; import io.openvidu.server.core.SessionManager; import io.openvidu.server.kurento.kms.Kms; import io.openvidu.server.kurento.kms.KmsManager; import io.openvidu.server.recording.Recording; import io.openvidu.server.recording.RecordingDownloader; import io.openvidu.server.recording.RecordingUploader; import io.openvidu.server.recording.service.RecordingService.PropertiesRecordingId; import io.openvidu.server.utils.CustomFileManager; import io.openvidu.server.utils.DockerManager; import io.openvidu.server.utils.JsonUtils; import io.openvidu.server.utils.LocalCustomFileManager; import io.openvidu.server.utils.LocalDockerManager; import io.openvidu.server.utils.RecordingUtils; import io.openvidu.server.utils.RemoteOperationUtils; public class RecordingManager { private static final Logger log = LoggerFactory.getLogger(RecordingManager.class); private ComposedRecordingService composedRecordingService; private ComposedQuickStartRecordingService composedQuickStartRecordingService; private SingleStreamRecordingService singleStreamRecordingService; private DockerManager dockerManager; private CustomFileManager fileManager; @Autowired protected SessionEventsHandler sessionHandler; @Autowired private SessionManager sessionManager; @Autowired protected RecordingManagerUtils recordingManagerUtils; @Autowired private RecordingDownloader recordingDownloader; @Autowired private RecordingUploader recordingUploader; @Autowired protected OpenviduConfig openviduConfig; @Autowired private KmsManager kmsManager; @Autowired private CallDetailRecord cdr; protected Map<String, Recording> startingRecordings = new ConcurrentHashMap<>(); protected Map<String, Recording> startedRecordings = new ConcurrentHashMap<>(); protected Map<String, Recording> sessionsRecordings = new ConcurrentHashMap<>(); protected Map<String, Recording> sessionsRecordingsStarting = new ConcurrentHashMap<>(); private final Map<String, ScheduledFuture<?>> automaticRecordingStopThreads = new ConcurrentHashMap<>(); private JsonUtils jsonUtils = new JsonUtils(); private ScheduledExecutorService automaticRecordingStopExecutor = Executors .newScheduledThreadPool(Runtime.getRuntime().availableProcessors()); private static final List<EndReason> LAST_PARTICIPANT_LEFT_REASONS = Arrays .asList(new EndReason[] { EndReason.disconnect, EndReason.forceDisconnectByUser, EndReason.forceDisconnectByServer, EndReason.networkDisconnect }); public RecordingManager(DockerManager dockerManager, CustomFileManager fileManager) { this.dockerManager = dockerManager; this.fileManager = fileManager; } @PostConstruct public void init() { if (this.openviduConfig.isRecordingModuleEnabled()) { log.info("OpenVidu recording service is enabled"); try { this.initializeRecordingManager(); } catch (OpenViduException e) { String finalErrorMessage = ""; if (e.getCodeValue() == Code.DOCKER_NOT_FOUND.getValue()) { finalErrorMessage = "Error connecting to Docker daemon. Enabling OpenVidu recording module requires Docker"; } else if (e.getCodeValue() == Code.RECORDING_PATH_NOT_VALID.getValue()) { finalErrorMessage = "Error initializing recording path \"" + this.openviduConfig.getOpenViduRecordingPath() + "\" set with system property \"OPENVIDU_RECORDING_PATH\""; } else if (e.getCodeValue() == Code.RECORDING_FILE_EMPTY_ERROR.getValue()) { finalErrorMessage = "Error initializing recording custom layouts path \"" + this.openviduConfig.getOpenviduRecordingCustomLayout() + "\" set with system property \"OPENVIDU_RECORDING_CUSTOM_LAYOUT\""; } log.error(finalErrorMessage + ". Shutting down OpenVidu Server"); Runtime.getRuntime().halt(1); } } else { log.info("OpenVidu recording service is disabled"); } } public void initializeRecordingManager() throws OpenViduException { this.dockerManager.init(); this.composedRecordingService = new ComposedRecordingService(this, recordingDownloader, recordingUploader, kmsManager, fileManager, openviduConfig, cdr, this.dockerManager); this.composedQuickStartRecordingService = new ComposedQuickStartRecordingService(this, recordingDownloader, recordingUploader, kmsManager, fileManager, openviduConfig, cdr, this.dockerManager); this.singleStreamRecordingService = new SingleStreamRecordingService(this, recordingDownloader, recordingUploader, kmsManager, fileManager, openviduConfig, cdr); this.checkRecordingRequirements(this.openviduConfig.getOpenViduRecordingPath(), this.openviduConfig.getOpenviduRecordingCustomLayout()); LocalDockerManager dockMng = new LocalDockerManager(true); if (!openviduConfig.isRecordingComposedExternal()) { downloadRecordingImageToLocal(dockMng); } // Clean any stranded openvidu/openvidu-recording container on startup dockMng.cleanStrandedContainers(openviduConfig.getOpenviduRecordingImageRepo()); } public void checkRecordingRequirements(String openviduRecordingPath, String openviduRecordingCustomLayout) throws OpenViduException { LocalDockerManager dockerManager = null; try { dockerManager = new LocalDockerManager(true); dockerManager.checkDockerEnabled(); } catch (OpenViduException e) { String message = e.getMessage(); if ("docker".equals(openviduConfig.getSpringProfile())) { final String NEW_LINE = System.getProperty("line.separator"); message += ": make sure you include the following flags in your \"docker run\" command:" + NEW_LINE + " -e OPENVIDU_RECORDING_PATH=/YOUR/PATH/TO/VIDEO/FILES" + NEW_LINE + " -e MY_UID=$(id -u $USER)" + NEW_LINE + " -v /var/run/docker.sock:/var/run/docker.sock" + NEW_LINE + " -v /YOUR/PATH/TO/VIDEO/FILES:/YOUR/PATH/TO/VIDEO/FILES" + NEW_LINE; } else { message += ": you need Docker CE installed in this machine to enable OpenVidu recording service. " + "If Docker CE is already installed, make sure to add OpenVidu Server user to " + "\"docker\" group: " + System.lineSeparator() + " 1) $ sudo usermod -aG docker $USER" + System.lineSeparator() + " 2) Log out and log back to the host to reevaluate group membership"; } log.error(message); throw e; } finally { dockerManager.close(); } this.checkRecordingPaths(openviduRecordingPath, openviduRecordingCustomLayout); } private void downloadRecordingImageToLocal(LocalDockerManager dockMng) { log.info("Recording module required: Downloading openvidu/openvidu-recording:" + openviduConfig.getOpenViduRecordingVersion() + " Docker image (350MB aprox)"); if (dockMng.dockerImageExistsLocally( openviduConfig.getOpenviduRecordingImageRepo() + ":" + openviduConfig.getOpenViduRecordingVersion())) { log.info("Docker image already exists locally"); } else { Thread t = new Thread(() -> { boolean keep = true; log.info("Downloading "); while (keep) { System.out.print("."); try { Thread.sleep(1000); } catch (InterruptedException e) { keep = false; log.info("\nDownload complete"); } } }); t.start(); try { dockMng.downloadDockerImage(openviduConfig.getOpenviduRecordingImageRepo() + ":" + openviduConfig.getOpenViduRecordingVersion(), 600); } catch (Exception e) { log.error("Error downloading docker image {}:{}", openviduConfig.getOpenviduRecordingImageRepo(), openviduConfig.getOpenViduRecordingVersion()); } t.interrupt(); try { t.join(); } catch (InterruptedException e) { e.printStackTrace(); } log.info("Docker image available"); } } public void startComposedQuickStartContainer(Session session) { this.composedQuickStartRecordingService.runComposedQuickStartContainer(session); } public void stopComposedQuickStartContainer(Session session, EndReason reason) { this.composedQuickStartRecordingService.stopRecordingContainer(session, reason); } public Recording startRecording(Session session, RecordingProperties properties) throws OpenViduException { String recordingId = null; try { PropertiesRecordingId updatePropertiesAndRecordingId = ((RecordingService) this.composedRecordingService) .setFinalRecordingNameAndGetFreeRecordingId(session, properties); properties = updatePropertiesAndRecordingId.properties; recordingId = updatePropertiesAndRecordingId.recordingId; // INCREMENT ACTIVE RECORDINGS OF MEDIA NODE HERE. IF MEDIA NODE IS NOT // AVAILABLE FOR STARTING NEW RECORDINGS THIS METHOD THROWS AN EXCEPTION kmsManager.incrementActiveRecordings(properties, recordingId, session); try { if (session.recordingLock.tryLock(15, TimeUnit.SECONDS)) { try { if (sessionIsBeingRecorded(session.getSessionId())) { throw new OpenViduException(Code.RECORDING_START_ERROR_CODE, "Concurrent start of recording for session " + session.getSessionId()); } else { Recording recording = null; switch (properties.outputMode()) { case COMPOSED: recording = this.composedRecordingService.startRecording(session, recordingId, properties); break; case COMPOSED_QUICK_START: recording = this.composedQuickStartRecordingService.startRecording(session, recordingId, properties); break; case INDIVIDUAL: recording = this.singleStreamRecordingService.startRecording(session, recordingId, properties); break; } this.recordingFromStartingToStarted(recording); this.cdr.recordRecordingStatusChanged(recording, null, recording.getCreatedAt(), Status.started); if (!(OutputMode.COMPOSED.equals(properties.outputMode()) && properties.hasVideo())) { // Directly send recording started notification for all cases except for // COMPOSED recordings with video (will be sent on first RECORDER subscriber) // Both INDIVIDUAL and COMPOSED_QUICK_START should notify immediately this.sessionHandler.sendRecordingStartedNotification(session, recording); } if (session.getActivePublishers() == 0) { // Init automatic recording stop if no publishers when starting the recording log.info( "No publisher in session {}. Starting {} seconds countdown for stopping recording", session.getSessionId(), this.openviduConfig.getOpenviduRecordingAutostopTimeout()); this.initAutomaticRecordingStopThread(session); } return recording; } } finally { session.recordingLock.unlock(); } } else { throw new OpenViduException(Code.RECORDING_START_ERROR_CODE, "Timeout waiting for recording Session lock to be available for session " + session.getSessionId()); } } catch (InterruptedException e) { throw new OpenViduException(Code.RECORDING_START_ERROR_CODE, "InterruptedException waiting for recording Session lock to be available for session " + session.getSessionId()); } } catch (Exception e) { // DECREMENT ACTIVE RECORDINGS OF MEDIA NODE AND TRY REMOVE MEDIA NODE HERE kmsManager.decrementActiveRecordings(properties, recordingId, session); throw e; } } public Recording stopRecording(Session session, String recordingId, EndReason reason) { Recording recording; if (session == null) { recording = this.startedRecordings.get(recordingId); } else { recording = this.sessionsRecordings.get(session.getSessionId()); } if (recording == null) { recording = this.sessionsRecordingsStarting.get(session.getSessionId()); if (recording == null) { log.error("Cannot stop recording. Session {} is not being recorded", recordingId, session.getSessionId()); return null; } else { // Recording is still starting log.warn("Recording {} is still in \"starting\" status", recording.getId()); } } ((RecordingService) singleStreamRecordingService).sealRecordingMetadataFileAsStopped(recording); final long timestamp = System.currentTimeMillis(); this.cdr.recordRecordingStatusChanged(recording, reason, timestamp, Status.stopped); switch (recording.getOutputMode()) { case COMPOSED: recording = this.composedRecordingService.stopRecording(session, recording, reason); break; case COMPOSED_QUICK_START: recording = this.composedQuickStartRecordingService.stopRecording(session, recording, reason); break; case INDIVIDUAL: recording = this.singleStreamRecordingService.stopRecording(session, recording, reason); break; } this.abortAutomaticRecordingStopThread(session, reason); return recording; } public Recording forceStopRecording(Session session, EndReason reason, Long kmsDisconnectionTime) { Recording recording = this.sessionsRecordings.get(session.getSessionId()); ((RecordingService) singleStreamRecordingService).sealRecordingMetadataFileAsStopped(recording); final long timestamp = System.currentTimeMillis(); this.cdr.recordRecordingStatusChanged(recording, reason, timestamp, Status.stopped); switch (recording.getOutputMode()) { case COMPOSED: recording = this.composedRecordingService.stopRecording(session, recording, reason, kmsDisconnectionTime); if (recording.hasVideo()) { // Evict the recorder participant if composed recording with video this.sessionManager.evictParticipant( session.getParticipantByPublicId(ProtocolElements.RECORDER_PARTICIPANT_PUBLICID), null, null, null); } break; case COMPOSED_QUICK_START: recording = this.composedQuickStartRecordingService.stopRecording(session, recording, reason, kmsDisconnectionTime); if (recording.hasVideo()) { // Evict the recorder participant if composed recording with video this.sessionManager.evictParticipant( session.getParticipantByPublicId(ProtocolElements.RECORDER_PARTICIPANT_PUBLICID), null, null, null); } break; case INDIVIDUAL: recording = this.singleStreamRecordingService.stopRecording(session, recording, reason, kmsDisconnectionTime); break; } this.abortAutomaticRecordingStopThread(session, reason); return recording; } public void startOneIndividualStreamRecording(Session session, Participant participant) { Recording recording = this.sessionsRecordings.get(session.getSessionId()); if (recording == null) { recording = this.sessionsRecordingsStarting.get(session.getSessionId()); if (recording == null) { log.error("Cannot start recording of new stream {}. Session {} is not being recorded", participant.getPublisherStreamId(), session.getSessionId()); return; } } if (OutputMode.INDIVIDUAL.equals(recording.getOutputMode())) { // Start new RecorderEndpoint for this stream log.info("Starting new RecorderEndpoint in session {} for new stream of participant {}", session.getSessionId(), participant.getParticipantPublicId()); MediaProfileSpecType profile = null; try { profile = this.singleStreamRecordingService.generateMediaProfile(recording.getRecordingProperties(), participant); } catch (OpenViduException e) { log.error("Cannot start single stream recorder for stream {} in session {}: {}", participant.getPublisherStreamId(), session.getSessionId(), e.getMessage()); return; } this.singleStreamRecordingService.startRecorderEndpointForPublisherEndpoint(recording.getId(), profile, participant, new CountDownLatch(1)); } else if (RecordingUtils.IS_COMPOSED(recording.getOutputMode()) && !recording.hasVideo()) { // Connect this stream to existing Composite recorder log.info("Joining PublisherEndpoint to existing Composite in session {} for new stream of participant {}", session.getSessionId(), participant.getParticipantPublicId()); this.composedRecordingService.joinPublisherEndpointToComposite(session, recording.getId(), participant); } } public void stopOneIndividualStreamRecording(Session session, String streamId, Long kmsDisconnectionTime) { Recording recording = this.sessionsRecordings.get(session.getSessionId()); if (recording == null) { recording = this.sessionsRecordingsStarting.get(session.getSessionId()); if (recording == null) { log.error("Cannot stop recording of existing stream {}. Session {} is not being recorded", streamId, session.getSessionId()); return; } else { // Recording is still starting log.warn("Recording {} is still in \"starting\" status", recording.getId()); } } if (OutputMode.INDIVIDUAL.equals(recording.getOutputMode())) { // Stop specific RecorderEndpoint for this stream log.info("Stopping RecorderEndpoint in session {} for stream of participant {}", session.getSessionId(), streamId); final CountDownLatch stoppedCountDown = new CountDownLatch(1); this.singleStreamRecordingService.stopRecorderEndpointOfPublisherEndpoint(recording.getId(), streamId, stoppedCountDown, kmsDisconnectionTime); try { if (!stoppedCountDown.await(5, TimeUnit.SECONDS)) { log.error("Error waiting for recorder endpoint of stream {} to stop in session {}", streamId, session.getSessionId()); } } catch (InterruptedException e) { log.error("Exception while waiting for state change", e); } } else if (RecordingUtils.IS_COMPOSED(recording.getOutputMode()) && !recording.hasVideo()) { // Disconnect this stream from existing Composite recorder log.info("Removing PublisherEndpoint from Composite in session {} for stream of participant {}", session.getSessionId(), streamId); this.composedRecordingService.removePublisherEndpointFromComposite(session.getSessionId(), streamId); } } public boolean sessionIsBeingRecorded(String sessionId) { return (this.sessionsRecordings.get(sessionId) != null || this.sessionsRecordingsStarting.get(sessionId) != null); } public boolean sessionIsBeingRecordedIndividual(String sessionId) { if (!sessionIsBeingRecorded(sessionId)) { return false; } else { Recording recording = this.sessionsRecordings.get(sessionId); if (recording == null) { recording = this.sessionsRecordingsStarting.get(sessionId); } return OutputMode.INDIVIDUAL.equals(recording.getOutputMode()); } } public Recording getStartedRecording(String recordingId) { return this.startedRecordings.get(recordingId); } public Recording getStartingRecording(String recordingId) { return this.startingRecordings.get(recordingId); } public Collection<Recording> getFinishedRecordings() { return recordingManagerUtils.getAllRecordingsFromStorage().stream() .filter(recording -> recording.getStatus().equals(Status.ready)).collect(Collectors.toSet()); } public Recording getRecording(String recordingId) { return recordingManagerUtils.getRecordingFromStorage(recordingId); } public Collection<Recording> getAllRecordings() { return recordingManagerUtils.getAllRecordingsFromStorage(); } public String getFreeRecordingId(String sessionId) { log.info("Getting free recording id for session {}", sessionId); String recordingId = recordingManagerUtils.getFreeRecordingId(sessionId); log.info("Free recording id got for session {}: {}", sessionId, recordingId); return recordingId; } public HttpStatus deleteRecordingFromHost(String recordingId, boolean force) { if (this.startedRecordings.containsKey(recordingId) || this.startingRecordings.containsKey(recordingId)) { if (!force) { // Cannot delete an active recording return HttpStatus.CONFLICT; } } Recording recording = recordingManagerUtils.getRecordingFromStorage(recordingId); if (recording == null) { return HttpStatus.NOT_FOUND; } if (Status.stopped.equals(recording.getStatus())) { // Recording is being downloaded from remote host or being uploaded log.warn("Recording {} status is \"stopped\". Cancelling possible ongoing download process", recording.getId()); this.recordingDownloader.cancelDownload(recording.getId()); } return recordingManagerUtils.deleteRecordingFromStorage(recordingId); } public Set<String> getAllRecordingIdsFromLocalStorage() { File folder = new File(openviduConfig.getOpenViduRecordingPath()); File[] files = folder.listFiles(); Set<String> fileNamesNoExtension = new HashSet<>(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) { File[] innerFiles = files[i].listFiles(); for (int j = 0; j < innerFiles.length; j++) { if (innerFiles[j].isFile() && innerFiles[j].getName().startsWith(RecordingService.RECORDING_ENTITY_FILE)) { fileNamesNoExtension .add(innerFiles[j].getName().replaceFirst(RecordingService.RECORDING_ENTITY_FILE, "")); break; } } } } return fileNamesNoExtension; } public HttpStatus deleteRecordingFromLocalStorage(String recordingId) { File folder = new File(openviduConfig.getOpenViduRecordingPath()); File[] files = folder.listFiles(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory() && files[i].getName().equals(recordingId)) { // Correct folder. Delete it try { FileUtils.deleteDirectory(files[i]); return HttpStatus.NO_CONTENT; } catch (IOException e) { log.error("Couldn't delete folder {}", files[i].getAbsolutePath()); return HttpStatus.INTERNAL_SERVER_ERROR; } } } return HttpStatus.NOT_FOUND; } public File getRecordingEntityFileFromLocalStorage(String recordingId) { String metadataFilePath = openviduConfig.getOpenViduRecordingPath() + recordingId + "/" + RecordingService.RECORDING_ENTITY_FILE + recordingId; return new File(metadataFilePath); } public Set<Recording> getAllRecordingsFromLocalStorage() { File folder = new File(openviduConfig.getOpenViduRecordingPath()); File[] files = folder.listFiles(); Set<Recording> recordingEntities = new HashSet<>(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) { File[] innerFiles = files[i].listFiles(); for (int j = 0; j < innerFiles.length; j++) { Recording recording = getRecordingFromEntityFile(innerFiles[j]); if (recording != null) { recordingEntities.add(recording); } } } } return recordingEntities; } public Recording getRecordingFromEntityFile(File file) { if (file.isFile() && file.getName().startsWith(RecordingService.RECORDING_ENTITY_FILE)) { JsonObject json; try { json = jsonUtils.fromFileToJsonObject(file.getAbsolutePath()); } catch (JsonIOException | JsonSyntaxException | IOException e) { log.error("Error reading recording entity file {}: {}", file.getAbsolutePath(), (e.getMessage())); return null; } return getRecordingFromJson(json); } return null; } public Recording getRecordingFromJson(JsonObject json) { Recording recording = new Recording(json); if (Status.ready.equals(recording.getStatus()) && composedQuickStartRecordingService.isBeingUploaded(recording)) { // Recording has finished but is being uploaded recording.setStatus(Status.stopped); } else if (Status.ready.equals(recording.getStatus()) || Status.failed.equals(recording.getStatus())) { // Recording has been completely processed and must include URL recording.setUrl(recordingManagerUtils.getRecordingUrl(recording)); } return recording; } public String getRecordingUrl(Recording recording) { return recordingManagerUtils.getRecordingUrl(recording); } public void initAutomaticRecordingStopThread(final Session session) { final String recordingId = this.sessionsRecordings.get(session.getSessionId()).getId(); this.automaticRecordingStopThreads.computeIfAbsent(session.getSessionId(), f -> { ScheduledFuture<?> future = this.automaticRecordingStopExecutor.schedule(() -> { log.info("Stopping recording {} after {} seconds wait (no publisher published before timeout)", recordingId, this.openviduConfig.getOpenviduRecordingAutostopTimeout()); if (this.automaticRecordingStopThreads.remove(session.getSessionId()) != null) { boolean alreadyUnlocked = false; try { if (session.closingLock.writeLock().tryLock(15, TimeUnit.SECONDS)) { try { if (session.isClosed()) { return; } if (session.getParticipants().size() == 0 || session.onlyRecorderParticipant()) { // Close session if there are no participants connected (RECORDER does not // count) and publishing log.info("Closing session {} after automatic stop of recording {}", session.getSessionId(), recordingId); sessionManager.closeSessionAndEmptyCollections(session, EndReason.automaticStop, true); } else { // There are users connected, but no one is publishing // We don't need the lock if session is not closing session.closingLock.writeLock().unlock(); alreadyUnlocked = true; log.info( "Automatic stopping recording {}. There are users connected to session {}, but no one is publishing", recordingId, session.getSessionId()); this.stopRecording(session, recordingId, EndReason.automaticStop); } } finally { if (!alreadyUnlocked) { session.closingLock.writeLock().unlock(); } } } else { log.error( "Timeout waiting for Session {} closing lock to be available for automatic recording stop thred", session.getSessionId()); } } catch (InterruptedException e) { log.error( "InterruptedException while waiting for Session {} closing lock to be available for automatic recording stop thred", session.getSessionId()); } } else { // This code shouldn't be reachable log.warn("Recording {} was already automatically stopped by a previous thread", recordingId); } }, this.openviduConfig.getOpenviduRecordingAutostopTimeout(), TimeUnit.SECONDS); return future; }); } public boolean abortAutomaticRecordingStopThread(Session session, EndReason reason) { ScheduledFuture<?> future = this.automaticRecordingStopThreads.remove(session.getSessionId()); if (future != null) { boolean cancelled = future.cancel(false); try { if (session.closingLock.writeLock().tryLock(15, TimeUnit.SECONDS)) { try { if (session.isClosed()) { return false; } if (session.getParticipants().size() == 0 || session.onlyRecorderParticipant()) { // Close session if there are no participants connected (except for RECORDER). // This code will only be executed if recording is manually stopped during the // automatic stop timeout, so the session must be also closed log.info( "Ongoing recording of session {} was explicetly stopped within timeout for automatic recording stop. Closing session", session.getSessionId()); sessionManager.closeSessionAndEmptyCollections(session, reason, false); } } finally { session.closingLock.writeLock().unlock(); } } else { log.error( "Timeout waiting for Session {} closing lock to be available for aborting automatic recording stop thred", session.getSessionId()); } } catch (InterruptedException e) { log.error( "InterruptedException while waiting for Session {} closing lock to be available for aborting automatic recording stop thred", session.getSessionId()); } return cancelled; } else { return true; } } protected void checkRecordingPaths(String openviduRecordingPath, String openviduRecordingCustomLayout) throws OpenViduException { log.info("Initializing recording paths"); Path recordingPath = null; try { recordingPath = Files.createDirectories(Paths.get(openviduRecordingPath)); } catch (IOException e) { String errorMessage = "The recording path \"" + openviduRecordingPath + "\" is not valid. Reason: OpenVidu Server cannot find path \"" + openviduRecordingPath + "\" and doesn't have permissions to create it"; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_PATH_NOT_VALID, errorMessage); } // Check OpenVidu Server write permissions in recording path if (!Files.isWritable(recordingPath)) { String errorMessage = "The recording path \"" + openviduRecordingPath + "\" is not valid. Reason: OpenVidu Server needs write permissions. Try running command \"sudo chmod 777 " + openviduRecordingPath + "\""; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_PATH_NOT_VALID, errorMessage); } else { log.info("OpenVidu Server has write permissions on recording path: {}", openviduRecordingPath); } final String testFolderPath = openviduRecordingPath + "/TEST_RECORDING_PATH_" + System.currentTimeMillis(); final String testFilePath = testFolderPath + "/TEST_RECORDING_PATH" + openviduConfig.getMediaServer().getRecordingFileExtension(); // Check Kurento Media Server write permissions in recording path if (this.kmsManager.getKmss().isEmpty()) { log.warn("No KMSs were defined in KMS_URIS array. Recording path check aborted"); } else if (MediaServer.mediasoup.equals(openviduConfig.getMediaServer())) { log.warn("Using mediasoup. Recording path check aborted"); } else { Kms kms = null; try { kms = this.kmsManager.getLessLoadedConnectedAndRunningKms(); } catch (NoSuchElementException e) { } if (kms == null) { log.warn("There are not running and connected KMSs. Recording path check aborted"); } else { MediaPipeline pipeline = this.kmsManager.getLessLoadedConnectedAndRunningKms().getKurentoClient() .createMediaPipeline(); RecorderEndpoint recorder = new RecorderEndpoint.Builder(pipeline, "file://" + testFilePath).build(); final AtomicBoolean kurentoRecorderError = new AtomicBoolean(false); recorder.addErrorListener(new EventListener<ErrorEvent>() { @Override public void onEvent(ErrorEvent event) { if (event.getErrorCode() == 6) { // KMS write permissions error kurentoRecorderError.compareAndSet(false, true); } } }); recorder.record(); try { // Give the error event some time to trigger if necessary Thread.sleep(500); } catch (InterruptedException e1) { e1.printStackTrace(); } if (kurentoRecorderError.get()) { String errorMessage = "The recording path \"" + openviduRecordingPath + "\" is not valid. Reason: Kurento Media Server needs write permissions. Try running command \"sudo chmod 777 " + openviduRecordingPath + "\""; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_PATH_NOT_VALID, errorMessage); } if (!RemoteOperationUtils.mustSkipRemoteOperation()) { recorder.stop(); recorder.release(); pipeline.release(); } log.info("Kurento Media Server has write permissions on recording path: {}", openviduRecordingPath); try { new LocalCustomFileManager().deleteFolder(testFolderPath); log.info("OpenVidu Server has write permissions over files created by Kurento Media Server"); } catch (IOException e) { String errorMessage = "The recording path \"" + openviduRecordingPath + "\" is not valid. Reason: OpenVidu Server does not have write permissions over files created by Kurento Media Server. " + "Try running Kurento Media Server as user \"" + System.getProperty("user.name") + "\" or run OpenVidu Server as superuser"; log.error(errorMessage); log.error( "Be aware that a folder \"{}\" was created and should be manually deleted (\"sudo rm -rf {}\")", testFolderPath, testFolderPath); throw new OpenViduException(Code.RECORDING_PATH_NOT_VALID, errorMessage); } } } if (openviduConfig.openviduRecordingCustomLayoutChanged(openviduRecordingCustomLayout)) { // Property OPENVIDU_RECORDING_CUSTOM_LAYOUT changed File dir = new File(openviduRecordingCustomLayout); if (dir.exists()) { if (!dir.isDirectory()) { String errorMessage = "The custom layouts path \"" + openviduRecordingCustomLayout + "\" is not valid. Reason: path already exists but it is not a directory"; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_FILE_EMPTY_ERROR, errorMessage); } else { if (dir.listFiles() == null) { String errorMessage = "The custom layouts path \"" + openviduRecordingCustomLayout + "\" is not valid. Reason: OpenVidu Server needs read permissions. Try running command \"sudo chmod 755 " + openviduRecordingCustomLayout + "\""; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_FILE_EMPTY_ERROR, errorMessage); } else { log.info("OpenVidu Server has read permissions on custom layout path: {}", openviduRecordingCustomLayout); log.info("Custom layouts path successfully initialized at {}", openviduRecordingCustomLayout); } } } else { try { Files.createDirectories(dir.toPath()); log.warn( "OpenVidu custom layouts path (system property 'OPENVIDU_RECORDING_CUSTOM_LAYOUT') has been created, being folder {}. " + "It is an empty folder, so no custom layout is currently present", dir.getAbsolutePath()); } catch (IOException e) { String errorMessage = "The custom layouts path \"" + openviduRecordingCustomLayout + "\" is not valid. Reason: OpenVidu Server cannot find path \"" + openviduRecordingCustomLayout + "\" and doesn't have permissions to create it"; log.error(errorMessage); throw new OpenViduException(Code.RECORDING_FILE_EMPTY_ERROR, errorMessage); } } } log.info("Recording path successfully initialized at {}", openviduRecordingPath); } public static EndReason finalReason(EndReason reason) { if (RecordingManager.LAST_PARTICIPANT_LEFT_REASONS.contains(reason)) { return EndReason.lastParticipantLeft; } else { return reason; } } /** * New starting recording */ public void recordingToStarting(Recording recording) throws RuntimeException { if ((startingRecordings.putIfAbsent(recording.getId(), recording) != null) || (sessionsRecordingsStarting.putIfAbsent(recording.getSessionId(), recording) != null)) { log.error("Concurrent session recording initialization. Aborting this thread"); throw new RuntimeException("Concurrent initialization of recording " + recording.getId()); } else { this.sessionHandler.storeRecordingToSendClientEvent(recording); } } /** * Changes recording from starting to started, updating global recording * collection */ private void recordingFromStartingToStarted(Recording recording) { this.sessionsRecordings.put(recording.getSessionId(), recording); this.startingRecordings.remove(recording.getId()); this.sessionsRecordingsStarting.remove(recording.getSessionId()); this.startedRecordings.put(recording.getId(), recording); } }
/* * Copyright (c) 2010-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package com.evolveum.midpoint.provisioning.impl.dummy; import com.evolveum.icf.dummy.resource.DummyAccount; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PropertyDelta; import com.evolveum.midpoint.prism.match.MatchingRule; import com.evolveum.midpoint.prism.util.PrismAsserts; import com.evolveum.midpoint.provisioning.impl.ProvisioningTestUtil; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.internals.InternalMonitor; import com.evolveum.midpoint.schema.internals.InternalOperationClasses; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.DummyResourceContoller; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationProvisioningScriptsType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.testng.annotations.Listeners; import org.testng.annotations.Test; import javax.xml.namespace.QName; import java.io.File; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.evolveum.midpoint.test.DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME; import static com.evolveum.midpoint.test.IntegrationTestTools.display; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertNull; import static org.testng.AssertJUnit.assertTrue; /** * The test of Provisioning service on the API level. The test is using dummy * resource for speed and flexibility. * * @author Radovan Semancik * @author Pavol Mederly * */ @ContextConfiguration(locations = "classpath:ctx-provisioning-test-main.xml") @DirtiesContext @Listeners({ com.evolveum.midpoint.tools.testng.AlphabeticalMethodInterceptor.class }) public class TestDummyPrioritiesAndReadReplace extends AbstractDummyTest { private static final Trace LOGGER = TraceManager.getTrace(TestDummyPrioritiesAndReadReplace.class); protected String willIcfUid; public static final File TEST_DIR = new File(TEST_DIR_DUMMY, "dummy-priorities-read-replace"); public static final File RESOURCE_DUMMY_FILE = new File(TEST_DIR, "resource-dummy.xml"); @Override protected File getResourceDummyFilename() { return RESOURCE_DUMMY_FILE; } protected MatchingRule<String> getUidMatchingRule() { return null; } @Override public void initSystem(Task initTask, OperationResult initResult) throws Exception { super.initSystem(initTask, initResult); InternalMonitor.setTrace(InternalOperationClasses.CONNECTOR_OPERATIONS, true); // in order to have schema available here resourceType = provisioningService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, taskManager.createTaskInstance(), initResult).asObjectable(); } // copied from TestDummy @Test public void test100AddAccount() throws Exception { final String TEST_NAME = "test100AddAccount"; TestUtil.displayTestTile(TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestDummy.class.getName() + "." + TEST_NAME); OperationResult result = new OperationResult(TestDummy.class.getName() + "." + TEST_NAME); syncServiceMock.reset(); PrismObject<ShadowType> account = prismContext.parseObject(getAccountWillFile()); account.checkConsistence(); display("Adding shadow", account); // WHEN String addedObjectOid = provisioningService.addObject(account, null, null, task, result); // THEN result.computeStatus(); display("add object result", result); TestUtil.assertSuccess("addObject has failed (result)", result); assertEquals(ACCOUNT_WILL_OID, addedObjectOid); account.checkConsistence(); PrismObject<ShadowType> accountRepo = repositoryService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, result); willIcfUid = getIcfUid(accountRepo); ActivationType activationRepo = accountRepo.asObjectable().getActivation(); if (supportsActivation()) { assertNotNull("No activation in "+accountRepo+" (repo)", activationRepo); assertEquals("Wrong activation enableTimestamp in "+accountRepo+" (repo)", ACCOUNT_WILL_ENABLE_TIMESTAMP, activationRepo.getEnableTimestamp()); } else { assertNull("Activation sneaked in (repo)", activationRepo); } syncServiceMock.assertNotifySuccessOnly(); PrismObject<ShadowType> accountProvisioning = provisioningService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, task, result); display("Account provisioning", accountProvisioning); ShadowType accountTypeProvisioning = accountProvisioning.asObjectable(); display("account from provisioning", accountTypeProvisioning); PrismAsserts.assertEqualsPolyString("Name not equal", ACCOUNT_WILL_USERNAME, accountTypeProvisioning.getName()); assertEquals("Wrong kind (provisioning)", ShadowKindType.ACCOUNT, accountTypeProvisioning.getKind()); assertAttribute(accountProvisioning, SchemaConstants.ICFS_NAME, ACCOUNT_WILL_USERNAME); assertAttribute(accountProvisioning, getUidMatchingRule(), SchemaConstants.ICFS_UID, willIcfUid); ActivationType activationProvisioning = accountTypeProvisioning.getActivation(); if (supportsActivation()) { assertNotNull("No activation in "+accountProvisioning+" (provisioning)", activationProvisioning); assertEquals("Wrong activation administrativeStatus in "+accountProvisioning+" (provisioning)", ActivationStatusType.ENABLED, activationProvisioning.getAdministrativeStatus()); TestUtil.assertEqualsTimestamp("Wrong activation enableTimestamp in "+accountProvisioning+" (provisioning)", ACCOUNT_WILL_ENABLE_TIMESTAMP, activationProvisioning.getEnableTimestamp()); } else { assertNull("Activation sneaked in (provisioning)", activationProvisioning); } assertNull("The _PASSSWORD_ attribute sneaked into shadow", ShadowUtil.getAttributeValues( accountTypeProvisioning, new QName(SchemaConstants.NS_ICF_SCHEMA, "password"))); // Check if the account was created in the dummy resource DummyAccount dummyAccount = getDummyAccountAssert(ACCOUNT_WILL_USERNAME, willIcfUid); assertNotNull("No dummy account", dummyAccount); assertEquals("Username is wrong", ACCOUNT_WILL_USERNAME, dummyAccount.getName()); assertEquals("Fullname is wrong", "Will Turner", dummyAccount.getAttributeValue("fullname")); assertTrue("The account is not enabled", dummyAccount.isEnabled()); assertEquals("Wrong password", "3lizab3th", dummyAccount.getPassword()); // Check if the shadow is still in the repo (e.g. that the consistency or sync haven't removed it) PrismObject<ShadowType> shadowFromRepo = repositoryService.getObject(ShadowType.class, addedObjectOid, null, result); assertNotNull("Shadow was not created in the repository", shadowFromRepo); display("Repository shadow", shadowFromRepo.debugDump()); ProvisioningTestUtil.checkRepoAccountShadow(shadowFromRepo); checkConsistency(accountProvisioning); //assertSteadyResource(); } @Test public void test123ModifyObjectReplace() throws Exception { final String TEST_NAME = "test123ModifyObjectReplace"; TestUtil.displayTestTile(TEST_NAME); Task task = taskManager.createTaskInstance(TestDummyPrioritiesAndReadReplace.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); syncServiceMock.reset(); // todo add correct definition ObjectDelta<ShadowType> objectDelta = ObjectDelta.createModificationReplaceProperty(ShadowType.class, ACCOUNT_WILL_OID, dummyResourceCtl.getAttributeFullnamePath(), prismContext, "Pirate Master Will Turner"); PropertyDelta weaponDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributeWeaponPath()); weaponDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME)); weaponDelta.setValuesToReplace(new PrismPropertyValue<>("Gun")); objectDelta.addModification(weaponDelta); PropertyDelta lootDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributeLootPath()); lootDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME)); lootDelta.setValuesToReplace(new PrismPropertyValue<>(43)); objectDelta.addModification(lootDelta); PropertyDelta titleDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributePath(DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME)); titleDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME)); titleDelta.setValuesToReplace(new PrismPropertyValue<>("Pirate Master")); objectDelta.addModification(titleDelta); display("ObjectDelta", objectDelta); objectDelta.checkConsistence(); // WHEN provisioningService.modifyObject(ShadowType.class, objectDelta.getOid(), objectDelta.getModifications(), new OperationProvisioningScriptsType(), null, task, result); // THEN result.computeStatus(); display("modifyObject result", result); TestUtil.assertSuccess(result); objectDelta.checkConsistence(); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_FULLNAME_NAME, "Pirate Master Will Turner"); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate Master"); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 43); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "Gun"); // BEWARE: very brittle! List<OperationResult> updatesExecuted = TestUtil.selectSubresults(result, ProvisioningTestUtil.CONNID_CONNECTOR_FACADE_CLASS_NAME + ".update"); assertEquals("Wrong number of updates executed", 3, updatesExecuted.size()); checkAttributesUpdated(updatesExecuted.get(0), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME); checkAttributesUpdated(updatesExecuted.get(1), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME); checkAttributesUpdated(updatesExecuted.get(2), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_FULLNAME_NAME, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME); syncServiceMock.assertNotifySuccessOnly(); //assertSteadyResource(); } private void checkAttributesUpdated(OperationResult operationResult, String operation, String... attributeNames) { assertEquals("Wrong operation name", ProvisioningTestUtil.CONNID_CONNECTOR_FACADE_CLASS_NAME + "." + operation, operationResult.getOperation()); Collection<String> updatedAttributes = parseUpdatedAttributes(operationResult.getParams().get("attributes").toString()); assertEquals("Names of updated attributes do not match", new HashSet<>(Arrays.asList(attributeNames)), updatedAttributes); } // From something like this: [Attribute: {Name=fullname, Value=[Pirate Master Will Turner]},Attribute: {Name=title, Value=[Pirate Master]}] // we would like to get ["fullname", "title"] private Collection<String> parseUpdatedAttributes(String attributes) { Pattern pattern = Pattern.compile("Attribute: \\{Name=(\\w+),"); Matcher matcher = pattern.matcher(attributes); Set<String> retval = new HashSet<>(); while (matcher.find()) { retval.add(matcher.group(1)); } return retval; } @Test public void test150ModifyObjectAddDelete() throws Exception { final String TEST_NAME = "test150ModifyObjectAddDelete"; TestUtil.displayTestTile(TEST_NAME); Task task = taskManager.createTaskInstance(TestDummyPrioritiesAndReadReplace.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); syncServiceMock.reset(); // NOT a read replace attribute // todo add correct definition ObjectDelta<ShadowType> objectDelta = ObjectDelta.createModificationReplaceProperty(ShadowType.class, ACCOUNT_WILL_OID, dummyResourceCtl.getAttributeFullnamePath(), prismContext, "Pirate Great Master Will Turner"); // read replace attribute, priority 0 PropertyDelta weaponDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributeWeaponPath()); weaponDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME)); weaponDelta.addValuesToAdd(new PrismPropertyValue<>("Sword")); weaponDelta.addValuesToDelete(new PrismPropertyValue<>("GUN")); // case-insensitive treatment should work here objectDelta.addModification(weaponDelta); // read replace attribute, priority 1 PropertyDelta lootDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributeLootPath()); lootDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME)); lootDelta.addValuesToAdd(new PrismPropertyValue<>(44)); lootDelta.addValuesToDelete(new PrismPropertyValue<>(43)); objectDelta.addModification(lootDelta); // NOT a read-replace attribute PropertyDelta titleDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributePath(DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME)); titleDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME)); titleDelta.addValuesToAdd(new PrismPropertyValue<>("Pirate Great Master")); titleDelta.addValuesToDelete(new PrismPropertyValue<>("Pirate Master")); objectDelta.addModification(titleDelta); // read replace attribute PropertyDelta drinkDelta = objectDelta.createPropertyModification(dummyResourceCtl.getAttributePath(DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_DRINK_NAME)); drinkDelta.setDefinition( getAttributeDefinition(resourceType, ShadowKindType.ACCOUNT, null, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_DRINK_NAME)); drinkDelta.addValuesToAdd(new PrismPropertyValue<>("orange juice")); objectDelta.addModification(drinkDelta); display("ObjectDelta", objectDelta); objectDelta.checkConsistence(); // WHEN provisioningService.modifyObject(ShadowType.class, objectDelta.getOid(), objectDelta.getModifications(), new OperationProvisioningScriptsType(), null, task, result); // THEN result.computeStatus(); display("modifyObject result", result); TestUtil.assertSuccess(result); objectDelta.checkConsistence(); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_FULLNAME_NAME, "Pirate Great Master Will Turner"); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME, "Pirate Great Master"); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME, 44); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME, "Sword"); assertDummyAccountAttributeValues(ACCOUNT_WILL_USERNAME, willIcfUid, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_DRINK_NAME, "orange juice"); // BEWARE: very brittle! List<OperationResult> updatesExecuted = TestUtil.selectSubresults(result, ProvisioningTestUtil.CONNID_CONNECTOR_FACADE_CLASS_NAME + ".update", ProvisioningTestUtil.CONNID_CONNECTOR_FACADE_CLASS_NAME + ".addAttributeValues", ProvisioningTestUtil.CONNID_CONNECTOR_FACADE_CLASS_NAME + ".removeAttributeValues"); assertEquals("Wrong number of updates executed", 5, updatesExecuted.size()); checkAttributesUpdated(updatesExecuted.get(0), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_WEAPON_NAME); // prio 0, read-replace checkAttributesUpdated(updatesExecuted.get(1), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_LOOT_NAME); // prio 1, read-replace checkAttributesUpdated(updatesExecuted.get(2), "addAttributeValues", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME); // prio none, not read-replace checkAttributesUpdated(updatesExecuted.get(3), "update", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_DRINK_NAME, DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_FULLNAME_NAME); // prio none, read-replace + real replace checkAttributesUpdated(updatesExecuted.get(4), "removeAttributeValues", DummyResourceContoller.DUMMY_ACCOUNT_ATTRIBUTE_TITLE_NAME); // prio none, not read-replace syncServiceMock.assertNotifySuccessOnly(); //assertSteadyResource(); } }
/* * Copyright 2016, gRPC Authors All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.grpc.netty; import static com.google.common.base.Charsets.US_ASCII; import static com.google.common.base.Preconditions.checkArgument; import static io.grpc.netty.Utils.TE_HEADER; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.util.AsciiString.isUpperCase; import com.google.common.io.BaseEncoding; import io.grpc.Metadata; import io.netty.handler.codec.http2.DefaultHttp2HeadersDecoder; import io.netty.handler.codec.http2.Http2Headers; import io.netty.util.AsciiString; import io.netty.util.internal.PlatformDependent; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A headers utils providing custom gRPC implementations of {@link DefaultHttp2HeadersDecoder}. */ class GrpcHttp2HeadersUtils { static final class GrpcHttp2ServerHeadersDecoder extends DefaultHttp2HeadersDecoder { GrpcHttp2ServerHeadersDecoder(long maxHeaderListSize) { super(true, maxHeaderListSize); } @Override protected GrpcHttp2InboundHeaders newHeaders() { return new GrpcHttp2RequestHeaders(numberOfHeadersGuess()); } } static final class GrpcHttp2ClientHeadersDecoder extends DefaultHttp2HeadersDecoder { GrpcHttp2ClientHeadersDecoder(long maxHeaderListSize) { super(true, maxHeaderListSize); } @Override protected GrpcHttp2InboundHeaders newHeaders() { return new GrpcHttp2ResponseHeaders(numberOfHeadersGuess()); } } /** * A {@link Http2Headers} implementation optimized for inbound/received headers. * * <p>Header names and values are stored in simple arrays, which makes insert run in O(1) * and retrievial a O(n). Header name equality is not determined by the equals implementation of * {@link CharSequence} type, but by comparing two names byte to byte. * * <p>All {@link CharSequence} input parameters and return values are required to be of type * {@link AsciiString}. */ abstract static class GrpcHttp2InboundHeaders extends AbstractHttp2Headers { private static final AsciiString binaryHeaderSuffix = new AsciiString(Metadata.BINARY_HEADER_SUFFIX.getBytes(US_ASCII)); private byte[][] namesAndValues; private AsciiString[] values; private int namesAndValuesIdx; GrpcHttp2InboundHeaders(int numHeadersGuess) { checkArgument(numHeadersGuess > 0, "numHeadersGuess needs to be gt zero."); namesAndValues = new byte[numHeadersGuess * 2][]; values = new AsciiString[numHeadersGuess]; } protected Http2Headers add(AsciiString name, AsciiString value) { if (namesAndValuesIdx == namesAndValues.length) { expandHeadersAndValues(); } byte[] nameBytes = bytes(name); byte[] valueBytes = toBinaryValue(name, value); values[namesAndValuesIdx / 2] = value; namesAndValues[namesAndValuesIdx] = nameBytes; namesAndValuesIdx++; namesAndValues[namesAndValuesIdx] = valueBytes; namesAndValuesIdx++; return this; } protected CharSequence get(AsciiString name) { for (int i = 0; i < namesAndValuesIdx; i += 2) { if (equals(name, namesAndValues[i])) { return values[i / 2]; } } return null; } @Override public CharSequence status() { return get(Http2Headers.PseudoHeaderName.STATUS.value()); } @Override public List<CharSequence> getAll(CharSequence csName) { AsciiString name = requireAsciiString(csName); List<CharSequence> returnValues = new ArrayList<CharSequence>(4); for (int i = 0; i < namesAndValuesIdx; i += 2) { if (equals(name, namesAndValues[i])) { returnValues.add(values[i / 2]); } } return returnValues; } /** * Returns the header names and values as bytes. An even numbered index contains the * {@code byte[]} representation of a header name (in insertion order), and the subsequent * odd index number contains the corresponding header value. * * <p>The values of binary headers (with a -bin suffix), are already base64 decoded. * * <p>The array may contain several {@code null} values at the end. A {@code null} value an * index means that all higher numbered indices also contain {@code null} values. */ byte[][] namesAndValues() { return namesAndValues; } /** * Returns the number of none-null headers in {@link #namesAndValues()}. */ protected int numHeaders() { return namesAndValuesIdx / 2; } protected static boolean equals(AsciiString str0, byte[] str1) { return equals(str0.array(), str0.arrayOffset(), str0.length(), str1, 0, str1.length); } protected static boolean equals(AsciiString str0, AsciiString str1) { return equals(str0.array(), str0.arrayOffset(), str0.length(), str1.array(), str1.arrayOffset(), str1.length()); } protected static boolean equals(byte[] bytes0, int offset0, int length0, byte[] bytes1, int offset1, int length1) { if (length0 != length1) { return false; } return PlatformDependent.equals(bytes0, offset0, bytes1, offset1, length0); } private static byte[] toBinaryValue(AsciiString name, AsciiString value) { return name.endsWith(binaryHeaderSuffix) ? BaseEncoding.base64().decode(value) : bytes(value); } protected static byte[] bytes(AsciiString str) { return str.isEntireArrayUsed() ? str.array() : str.toByteArray(); } protected static AsciiString requireAsciiString(CharSequence cs) { if (!(cs instanceof AsciiString)) { throw new IllegalArgumentException("AsciiString expected. Was: " + cs.getClass().getName()); } return (AsciiString) cs; } protected static boolean isPseudoHeader(AsciiString str) { return !str.isEmpty() && str.charAt(0) == ':'; } protected AsciiString validateName(AsciiString str) { int offset = str.arrayOffset(); int length = str.length(); final byte[] data = str.array(); for (int i = offset; i < offset + length; i++) { if (isUpperCase(data[i])) { PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, "invalid header name '%s'", str)); } } return str; } private void expandHeadersAndValues() { int newValuesLen = Math.max(2, values.length + values.length / 2); int newNamesAndValuesLen = newValuesLen * 2; byte[][] newNamesAndValues = new byte[newNamesAndValuesLen][]; AsciiString[] newValues = new AsciiString[newValuesLen]; System.arraycopy(namesAndValues, 0, newNamesAndValues, 0, namesAndValues.length); System.arraycopy(values, 0, newValues, 0, values.length); namesAndValues = newNamesAndValues; values = newValues; } @Override public int size() { return numHeaders(); } protected static void appendNameAndValue(StringBuilder builder, CharSequence name, CharSequence value, boolean prependSeparator) { if (prependSeparator) { builder.append(", "); } builder.append(name).append(": ").append(value); } protected final String namesAndValuesToString() { StringBuilder builder = new StringBuilder(); boolean prependSeparator = false; for (int i = 0; i < namesAndValuesIdx; i += 2) { String name = new String(namesAndValues[i], US_ASCII); // If binary headers, the value is base64 encoded. AsciiString value = values[i / 2]; appendNameAndValue(builder, name, value, prependSeparator); prependSeparator = true; } return builder.toString(); } } /** * A {@link GrpcHttp2InboundHeaders} implementation, optimized for HTTP/2 request headers. That * is, HTTP/2 request pseudo headers are stored in dedicated fields and are NOT part of the * array returned by {@link #namesAndValues()}. * * <p>This class only implements the methods used by {@link NettyServerHandler} and tests. All * other methods throw an {@link UnsupportedOperationException}. */ static final class GrpcHttp2RequestHeaders extends GrpcHttp2InboundHeaders { private static final AsciiString PATH_HEADER = AsciiString.of(":path"); private static final AsciiString AUTHORITY_HEADER = AsciiString.of(":authority"); private static final AsciiString METHOD_HEADER = AsciiString.of(":method"); private static final AsciiString SCHEME_HEADER = AsciiString.of(":scheme"); private AsciiString path; private AsciiString authority; private AsciiString method; private AsciiString scheme; private AsciiString te; GrpcHttp2RequestHeaders(int numHeadersGuess) { super(numHeadersGuess); } @Override public Http2Headers add(CharSequence csName, CharSequence csValue) { AsciiString name = validateName(requireAsciiString(csName)); AsciiString value = requireAsciiString(csValue); if (isPseudoHeader(name)) { addPseudoHeader(name, value); return this; } if (equals(TE_HEADER, name)) { te = value; return this; } return add(name, value); } @Override public CharSequence get(CharSequence csName) { AsciiString name = requireAsciiString(csName); checkArgument(!isPseudoHeader(name), "Use direct accessor methods for pseudo headers."); if (equals(TE_HEADER, name)) { return te; } return get(name); } private void addPseudoHeader(CharSequence csName, CharSequence csValue) { AsciiString name = requireAsciiString(csName); AsciiString value = requireAsciiString(csValue); if (equals(PATH_HEADER, name)) { path = value; } else if (equals(AUTHORITY_HEADER, name)) { authority = value; } else if (equals(METHOD_HEADER, name)) { method = value; } else if (equals(SCHEME_HEADER, name)) { scheme = value; } else { PlatformDependent.throwException( connectionError(PROTOCOL_ERROR, "Illegal pseudo-header '%s' in request.", name)); } } @Override public CharSequence path() { return path; } @Override public CharSequence authority() { return authority; } @Override public CharSequence method() { return method; } @Override public CharSequence scheme() { return scheme; } /** * This method is called in tests only. */ @Override public List<CharSequence> getAll(CharSequence csName) { AsciiString name = requireAsciiString(csName); if (isPseudoHeader(name)) { // This code should never be reached. throw new IllegalArgumentException("Use direct accessor methods for pseudo headers."); } if (equals(TE_HEADER, name)) { return Collections.singletonList((CharSequence) te); } return super.getAll(csName); } /** * This method is called in tests only. */ @Override public int size() { int size = 0; if (path != null) { size++; } if (authority != null) { size++; } if (method != null) { size++; } if (scheme != null) { size++; } if (te != null) { size++; } size += super.size(); return size; } @Override public String toString() { StringBuilder builder = new StringBuilder(getClass().getSimpleName()).append('['); boolean prependSeparator = false; if (path != null) { appendNameAndValue(builder, PATH_HEADER, path, prependSeparator); prependSeparator = true; } if (authority != null) { appendNameAndValue(builder, AUTHORITY_HEADER, authority, prependSeparator); prependSeparator = true; } if (method != null) { appendNameAndValue(builder, METHOD_HEADER, method, prependSeparator); prependSeparator = true; } if (scheme != null) { appendNameAndValue(builder, SCHEME_HEADER, scheme, prependSeparator); prependSeparator = true; } if (te != null) { appendNameAndValue(builder, TE_HEADER, te, prependSeparator); } String namesAndValues = namesAndValuesToString(); if (builder.length() > 0 && namesAndValues.length() > 0) { builder.append(", "); } builder.append(namesAndValues); builder.append(']'); return builder.toString(); } } /** * This class only implements the methods used by {@link NettyClientHandler} and tests. All * other methods throw an {@link UnsupportedOperationException}. * * <p>Unlike in {@link GrpcHttp2ResponseHeaders} the {@code :status} pseudo-header is not treated * special and is part of {@link #namesAndValues}. */ static final class GrpcHttp2ResponseHeaders extends GrpcHttp2InboundHeaders { GrpcHttp2ResponseHeaders(int numHeadersGuess) { super(numHeadersGuess); } @Override public Http2Headers add(CharSequence csName, CharSequence csValue) { AsciiString name = validateName(requireAsciiString(csName)); AsciiString value = requireAsciiString(csValue); return add(name, value); } @Override public CharSequence get(CharSequence csName) { AsciiString name = requireAsciiString(csName); return get(name); } @Override public String toString() { StringBuilder builder = new StringBuilder(getClass().getSimpleName()).append('['); builder.append(namesAndValuesToString()).append(']'); return builder.toString(); } } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi public final class NodeGroupAggregatedList implements ApiMessage { private final String id; private final Map<String, NodeGroupsScopedList> items; private final String kind; private final String nextPageToken; private final String selfLink; private final Warning warning; private NodeGroupAggregatedList() { this.id = null; this.items = null; this.kind = null; this.nextPageToken = null; this.selfLink = null; this.warning = null; } private NodeGroupAggregatedList( String id, Map<String, NodeGroupsScopedList> items, String kind, String nextPageToken, String selfLink, Warning warning) { this.id = id; this.items = items; this.kind = kind; this.nextPageToken = nextPageToken; this.selfLink = selfLink; this.warning = warning; } @Override public Object getFieldValue(String fieldName) { if ("id".equals(fieldName)) { return id; } if ("items".equals(fieldName)) { return items; } if ("kind".equals(fieldName)) { return kind; } if ("nextPageToken".equals(fieldName)) { return nextPageToken; } if ("selfLink".equals(fieldName)) { return selfLink; } if ("warning".equals(fieldName)) { return warning; } return null; } @Nullable @Override public ApiMessage getApiMessageRequestBody() { return null; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public String getId() { return id; } /** * A list of NodeGroupsScopedList resources. The key for the map is: [Output Only] Name of the * scope containing this set of node groups. */ public Map<String, NodeGroupsScopedList> getItemsMap() { return items; } /** * [Output Only] Type of resource.Always compute#nodeGroupAggregatedList for aggregated lists of * node groups. */ public String getKind() { return kind; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public String getNextPageToken() { return nextPageToken; } /** [Output Only] Server-defined URL for this resource. */ public String getSelfLink() { return selfLink; } /** [Output Only] Informational warning message. */ public Warning getWarning() { return warning; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(NodeGroupAggregatedList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static NodeGroupAggregatedList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final NodeGroupAggregatedList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new NodeGroupAggregatedList(); } public static class Builder { private String id; private Map<String, NodeGroupsScopedList> items; private String kind; private String nextPageToken; private String selfLink; private Warning warning; Builder() {} public Builder mergeFrom(NodeGroupAggregatedList other) { if (other == NodeGroupAggregatedList.getDefaultInstance()) return this; if (other.getId() != null) { this.id = other.id; } if (other.getItemsMap() != null) { this.items = other.items; } if (other.getKind() != null) { this.kind = other.kind; } if (other.getNextPageToken() != null) { this.nextPageToken = other.nextPageToken; } if (other.getSelfLink() != null) { this.selfLink = other.selfLink; } if (other.getWarning() != null) { this.warning = other.warning; } return this; } Builder(NodeGroupAggregatedList source) { this.id = source.id; this.items = source.items; this.kind = source.kind; this.nextPageToken = source.nextPageToken; this.selfLink = source.selfLink; this.warning = source.warning; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public String getId() { return id; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public Builder setId(String id) { this.id = id; return this; } /** * A list of NodeGroupsScopedList resources. The key for the map is: [Output Only] Name of the * scope containing this set of node groups. */ public Map<String, NodeGroupsScopedList> getItemsMap() { return items; } /** * A list of NodeGroupsScopedList resources. The key for the map is: [Output Only] Name of the * scope containing this set of node groups. */ public Builder putAllItems(Map<String, NodeGroupsScopedList> items) { this.items = items; return this; } /** * [Output Only] Type of resource.Always compute#nodeGroupAggregatedList for aggregated lists of * node groups. */ public String getKind() { return kind; } /** * [Output Only] Type of resource.Always compute#nodeGroupAggregatedList for aggregated lists of * node groups. */ public Builder setKind(String kind) { this.kind = kind; return this; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public String getNextPageToken() { return nextPageToken; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public Builder setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** [Output Only] Server-defined URL for this resource. */ public String getSelfLink() { return selfLink; } /** [Output Only] Server-defined URL for this resource. */ public Builder setSelfLink(String selfLink) { this.selfLink = selfLink; return this; } /** [Output Only] Informational warning message. */ public Warning getWarning() { return warning; } /** [Output Only] Informational warning message. */ public Builder setWarning(Warning warning) { this.warning = warning; return this; } public NodeGroupAggregatedList build() { return new NodeGroupAggregatedList(id, items, kind, nextPageToken, selfLink, warning); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setId(this.id); newBuilder.putAllItems(this.items); newBuilder.setKind(this.kind); newBuilder.setNextPageToken(this.nextPageToken); newBuilder.setSelfLink(this.selfLink); newBuilder.setWarning(this.warning); return newBuilder; } } @Override public String toString() { return "NodeGroupAggregatedList{" + "id=" + id + ", " + "items=" + items + ", " + "kind=" + kind + ", " + "nextPageToken=" + nextPageToken + ", " + "selfLink=" + selfLink + ", " + "warning=" + warning + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof NodeGroupAggregatedList) { NodeGroupAggregatedList that = (NodeGroupAggregatedList) o; return Objects.equals(this.id, that.getId()) && Objects.equals(this.items, that.getItemsMap()) && Objects.equals(this.kind, that.getKind()) && Objects.equals(this.nextPageToken, that.getNextPageToken()) && Objects.equals(this.selfLink, that.getSelfLink()) && Objects.equals(this.warning, that.getWarning()); } return false; } @Override public int hashCode() { return Objects.hash(id, items, kind, nextPageToken, selfLink, warning); } }
//********************************************************* // // Copyright (c) Microsoft. All rights reserved. // This code is licensed under the Apache License Version 2.0. // THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY // IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR // PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. // //********************************************************* package com.microsoft.uprove; import java.math.BigInteger; import java.security.AccessController; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PrivilegedAction; import java.security.Security; import com.microsoft.uprove.UProveSDKPermission; /* * LOW-LEVEL IMPLEMENTATION CLASS. NOT PART OF PUBLIC API. */ /** * Implementation for SDK configuration options. * <p>For more detail regarding this class's strategy for discovering defaults * and allowing SDK users to get and set configuration options, see * {@link com.microsoft.uprove.Config}.</p> */ final class ConfigImpl { // option names for which we test when we allow outside code to get/set // config options private static final String OPTION_SECURERANDOM_ALGORITHM = "securerandom.algorithm"; private static final String OPTION_SECURERANDOM_PROVIDER = "securerandom.provider"; private static final String OPTION_MESSAGEDIGEST_PROVIDER = "messagedigest.provider"; private static final String OPTION_MATH_PRIMECONFIDENCELEVEL = "math.primeconfidencelevel"; // the base for all security properties we use private static final String SECURITY_PROPERTY_BASE = "com.microsoft.uprove."; // property names we use when calling java.security.Security.getProperty private static final String PROPERTY_SECURERANDOM_ALGORITHM = SECURITY_PROPERTY_BASE + OPTION_SECURERANDOM_ALGORITHM; private static final String PROPERTY_SECURERANDOM_PROVIDER = SECURITY_PROPERTY_BASE + OPTION_SECURERANDOM_PROVIDER; private static final String PROPERTY_MESSAGEDIGEST_PROVIDER = SECURITY_PROPERTY_BASE + OPTION_MESSAGEDIGEST_PROVIDER; private static final String PROPERTY_MATH_PRIMECONFIDENCELEVEL = SECURITY_PROPERTY_BASE + OPTION_MATH_PRIMECONFIDENCELEVEL; // prefixes for permission checks private static final String PREFIX_GET_OPTION = "getOption."; private static final String PREFIX_SET_OPTION = "setOption."; private static final String DEFAULT_SECURE_RANDOM_ALGORITHM = "SHA1PRNG"; /** * We choose 100 as the default, because that's what Java 1.4 chooses for * the default confidence for * {@link BigInteger#probablePrime(int, java.util.Random)}. */ private static final int DEFAULT_PRIME_CONFIDENCE_LEVEL = 100; // configuration settings // note: we create a new string so that our "unset" value is distinct from // any value that we'll possibly get from the User. if we simply set // OPTION_UNSET to "option unset", then we'd end up with an interned // String that would be reference-equal to an "option unset" String // given to us by a User. private static final String OPTION_UNSET = new String("option unset"); private static final int LEVEL_UNSET = -1; private static String secureRandomAlgorithm = OPTION_UNSET; private static String secureRandomProvider = OPTION_UNSET; private static String messageDigestProvider = OPTION_UNSET; private static int primeConfidenceLevel = LEVEL_UNSET; /** * Private constructor to prevent instantiation or subclassing. */ private ConfigImpl() { super(); } /** * Gets the default for an option, as specified in * java.security.Security's property set. * @param propertyName the name of the property holding our option. * @return the default setting, or <code>null</code> if either none * is set or we don't have permission to read the requested property. */ @SuppressWarnings("unchecked") private static String getDefault(final String propertyName) { try { // permission java.security.SecurityPermission // "getProperty.{propertyName}"; return (String) AccessController.doPrivileged( new PrivilegedAction() { public Object run() { return Security.getProperty(propertyName); } }); } catch (SecurityException se) { // we don't have the required permission, so just... return null; } } /** * Returns the configured secure random algorithm, selecting the default * if none is configured. * @return the configured secure random algorithm. */ static synchronized String secureRandomAlgorithm() { if (secureRandomAlgorithm == OPTION_UNSET) { secureRandomAlgorithm = getDefault(PROPERTY_SECURERANDOM_ALGORITHM); // if there's none set or it's empty, use our default if (secureRandomAlgorithm == null || secureRandomAlgorithm.length() == 0) { secureRandomAlgorithm = DEFAULT_SECURE_RANDOM_ALGORITHM; } } return secureRandomAlgorithm; } /** * Returns the name of the * {@link java.security.SecureRandom SecureRandom} algorithm used by the * SDK. * @return a <code>SecureRandom</code> algorithm name. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to retrieve this configuration option's value. */ public static String getSecureRandomAlgorithm() throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_GET_OPTION + OPTION_SECURERANDOM_ALGORITHM)); } return secureRandomAlgorithm(); } /** * Sets the name of the * {@link java.security.SecureRandom SecureRandom} algorithm used by the * SDK. * @param algorithm a <code>SecureRandom</code> algorithm name, or * <code>null</code> to select the default according to the site-wide * default. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to set this configuration option's value. */ public static void setSecureRandomAlgorithm(final String algorithm) { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_SET_OPTION + OPTION_SECURERANDOM_ALGORITHM)); } synchronized (ConfigImpl.class) { secureRandomAlgorithm = algorithm != null && algorithm.length() != 0 ? algorithm : OPTION_UNSET; RandomSourceImpl.reset(); } } /** * Returns the configured secure random provider. * @return the configured secure random provider. */ static synchronized String secureRandomProvider() { if (secureRandomProvider == OPTION_UNSET) { secureRandomProvider = getDefault(PROPERTY_SECURERANDOM_PROVIDER); // if we got the empty string, go with null, meaning that we'll // use the first provider based on the site's java.security config if (secureRandomProvider != null && secureRandomProvider.length() == 0) { secureRandomProvider = null; } } return secureRandomProvider; } /** * Returns the name of the * {@link java.security.SecureRandom SecureRandom} provider used by the * SDK. * @return a <code>SecureRandom</code> provider name. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to retrieve this configuration option's value. */ public static String getSecureRandomProvider() throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_GET_OPTION + OPTION_SECURERANDOM_PROVIDER)); } return secureRandomProvider(); } /** * Sets the name of the * {@link java.security.SecureRandom SecureRandom} provider to be used * by the SDK. * @param provider the name of a provider of a <code>SecureRandom</code> * algorithm, or <code>null</code> to use the default according to the * site-wide configuration. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to set this configuration option's value. */ public static void setSecureRandomProvider(final String provider) { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_SET_OPTION + OPTION_SECURERANDOM_PROVIDER)); } synchronized (ConfigImpl.class) { secureRandomProvider = provider != null && provider.length() != 0 ? provider : OPTION_UNSET; RandomSourceImpl.reset(); } } /** * Returns the configured message digest provider. * @return the configured message digest provider. */ static synchronized String messageDigestProvider() { if (messageDigestProvider == OPTION_UNSET) { messageDigestProvider = getDefault(PROPERTY_MESSAGEDIGEST_PROVIDER); // if we got the empty string, go with null, meaning that we'll // use the first provider based on the site's java.security config if (messageDigestProvider != null && messageDigestProvider.length() == 0) { messageDigestProvider = null; } } return messageDigestProvider; } /** * Returns the name of the * {@link java.security.MessageDigest MessageDigest} provider used by the * SDK. * @return a <code>MessageDigest</code> provider name. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to retrieve this configuration option's value. */ public static String getMessageDigestProvider() throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_GET_OPTION + OPTION_MESSAGEDIGEST_PROVIDER)); } return messageDigestProvider(); } /** * Sets the name of the * {@link java.security.MessageDigest MessageDigest} provider used by the * SDK. * @param provider a <code>MessageDigest</code> provider name, or * <code>null</code> to indicate that the SDK should use the * <code>java.security</code> default. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to set this configuration option's value. */ public static void setMessageDigestProvider(final String provider) throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_SET_OPTION + OPTION_MESSAGEDIGEST_PROVIDER)); } synchronized (ConfigImpl.class) { messageDigestProvider = provider != null && provider.length() != 0 ? provider : OPTION_UNSET; } } /** * Creates a new <code>MessageDigest</code> instance using the configured * provider. * @param algorithm the name of the message digest algorithm. * @return a digest instance. * @throws NoSuchProviderException if a provider is configured but not * available. * @throws NoSuchAlgorithmException if the desired algorithm is not * available. * @see #messageDigestProvider() */ static MessageDigest getMessageDigest(final String algorithm) throws NoSuchProviderException, NoSuchAlgorithmException { final String provider = messageDigestProvider(); return provider != null ? MessageDigest.getInstance(algorithm, provider) : MessageDigest.getInstance(algorithm); } /* * Prime number generation levels. */ /** * Test a prime confidence level for validity. * <p>Numbers less than <code>1</code> are invalid due to the fact that * you'll never find a prime with such a level. Empirical testing (and * inspection of Sun's implementation) show that for integers over 1024 * bits in length, levels above 3 are equivalent to 3.</p> * @param level a confidence level for testing. * @return <code>true</code> if <code>level</code> is valid. */ private static boolean isValidPrimeConfidenceLevel(final int level) { return level >= 1; } /** * Returns the configured prime number generation confidence level. * @return the configured prime number generation confidence level. */ static synchronized int primeConfidenceLevel() { if (primeConfidenceLevel == LEVEL_UNSET) { final String defaultLevel = getDefault(PROPERTY_MATH_PRIMECONFIDENCELEVEL); // pessimistically choose the default primeConfidenceLevel = DEFAULT_PRIME_CONFIDENCE_LEVEL; // now try to parse the default if (defaultLevel != null && defaultLevel.length() != 0) { try { final int level = Integer.parseInt(defaultLevel); if (isValidPrimeConfidenceLevel(level)) { // we got a good one! primeConfidenceLevel = level; } } catch (NumberFormatException nfe) { // stick with the default } } } return primeConfidenceLevel; } /** * Returns the confidence level for prime number generation. * @return the confidence level for prime number generation. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to retrieve this configuration option's value. */ public static int getPrimeConfidenceLevel() throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_GET_OPTION + OPTION_MATH_PRIMECONFIDENCELEVEL)); } return primeConfidenceLevel(); } /** * Sets the confidence level for prime number generation. * @param aLevel the confidence level for prime number generation, or * <code>0</code> to select the SDK's default value. * @throws SecurityException if a security manager exists and its * {@link SecurityManager#checkPermission(java.security.Permission)} * method denies access to set this configuration option's value. */ public static void setPrimeConfidenceLevel(final int aLevel) throws SecurityException { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new UProveSDKPermission(PREFIX_SET_OPTION + OPTION_MATH_PRIMECONFIDENCELEVEL)); } final int level; if (aLevel == 0) { level = LEVEL_UNSET; } else if (!isValidPrimeConfidenceLevel(aLevel)) { throw new IllegalArgumentException("Invalid level: " + aLevel); } else { level = aLevel; } synchronized (ConfigImpl.class) { primeConfidenceLevel = level; } } }
/** * Copyright 2016 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.pulsar.broker.service; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.lang.reflect.Field; import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.impl.EntryCacheImpl; import org.apache.bookkeeper.mledger.impl.ManagedLedgerFactoryImpl; import org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl; import org.apache.bookkeeper.mledger.impl.PositionImpl; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.google.common.collect.Sets; import com.yahoo.pulsar.broker.service.persistent.PersistentDispatcherMultipleConsumers; import com.yahoo.pulsar.broker.service.persistent.PersistentSubscription; import com.yahoo.pulsar.broker.service.persistent.PersistentTopic; import com.yahoo.pulsar.client.admin.PulsarAdminException; import com.yahoo.pulsar.client.api.CompressionType; import com.yahoo.pulsar.client.api.Consumer; import com.yahoo.pulsar.client.api.ConsumerConfiguration; import com.yahoo.pulsar.client.api.Message; import com.yahoo.pulsar.client.api.MessageBuilder; import com.yahoo.pulsar.client.api.MessageId; import com.yahoo.pulsar.client.api.Producer; import com.yahoo.pulsar.client.api.ProducerConfiguration; import com.yahoo.pulsar.client.api.PulsarClient; import com.yahoo.pulsar.client.api.PulsarClientException; import com.yahoo.pulsar.client.api.SubscriptionType; import com.yahoo.pulsar.client.impl.ConsumerImpl; import com.yahoo.pulsar.client.impl.MessageIdImpl; import com.yahoo.pulsar.client.impl.ProducerImpl; import com.yahoo.pulsar.common.api.proto.PulsarApi.CommandSubscribe.SubType; import com.yahoo.pulsar.common.naming.DestinationName; import com.yahoo.pulsar.common.policies.data.loadbalancer.NamespaceBundleStats; import com.yahoo.pulsar.common.stats.Metrics; /** */ @Test public class PersistentTopicE2ETest extends BrokerTestBase { @BeforeMethod @Override protected void setup() throws Exception { super.baseSetup(); } @AfterMethod @Override protected void cleanup() throws Exception { super.internalCleanup(); } @Test public void testSimpleProducerEvents() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic0"; // 1. producer connect Producer producer = pulsarClient.createProducer(topicName); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); assertEquals(topicRef.getProducers().size(), 1); // 2. producer publish messages for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } rolloverPerIntervalStats(); assertTrue(topicRef.getProducers().values().iterator().next().getStats().msgRateIn > 0.0); // 3. producer disconnect producer.close(); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(topicRef.getProducers().size(), 0); } @Test public void testSimpleConsumerEvents() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic1"; final String subName = "sub1"; final int numMsgs = 10; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); // 1. client connect Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertNotNull(topicRef); assertNotNull(subRef); assertTrue(subRef.getDispatcher().isConsumerConnected()); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(getAvailablePermits(subRef), 1000 /* default */); Producer producer = pulsarClient.createProducer(topicName); for (int i = 0; i < numMsgs * 2; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } assertTrue(subRef.getDispatcher().isConsumerConnected()); rolloverPerIntervalStats(); assertEquals(subRef.getNumberOfEntriesInBacklog(), numMsgs * 2); // 2. messages pushed before client receive Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(getAvailablePermits(subRef), 1000 - numMsgs * 2); Message msg = null; for (int i = 0; i < numMsgs; i++) { msg = consumer.receive(); // 3. in-order message delivery assertEquals(new String(msg.getData()), "my-message-" + i); consumer.acknowledge(msg); } rolloverPerIntervalStats(); // 4. messages deleted on individual acks Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(subRef.getNumberOfEntriesInBacklog(), numMsgs); for (int i = 0; i < numMsgs; i++) { msg = consumer.receive(); if (i == numMsgs - 1) { consumer.acknowledgeCumulative(msg); } } rolloverPerIntervalStats(); // 5. messages deleted on cumulative acks Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(subRef.getNumberOfEntriesInBacklog(), 0); // 6. consumer unsubscribe consumer.unsubscribe(); // 6. consumer graceful close consumer.close(); // 7. consumer unsubscribe try { consumer.unsubscribe(); fail("Should have failed"); } catch (PulsarClientException.AlreadyClosedException e) { // ok } Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); subRef = topicRef.getPersistentSubscription(subName); assertNull(subRef); producer.close(); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); } @Test public void testConsumerFlowControl() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic2"; final String subName = "sub2"; Message msg; int recvQueueSize = 4; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); conf.setReceiverQueueSize(recvQueueSize); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertNotNull(subRef); // 1. initial receive queue size recorded Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(getAvailablePermits(subRef), recvQueueSize); for (int i = 0; i < recvQueueSize / 2; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); msg = consumer.receive(); consumer.acknowledge(msg); } // 2. queue size re-adjusted after successful receive of half of window size Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(getAvailablePermits(subRef), recvQueueSize); consumer.close(); assertFalse(subRef.getDispatcher().isConsumerConnected()); } /** * Validation: 1. validates active-cursor after active subscription 2. validate active-cursor with subscription 3. * unconsumed messages should be present into cache 4. cache and active-cursor should be empty once subscription is * closed * * @throws Exception */ @Test public void testActiveSubscriptionWithCache() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic2"; final String subName = "sub2"; Message msg; int recvQueueSize = 4; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); conf.setReceiverQueueSize(recvQueueSize); // (1) Create subscription Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); // (2) Produce Messages for (int i = 0; i < recvQueueSize / 2; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); msg = consumer.receive(); consumer.acknowledge(msg); } PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); // (3) Get Entry cache ManagedLedgerImpl ledger = (ManagedLedgerImpl) topicRef.getManagedLedger(); Field cacheField = ManagedLedgerImpl.class.getDeclaredField("entryCache"); cacheField.setAccessible(true); EntryCacheImpl entryCache = (EntryCacheImpl) cacheField.get(ledger); /************* Validation on non-empty active-cursor **************/ // (4) Get ActiveCursor : which is list of active subscription Iterable<ManagedCursor> activeCursors = ledger.getActiveCursors(); ManagedCursor curosr = activeCursors.iterator().next(); // (4.1) Validate: active Cursor must be non-empty assertNotNull(curosr); // (4.2) Validate: validate cursor name assertEquals(subName, curosr.getName()); // (4.3) Validate: entryCache should have cached messages assertTrue(entryCache.getSize() != 0); /************* Validation on empty active-cursor **************/ // (5) Close consumer: which (1)removes activeConsumer and (2)clears the entry-cache consumer.close(); Thread.sleep(1000); // (5.1) Validate: active-consumer must be empty assertFalse(ledger.getActiveCursors().iterator().hasNext()); // (5.2) Validate: Entry-cache must be cleared assertTrue(entryCache.getSize() == 0); } // some race conditions needs to be handled // disabling the test for now to not block commit jobs @Test(enabled = false) public void testConcurrentConsumerThreads() throws Exception { // test concurrent consumer threads on same consumerId final String topicName = "persistent://prop/use/ns-abc/topic3"; final String subName = "sub3"; final int recvQueueSize = 100; final int numConsumersThreads = 10; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); conf.setReceiverQueueSize(recvQueueSize); ExecutorService executor = Executors.newCachedThreadPool(); final CyclicBarrier barrier = new CyclicBarrier(numConsumersThreads + 1); for (int i = 0; i < numConsumersThreads; i++) { executor.submit(new Callable<Void>() { @Override public Void call() throws Exception { barrier.await(); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); for (int i = 0; i < recvQueueSize / numConsumersThreads; i++) { Message msg = consumer.receive(); consumer.acknowledge(msg); } return null; } }); } Producer producer = pulsarClient.createProducer(topicName); for (int i = 0; i < recvQueueSize * numConsumersThreads; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } barrier.await(); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); // 1. cumulatively all threads drain the backlog assertEquals(subRef.getNumberOfEntriesInBacklog(), 0); // 2. flow control works the same as single consumer single thread Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertEquals(getAvailablePermits(subRef), recvQueueSize); } @Test(enabled = false) // TODO: enable this after java client supports graceful close public void testGracefulClose() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic4"; final String subName = "sub4"; Producer producer = pulsarClient.createProducer(topicName); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); ExecutorService executor = Executors.newCachedThreadPool(); CountDownLatch latch = new CountDownLatch(1); executor.submit(() -> { for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } latch.countDown(); return null; }); producer.close(); // 1. verify there are no pending publish acks once the producer close // is completed on client assertEquals(topicRef.getProducers().values().iterator().next().getPendingPublishAcks(), 0); // safety latch in case of failure, // wait for the spawned thread to complete latch.await(); ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertNotNull(subRef); Message msg = null; for (int i = 0; i < 10; i++) { msg = consumer.receive(); } // 2. verify consumer close fails when there are outstanding // message acks try { consumer.close(); fail("should have failed"); } catch (IllegalStateException e) { // Expected - messages not acked } consumer.acknowledgeCumulative(msg); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); // 3. verify consumer close succeeds once all messages are ack'ed consumer.close(); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); assertTrue(subRef.getDispatcher().isConsumerConnected()); } @Test public void testSimpleCloseTopic() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic5"; final String subName = "sub5"; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertNotNull(subRef); Message msg; for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); msg = consumer.receive(); consumer.acknowledge(msg); } producer.close(); consumer.close(); topicRef.close().get(); assertNull(pulsar.getBrokerService().getTopicReference(topicName)); } @Test public void testSingleClientMultipleSubscriptions() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic6"; final String subName = "sub6"; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); pulsarClient.subscribe(topicName, subName, conf); pulsarClient.createProducer(topicName); try { pulsarClient.subscribe(topicName, subName, conf); fail("Should have thrown an exception since one consumer is already connected"); } catch (PulsarClientException cce) { Assert.assertTrue(cce.getMessage().contains("Exclusive consumer is already connected")); } } @Test public void testMultipleClientsMultipleSubscriptions() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic7"; final String subName = "sub7"; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); PulsarClient client1 = PulsarClient.create(brokerUrl.toString()); PulsarClient client2 = PulsarClient.create(brokerUrl.toString()); try { client1.subscribe(topicName, subName, conf); client1.createProducer(topicName); client2.createProducer(topicName); client2.subscribe(topicName, subName, conf); fail("Should have thrown an exception since one consumer is already connected"); } catch (PulsarClientException cce) { Assert.assertTrue(cce.getMessage().contains("Exclusive consumer is already connected")); } finally { client2.shutdown(); client1.shutdown(); } } @Test public void testTopicDeleteWithDisconnectedSubscription() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic8"; final String subName = "sub1"; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); // 1. client connect Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertNotNull(topicRef); assertNotNull(subRef); assertTrue(subRef.getDispatcher().isConsumerConnected()); // 2. client disconnect consumer.close(); assertFalse(subRef.getDispatcher().isConsumerConnected()); // 3. delete topic admin.persistentTopics().delete(topicName); try { admin.persistentTopics().getStats(topicName); } catch (PulsarAdminException e) { // ok } } int getAvailablePermits(PersistentSubscription sub) { return sub.getDispatcher().getConsumers().get(0).getAvailablePermits(); } @Test(enabled = false) public void testUnloadNamespace() throws Exception { String topicName = "persistent://prop/use/ns-abc/topic-9"; DestinationName destinationName = DestinationName.get(topicName); pulsarClient.createProducer(topicName); pulsarClient.close(); assertTrue(pulsar.getBrokerService().getTopicReference(topicName) != null); assertTrue(((ManagedLedgerFactoryImpl) pulsar.getManagedLedgerFactory()).getManagedLedgers() .containsKey(destinationName.getPersistenceNamingEncoding())); admin.namespaces().unload("prop/use/ns-abc"); int i = 0; for (i = 0; i < 30; i++) { if (pulsar.getBrokerService().getTopicReference(topicName) == null) { break; } Thread.sleep(1000); } if (i == 30) { fail("The topic reference should be null"); } // ML should have been closed as well assertFalse(((ManagedLedgerFactoryImpl) pulsar.getManagedLedgerFactory()).getManagedLedgers() .containsKey(destinationName.getPersistenceNamingEncoding())); } @Test public void testGC() throws Exception { // 1. Simple successful GC String topicName = "persistent://prop/use/ns-abc/topic-10"; Producer producer = pulsarClient.createProducer(topicName); producer.close(); assertNotNull(pulsar.getBrokerService().getTopicReference(topicName)); runGC(); assertNull(pulsar.getBrokerService().getTopicReference(topicName)); // 2. Topic is not GCed with live connection ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); String subName = "sub1"; Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); runGC(); assertNotNull(pulsar.getBrokerService().getTopicReference(topicName)); // 3. Topic with subscription is not GCed even with no connections consumer.close(); runGC(); assertNotNull(pulsar.getBrokerService().getTopicReference(topicName)); // 4. Topic can be GCed after unsubscribe admin.persistentTopics().deleteSubscription(topicName, subName); runGC(); assertNull(pulsar.getBrokerService().getTopicReference(topicName)); } @Test public void testMessageExpiry() throws Exception { int messageTTLSecs = 1; String namespaceName = "prop/use/expiry-check"; admin.namespaces().createNamespace(namespaceName); admin.namespaces().setNamespaceMessageTTL(namespaceName, messageTTLSecs); final String topicName = "persistent://prop/use/expiry-check/topic1"; final String subName = "sub1"; final int numMsgs = 10; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); consumer.close(); assertFalse(subRef.getDispatcher().isConsumerConnected()); Producer producer = pulsarClient.createProducer(topicName); for (int i = 0; i < numMsgs; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } rolloverPerIntervalStats(); assertEquals(subRef.getNumberOfEntriesInBacklog(), numMsgs); Thread.sleep(TimeUnit.SECONDS.toMillis(messageTTLSecs)); runMessageExpiryCheck(); // 1. check all messages expired for this unconnected subscription assertEquals(subRef.getNumberOfEntriesInBacklog(), 0); // clean-up producer.close(); consumer.close(); admin.persistentTopics().deleteSubscription(topicName, subName); admin.persistentTopics().delete(topicName); admin.namespaces().deleteNamespace(namespaceName); } @Test public void testMessageExpiryWithFewExpiredBacklog() throws Exception { int messageTTLSecs = 10; String namespaceName = "prop/use/expiry-check-1"; admin.namespaces().createNamespace(namespaceName); admin.namespaces().setNamespaceMessageTTL(namespaceName, messageTTLSecs); final String topicName = "persistent://prop/use/expiry-check-1/topic1"; final String subName = "sub1"; final int numMsgs = 10; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); pulsarClient.subscribe(topicName, subName, conf); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); assertTrue(subRef.getDispatcher().isConsumerConnected()); Producer producer = pulsarClient.createProducer(topicName); for (int i = 0; i < numMsgs; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } rolloverPerIntervalStats(); assertEquals(subRef.getNumberOfEntriesInBacklog(), numMsgs); Thread.sleep(TimeUnit.SECONDS.toMillis(messageTTLSecs)); runMessageExpiryCheck(); assertEquals(subRef.getNumberOfEntriesInBacklog(), numMsgs); Thread.sleep(TimeUnit.SECONDS.toMillis(messageTTLSecs / 2)); runMessageExpiryCheck(); assertEquals(subRef.getNumberOfEntriesInBacklog(), 0); } @Test public void testSubscriptionTypeTransitions() throws Exception { final String topicName = "persistent://prop/use/ns-abc/shared-topic2"; final String subName = "sub2"; ConsumerConfiguration conf1 = new ConsumerConfiguration(); conf1.setSubscriptionType(SubscriptionType.Exclusive); ConsumerConfiguration conf2 = new ConsumerConfiguration(); conf2.setSubscriptionType(SubscriptionType.Shared); ConsumerConfiguration conf3 = new ConsumerConfiguration(); conf3.setSubscriptionType(SubscriptionType.Failover); Consumer consumer1 = pulsarClient.subscribe(topicName, subName, conf1); Consumer consumer2 = null; Consumer consumer3 = null; PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); // 1. shared consumer on an exclusive sub fails try { consumer2 = pulsarClient.subscribe(topicName, subName, conf2); fail("should have failed"); } catch (PulsarClientException e) { assertTrue(e.getMessage().contains("Subscription is of different type")); } // 2. failover consumer on an exclusive sub fails try { consumer3 = pulsarClient.subscribe(topicName, subName, conf3); fail("should have failed"); } catch (PulsarClientException e) { assertTrue(e.getMessage().contains("Subscription is of different type")); } // 3. disconnected sub can be converted in shared consumer1.close(); try { consumer2 = pulsarClient.subscribe(topicName, subName, conf2); assertEquals(subRef.getDispatcher().getType(), SubType.Shared); } catch (PulsarClientException e) { fail("should not fail"); } // 4. exclusive fails on shared sub try { consumer1 = pulsarClient.subscribe(topicName, subName, conf1); fail("should have failed"); } catch (PulsarClientException e) { assertTrue(e.getMessage().contains("Subscription is of different type")); } // 5. disconnected sub can be converted in failover consumer2.close(); try { consumer3 = pulsarClient.subscribe(topicName, subName, conf3); assertEquals(subRef.getDispatcher().getType(), SubType.Failover); } catch (PulsarClientException e) { fail("should not fail"); } // 5. exclusive consumer can connect after failover disconnects consumer3.close(); try { consumer1 = pulsarClient.subscribe(topicName, subName, conf1); assertEquals(subRef.getDispatcher().getType(), SubType.Exclusive); } catch (PulsarClientException e) { fail("should not fail"); } consumer1.close(); admin.persistentTopics().delete(topicName); } @Test public void testReceiveWithTimeout() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic-receive-timeout"; final String subName = "sub"; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); conf.setReceiverQueueSize(1000); ConsumerImpl consumer = (ConsumerImpl) pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); assertEquals(consumer.getAvailablePermits(), 0); Message msg = consumer.receive(10, TimeUnit.MILLISECONDS); assertNull(msg); assertEquals(consumer.getAvailablePermits(), 0); producer.send("test".getBytes()); Thread.sleep(100); assertEquals(consumer.getAvailablePermits(), 0); msg = consumer.receive(10, TimeUnit.MILLISECONDS); assertNotNull(msg); assertEquals(consumer.getAvailablePermits(), 1); msg = consumer.receive(10, TimeUnit.MILLISECONDS); assertNull(msg); assertEquals(consumer.getAvailablePermits(), 1); } @Test public void testProducerReturnedMessageId() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic-xyz"; // 1. producer connect Producer producer = pulsarClient.createProducer(topicName); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); assertEquals(topicRef.getProducers().size(), 1); ManagedLedgerImpl managedLedger = (ManagedLedgerImpl) topicRef.getManagedLedger(); long ledgerId = managedLedger.getLedgersInfoAsList().get(0).getLedgerId(); // 2. producer publish messages final int SyncMessages = 10; for (int i = 0; i < SyncMessages; i++) { String message = "my-message-" + i; MessageId receivedMessageId = producer.send(message.getBytes()); assertEquals(receivedMessageId, new MessageIdImpl(ledgerId, i, -1)); } // 3. producer publish messages async final int AsyncMessages = 10; final CountDownLatch counter = new CountDownLatch(AsyncMessages); for (int i = SyncMessages; i < (SyncMessages + AsyncMessages); i++) { String content = "my-message-" + i; Message msg = MessageBuilder.create().setContent(content.getBytes()).build(); final int index = i; producer.sendAsync(msg).thenRun(() -> { assertEquals(msg.getMessageId(), new MessageIdImpl(ledgerId, index, -1)); counter.countDown(); }).exceptionally((ex) -> { return null; }); } counter.await(); // 4. producer disconnect producer.close(); } @Test public void testProducerQueueFullBlocking() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic-xyzx"; final int messages = 10; PulsarClient client = PulsarClient.create(brokerUrl.toString()); // 1. Producer connect ProducerConfiguration producerConfiguration = new ProducerConfiguration().setMaxPendingMessages(messages) .setBlockIfQueueFull(true).setSendTimeout(1, TimeUnit.SECONDS); ProducerImpl producer = (ProducerImpl) client.createProducer(topicName, producerConfiguration); // 2. Stop broker cleanup(); // 2. producer publish messages long startTime = System.nanoTime(); for (int i = 0; i < messages; i++) { // Should never block producer.sendAsync("msg".getBytes()); } // Verify thread was not blocked long delayNs = System.nanoTime() - startTime; assertTrue(delayNs < TimeUnit.SECONDS.toNanos(1)); assertEquals(producer.getPendingQueueSize(), messages); // Next send operation must block, until all the messages in the queue expire startTime = System.nanoTime(); producer.sendAsync("msg".getBytes()); delayNs = System.nanoTime() - startTime; assertTrue(delayNs > TimeUnit.MILLISECONDS.toNanos(500)); assertTrue(delayNs < TimeUnit.MILLISECONDS.toNanos(1500)); assertEquals(producer.getPendingQueueSize(), 1); // 4. producer disconnect producer.close(); client.close(); // 5. Restart broker setup(); } @Test public void testProducerQueueFullNonBlocking() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic-xyzx"; final int messages = 10; // 1. Producer connect PulsarClient client = PulsarClient.create(brokerUrl.toString()); ProducerConfiguration producerConfiguration = new ProducerConfiguration().setMaxPendingMessages(messages) .setBlockIfQueueFull(false).setSendTimeout(1, TimeUnit.SECONDS); ProducerImpl producer = (ProducerImpl) client.createProducer(topicName, producerConfiguration); // 2. Stop broker cleanup(); // 2. producer publish messages long startTime = System.nanoTime(); for (int i = 0; i < messages; i++) { // Should never block producer.sendAsync("msg".getBytes()); } // Verify thread was not blocked long delayNs = System.nanoTime() - startTime; assertTrue(delayNs < TimeUnit.SECONDS.toNanos(1)); assertEquals(producer.getPendingQueueSize(), messages); // Next send operation must fail and not block startTime = System.nanoTime(); try { producer.send("msg".getBytes()); fail("Send should have failed"); } catch (PulsarClientException.ProducerQueueIsFullError e) { // Expected } delayNs = System.nanoTime() - startTime; assertTrue(delayNs < TimeUnit.SECONDS.toNanos(1)); assertEquals(producer.getPendingQueueSize(), messages); // 4. producer disconnect producer.close(); client.close(); // 5. Restart broker setup(); } @Test public void testDeleteTopics() throws Exception { BrokerService brokerService = pulsar.getBrokerService(); // 1. producers connect Producer producer1 = pulsarClient.createProducer("persistent://prop/use/ns-abc/topic-1"); Producer producer2 = pulsarClient.createProducer("persistent://prop/use/ns-abc/topic-2"); brokerService.updateRates(); Map<String, NamespaceBundleStats> bundleStatsMap = brokerService.getBundleStats(); assertEquals(bundleStatsMap.size(), 1); NamespaceBundleStats bundleStats = bundleStatsMap.get("prop/use/ns-abc/0x00000000_0xffffffff"); assertNotNull(bundleStats); producer1.close(); admin.persistentTopics().delete("persistent://prop/use/ns-abc/topic-1"); brokerService.updateRates(); bundleStatsMap = brokerService.getBundleStats(); assertEquals(bundleStatsMap.size(), 1); bundleStats = bundleStatsMap.get("prop/use/ns-abc/0x00000000_0xffffffff"); assertNotNull(bundleStats); // // Delete 2nd topic as well // producer2.close(); // admin.persistentTopics().delete("persistent://prop/use/ns-abc/topic-2"); // // brokerService.updateRates(); // // bundleStatsMap = brokerService.getBundleStats(); // assertEquals(bundleStatsMap.size(), 0); } @DataProvider(name = "codec") public Object[][] codecProvider() { return new Object[][] { { CompressionType.NONE }, { CompressionType.LZ4 }, { CompressionType.ZLIB }, }; } @Test(dataProvider = "codec") public void testCompression(CompressionType compressionType) throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic0" + compressionType; // 1. producer connect ProducerConfiguration producerConf = new ProducerConfiguration(); producerConf.setCompressionType(compressionType); Producer producer = pulsarClient.createProducer(topicName, producerConf); Consumer consumer = pulsarClient.subscribe(topicName, "my-sub"); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); assertEquals(topicRef.getProducers().size(), 1); // 2. producer publish messages for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } for (int i = 0; i < 10; i++) { Message msg = consumer.receive(5, TimeUnit.SECONDS); assertNotNull(msg); assertEquals(msg.getData(), ("my-message-" + i).getBytes()); } // 3. producer disconnect producer.close(); consumer.close(); } @Test public void testBrokerTopicStats() throws Exception { BrokerService brokerService = this.pulsar.getBrokerService(); Field field = BrokerService.class.getDeclaredField("statsUpdater"); field.setAccessible(true); ScheduledExecutorService statsUpdater = (ScheduledExecutorService) field.get(brokerService); // disable statsUpdate to calculate rates explicitly statsUpdater.shutdown(); final String namespace = "prop/use/ns-abc"; ProducerConfiguration producerConf = new ProducerConfiguration(); Producer producer = pulsarClient.createProducer("persistent://" + namespace + "/topic0", producerConf); // 1. producer publish messages for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } Metrics metric = null; // sleep 1 sec to caclulate metrics per second Thread.sleep(1000); brokerService.updateRates(); List<Metrics> metrics = brokerService.getDestinationMetrics(); for (int i = 0; i < metrics.size(); i++) { if (metrics.get(i).getDimension("namespace").equalsIgnoreCase(namespace)) { metric = metrics.get(i); break; } } assertNotNull(metric); double msgInRate = (double) metrics.get(0).getMetrics().get("brk_in_rate"); // rate should be calculated and no must be > 0 as we have produced 10 msgs so far assertTrue(msgInRate > 0); } @Test public void testPayloadCorruptionDetection() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic1"; // 1. producer connect Producer producer = pulsarClient.createProducer(topicName); Consumer consumer = pulsarClient.subscribe(topicName, "my-sub"); Message msg1 = MessageBuilder.create().setContent("message-1".getBytes()).build(); CompletableFuture<MessageId> future1 = producer.sendAsync(msg1); // Stop the broker, and publishes messages. Messages are accumulated in the producer queue and they're checksums // would have already been computed. If we change the message content at that point, it should result in a // checksum validation error stopBroker(); Message msg2 = MessageBuilder.create().setContent("message-2".getBytes()).build(); CompletableFuture<MessageId> future2 = producer.sendAsync(msg2); // Taint msg2 msg2.getData()[msg2.getData().length - 1] = '3'; // new content would be 'message-3' // Restart the broker to have the messages published startBroker(); future1.get(); try { future2.get(); fail("since we corrupted the message, it should be rejected by the broker"); } catch (Exception e) { // ok } // We should only receive msg1 Message msg = consumer.receive(1, TimeUnit.SECONDS); assertEquals(new String(msg.getData()), "message-1"); while ((msg = consumer.receive(1, TimeUnit.SECONDS)) != null) { assertEquals(new String(msg.getData()), "message-1"); } } /** * Verify: Broker should not replay already acknowledged messages again and should clear them from messageReplay bucket * * 1. produce messages * 2. consume messages and ack all except 1 msg * 3. Verification: should replay only 1 unacked message */ @Test() public void testMessageRedelivery() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic2"; final String subName = "sub2"; Message msg; int totalMessages = 10; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Shared); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); // (1) Produce messages for (int i = 0; i < totalMessages; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } //(2) Consume and ack messages except first message Message unAckedMsg = null; for (int i = 0; i < totalMessages; i++) { msg = consumer.receive(); if (i == 0) { unAckedMsg = msg; } else { consumer.acknowledge(msg); } } consumer.redeliverUnacknowledgedMessages(); // Verify: msg [L:0] must be redelivered try { msg = consumer.receive(1, TimeUnit.SECONDS); assertEquals(new String(msg.getData()), new String(unAckedMsg.getData())); } catch (Exception e) { fail("msg should be redelivered ", e); } // Verify no other messages are redelivered msg = consumer.receive(100, TimeUnit.MILLISECONDS); assertNull(msg); consumer.close(); producer.close(); } /** * Verify: * 1. Broker should not replay already acknowledged messages * 2. Dispatcher should not stuck while dispatching new messages due to previous-replay * of invalid/already-acked messages * * @throws Exception */ @Test public void testMessageReplay() throws Exception { final String topicName = "persistent://prop/use/ns-abc/topic2"; final String subName = "sub2"; Message msg; int totalMessages = 10; int replayIndex = totalMessages / 2; ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Shared); conf.setReceiverQueueSize(1); Consumer consumer = pulsarClient.subscribe(topicName, subName, conf); Producer producer = pulsarClient.createProducer(topicName); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topicName); assertNotNull(topicRef); PersistentSubscription subRef = topicRef.getPersistentSubscription(subName); PersistentDispatcherMultipleConsumers dispatcher = (PersistentDispatcherMultipleConsumers) subRef .getDispatcher(); Field replayMap = PersistentDispatcherMultipleConsumers.class.getDeclaredField("messagesToReplay"); replayMap.setAccessible(true); TreeSet<PositionImpl> messagesToReplay = Sets.newTreeSet(); assertNotNull(subRef); // (1) Produce messages for (int i = 0; i < totalMessages; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } MessageIdImpl firstAckedMsg = null; // (2) Consume and ack messages except first message for (int i = 0; i < totalMessages; i++) { msg = consumer.receive(); consumer.acknowledge(msg); MessageIdImpl msgId = (MessageIdImpl) msg.getMessageId(); if (i == 0) { firstAckedMsg = msgId; } if (i < replayIndex) { // (3) accumulate acked messages for replay messagesToReplay.add(new PositionImpl(msgId.getLedgerId(), msgId.getEntryId())); } } // (4) redelivery : should redeliver only unacked messages Thread.sleep(1000); replayMap.set(dispatcher, messagesToReplay); // (a) redelivery with all acked-message should clear messageReply bucket dispatcher.redeliverUnacknowledgedMessages(dispatcher.getConsumers().get(0)); assertEquals(messagesToReplay.size(), 0); // (b) fill messageReplyBucket with already acked entry again: and try to publish new msg and read it messagesToReplay.add(new PositionImpl(firstAckedMsg.getLedgerId(), firstAckedMsg.getEntryId())); replayMap.set(dispatcher, messagesToReplay); // send new message final String testMsg = "testMsg"; producer.send(testMsg.getBytes()); // consumer should be able to receive only new message and not the dispatcher.consumerFlow(dispatcher.getConsumers().get(0), 1); msg = consumer.receive(1, TimeUnit.SECONDS); assertNotNull(msg); assertEquals(msg.getData(), testMsg.getBytes()); consumer.close(); producer.close(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.file.tool; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.LoggingAwareMultiCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.support.Validation.Users; import org.elasticsearch.xpack.security.authc.file.FileUserPasswdStore; import org.elasticsearch.xpack.security.authc.file.FileUserRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.support.FileAttributesChecker; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; public class UsersTool extends LoggingAwareMultiCommand { public static void main(String[] args) throws Exception { exit(new UsersTool().main(args, Terminal.DEFAULT)); } UsersTool() { super("Manages elasticsearch file users"); subcommands.put("useradd", newAddUserCommand()); subcommands.put("userdel", newDeleteUserCommand()); subcommands.put("passwd", newPasswordCommand()); subcommands.put("roles", newRolesCommand()); subcommands.put("list", newListCommand()); } protected AddUserCommand newAddUserCommand() { return new AddUserCommand(); } protected DeleteUserCommand newDeleteUserCommand() { return new DeleteUserCommand(); } protected PasswordCommand newPasswordCommand() { return new PasswordCommand(); } protected RolesCommand newRolesCommand() { return new RolesCommand(); } protected ListCommand newListCommand() { return new ListCommand(); } static class AddUserCommand extends EnvironmentAwareCommand { private final OptionSpec<String> passwordOption; private final OptionSpec<String> rolesOption; private final OptionSpec<String> arguments; AddUserCommand() { super("Adds a file user"); this.passwordOption = parser.acceptsAll(Arrays.asList("p", "password"), "The user password") .withRequiredArg(); this.rolesOption = parser.acceptsAll(Arrays.asList("r", "roles"), "Comma-separated list of the roles of the user") .withRequiredArg().defaultsTo(""); this.arguments = parser.nonOptions("username"); } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println("Adds a file based user to elasticsearch (via internal realm). The user will"); terminal.println("be added to the \"users\" file and its roles will be added to the"); terminal.println("\"users_roles\" file in the elasticsearch config directory."); terminal.println(""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String username = parseUsername(arguments.values(options), env.settings()); final boolean allowReserved = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(env.settings()) == false; Validation.Error validationError = Users.validateUsername(username, allowReserved, env.settings()); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError); } final char[] passwordHash = getPasswordHash(terminal, env, passwordOption.value(options)); String[] roles = parseRoles(terminal, env, rolesOption.value(options)); Path passwordFile = FileUserPasswdStore.resolveFile(env); Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile); Map<String, char[]> users = FileUserPasswdStore.parseFile(passwordFile, null, env.settings()); if (users == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + passwordFile + "] is missing"); } if (users.containsKey(username)) { throw new UserException(ExitCodes.CODE_ERROR, "User [" + username + "] already exists"); } users = new HashMap<>(users); // make modifiable users.put(username, passwordHash); FileUserPasswdStore.writeFile(users, passwordFile); if (roles.length > 0) { Map<String, String[]> userRoles = new HashMap<>(FileUserRolesStore.parseFile(rolesFile, null)); userRoles.put(username, roles); FileUserRolesStore.writeFile(userRoles, rolesFile); } attributesChecker.check(terminal); } } static class DeleteUserCommand extends EnvironmentAwareCommand { private final OptionSpec<String> arguments; DeleteUserCommand() { super("Deletes a file based user"); this.arguments = parser.nonOptions("username"); } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println("Removes an existing file based user from elasticsearch. The user will be"); terminal.println("removed from the \"users\" file and its roles will be removed from the"); terminal.println("\"users_roles\" file in the elasticsearch config directory."); terminal.println(""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String username = parseUsername(arguments.values(options), env.settings()); Path passwordFile = FileUserPasswdStore.resolveFile(env); Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile); Map<String, char[]> users = FileUserPasswdStore.parseFile(passwordFile, null, env.settings()); if (users == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + passwordFile + "] is missing"); } if (users.containsKey(username) == false) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } if (Files.exists(passwordFile)) { users = new HashMap<>(users); char[] passwd = users.remove(username); if (passwd != null) { FileUserPasswdStore.writeFile(users, passwordFile); } } Map<String, String[]> userRoles = new HashMap<>(FileUserRolesStore.parseFile(rolesFile, null)); if (Files.exists(rolesFile)) { String[] roles = userRoles.remove(username); if (roles != null) { FileUserRolesStore.writeFile(userRoles, rolesFile); } } attributesChecker.check(terminal); } } static class PasswordCommand extends EnvironmentAwareCommand { private final OptionSpec<String> passwordOption; private final OptionSpec<String> arguments; PasswordCommand() { super("Changes the password of an existing file based user"); this.passwordOption = parser.acceptsAll(Arrays.asList("p", "password"), "The user password") .withRequiredArg(); this.arguments = parser.nonOptions("username"); } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println("The passwd command changes passwords for file based users. The tool"); terminal.println("prompts twice for a replacement password. The second entry is compared"); terminal.println("against the first and both are required to match in order for the"); terminal.println("password to be changed."); terminal.println(""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String username = parseUsername(arguments.values(options), env.settings()); char[] passwordHash = getPasswordHash(terminal, env, passwordOption.value(options)); Path file = FileUserPasswdStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(file); Map<String, char[]> users = FileUserPasswdStore.parseFile(file, null, env.settings()); if (users == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + file + "] is missing"); } if (users.containsKey(username) == false) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } users = new HashMap<>(users); // make modifiable users.put(username, passwordHash); FileUserPasswdStore.writeFile(users, file); attributesChecker.check(terminal); } } static class RolesCommand extends EnvironmentAwareCommand { private final OptionSpec<String> addOption; private final OptionSpec<String> removeOption; private final OptionSpec<String> arguments; RolesCommand() { super("Edit roles of an existing user"); this.addOption = parser.acceptsAll(Arrays.asList("a", "add"), "Adds supplied roles to the specified user") .withRequiredArg().defaultsTo(""); this.removeOption = parser.acceptsAll(Arrays.asList("r", "remove"), "Remove supplied roles from the specified user") .withRequiredArg().defaultsTo(""); this.arguments = parser.nonOptions("username"); } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println("The roles command allows editing roles for file based users."); terminal.println("You can also list a user's roles by omitting the -a and -r"); terminal.println("parameters."); terminal.println(""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String username = parseUsername(arguments.values(options), env.settings()); String[] addRoles = parseRoles(terminal, env, addOption.value(options)); String[] removeRoles = parseRoles(terminal, env, removeOption.value(options)); // check if just need to return data as no write operation happens // Nothing to add, just list the data for a username boolean readOnlyUserListing = removeRoles.length == 0 && addRoles.length == 0; if (readOnlyUserListing) { listUsersAndRoles(terminal, env, username); return; } Path usersFile = FileUserPasswdStore.resolveFile(env); Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(usersFile, rolesFile); Map<String, char[]> usersMap = FileUserPasswdStore.parseFile(usersFile, null, env.settings()); if (!usersMap.containsKey(username)) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } Map<String, String[]> userRoles = FileUserRolesStore.parseFile(rolesFile, null); List<String> roles = new ArrayList<>(); if (userRoles.get(username) != null) { roles.addAll(Arrays.asList(userRoles.get(username))); } roles.addAll(Arrays.asList(addRoles)); roles.removeAll(Arrays.asList(removeRoles)); Map<String, String[]> userRolesToWrite = new HashMap<>(userRoles.size()); userRolesToWrite.putAll(userRoles); if (roles.isEmpty()) { userRolesToWrite.remove(username); } else { userRolesToWrite.put(username, new LinkedHashSet<>(roles).toArray(new String[]{})); } FileUserRolesStore.writeFile(userRolesToWrite, rolesFile); attributesChecker.check(terminal); } } static class ListCommand extends EnvironmentAwareCommand { private final OptionSpec<String> arguments; ListCommand() { super("List existing file based users and their corresponding roles"); this.arguments = parser.nonOptions("username"); } @Override protected void printAdditionalHelp(Terminal terminal) { terminal.println(""); terminal.println(""); } @Override protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { String username = null; if (options.has(arguments)) { username = arguments.value(options); } listUsersAndRoles(terminal, env, username); } } // pkg private for tests static void listUsersAndRoles(Terminal terminal, Environment env, String username) throws Exception { Path userRolesFilePath = FileUserRolesStore.resolveFile(env); Map<String, String[]> userRoles = FileUserRolesStore.parseFile(userRolesFilePath, null); if (userRoles == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + userRolesFilePath + "] is missing"); } Path userFilePath = FileUserPasswdStore.resolveFile(env); Map<String, char[]> users = FileUserPasswdStore.parseFile(userFilePath, null, env.settings()); if (users == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + userFilePath + "] is missing"); } Path rolesFilePath = FileRolesStore.resolveFile(env); Set<String> knownRoles = Sets.union(FileRolesStore.parseFileForRoleNames(rolesFilePath, null), ReservedRolesStore.names()); if (knownRoles == null) { throw new UserException(ExitCodes.CONFIG, "Configuration file [" + rolesFilePath + "] is missing"); } if (username != null) { if (!users.containsKey(username)) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } if (userRoles.containsKey(username)) { String[] roles = userRoles.get(username); Set<String> unknownRoles = Sets.difference(Sets.newHashSet(roles), knownRoles); String[] markedRoles = markUnknownRoles(roles, unknownRoles); terminal.println(String.format(Locale.ROOT, "%-15s: %s", username, Arrays.stream(markedRoles).map(s -> s == null ? "-" : s).collect(Collectors.joining(",")))); if (!unknownRoles.isEmpty()) { // at least one role is marked... so printing the legend Path rolesFile = FileRolesStore.resolveFile(env).toAbsolutePath(); terminal.println(""); terminal.println(" [*] Role is not in the [" + rolesFile.toAbsolutePath() + "] file. If the role has been created " + "using the API, please disregard this message."); } } else { terminal.println(String.format(Locale.ROOT, "%-15s: -", username)); } } else { boolean unknownRolesFound = false; boolean usersExist = false; for (Map.Entry<String, String[]> entry : userRoles.entrySet()) { String[] roles = entry.getValue(); Set<String> unknownRoles = Sets.difference(Sets.newHashSet(roles), knownRoles); String[] markedRoles = markUnknownRoles(roles, unknownRoles); terminal.println(String.format(Locale.ROOT, "%-15s: %s", entry.getKey(), String.join(",", markedRoles))); unknownRolesFound = unknownRolesFound || !unknownRoles.isEmpty(); usersExist = true; } // list users without roles Set<String> usersWithoutRoles = Sets.newHashSet(users.keySet()); usersWithoutRoles.removeAll(userRoles.keySet()); for (String user : usersWithoutRoles) { terminal.println(String.format(Locale.ROOT, "%-15s: -", user)); usersExist = true; } if (!usersExist) { terminal.println("No users found"); return; } if (unknownRolesFound) { // at least one role is marked... so printing the legend Path rolesFile = FileRolesStore.resolveFile(env).toAbsolutePath(); terminal.println(""); terminal.println(" [*] Role is not in the [" + rolesFile.toAbsolutePath() + "] file. If the role has been created " + "using the API, please disregard this message."); } } } private static String[] markUnknownRoles(String[] roles, Set<String> unknownRoles) { if (unknownRoles.isEmpty()) { return roles; } String[] marked = new String[roles.length]; for (int i = 0; i < roles.length; i++) { if (unknownRoles.contains(roles[i])) { marked[i] = roles[i] + "*"; } else { marked[i] = roles[i]; } } return marked; } // pkg private for testing static String parseUsername(List<String> args, Settings settings) throws UserException { if (args.isEmpty()) { throw new UserException(ExitCodes.USAGE, "Missing username argument"); } else if (args.size() > 1) { throw new UserException(ExitCodes.USAGE, "Expected a single username argument, found extra: " + args.toString()); } String username = args.get(0); final boolean allowReserved = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings) == false; Validation.Error validationError = Users.validateUsername(username, allowReserved, settings); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError); } return username; } private static char[] getPasswordHash(Terminal terminal, Environment env, String cliPasswordValue) throws UserException { final Hasher hasher = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(env.settings())); if (XPackSettings.FIPS_MODE_ENABLED.get(env.settings()) && hasher.name().toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { throw new UserException(ExitCodes.CONFIG, "Only PBKDF2 is allowed for password hashing in a FIPS 140 JVM. Please set the " + "appropriate value for [ " + XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey() + " ] setting."); } final char[] passwordHash; try (SecureString password = parsePassword(terminal, cliPasswordValue)) { passwordHash = hasher.hash(password); } return passwordHash; } // pkg private for testing static SecureString parsePassword(Terminal terminal, String passwordStr) throws UserException { SecureString password; if (passwordStr != null) { password = new SecureString(passwordStr.toCharArray()); Validation.Error validationError = Users.validatePassword(password); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid password..." + validationError); } } else { password = new SecureString(terminal.readSecret("Enter new password: ")); Validation.Error validationError = Users.validatePassword(password); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid password..." + validationError); } char[] retyped = terminal.readSecret("Retype new password: "); if (Arrays.equals(password.getChars(), retyped) == false) { throw new UserException(ExitCodes.DATA_ERROR, "Password mismatch"); } } return password; } private static void verifyRoles(Terminal terminal, Environment env, String[] roles) { Path rolesFile = FileRolesStore.resolveFile(env); assert Files.exists(rolesFile); Set<String> knownRoles = Sets.union(FileRolesStore.parseFileForRoleNames(rolesFile, null), ReservedRolesStore.names()); Set<String> unknownRoles = Sets.difference(Sets.newHashSet(roles), knownRoles); if (!unknownRoles.isEmpty()) { terminal.errorPrintln(String.format(Locale.ROOT, "Warning: The following roles [%s] are not in the [%s] file. " + "Make sure the names are correct. If the names are correct and the roles were created using the API please " + "disregard this message. Nonetheless the user will still be associated with all specified roles", Strings.collectionToCommaDelimitedString(unknownRoles), rolesFile.toAbsolutePath())); terminal.errorPrintln("Known roles: " + knownRoles.toString()); } } // pkg private for testing static String[] parseRoles(Terminal terminal, Environment env, String rolesStr) throws UserException { if (rolesStr.isEmpty()) { return Strings.EMPTY_ARRAY; } String[] roles = rolesStr.split(","); for (String role : roles) { Validation.Error validationError = Validation.Roles.validateRoleName(role, true); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid role [" + role + "]... " + validationError); } } verifyRoles(terminal, env, roles); return roles; } }
package org.hisp.dhis.dataset; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonView; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement; import org.hisp.dhis.common.BaseIdentifiableObject; import org.hisp.dhis.common.DxfNamespaces; import org.hisp.dhis.common.IdentifiableObject; import org.hisp.dhis.common.MergeStrategy; import org.hisp.dhis.common.annotation.Scanned; import org.hisp.dhis.common.view.DetailedView; import org.hisp.dhis.common.view.ExportView; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementOperand; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.schema.annotation.PropertyRange; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; @JacksonXmlRootElement( localName = "section", namespace = DxfNamespaces.DXF_2_0 ) public class Section extends BaseIdentifiableObject { private String description; private DataSet dataSet; @Scanned private List<DataElement> dataElements = new ArrayList<>(); @Scanned private List<Indicator> indicators = new ArrayList<>(); private Set<DataElementOperand> greyedFields = new HashSet<>(); private int sortOrder; // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- public Section() { } public Section( String name, DataSet dataSet, List<DataElement> dataElements, Set<DataElementOperand> greyedFields ) { this.name = name; this.dataSet = dataSet; this.dataElements = dataElements; this.greyedFields = greyedFields; } // ------------------------------------------------------------------------- // Logic // ------------------------------------------------------------------------- public void addDataElement( DataElement dataElement ) { dataElements.add( dataElement ); } public void removeDataElement( DataElement dataElement ) { dataElements.remove( dataElement ); } public void addGreyedField( DataElementOperand greyedField ) { greyedFields.add( greyedField ); } public void removeGreyedField( DataElementOperand greyedField ) { greyedFields.remove( greyedField ); } public void removeAllGreyedFields() { greyedFields.clear(); } public void removeAllDataElements() { dataElements.clear(); } public boolean hasCategoryCombo() { return getCategoryCombo() != null; } public DataElementCategoryCombo getCategoryCombo() { return dataElements != null && !dataElements.isEmpty() ? dataElements.get( 0 ).getCategoryCombo() : null; } public boolean hasMultiDimensionalDataElement() { for ( DataElement element : dataElements ) { if ( element.isMultiDimensional() ) { return true; } } return false; } public boolean categorComboIsInvalid() { if ( dataElements != null && dataElements.size() > 0 ) { DataElementCategoryCombo categoryCombo = null; for ( DataElement element : dataElements ) { if ( element != null ) { if ( categoryCombo != null && !categoryCombo.equals( element.getCategoryCombo() ) ) { return true; } categoryCombo = element.getCategoryCombo(); } } } return false; } public boolean hasDataElements() { return dataElements != null && !dataElements.isEmpty(); } @Override public boolean haveUniqueNames() { return false; } // ------------------------------------------------------------------------- // Getters and setters // ------------------------------------------------------------------------- @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) @PropertyRange( min = 2 ) public String getDescription() { return description; } public void setDescription( String description ) { this.description = description; } @JsonProperty @JsonSerialize( as = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public DataSet getDataSet() { return dataSet; } public void setDataSet( DataSet dataSet ) { this.dataSet = dataSet; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "dataElements", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "dataElement", namespace = DxfNamespaces.DXF_2_0 ) public List<DataElement> getDataElements() { return dataElements; } public void setDataElements( List<DataElement> dataElements ) { this.dataElements = dataElements; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "indicators", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "indicator", namespace = DxfNamespaces.DXF_2_0 ) public List<Indicator> getIndicators() { return indicators; } public void setIndicators( List<Indicator> indicators ) { this.indicators = indicators; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public int getSortOrder() { return sortOrder; } public void setSortOrder( int sortOrder ) { this.sortOrder = sortOrder; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "greyedFields", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "greyedField", namespace = DxfNamespaces.DXF_2_0 ) public Set<DataElementOperand> getGreyedFields() { return greyedFields; } public void setGreyedFields( Set<DataElementOperand> greyedFields ) { this.greyedFields = greyedFields; } @Override public void mergeWith( IdentifiableObject other, MergeStrategy strategy ) { super.mergeWith( other, strategy ); if ( other.getClass().isInstance( this ) ) { Section section = (Section) other; sortOrder = section.getSortOrder(); if ( strategy.isReplace() ) { dataSet = section.getDataSet(); } else if ( strategy.isMerge() ) { dataSet = section.getDataSet() == null ? dataSet : section.getDataSet(); } removeAllDataElements(); section.getDataElements().forEach( this::addDataElement ); removeAllGreyedFields(); section.getGreyedFields().forEach( this::addGreyedField ); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.runtime; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.clients.consumer.OffsetCommitCallback; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.header.internals.RecordHeaders; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.utils.Time; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaAndValue; import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.runtime.distributed.ClusterConfigState; import org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperatorTest; import org.apache.kafka.connect.runtime.isolation.PluginClassLoader; import org.apache.kafka.connect.runtime.standalone.StandaloneConfig; import org.apache.kafka.connect.sink.SinkConnector; import org.apache.kafka.connect.sink.SinkRecord; import org.apache.kafka.connect.sink.SinkTask; import org.apache.kafka.connect.storage.Converter; import org.apache.kafka.connect.storage.HeaderConverter; import org.apache.kafka.connect.storage.StatusBackingStore; import org.apache.kafka.connect.util.ConnectorTaskId; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.connect.util.ThreadedTest; import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMock; import org.easymock.IAnswer; import org.easymock.IExpectationSetters; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.easymock.PowerMock; import org.powermock.api.easymock.annotation.Mock; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.reflect.Whitebox; import java.time.Duration; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @RunWith(PowerMockRunner.class) @PrepareForTest(WorkerSinkTask.class) @PowerMockIgnore("javax.management.*") public class WorkerSinkTaskThreadedTest extends ThreadedTest { // These are fixed to keep this code simpler. In this example we assume byte[] raw values // with mix of integer/string in Connect private static final String TOPIC = "test"; private static final int PARTITION = 12; private static final int PARTITION2 = 13; private static final int PARTITION3 = 14; private static final long FIRST_OFFSET = 45; private static final Schema KEY_SCHEMA = Schema.INT32_SCHEMA; private static final int KEY = 12; private static final Schema VALUE_SCHEMA = Schema.STRING_SCHEMA; private static final String VALUE = "VALUE"; private static final byte[] RAW_KEY = "key".getBytes(); private static final byte[] RAW_VALUE = "value".getBytes(); private static final TopicPartition TOPIC_PARTITION = new TopicPartition(TOPIC, PARTITION); private static final TopicPartition TOPIC_PARTITION2 = new TopicPartition(TOPIC, PARTITION2); private static final TopicPartition TOPIC_PARTITION3 = new TopicPartition(TOPIC, PARTITION3); private static final TopicPartition UNASSIGNED_TOPIC_PARTITION = new TopicPartition(TOPIC, 200); private static final Map<String, String> TASK_PROPS = new HashMap<>(); private static final long TIMESTAMP = 42L; private static final TimestampType TIMESTAMP_TYPE = TimestampType.CREATE_TIME; static { TASK_PROPS.put(SinkConnector.TOPICS_CONFIG, TOPIC); TASK_PROPS.put(TaskConfig.TASK_CLASS_CONFIG, TestSinkTask.class.getName()); } private static final TaskConfig TASK_CONFIG = new TaskConfig(TASK_PROPS); private ConnectorTaskId taskId = new ConnectorTaskId("job", 0); private TargetState initialState = TargetState.STARTED; private Time time; private ConnectMetrics metrics; @Mock private SinkTask sinkTask; private Capture<WorkerSinkTaskContext> sinkTaskContext = EasyMock.newCapture(); private WorkerConfig workerConfig; @Mock private PluginClassLoader pluginLoader; @Mock private Converter keyConverter; @Mock private Converter valueConverter; @Mock private HeaderConverter headerConverter; @Mock private TransformationChain<SinkRecord> transformationChain; private WorkerSinkTask workerTask; @Mock private KafkaConsumer<byte[], byte[]> consumer; private Capture<ConsumerRebalanceListener> rebalanceListener = EasyMock.newCapture(); @Mock private TaskStatus.Listener statusListener; @Mock private StatusBackingStore statusBackingStore; private long recordsReturned; @Override public void setup() { super.setup(); time = new MockTime(); metrics = new MockConnectMetrics(); Map<String, String> workerProps = new HashMap<>(); workerProps.put("key.converter", "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("value.converter", "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("internal.key.converter", "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("internal.value.converter", "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("internal.key.converter.schemas.enable", "false"); workerProps.put("internal.value.converter.schemas.enable", "false"); workerProps.put("offset.storage.file.filename", "/tmp/connect.offsets"); pluginLoader = PowerMock.createMock(PluginClassLoader.class); workerConfig = new StandaloneConfig(workerProps); workerTask = new WorkerSinkTask( taskId, sinkTask, statusListener, initialState, workerConfig, ClusterConfigState.EMPTY, metrics, keyConverter, valueConverter, headerConverter, new TransformationChain<>(Collections.emptyList(), RetryWithToleranceOperatorTest.NOOP_OPERATOR), consumer, pluginLoader, time, RetryWithToleranceOperatorTest.NOOP_OPERATOR, statusBackingStore); recordsReturned = 0; } @After public void tearDown() { if (metrics != null) metrics.stop(); } @Test public void testPollsInBackground() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); Capture<Collection<SinkRecord>> capturedRecords = expectPolls(1L); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // First iteration initializes partition assignment workerTask.iteration(); // Then we iterate to fetch data for (int i = 0; i < 10; i++) { workerTask.iteration(); } workerTask.stop(); workerTask.close(); // Verify contents match expected values, i.e. that they were translated properly. With max // batch size 1 and poll returns 1 message at a time, we should have a matching # of batches assertEquals(10, capturedRecords.getValues().size()); int offset = 0; for (Collection<SinkRecord> recs : capturedRecords.getValues()) { assertEquals(1, recs.size()); for (SinkRecord rec : recs) { SinkRecord referenceSinkRecord = new SinkRecord(TOPIC, PARTITION, KEY_SCHEMA, KEY, VALUE_SCHEMA, VALUE, FIRST_OFFSET + offset, TIMESTAMP, TIMESTAMP_TYPE); assertEquals(referenceSinkRecord, rec); offset++; } } PowerMock.verifyAll(); } @Test public void testCommit() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); // Make each poll() take the offset commit interval Capture<Collection<SinkRecord>> capturedRecords = expectPolls(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT); expectOffsetCommit(1L, null, null, 0, true); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // Initialize partition assignment workerTask.iteration(); // Fetch one record workerTask.iteration(); // Trigger the commit workerTask.iteration(); // Commit finishes synchronously for testing so we can check this immediately assertEquals(0, workerTask.commitFailures()); workerTask.stop(); workerTask.close(); assertEquals(2, capturedRecords.getValues().size()); PowerMock.verifyAll(); } @Test public void testCommitFailure() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); Capture<Collection<SinkRecord>> capturedRecords = expectPolls(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT); expectOffsetCommit(1L, new RuntimeException(), null, 0, true); // Should rewind to last known good positions, which in this case will be the offsets loaded during initialization // for all topic partitions consumer.seek(TOPIC_PARTITION, FIRST_OFFSET); PowerMock.expectLastCall(); consumer.seek(TOPIC_PARTITION2, FIRST_OFFSET); PowerMock.expectLastCall(); consumer.seek(TOPIC_PARTITION3, FIRST_OFFSET); PowerMock.expectLastCall(); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // Initialize partition assignment workerTask.iteration(); // Fetch some data workerTask.iteration(); // Trigger the commit workerTask.iteration(); assertEquals(1, workerTask.commitFailures()); assertEquals(false, Whitebox.getInternalState(workerTask, "committing")); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testCommitSuccessFollowedByFailure() throws Exception { // Validate that we rewind to the correct offsets if a task's preCommit() method throws an exception expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); Capture<Collection<SinkRecord>> capturedRecords = expectPolls(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT); expectOffsetCommit(1L, null, null, 0, true); expectOffsetCommit(2L, new RuntimeException(), null, 0, true); // Should rewind to last known committed positions consumer.seek(TOPIC_PARTITION, FIRST_OFFSET + 1); PowerMock.expectLastCall(); consumer.seek(TOPIC_PARTITION2, FIRST_OFFSET); PowerMock.expectLastCall(); consumer.seek(TOPIC_PARTITION3, FIRST_OFFSET); PowerMock.expectLastCall(); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // Initialize partition assignment workerTask.iteration(); // Fetch some data workerTask.iteration(); // Trigger first commit, workerTask.iteration(); // Trigger second (failing) commit workerTask.iteration(); assertEquals(1, workerTask.commitFailures()); assertEquals(false, Whitebox.getInternalState(workerTask, "committing")); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testCommitConsumerFailure() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); Capture<Collection<SinkRecord>> capturedRecords = expectPolls(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT); expectOffsetCommit(1L, null, new Exception(), 0, true); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // Initialize partition assignment workerTask.iteration(); // Fetch some data workerTask.iteration(); // Trigger commit workerTask.iteration(); // TODO Response to consistent failures? assertEquals(1, workerTask.commitFailures()); assertEquals(false, Whitebox.getInternalState(workerTask, "committing")); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testCommitTimeout() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); // Cut down amount of time to pass in each poll so we trigger exactly 1 offset commit Capture<Collection<SinkRecord>> capturedRecords = expectPolls(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT / 2); expectOffsetCommit(2L, null, null, WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT, false); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); // Initialize partition assignment workerTask.iteration(); // Fetch some data workerTask.iteration(); workerTask.iteration(); // Trigger the commit workerTask.iteration(); // Trigger the timeout without another commit workerTask.iteration(); // TODO Response to consistent failures? assertEquals(1, workerTask.commitFailures()); assertEquals(false, Whitebox.getInternalState(workerTask, "committing")); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testAssignmentPauseResume() throws Exception { // Just validate that the calls are passed through to the consumer, and that where appropriate errors are // converted expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); expectOnePoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { assertEquals(new HashSet<>(Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3)), sinkTaskContext.getValue().assignment()); return null; } }); EasyMock.expect(consumer.assignment()).andReturn(new HashSet<>(Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3))); expectOnePoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { try { sinkTaskContext.getValue().pause(UNASSIGNED_TOPIC_PARTITION); fail("Trying to pause unassigned partition should have thrown an Connect exception"); } catch (ConnectException e) { // expected } sinkTaskContext.getValue().pause(TOPIC_PARTITION, TOPIC_PARTITION2); return null; } }); consumer.pause(Arrays.asList(UNASSIGNED_TOPIC_PARTITION)); PowerMock.expectLastCall().andThrow(new IllegalStateException("unassigned topic partition")); consumer.pause(Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2)); PowerMock.expectLastCall(); expectOnePoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { try { sinkTaskContext.getValue().resume(UNASSIGNED_TOPIC_PARTITION); fail("Trying to resume unassigned partition should have thrown an Connect exception"); } catch (ConnectException e) { // expected } sinkTaskContext.getValue().resume(TOPIC_PARTITION, TOPIC_PARTITION2); return null; } }); consumer.resume(Arrays.asList(UNASSIGNED_TOPIC_PARTITION)); PowerMock.expectLastCall().andThrow(new IllegalStateException("unassigned topic partition")); consumer.resume(Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2)); PowerMock.expectLastCall(); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); workerTask.iteration(); workerTask.iteration(); workerTask.iteration(); workerTask.iteration(); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testRewind() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); final long startOffset = 40L; final Map<TopicPartition, Long> offsets = new HashMap<>(); expectOnePoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { offsets.put(TOPIC_PARTITION, startOffset); sinkTaskContext.getValue().offset(offsets); return null; } }); consumer.seek(TOPIC_PARTITION, startOffset); EasyMock.expectLastCall(); expectOnePoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { Map<TopicPartition, Long> offsets = sinkTaskContext.getValue().offsets(); assertEquals(0, offsets.size()); return null; } }); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); workerTask.iteration(); workerTask.iteration(); workerTask.iteration(); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } @Test public void testRewindOnRebalanceDuringPoll() throws Exception { expectInitializeTask(); expectTaskGetTopic(true); expectPollInitialAssignment(); expectRebalanceDuringPoll().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { Map<TopicPartition, Long> offsets = sinkTaskContext.getValue().offsets(); assertEquals(0, offsets.size()); return null; } }); expectStopTask(); PowerMock.replayAll(); workerTask.initialize(TASK_CONFIG); workerTask.initializeAndStart(); workerTask.iteration(); workerTask.iteration(); workerTask.stop(); workerTask.close(); PowerMock.verifyAll(); } private void expectInitializeTask() throws Exception { consumer.subscribe(EasyMock.eq(Arrays.asList(TOPIC)), EasyMock.capture(rebalanceListener)); PowerMock.expectLastCall(); sinkTask.initialize(EasyMock.capture(sinkTaskContext)); PowerMock.expectLastCall(); sinkTask.start(TASK_PROPS); PowerMock.expectLastCall(); } private void expectPollInitialAssignment() throws Exception { final List<TopicPartition> partitions = Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3); sinkTask.open(partitions); EasyMock.expectLastCall(); EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andAnswer(new IAnswer<ConsumerRecords<byte[], byte[]>>() { @Override public ConsumerRecords<byte[], byte[]> answer() throws Throwable { rebalanceListener.getValue().onPartitionsAssigned(partitions); return ConsumerRecords.empty(); } }); EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET); EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET); EasyMock.expect(consumer.position(TOPIC_PARTITION3)).andReturn(FIRST_OFFSET); sinkTask.put(Collections.<SinkRecord>emptyList()); EasyMock.expectLastCall(); } private void expectStopTask() throws Exception { sinkTask.stop(); PowerMock.expectLastCall(); // No offset commit since it happens in the mocked worker thread, but the main thread does need to wake up the // consumer so it exits quickly consumer.wakeup(); PowerMock.expectLastCall(); consumer.close(); PowerMock.expectLastCall(); } // Note that this can only be called once per test currently private Capture<Collection<SinkRecord>> expectPolls(final long pollDelayMs) throws Exception { // Stub out all the consumer stream/iterator responses, which we just want to verify occur, // but don't care about the exact details here. EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andStubAnswer( new IAnswer<ConsumerRecords<byte[], byte[]>>() { @Override public ConsumerRecords<byte[], byte[]> answer() throws Throwable { // "Sleep" so time will progress time.sleep(pollDelayMs); ConsumerRecords<byte[], byte[]> records = new ConsumerRecords<>( Collections.singletonMap( new TopicPartition(TOPIC, PARTITION), Arrays.asList( new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturned, TIMESTAMP, TIMESTAMP_TYPE, 0L, 0, 0, RAW_KEY, RAW_VALUE) ))); recordsReturned++; return records; } }); EasyMock.expect(keyConverter.toConnectData(TOPIC, emptyHeaders(), RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY)).anyTimes(); EasyMock.expect(valueConverter.toConnectData(TOPIC, emptyHeaders(), RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE)).anyTimes(); final Capture<SinkRecord> recordCapture = EasyMock.newCapture(); EasyMock.expect(transformationChain.apply(EasyMock.capture(recordCapture))).andAnswer( (IAnswer<SinkRecord>) () -> recordCapture.getValue()).anyTimes(); Capture<Collection<SinkRecord>> capturedRecords = EasyMock.newCapture(CaptureType.ALL); sinkTask.put(EasyMock.capture(capturedRecords)); EasyMock.expectLastCall().anyTimes(); return capturedRecords; } @SuppressWarnings("unchecked") private IExpectationSetters<Object> expectOnePoll() { // Currently the SinkTask's put() method will not be invoked unless we provide some data, so instead of // returning empty data, we return one record. The expectation is that the data will be ignored by the // response behavior specified using the return value of this method. EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andAnswer( new IAnswer<ConsumerRecords<byte[], byte[]>>() { @Override public ConsumerRecords<byte[], byte[]> answer() throws Throwable { // "Sleep" so time will progress time.sleep(1L); ConsumerRecords<byte[], byte[]> records = new ConsumerRecords<>( Collections.singletonMap( new TopicPartition(TOPIC, PARTITION), Arrays.asList( new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturned, TIMESTAMP, TIMESTAMP_TYPE, 0L, 0, 0, RAW_KEY, RAW_VALUE) ))); recordsReturned++; return records; } }); EasyMock.expect(keyConverter.toConnectData(TOPIC, emptyHeaders(), RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY)); EasyMock.expect(valueConverter.toConnectData(TOPIC, emptyHeaders(), RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE)); sinkTask.put(EasyMock.anyObject(Collection.class)); return EasyMock.expectLastCall(); } @SuppressWarnings("unchecked") private IExpectationSetters<Object> expectRebalanceDuringPoll() throws Exception { final List<TopicPartition> partitions = Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3); final long startOffset = 40L; final Map<TopicPartition, Long> offsets = new HashMap<>(); offsets.put(TOPIC_PARTITION, startOffset); EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andAnswer( new IAnswer<ConsumerRecords<byte[], byte[]>>() { @Override public ConsumerRecords<byte[], byte[]> answer() throws Throwable { // "Sleep" so time will progress time.sleep(1L); sinkTaskContext.getValue().offset(offsets); rebalanceListener.getValue().onPartitionsAssigned(partitions); ConsumerRecords<byte[], byte[]> records = new ConsumerRecords<>( Collections.singletonMap( new TopicPartition(TOPIC, PARTITION), Arrays.asList( new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturned, TIMESTAMP, TIMESTAMP_TYPE, 0L, 0, 0, RAW_KEY, RAW_VALUE) ))); recordsReturned++; return records; } }); EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET); EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET); EasyMock.expect(consumer.position(TOPIC_PARTITION3)).andReturn(FIRST_OFFSET); sinkTask.open(partitions); EasyMock.expectLastCall(); consumer.seek(TOPIC_PARTITION, startOffset); EasyMock.expectLastCall(); EasyMock.expect(keyConverter.toConnectData(TOPIC, emptyHeaders(), RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY)); EasyMock.expect(valueConverter.toConnectData(TOPIC, emptyHeaders(), RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE)); sinkTask.put(EasyMock.anyObject(Collection.class)); return EasyMock.expectLastCall(); } private Capture<OffsetCommitCallback> expectOffsetCommit(final long expectedMessages, final RuntimeException error, final Exception consumerCommitError, final long consumerCommitDelayMs, final boolean invokeCallback) throws Exception { final long finalOffset = FIRST_OFFSET + expectedMessages; // All assigned partitions will have offsets committed, but we've only processed messages/updated offsets for one final Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = new HashMap<>(); offsetsToCommit.put(TOPIC_PARTITION, new OffsetAndMetadata(finalOffset)); offsetsToCommit.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET)); offsetsToCommit.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET)); sinkTask.preCommit(offsetsToCommit); IExpectationSetters<Object> expectation = PowerMock.expectLastCall(); if (error != null) { expectation.andThrow(error).once(); return null; } else { expectation.andReturn(offsetsToCommit); } final Capture<OffsetCommitCallback> capturedCallback = EasyMock.newCapture(); consumer.commitAsync(EasyMock.eq(offsetsToCommit), EasyMock.capture(capturedCallback)); PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { time.sleep(consumerCommitDelayMs); if (invokeCallback) capturedCallback.getValue().onComplete(offsetsToCommit, consumerCommitError); return null; } }); return capturedCallback; } private void expectTaskGetTopic(boolean anyTimes) { final Capture<String> connectorCapture = EasyMock.newCapture(); final Capture<String> topicCapture = EasyMock.newCapture(); IExpectationSetters<TopicStatus> expect = EasyMock.expect(statusBackingStore.getTopic( EasyMock.capture(connectorCapture), EasyMock.capture(topicCapture))); if (anyTimes) { expect.andStubAnswer(() -> new TopicStatus( topicCapture.getValue(), new ConnectorTaskId(connectorCapture.getValue(), 0), Time.SYSTEM.milliseconds())); } else { expect.andAnswer(() -> new TopicStatus( topicCapture.getValue(), new ConnectorTaskId(connectorCapture.getValue(), 0), Time.SYSTEM.milliseconds())); } if (connectorCapture.hasCaptured() && topicCapture.hasCaptured()) { assertEquals("job", connectorCapture.getValue()); assertEquals(TOPIC, topicCapture.getValue()); } } private RecordHeaders emptyHeaders() { return new RecordHeaders(); } private static abstract class TestSinkTask extends SinkTask { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import javax.annotation.Generated; import org.apache.camel.ExchangePattern; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; import org.apache.camel.spi.ExceptionHandler; /** * The gora component allows you to work with NoSQL databases using the Apache * Gora framework. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface GoraEndpointBuilderFactory { /** * Builder for endpoint consumers for the Gora component. */ public interface GoraEndpointConsumerBuilder extends EndpointConsumerBuilder { default AdvancedGoraEndpointConsumerBuilder advanced() { return (AdvancedGoraEndpointConsumerBuilder) this; } /** * The type of the dataStore. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointConsumerBuilder dataStoreClass(String dataStoreClass) { setProperty("dataStoreClass", dataStoreClass); return this; } /** * The type class of the key. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointConsumerBuilder keyClass(String keyClass) { setProperty("keyClass", keyClass); return this; } /** * The type of the value. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointConsumerBuilder valueClass(String valueClass) { setProperty("valueClass", valueClass); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: <code>boolean</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { setProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option will be converted to a <code>boolean</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder bridgeErrorHandler( String bridgeErrorHandler) { setProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Number of concurrent consumers. * * The option is a: <code>int</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder concurrentConsumers( int concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * Number of concurrent consumers. * * The option will be converted to a <code>int</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder concurrentConsumers( String concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * The End Key. * * The option is a: <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder endKey(Object endKey) { setProperty("endKey", endKey); return this; } /** * The End Key. * * The option will be converted to a <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder endKey(String endKey) { setProperty("endKey", endKey); return this; } /** * The End Time. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder endTime(long endTime) { setProperty("endTime", endTime); return this; } /** * The End Time. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder endTime(String endTime) { setProperty("endTime", endTime); return this; } /** * The Fields. * * The option is a: <code>com.google.common.base.Strings</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder fields(Object fields) { setProperty("fields", fields); return this; } /** * The Fields. * * The option will be converted to a * <code>com.google.common.base.Strings</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder fields(String fields) { setProperty("fields", fields); return this; } /** * The Key Range From. * * The option is a: <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder keyRangeFrom(Object keyRangeFrom) { setProperty("keyRangeFrom", keyRangeFrom); return this; } /** * The Key Range From. * * The option will be converted to a <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder keyRangeFrom(String keyRangeFrom) { setProperty("keyRangeFrom", keyRangeFrom); return this; } /** * The Key Range To. * * The option is a: <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder keyRangeTo(Object keyRangeTo) { setProperty("keyRangeTo", keyRangeTo); return this; } /** * The Key Range To. * * The option will be converted to a <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder keyRangeTo(String keyRangeTo) { setProperty("keyRangeTo", keyRangeTo); return this; } /** * The Limit. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder limit(long limit) { setProperty("limit", limit); return this; } /** * The Limit. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder limit(String limit) { setProperty("limit", limit); return this; } /** * The Start Key. * * The option is a: <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder startKey(Object startKey) { setProperty("startKey", startKey); return this; } /** * The Start Key. * * The option will be converted to a <code>java.lang.Object</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder startKey(String startKey) { setProperty("startKey", startKey); return this; } /** * The Start Time. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder startTime(long startTime) { setProperty("startTime", startTime); return this; } /** * The Start Time. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder startTime(String startTime) { setProperty("startTime", startTime); return this; } /** * The Time Range From. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timeRangeFrom(long timeRangeFrom) { setProperty("timeRangeFrom", timeRangeFrom); return this; } /** * The Time Range From. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timeRangeFrom(String timeRangeFrom) { setProperty("timeRangeFrom", timeRangeFrom); return this; } /** * The Time Range To. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timeRangeTo(long timeRangeTo) { setProperty("timeRangeTo", timeRangeTo); return this; } /** * The Time Range To. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timeRangeTo(String timeRangeTo) { setProperty("timeRangeTo", timeRangeTo); return this; } /** * The Timestamp. * * The option is a: <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timestamp(long timestamp) { setProperty("timestamp", timestamp); return this; } /** * The Timestamp. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default GoraEndpointConsumerBuilder timestamp(String timestamp) { setProperty("timestamp", timestamp); return this; } } /** * Advanced builder for endpoint consumers for the Gora component. */ public interface AdvancedGoraEndpointConsumerBuilder extends EndpointConsumerBuilder { default GoraEndpointConsumerBuilder basic() { return (GoraEndpointConsumerBuilder) this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option is a: <code>org.apache.camel.spi.ExceptionHandler</code> * type. * * Group: consumer (advanced) */ default AdvancedGoraEndpointConsumerBuilder exceptionHandler( ExceptionHandler exceptionHandler) { setProperty("exceptionHandler", exceptionHandler); return this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option will be converted to a * <code>org.apache.camel.spi.ExceptionHandler</code> type. * * Group: consumer (advanced) */ default AdvancedGoraEndpointConsumerBuilder exceptionHandler( String exceptionHandler) { setProperty("exceptionHandler", exceptionHandler); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option is a: <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedGoraEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { setProperty("exchangePattern", exchangePattern); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option will be converted to a * <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedGoraEndpointConsumerBuilder exchangePattern( String exchangePattern) { setProperty("exchangePattern", exchangePattern); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Hadoop Configuration. * * The option is a: <code>org.apache.hadoop.conf.Configuration</code> * type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder hadoopConfiguration( Object hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Hadoop Configuration. * * The option will be converted to a * <code>org.apache.hadoop.conf.Configuration</code> type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder hadoopConfiguration( String hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder synchronous( boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointConsumerBuilder synchronous( String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint producers for the Gora component. */ public interface GoraEndpointProducerBuilder extends EndpointProducerBuilder { default AdvancedGoraEndpointProducerBuilder advanced() { return (AdvancedGoraEndpointProducerBuilder) this; } /** * The type of the dataStore. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointProducerBuilder dataStoreClass(String dataStoreClass) { setProperty("dataStoreClass", dataStoreClass); return this; } /** * The type class of the key. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointProducerBuilder keyClass(String keyClass) { setProperty("keyClass", keyClass); return this; } /** * The type of the value. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointProducerBuilder valueClass(String valueClass) { setProperty("valueClass", valueClass); return this; } /** * Flush on every operation. * * The option is a: <code>boolean</code> type. * * Group: producer */ default GoraEndpointProducerBuilder flushOnEveryOperation( boolean flushOnEveryOperation) { setProperty("flushOnEveryOperation", flushOnEveryOperation); return this; } /** * Flush on every operation. * * The option will be converted to a <code>boolean</code> type. * * Group: producer */ default GoraEndpointProducerBuilder flushOnEveryOperation( String flushOnEveryOperation) { setProperty("flushOnEveryOperation", flushOnEveryOperation); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Group: producer */ default GoraEndpointProducerBuilder lazyStartProducer( boolean lazyStartProducer) { setProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Group: producer */ default GoraEndpointProducerBuilder lazyStartProducer( String lazyStartProducer) { setProperty("lazyStartProducer", lazyStartProducer); return this; } } /** * Advanced builder for endpoint producers for the Gora component. */ public interface AdvancedGoraEndpointProducerBuilder extends EndpointProducerBuilder { default GoraEndpointProducerBuilder basic() { return (GoraEndpointProducerBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Hadoop Configuration. * * The option is a: <code>org.apache.hadoop.conf.Configuration</code> * type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder hadoopConfiguration( Object hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Hadoop Configuration. * * The option will be converted to a * <code>org.apache.hadoop.conf.Configuration</code> type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder hadoopConfiguration( String hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder synchronous( boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointProducerBuilder synchronous( String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint for the Gora component. */ public interface GoraEndpointBuilder extends GoraEndpointConsumerBuilder, GoraEndpointProducerBuilder { default AdvancedGoraEndpointBuilder advanced() { return (AdvancedGoraEndpointBuilder) this; } /** * The type of the dataStore. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointBuilder dataStoreClass(String dataStoreClass) { setProperty("dataStoreClass", dataStoreClass); return this; } /** * The type class of the key. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointBuilder keyClass(String keyClass) { setProperty("keyClass", keyClass); return this; } /** * The type of the value. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default GoraEndpointBuilder valueClass(String valueClass) { setProperty("valueClass", valueClass); return this; } } /** * Advanced builder for endpoint for the Gora component. */ public interface AdvancedGoraEndpointBuilder extends AdvancedGoraEndpointConsumerBuilder, AdvancedGoraEndpointProducerBuilder { default GoraEndpointBuilder basic() { return (GoraEndpointBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Hadoop Configuration. * * The option is a: <code>org.apache.hadoop.conf.Configuration</code> * type. * * Group: advanced */ default AdvancedGoraEndpointBuilder hadoopConfiguration( Object hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Hadoop Configuration. * * The option will be converted to a * <code>org.apache.hadoop.conf.Configuration</code> type. * * Group: advanced */ default AdvancedGoraEndpointBuilder hadoopConfiguration( String hadoopConfiguration) { setProperty("hadoopConfiguration", hadoopConfiguration); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointBuilder synchronous(boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedGoraEndpointBuilder synchronous(String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Gora (camel-gora) * The gora component allows you to work with NoSQL databases using the * Apache Gora framework. * * Category: database,hadoop,nosql * Available as of version: 2.14 * Maven coordinates: org.apache.camel:camel-gora * * Syntax: <code>gora:name</code> * * Path parameter: name (required) * Instance name */ default GoraEndpointBuilder gora(String path) { class GoraEndpointBuilderImpl extends AbstractEndpointBuilder implements GoraEndpointBuilder, AdvancedGoraEndpointBuilder { public GoraEndpointBuilderImpl(String path) { super("gora", path); } } return new GoraEndpointBuilderImpl(path); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common.geo.builders; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Assertions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.Objects; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc */ public abstract class ShapeBuilder<T extends Shape, G extends org.elasticsearch.geometry.Geometry, E extends ShapeBuilder<T, G, E>> implements NamedWriteable, ToXContentObject { protected static final Logger LOGGER = LogManager.getLogger(ShapeBuilder.class); private static final boolean DEBUG; static { // if asserts are enabled we run the debug statements even if they are not logged // to prevent exceptions only present if debug enabled DEBUG = Assertions.ENABLED; } protected final List<Coordinate> coordinates; public static final double DATELINE = 180; /** * coordinate at [0.0, 0.0] */ public static final Coordinate ZERO_ZERO = new Coordinate(0.0, 0.0); // TODO how might we use JtsSpatialContextFactory to configure the context (esp. for non-geo)? public static final JtsSpatialContext SPATIAL_CONTEXT = JtsSpatialContext.GEO; public static final GeometryFactory FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); /** We're expecting some geometries might cross the dateline. */ protected final boolean wrapdateline = SPATIAL_CONTEXT.isGeo(); /** It's possible that some geometries in a MULTI* shape might overlap. With the possible exception of GeometryCollection, * this normally isn't allowed. */ protected static final boolean MULTI_POLYGON_MAY_OVERLAP = false; /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#validate() */ protected static final boolean AUTO_VALIDATE_JTS_GEOMETRY = true; /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#index() */ protected static final boolean AUTO_INDEX_JTS_GEOMETRY = true;//may want to turn off once SpatialStrategy impls do it. /** default ctor */ protected ShapeBuilder() { coordinates = new ArrayList<>(); } /** ctor from list of coordinates */ protected ShapeBuilder(List<Coordinate> coordinates) { if (coordinates == null || coordinates.size() == 0) { throw new IllegalArgumentException("cannot create point collection with empty set of points"); } this.coordinates = coordinates; } /** ctor from serialized stream input */ protected ShapeBuilder(StreamInput in) throws IOException { int size = in.readVInt(); coordinates = new ArrayList<>(size); for (int i=0; i < size; i++) { coordinates.add(readFromStream(in)); } } protected static Coordinate readFromStream(StreamInput in) throws IOException { double x = in.readDouble(); double y = in.readDouble(); Double z = in.readOptionalDouble(); return z == null ? new Coordinate(x, y) : new Coordinate(x, y, z); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(coordinates.size()); for (Coordinate point : coordinates) { writeCoordinateTo(point, out); } } protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException { out.writeDouble(coordinate.x); out.writeDouble(coordinate.y); out.writeOptionalDouble(Double.isNaN(coordinate.z) ? null : coordinate.z); } @SuppressWarnings("unchecked") private E thisRef() { return (E)this; } /** * Add a new coordinate to the collection * @param longitude longitude of the coordinate * @param latitude latitude of the coordinate * @return this */ public E coordinate(double longitude, double latitude) { return this.coordinate(new Coordinate(longitude, latitude)); } /** * Add a new coordinate to the collection * @param coordinate coordinate of the point * @return this */ public E coordinate(Coordinate coordinate) { this.coordinates.add(coordinate); return thisRef(); } /** * Add a array of coordinates to the collection * * @param coordinates array of {@link Coordinate}s to add * @return this */ public E coordinates(Coordinate...coordinates) { return this.coordinates(Arrays.asList(coordinates)); } /** * Add a collection of coordinates to the collection * * @param coordinates array of {@link Coordinate}s to add * @return this */ public E coordinates(Collection<? extends Coordinate> coordinates) { this.coordinates.addAll(coordinates); return thisRef(); } /** * Copy all coordinate to a new Array * * @param closed if set to true the first point of the array is repeated as last element * @return Array of coordinates */ protected Coordinate[] coordinates(boolean closed) { Coordinate[] result = coordinates.toArray(new Coordinate[coordinates.size() + (closed?1:0)]); if(closed) { result[result.length-1] = result[0]; } return result; } protected JtsGeometry jtsGeometry(Geometry geom) { //dateline180Check is false because ElasticSearch does it's own dateline wrapping JtsGeometry jtsGeometry = new JtsGeometry(geom, SPATIAL_CONTEXT, false, MULTI_POLYGON_MAY_OVERLAP); if (AUTO_VALIDATE_JTS_GEOMETRY) jtsGeometry.validate(); if (AUTO_INDEX_JTS_GEOMETRY) jtsGeometry.index(); return jtsGeometry; } /** * Create a new Shape from this builder. Since calling this method could change the * defined shape. (by inserting new coordinates or change the position of points) * the builder looses its validity. So this method should only be called once on a builder * @return new {@link Shape} defined by the builder */ public abstract T buildS4J(); /** * build lucene geometry. * * @return GeoPoint, double[][], Line, Line[], Polygon, Polygon[], Rectangle, Object[] */ public abstract G buildGeometry(); protected static Coordinate shift(Coordinate coordinate, double dateline) { if (dateline == 0) { return coordinate; } else { return new Coordinate(-2 * dateline + coordinate.x, coordinate.y); } } /** * get the shapes type * @return type of the shape */ public abstract GeoShapeType type(); /** tracks number of dimensions for this shape */ public abstract int numDimensions(); /** * Calculate the intersection of a line segment and a vertical dateline. * * @param p1 * start-point of the line segment * @param p2 * end-point of the line segment * @param dateline * x-coordinate of the vertical dateline * @return position of the intersection in the open range (0..1] if the line * segment intersects with the line segment. Otherwise this method * returns {@link Double#NaN} */ protected static final double intersection(Coordinate p1, Coordinate p2, double dateline) { if (p1.x == p2.x && p1.x != dateline) { return Double.NaN; } else if (p1.x == p2.x && p1.x == dateline) { return 1.0; } else { final double t = (dateline - p1.x) / (p2.x - p1.x); if (t > 1 || t <= 0) { return Double.NaN; } else { return t; } } } /** * Calculate all intersections of line segments and a vertical line. The * Array of edges will be ordered asc by the y-coordinate of the * intersections of edges. * * @param dateline * x-coordinate of the dateline * @param edges * set of edges that may intersect with the dateline * @return number of intersecting edges */ protected static int intersections(double dateline, Edge[] edges) { int numIntersections = 0; assert Double.isNaN(dateline) == false; int maxComponent = 0; for (int i = 0; i < edges.length; i++) { Coordinate p1 = edges[i].coordinate; Coordinate p2 = edges[i].next.coordinate; assert Double.isNaN(p2.x) == false && Double.isNaN(p1.x) == false; edges[i].intersect = Edge.MAX_COORDINATE; double position = intersection(p1, p2, dateline); if (Double.isNaN(position) == false) { edges[i].intersection(position); numIntersections++; maxComponent = Math.max(maxComponent, edges[i].component); } } if (maxComponent > 0) { // we might detect polygons touching the dateline as intersections // Here we clean them up for (int i = 0; i < maxComponent; i++) { if (clearComponentTouchingDateline(edges, i + 1)) { numIntersections--; } } } Arrays.sort(edges, INTERSECTION_ORDER); return numIntersections; } /** * Checks the number of dateline intersections detected for a component. If there is only * one, it clears it as it means that the component just touches the dateline. * * @param edges set of edges that may intersect with the dateline * @param component The component to check * @return true if the component touches the dateline. */ private static boolean clearComponentTouchingDateline(Edge[] edges, int component) { Edge intersection = null; for (Edge edge : edges) { if (edge.intersect != Edge.MAX_COORDINATE && edge.component == component) { if (intersection == null) { intersection = edge; } else { return false; } } } if (intersection != null) { intersection.intersect = Edge.MAX_COORDINATE; } return intersection != null; } /** * This helper class implements a linked list for {@link Coordinate}. It contains * fields for a dateline intersection and component id */ protected static final class Edge { Coordinate coordinate; // coordinate of the start point Edge next; // next segment Coordinate intersect; // potential intersection with dateline int component = -1; // id of the component this edge belongs to public static final Coordinate MAX_COORDINATE = new Coordinate(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY); protected Edge(Coordinate coordinate, Edge next, Coordinate intersection) { this.coordinate = coordinate; // use setter to catch duplicate point cases this.setNext(next); this.intersect = intersection; if (next != null) { this.component = next.component; } } protected Edge(Coordinate coordinate, Edge next) { this(coordinate, next, Edge.MAX_COORDINATE); } protected void setNext(Edge next) { // don't bother setting next if its null if (next != null) { // self-loop throws an invalid shape if (this.coordinate.equals(next.coordinate)) { throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + this.coordinate); } this.next = next; } } /** * Set the intersection of this line segment to the given position * * @param position * position of the intersection [0..1] * @return the {@link Coordinate} of the intersection */ protected Coordinate intersection(double position) { return intersect = position(coordinate, next.coordinate, position); } protected static Coordinate position(Coordinate p1, Coordinate p2, double position) { if (position == 0) { return p1; } else if (position == 1) { return p2; } else { final double x = p1.x + position * (p2.x - p1.x); final double y = p1.y + position * (p2.y - p1.y); return new Coordinate(x, y); } } @Override public String toString() { return "Edge[Component=" + component + "; start=" + coordinate + " " + "; intersection=" + intersect + "]"; } } protected StringBuilder contentToWKT() { return coordinateListToWKT(this.coordinates); } public String toWKT() { StringBuilder sb = new StringBuilder(); sb.append(type().wktName()); sb.append(GeoWKTParser.SPACE); sb.append(contentToWKT()); return sb.toString(); } protected static StringBuilder coordinateListToWKT(final List<Coordinate> coordinates) { final StringBuilder sb = new StringBuilder(); if (coordinates.isEmpty()) { sb.append(GeoWKTParser.EMPTY); } else { // walk through coordinates: sb.append(GeoWKTParser.LPAREN); sb.append(coordinateToWKT(coordinates.get(0))); for (int i = 1; i < coordinates.size(); ++i) { sb.append(GeoWKTParser.COMMA); sb.append(GeoWKTParser.SPACE); sb.append(coordinateToWKT(coordinates.get(i))); } sb.append(GeoWKTParser.RPAREN); } return sb; } private static String coordinateToWKT(final Coordinate coordinate) { final StringBuilder sb = new StringBuilder(); sb.append(coordinate.x + GeoWKTParser.SPACE + coordinate.y); if (Double.isNaN(coordinate.z) == false) { sb.append(GeoWKTParser.SPACE + coordinate.z); } return sb.toString(); } protected static final IntersectionOrder INTERSECTION_ORDER = new IntersectionOrder(); private static final class IntersectionOrder implements Comparator<Edge> { @Override public int compare(Edge o1, Edge o2) { return Double.compare(o1.intersect.y, o2.intersect.y); } } public enum Orientation { LEFT, RIGHT; public static final Orientation CLOCKWISE = Orientation.LEFT; public static final Orientation COUNTER_CLOCKWISE = Orientation.RIGHT; public static final Orientation CW = Orientation.LEFT; public static final Orientation CCW = Orientation.RIGHT; public void writeTo (StreamOutput out) throws IOException { out.writeBoolean(this == Orientation.RIGHT); } public static Orientation readFrom (StreamInput in) throws IOException { return in.readBoolean() ? Orientation.RIGHT : Orientation.LEFT; } public boolean getAsBoolean() { return this == Orientation.RIGHT; } public static Orientation fromString(String orientation) { orientation = orientation.toLowerCase(Locale.ROOT); switch (orientation) { case "right": case "counterclockwise": case "ccw": return Orientation.RIGHT; case "left": case "clockwise": case "cw": return Orientation.LEFT; default: throw new IllegalArgumentException("Unknown orientation [" + orientation + "]"); } } } protected static final boolean debugEnabled() { return LOGGER.isDebugEnabled() || DEBUG; } protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException { builder.startArray().value(coordinate.x).value(coordinate.y); if (Double.isNaN(coordinate.z) == false) { builder.value(coordinate.z); } return builder.endArray(); } /** * builds an array of coordinates to a {@link XContentBuilder} * * @param builder builder to use * @param closed repeat the first point at the end of the array if it's not already defines as last element of the array * @return the builder */ protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException { builder.startArray(); for(Coordinate coord : coordinates) { toXContent(builder, coord); } if(closed) { Coordinate start = coordinates.get(0); Coordinate end = coordinates.get(coordinates.size()-1); if(start.x != end.x || start.y != end.y) { toXContent(builder, coordinates.get(0)); } } builder.endArray(); return builder; } @Override public boolean equals(Object o) { if (this == o) return true; if ((o instanceof ShapeBuilder) == false) return false; ShapeBuilder<?,?,?> that = (ShapeBuilder<?,?,?>) o; return Objects.equals(coordinates, that.coordinates); } @Override public int hashCode() { return Objects.hash(coordinates); } @Override public String getWriteableName() { return type().shapeName(); } @Override public String toString() { return Strings.toString(this, true, true); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @Category(SmallTests.class) public class TestRegionSplitPolicy { private Configuration conf; private HTableDescriptor htd; private HRegion mockRegion; private List<Store> stores; private static final TableName TABLENAME = TableName.valueOf("t"); @Before public void setupMocks() { conf = HBaseConfiguration.create(); HRegionInfo hri = new HRegionInfo(TABLENAME); htd = new HTableDescriptor(TABLENAME); mockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(htd).when(mockRegion).getTableDesc(); Mockito.doReturn(hri).when(mockRegion).getRegionInfo(); stores = new ArrayList<Store>(); Mockito.doReturn(stores).when(mockRegion).getStores(); } @Test public void testIncreasingToUpperBoundRegionSplitPolicy() throws IOException { // Configure IncreasingToUpperBoundRegionSplitPolicy as our split policy conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, IncreasingToUpperBoundRegionSplitPolicy.class.getName()); // Now make it so the mock region has a RegionServerService that will // return 'online regions'. RegionServerServices rss = Mockito.mock(RegionServerServices.class); final List<Region> regions = new ArrayList<Region>(); Mockito.when(rss.getOnlineRegions(TABLENAME)).thenReturn(regions); Mockito.when(mockRegion.getRegionServerServices()).thenReturn(rss); // Set max size for this 'table'. long maxSplitSize = 1024L; htd.setMaxFileSize(maxSplitSize); // Set flush size to 1/8. IncreasingToUpperBoundRegionSplitPolicy // grows by the cube of the number of regions times flushsize each time. long flushSize = maxSplitSize/8; conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, flushSize); htd.setMemStoreFlushSize(flushSize); // If RegionServerService with no regions in it -- 'online regions' == 0 -- // then IncreasingToUpperBoundRegionSplitPolicy should act like a // ConstantSizePolicy IncreasingToUpperBoundRegionSplitPolicy policy = (IncreasingToUpperBoundRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); doConstantSizePolicyTests(policy); // Add a store in excess of split size. Because there are "no regions" // on this server -- rss.getOnlineRegions is 0 -- then we should split // like a constantsizeregionsplitpolicy would HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); stores.add(mockStore); // It should split assertTrue(policy.shouldSplit()); // Now test that we increase our split size as online regions for a table // grows. With one region, split size should be flushsize. regions.add(mockRegion); Mockito.doReturn(flushSize).when(mockStore).getSize(); // Should not split since store is flush size. assertFalse(policy.shouldSplit()); // Set size of store to be > 2*flush size and we should split Mockito.doReturn(flushSize*2 + 1).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Add another region to the 'online regions' on this server and we should // now be no longer be splittable since split size has gone up. regions.add(mockRegion); assertFalse(policy.shouldSplit()); // make sure its just over; verify it'll split Mockito.doReturn((long)(maxSplitSize * 1.25 + 1)).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Finally assert that even if loads of regions, we'll split at max size assertWithinJitter(maxSplitSize, policy.getSizeToCheck(1000)); // Assert same is true if count of regions is zero. assertWithinJitter(maxSplitSize, policy.getSizeToCheck(0)); } private void assertWithinJitter(long maxSplitSize, long sizeToCheck) { assertTrue("Size greater than lower bound of jitter", (long)(maxSplitSize * 0.75) <= sizeToCheck); assertTrue("Size less than upper bound of jitter", (long)(maxSplitSize * 1.25) >= sizeToCheck); } @Test public void testCreateDefault() throws IOException { conf.setLong(HConstants.HREGION_MAX_FILESIZE, 1234L); // Using a default HTD, should pick up the file size from // configuration. ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create( mockRegion, conf); assertWithinJitter(1234L, policy.getDesiredMaxFileSize()); // If specified in HTD, should use that htd.setMaxFileSize(9999L); policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create( mockRegion, conf); assertWithinJitter(9999L, policy.getDesiredMaxFileSize()); } /** * Test setting up a customized split policy */ @Test public void testCustomPolicy() throws IOException { HTableDescriptor myHtd = new HTableDescriptor(); myHtd.setValue(HTableDescriptor.SPLIT_POLICY, KeyPrefixRegionSplitPolicy.class.getName()); myHtd.setValue(KeyPrefixRegionSplitPolicy.PREFIX_LENGTH_KEY, String.valueOf(2)); HRegion myMockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(myHtd).when(myMockRegion).getTableDesc(); Mockito.doReturn(stores).when(myMockRegion).getStores(); HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("abcd")).when(mockStore).getSplitPoint(); stores.add(mockStore); KeyPrefixRegionSplitPolicy policy = (KeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ab", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(true).when(myMockRegion).shouldForceSplit(); Mockito.doReturn(Bytes.toBytes("efgh")).when(myMockRegion) .getExplicitSplitPoint(); policy = (KeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ef", Bytes.toString(policy.getSplitPoint())); } @Test public void testConstantSizePolicy() throws IOException { htd.setMaxFileSize(1024L); ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); doConstantSizePolicyTests(policy); } /** * Run through tests for a ConstantSizeRegionSplitPolicy * @param policy */ private void doConstantSizePolicyTests(final ConstantSizeRegionSplitPolicy policy) { // For no stores, should not split assertFalse(policy.shouldSplit()); // Add a store above the requisite size. Should split. HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); stores.add(mockStore); assertTrue(policy.shouldSplit()); // Act as if there's a reference file or some other reason it can't split. // This should prevent splitting even though it's big enough. Mockito.doReturn(false).when(mockStore).canSplit(); assertFalse(policy.shouldSplit()); // Reset splittability after above Mockito.doReturn(true).when(mockStore).canSplit(); // Set to a small size but turn on forceSplit. Should result in a split. Mockito.doReturn(true).when(mockRegion).shouldForceSplit(); Mockito.doReturn(100L).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Turn off forceSplit, should not split Mockito.doReturn(false).when(mockRegion).shouldForceSplit(); assertFalse(policy.shouldSplit()); // Clear families we added above stores.clear(); } @Test public void testGetSplitPoint() throws IOException { ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); // For no stores, should not split assertFalse(policy.shouldSplit()); assertNull(policy.getSplitPoint()); // Add a store above the requisite size. Should split. HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("store 1 split")) .when(mockStore).getSplitPoint(); stores.add(mockStore); assertEquals("store 1 split", Bytes.toString(policy.getSplitPoint())); // Add a bigger store. The split point should come from that one HStore mockStore2 = Mockito.mock(HStore.class); Mockito.doReturn(4000L).when(mockStore2).getSize(); Mockito.doReturn(true).when(mockStore2).canSplit(); Mockito.doReturn(Bytes.toBytes("store 2 split")) .when(mockStore2).getSplitPoint(); stores.add(mockStore2); assertEquals("store 2 split", Bytes.toString(policy.getSplitPoint())); } @Test public void testDelimitedKeyPrefixRegionSplitPolicy() throws IOException { HTableDescriptor myHtd = new HTableDescriptor(); myHtd.setValue(HTableDescriptor.SPLIT_POLICY, DelimitedKeyPrefixRegionSplitPolicy.class.getName()); myHtd.setValue(DelimitedKeyPrefixRegionSplitPolicy.DELIMITER_KEY, ","); HRegion myMockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(myHtd).when(myMockRegion).getTableDesc(); Mockito.doReturn(stores).when(myMockRegion).getStores(); HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("ab,cd")).when(mockStore).getSplitPoint(); stores.add(mockStore); DelimitedKeyPrefixRegionSplitPolicy policy = (DelimitedKeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ab", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(true).when(myMockRegion).shouldForceSplit(); Mockito.doReturn(Bytes.toBytes("efg,h")).when(myMockRegion) .getExplicitSplitPoint(); policy = (DelimitedKeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("efg", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(Bytes.toBytes("ijk")).when(myMockRegion) .getExplicitSplitPoint(); assertEquals("ijk", Bytes.toString(policy.getSplitPoint())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vadim L. Bogdanov * @version $Revision$ */ package javax.swing.text.html; import java.awt.Cursor; import java.awt.event.ActionEvent; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.util.Arrays; import javax.swing.Action; import javax.swing.JEditorPane; import javax.swing.SwingTestCase; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import javax.swing.text.AttributeSet; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultStyledDocument; import javax.swing.text.Document; import javax.swing.text.Element; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledEditorKit; import javax.swing.text.ViewFactory; import javax.swing.text.html.HTMLEditorKit.InsertHTMLTextAction; public class HTMLEditorKitTest extends SwingTestCase { private static final String HTML_TEXT = "<title>t</title>html <i>text</i>"; private static final String LOADED_HTML_TEXT = " \nhtml text"; private HTMLEditorKit editorKit; private HTMLDocument document; public HTMLEditorKitTest(final String name) { super(name); } protected void setUp() throws Exception { super.setUp(); setIgnoreNotImplemented(true); editorKit = new HTMLEditorKit(); document = (HTMLDocument)editorKit.createDefaultDocument(); document.setAsynchronousLoadPriority(-1); // synchronous loading } protected void tearDown() throws Exception { super.tearDown(); } public void testHTMLEditorKit() { editorKit = new HTMLEditorKit(); assertNotNull(editorKit.getActions()); } public void testClone() { // TODO: implement } public void testCreateDefaultDocument() { Document doc = editorKit.createDefaultDocument(); assertTrue(doc instanceof HTMLDocument); HTMLDocument htmlDoc = (HTMLDocument)doc; assertSame(editorKit.getParser(), htmlDoc.getParser()); assertEquals(4, htmlDoc.getAsynchronousLoadPriority()); assertNotNull(htmlDoc.getStyleSheet()); assertFalse(editorKit.getStyleSheet().equals(htmlDoc.getStyleSheet())); assertTrue(Arrays.asList(htmlDoc.getStyleSheet().getStyleSheets()) .contains(editorKit.getStyleSheet())); } public void testDeinstallJEditorPane() { JEditorPane pane = new JEditorPane(); int mouseListenersCount = pane.getMouseListeners().length; int mouseMotionListenersCount = pane.getMouseMotionListeners().length; editorKit.install(pane); editorKit.deinstall(pane); assertEquals(mouseListenersCount, pane.getMouseListeners().length); assertEquals(mouseMotionListenersCount, pane.getMouseMotionListeners().length); } public void testGetAccessibleContext() { // TODO: implement } public void testGetActions() throws Exception { Action[] ancestorActions = new StyledEditorKit().getActions(); Action[] actions = editorKit.getActions(); assertEquals(12, actions.length - ancestorActions.length); Action[] predefinedInsertHTMLTextActions = createPredefinedInsertHTMLTextActions(); for (int i = 0; i < predefinedInsertHTMLTextActions.length; i++) { Action action = findActionWithName(actions, predefinedInsertHTMLTextActions[i].getValue(Action.NAME)); if (action != null) { assertTrue("Action is not same" + action.getValue(Action.NAME), compareInsertHTMLTextActions(action, predefinedInsertHTMLTextActions[i])); } else { fail("Action not found: " + predefinedInsertHTMLTextActions[i].getValue(Action.NAME)); } } } public void testNextLinkAction() throws Exception { Action action = findActionWithName( editorKit.getActions(), "next-link-action"); assertNotNull(action); JEditorPane pane = new JEditorPane(); pane.setEditable(false); pane.setEditorKit(editorKit); document = ((HTMLDocument)pane.getDocument()); document.setAsynchronousLoadPriority(-1); // synchronous loading pane.setText("<p><a href=http://a.com>a.com</a>text<a href=http://b.com>b.com</a></p>"); pane.setCaretPosition(0); action.actionPerformed(new ActionEvent(pane, 0, null)); Element e = document.getCharacterElement(pane.getCaretPosition()); assertEquals("http://a.com", getURLString(e)); action.actionPerformed(new ActionEvent(pane, 0, null)); e = document.getCharacterElement(pane.getCaretPosition()); assertEquals("http://b.com", getURLString(e)); } public void testPreviousLinkAction() throws Exception { Action action = findActionWithName( editorKit.getActions(), "previous-link-action"); assertNotNull(action); JEditorPane pane = new JEditorPane(); pane.setEditable(false); pane.setEditorKit(editorKit); document = ((HTMLDocument)pane.getDocument()); document.setAsynchronousLoadPriority(-1); // synchronous loading pane.setText("<p><a href=http://a.com>a.com</a>text<a href=http://b.com>b.com</a></p>"); pane.setCaretPosition(document.getLength() - 1); action.actionPerformed(new ActionEvent(pane, 0, null)); Element e = document.getCharacterElement(pane.getCaretPosition()); assertEquals("http://b.com", getURLString(e)); action.actionPerformed(new ActionEvent(pane, 0, null)); e = document.getCharacterElement(pane.getCaretPosition()); assertEquals("http://a.com", getURLString(e)); } public void testActivateLinkAction() throws Exception { Action action = findActionWithName( editorKit.getActions(), "activate-link-action"); assertNotNull(action); JEditorPane pane = new JEditorPane(); pane.setEditable(false); pane.setEditorKit(editorKit); document = ((HTMLDocument)pane.getDocument()); document.setAsynchronousLoadPriority(-1); // synchronous loading pane.setText("<p><a href=http://a.com>a.com</a>text<a href=http://b.com>b.com</a></p>"); pane.setCaretPosition(1); class TestHyperlinkListener implements HyperlinkListener { public boolean occured; public void hyperlinkUpdate(HyperlinkEvent event) { occured = true; } } TestHyperlinkListener listener = new TestHyperlinkListener(); pane.addHyperlinkListener(listener); action.actionPerformed(new ActionEvent(pane, 0, null)); assertTrue(listener.occured); } public void testInsertHRAction() throws Exception { InsertHTMLTextAction action = (InsertHTMLTextAction)findActionWithName( editorKit.getActions(), "InsertHR"); assertNotNull(action); JEditorPane pane = new JEditorPane(); pane.setEditorKit(editorKit); document = ((HTMLDocument)pane.getDocument()); document.setAsynchronousLoadPriority(-1); // synchronous loading pane.setText("<p>test</p>"); final int pos = document.getLength() - 1; pane.setCaretPosition(pos); action.actionPerformed(new ActionEvent(pane, 0, null)); Element e = document.getCharacterElement(pos + 1); assertEquals(HTML.Tag.HR, getHTMLTagByElement(e)); assertNotNull(e); HTML.Tag parentTag = getHTMLTagByElement(e.getParentElement()); assertTrue(HTML.Tag.P.equals(parentTag) || HTML.Tag.IMPLIED.equals(parentTag)); } public void testGetContentType() { assertEquals("text/html", editorKit.getContentType()); } public void testGetInputAttributes() throws Exception { JEditorPane pane = new JEditorPane(); editorKit.install(pane); editorKit.read(new StringReader("normal<i>italic</i>"), pane.getDocument(), 0); pane.setCaretPosition(pane.getDocument().getLength() - 1); assertNotNull(editorKit.getInputAttributes()); } public void testGetViewFactory() { ViewFactory factory = editorKit.getViewFactory(); assertTrue(factory instanceof HTMLEditorKit.HTMLFactory); assertSame(factory, editorKit.getViewFactory()); assertSame(factory, new HTMLEditorKit().getViewFactory()); } public void testInsertHTML() throws Exception { final String HTML_TEXT2 = "<i>_another text_</i>"; final String HTML_TEXT3 = (""); final String INSERTION_RESULT = " \nhtml_another text_ text"; editorKit.read(new StringReader(HTML_TEXT), document, 0); String s = document.getText(0, document.getLength()); assertEquals(LOADED_HTML_TEXT, s); editorKit.insertHTML(document, 7, HTML_TEXT2, 0, 0, HTML.Tag.I); assertEquals(INSERTION_RESULT, document.getText(0, document.getLength())); // test pos > document's length testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return BadLocationException.class; } public void exceptionalAction() throws Exception { editorKit.insertHTML(document, document.getLength() + 1, HTML_TEXT3, 0, 0, HTML.Tag.P); } }); // test pos < 0 testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return BadLocationException.class; } public void exceptionalAction() throws Exception { editorKit.insertHTML(document, -1, HTML_TEXT2, 0, 0, HTML.Tag.I); } }); // empty insertion, no exception should be thrown editorKit.insertHTML(document, -1, HTML_TEXT3, 0, 0, HTML.Tag.I); } public void testInstallJEditorPane() { JEditorPane pane = new JEditorPane(); int mouseListenersCount = pane.getMouseListeners().length; int mouseMotionListenersCount = pane.getMouseMotionListeners().length; editorKit.install(pane); assertEquals(mouseListenersCount + 1, pane.getMouseListeners().length); assertEquals(mouseMotionListenersCount + 1, pane.getMouseMotionListeners().length); } public void testRead() throws Exception { final StringReader in1 = new StringReader(HTML_TEXT); final StringReader in2 = new StringReader("another text"); final StringReader in3 = new StringReader(""); final StringReader in4 = new StringReader(""); final StringReader in5 = new StringReader(""); final String text2 = " \nhtml\nanother text\n text"; editorKit.read(in1, document, 0); String s = document.getText(0, document.getLength()); assertEquals(LOADED_HTML_TEXT, s); testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return IOException.class; } public void exceptionalAction() throws Exception { in1.ready(); } }); editorKit.read(in2, document, 7); assertEquals(text2, document.getText(0, document.getLength())); // test pos > document's length testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return BadLocationException.class; } public void exceptionalAction() throws Exception { editorKit.read(in3, document, document.getLength() + 1); } }); // test pos outside BODY testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return RuntimeException.class; } public void exceptionalAction() throws Exception { editorKit.read(in4, document, 0); } }); // test pos < 0 testExceptionalCase(new ExceptionalCase() { public Class expectedExceptionClass() { return RuntimeException.class; } public void exceptionalAction() throws Exception { editorKit.read(in5, document, -1); } }); } public void testSetIsAutoFormSubmission() { assertTrue(editorKit.isAutoFormSubmission()); editorKit.setAutoFormSubmission(false); assertFalse(editorKit.isAutoFormSubmission()); } public void testSetGetDefaultCursor() { assertSame(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR), editorKit.getDefaultCursor()); Cursor newCursor = new Cursor(Cursor.DEFAULT_CURSOR); editorKit.setDefaultCursor(newCursor); assertSame(newCursor, editorKit.getDefaultCursor()); } public void testSetGetLinkCursor() { assertSame(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR), editorKit.getLinkCursor()); Cursor newCursor = new Cursor(Cursor.DEFAULT_CURSOR); editorKit.setLinkCursor(newCursor); assertSame(newCursor, editorKit.getLinkCursor()); } public void testSetStyleSheet() { StyleSheet ss = new StyleSheet(); editorKit.setStyleSheet(ss); assertSame(ss, editorKit.getStyleSheet()); editorKit = new HTMLEditorKit(); assertSame(ss, editorKit.getStyleSheet()); } public void testGetStyleSheet() { StyleSheet ss = editorKit.getStyleSheet(); assertNotNull(ss); assertSame(ss, editorKit.getStyleSheet()); editorKit = new HTMLEditorKit(); assertSame(ss, editorKit.getStyleSheet()); } public void testWrite() throws Exception { StringWriter writer = new StringWriter(); final String content = "Hello, World!"; final int start = 1; final int end = 4; DefaultStyledDocument doc = new DefaultStyledDocument(); doc.insertString(0, content, null); editorKit.write(writer, doc, start, end); String output = writer.toString(); assertTrue(output.indexOf("<html>") != -1); if (isHarmony()) { assertFalse(output.indexOf(content) != -1); assertTrue(output.indexOf(content.substring(start, end)) != -1); } writer = new StringWriter(); doc = (HTMLDocument)editorKit.createDefaultDocument(); doc.insertString(0, content, null); editorKit.write(writer, doc, start, end); output = writer.toString(); assertTrue(output.indexOf("<html>") != -1); assertFalse(output.indexOf(content) != -1); assertTrue(output.indexOf(content.substring(start, end)) != -1); } public void testGetParser() { HTMLEditorKit.Parser parser = editorKit.getParser(); assertNotNull(parser); assertSame(parser, editorKit.getParser()); assertSame(parser, new HTMLEditorKit().getParser()); } public void testCreateInputAttributes() throws Exception { document.insertAfterStart(document.getDefaultRootElement(), "<b>bold</b>"); Element e = document.getDefaultRootElement().getElement(0); SimpleAttributeSet attrSet = new SimpleAttributeSet(); editorKit.createInputAttributes(e, attrSet); assertTrue(attrSet.containsAttribute(StyleConstants.NameAttribute, HTML.Tag.CONTENT)); assertEquals("bold", attrSet.getAttribute(CSS.Attribute.FONT_WEIGHT).toString()); } public void testParserCallback() { Object implied = HTMLEditorKit.ParserCallback.IMPLIED; assertTrue(implied instanceof String); assertFalse("".equals(implied)); } private Action[] createPredefinedInsertHTMLTextActions() { Action[] actions = { new HTMLEditorKit.InsertHTMLTextAction("InsertOrderedList", "<ol><li></li></ol>", HTML.Tag.BODY, HTML.Tag.OL), new HTMLEditorKit.InsertHTMLTextAction("InsertOrderedListItem", "<ol><li></li></ol>", HTML.Tag.OL, HTML.Tag.LI, HTML.Tag.BODY, HTML.Tag.OL), new HTMLEditorKit.InsertHTMLTextAction("InsertUnorderedList", "<ul><li></li></ul>", HTML.Tag.BODY, HTML.Tag.UL), new HTMLEditorKit.InsertHTMLTextAction("InsertUnorderedListItem", "<ul><li></li></ul>", HTML.Tag.UL, HTML.Tag.LI, HTML.Tag.BODY, HTML.Tag.UL), new HTMLEditorKit.InsertHTMLTextAction("InsertTable", "<table border=1><tr><td></td></tr></table>", HTML.Tag.BODY, HTML.Tag.TABLE), new HTMLEditorKit.InsertHTMLTextAction("InsertTableDataCell", "<table border=1><tr><td></td></tr></table>", HTML.Tag.TR, HTML.Tag.TD, HTML.Tag.BODY, HTML.Tag.TABLE), new HTMLEditorKit.InsertHTMLTextAction("InsertTableRow", "<table border=1><tr><td></td></tr></table>", HTML.Tag.TABLE, HTML.Tag.TR, HTML.Tag.BODY, HTML.Tag.TABLE), new HTMLEditorKit.InsertHTMLTextAction("InsertPre", "<pre></pre>", HTML.Tag.BODY, HTML.Tag.PRE), }; return actions; } private boolean compareInsertHTMLTextActions(final Action a1, final Action a2) { if (!(a1 instanceof HTMLEditorKit.InsertHTMLTextAction) || !(a2 instanceof HTMLEditorKit.InsertHTMLTextAction)) { return false; } HTMLEditorKit.InsertHTMLTextAction htmlAction1 = (HTMLEditorKit.InsertHTMLTextAction)a1; HTMLEditorKit.InsertHTMLTextAction htmlAction2 = (HTMLEditorKit.InsertHTMLTextAction)a2; return compareActionFields(htmlAction1.addTag, htmlAction2.addTag) && compareActionFields(htmlAction1.alternateAddTag, htmlAction2.alternateAddTag) && compareActionFields(htmlAction1.alternateParentTag, htmlAction2.alternateParentTag) && compareActionFields(htmlAction1.html, htmlAction2.html) && compareActionFields(htmlAction1.parentTag, htmlAction2.parentTag); } private boolean compareActionFields(final Object f1, final Object f2) { return f1 != null && f1.equals(f2) || f1 == f2; } private Action findActionWithName(final Action[] actions, final Object name) { for (int i = 0; i < actions.length; i++) { if (name.equals(actions[i].getValue(Action.NAME))) { return actions[i]; } } return null; } private static HTML.Tag getHTMLTagByElement(final Element elem) { final Object result = elem.getAttributes().getAttribute(StyleConstants.NameAttribute); return (result instanceof HTML.Tag) ? (HTML.Tag)result : null; } private static String getURLString(final Element e) { AttributeSet aSet = (AttributeSet)e.getAttributes() .getAttribute(HTML.Tag.A); return aSet == null ? null : (String)aSet.getAttribute(HTML.Attribute.HREF); } }
/* * Copyright 2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis.tools.ant.wsdl; import org.apache.axis.encoding.TypeMappingRegistryImpl; import org.apache.axis.encoding.TypeMappingDelegate; import org.apache.axis.wsdl.fromJava.Emitter; import org.apache.tools.ant.AntClassLoader; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.Task; import org.apache.tools.ant.types.Path; import org.apache.tools.ant.types.Reference; import org.apache.tools.ant.types.Environment; import org.apache.tools.ant.types.CommandlineJava; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; /* * Important. we autogenerate the ant task docs from this. * after adding a new attribute * 1. add the javadoc for the end users. Make it meaningful * 2. get jakarta_ant/proposals/xdocs from ant CVS * 3. run the xdocs target in tools/build.xml * this creates xml files in xdocs/build * 4. run proposals/xdocs/dvsl build.xml to create the html files * these are also created under xdocs/build * 5. copy the the html files to docs/ant * 4. check in the changes in docs/ant */ /** * Generates a WSDL description from a Java class. * @author Rich Scheuerle (scheu@us.ibm.com) * @author Steve Loughran * @ant.task category="axis" name="axis-java2wsdl" */ public class Java2WsdlAntTask extends Task { private String namespace = ""; private String namespaceImpl = null; private HashMap namespaceMap = new HashMap(); private String location = ""; private String locationImport = null; private String output = "." ; private String importSchema = null ; private String input = null ; private String outputImpl = null; private String className = "." ; private String servicePortName = null ; private String portTypeName = null ; private String bindingName = null ; private String implClass = null; private boolean useInheritedMethods = false; private String exclude = null; private String stopClasses = null; private String typeMappingVersion = TypeMappingVersionEnum.DEFAULT_VERSION; private String style = null; private String serviceElementName=null; private String methods=null; private String use = null; private MappingSet mappings=new MappingSet(); private String extraClasses = null; private Path classpath = null; private String soapAction = null; private List complexTypes = new LinkedList(); private boolean isDeploy = false; private CommandlineJava commandline = new CommandlineJava(); /** * trace out parameters * @param logLevel to log at * @see org.apache.tools.ant.Project#log */ public void traceParams(int logLevel) { log("Running Java2WsdlAntTask with parameters:", logLevel); log("\tnamespace:" + namespace, logLevel); log("\tPkgtoNS:" + namespaceMap, logLevel); log("\tlocation:" + location, logLevel); log("\toutput:" + output, logLevel); log("\timportSchema:" + importSchema, logLevel); log("\tinput:" + input, logLevel); log("\tclassName:" + className, logLevel); log("\tservicePortName:" + servicePortName, logLevel); log("\tportTypeName:" + portTypeName, logLevel); log("\tbindingName:" + bindingName, logLevel); log("\timplClass:" + implClass, logLevel); log("\tinheritance:" + useInheritedMethods, logLevel); log("\texcluded:" + exclude, logLevel); log("\tstopClasses:" + stopClasses, logLevel); log("\ttypeMappingVersion:" + typeMappingVersion, logLevel); log("\tstyle:" + style, logLevel); log("\toutputImpl:" + outputImpl, logLevel); log("\tuse:" + use, logLevel); log("\tnamespaceImpl:" + namespaceImpl, logLevel); log("\tlocationImport:" + locationImport, logLevel); log("\tserviceElementName:" + serviceElementName, logLevel); log("\tmethods:" + methods, logLevel); log("\textraClasses:" + extraClasses, logLevel); log("\tsoapAction:" + soapAction, logLevel); log("\tclasspath:" + classpath, logLevel); } /** * validation code * @throws BuildException if validation failed */ protected void validate() throws BuildException { if(className==null || className.length() ==0) { throw new BuildException("No classname was specified"); } if(location==null || location.length() == 0) { throw new BuildException("No location was specified"); } } /** * execute the task * @throws BuildException */ public void execute() throws BuildException { AntClassLoader cl = new AntClassLoader(getClass().getClassLoader(), getProject(), classpath == null ? createClasspath() : classpath, true); CommandlineJava.SysProperties sysProperties = commandline.getSystemProperties(); if (sysProperties != null) { sysProperties.setSystem(); } try { traceParams(Project.MSG_VERBOSE); validate(); // Instantiate the emitter Emitter emitter = new Emitter(); //do the mappings, packages are the key for this map mappings.execute(this,namespaceMap, true); if (!namespaceMap.isEmpty()) { emitter.setNamespaceMap(namespaceMap); } if (servicePortName != null) { emitter.setServicePortName(servicePortName); } if (portTypeName != null) { emitter.setPortTypeName(portTypeName); } if (bindingName != null) { emitter.setBindingName(bindingName); } log("Java2WSDL " + className, Project.MSG_INFO); emitter.setCls(cl.loadClass(className)); if (implClass != null) { emitter.setImplCls(cl.loadClass(implClass)); } if (exclude != null) { emitter.setDisallowedMethods(exclude); } if (stopClasses != null) { emitter.setStopClasses(stopClasses); } if (extraClasses != null) { emitter.setExtraClasses(extraClasses, cl); } TypeMappingRegistryImpl tmr = new TypeMappingRegistryImpl(); tmr.doRegisterFromVersion(typeMappingVersion); emitter.setTypeMappingRegistry(tmr); // Create TypeMapping and register complex types TypeMappingDelegate tmi = (TypeMappingDelegate)tmr.getDefaultTypeMapping(); Iterator i = complexTypes.iterator(); while (i.hasNext()) { ((ComplexType) i.next()).register(cl, tmi); } if (style != null) { emitter.setStyle(style); } if (use != null) { emitter.setUse(use); } if (importSchema != null) { emitter.setInputSchema(importSchema); } if (input != null) { emitter.setInputWSDL(input); } emitter.setIntfNamespace(namespace); emitter.setImplNamespace(namespaceImpl); emitter.setLocationUrl(location); emitter.setImportUrl(locationImport); emitter.setUseInheritedMethods(useInheritedMethods); if(serviceElementName!=null) { emitter.setServiceElementName(serviceElementName); } if(methods!=null) { emitter.setAllowedMethods(methods); } if (soapAction != null) { emitter.setSoapAction(soapAction); } if (outputImpl == null) { // Normal case emitter.emit(output, Emitter.MODE_ALL); } else { // Emit interface and implementation wsdls emitter.emit(output, outputImpl); } if (isDeploy == true) { generateServerSide(emitter, (outputImpl != null) ? outputImpl : output); } } catch(BuildException b) { //pass build exceptions up the wire throw b; } catch (Throwable t) { //other trouble: stack trace the trouble and throw an exception StringWriter writer = new StringWriter(); t.printStackTrace(new PrintWriter(writer)); log(writer.getBuffer().toString(), Project.MSG_ERR); throw new BuildException("Error while running " + getClass().getName(), t); } finally { if (sysProperties != null) { sysProperties.restoreSystem(); } } } /** * The name of the output WSDL file. * If not specified, a suitable default WSDL file is written into * the current directory. * @param parameter */ public void setOutput(File parameter) { this.output = parameter.getPath(); } /** * Option attribute that indicates the name of an XML Schema file that * should be physically imported into the generated WSDL. * @param parameter */ public void setImportSchema(File parameter) throws BuildException { try { this.importSchema = parameter.toURL().toString(); } catch (java.io.IOException ioe) { throw new BuildException(ioe); } } /** * Optional attribute that indicates the name of the input wsdl file. * The output wsdl file will contain everything from the input wsdl * file plus the new constructs. If a new construct is already present * in the input wsdl file, it is not added. This option is useful for * constructing a wsdl file with multiple ports, bindings, or portTypes. * @param parameter filename */ public void setInput(File parameter) { this.input = parameter.getPath(); } /** * Use this option to indicate the name of the output implementation WSDL * file. If specified, Java2WSDL will produce separate interface and implementation * WSDL files. If not, a single WSDL file is generated * @param parameter */ public void setOutputImpl(File parameter) { this.outputImpl = parameter.getPath(); } /** * The url of the location of the service. The name after the last slash or * backslash is the name of the service port (unless overridden by the -s * option). The service port address location attribute is assigned the * specified value. * @param parameter a URL */ public void setLocation(String parameter) { this.location = parameter; } /** * the location of the interface WSDL when generating an implementation WSDL * Required when <tt>outputImpl</tt> is set * @param parameter URL? */ public void setLocationImport(String parameter) { this.locationImport = parameter; } /** * the class name to import, eg. org.example.Foo. Required. * The class must be on the classpath. * @param parameter fully qualified class name */ public void setClassName(String parameter) { this.className = parameter; } /** * Sometimes extra information is available in the implementation class * file. Use this option to specify the implementation class. * @param parameter */ public void setImplClass(String parameter) { this.implClass = parameter; } /** * service port name (obtained from location if not specified) * @param parameter portname */ public void setServicePortName(String parameter) { this.servicePortName = parameter; } /** * Indicates the name to use use for the portType element. * If not specified, the class-of-portType name is used. * @param parameter */ public void setPortTypeName(String parameter) { this.portTypeName = parameter; } /** * The name to use use for the binding element. * If not specified, the value of the * <tt>servicePortName</tt> + "SoapBinding" is used. * @param parameter */ public void setBindingName(String parameter) { this.bindingName = parameter; } /** * the target namespace. Required. * @param parameter */ public void setNamespace(String parameter) { this.namespace = parameter; } /** * Namespace of the implementation WSDL. * @param parameter */ public void setNamespaceImpl(String parameter) { this.namespaceImpl = parameter; } /** * should inherited methods be exported too? Default=false * @param parameter */ public void setUseInheritedMethods(boolean parameter) { this.useInheritedMethods = parameter; } /** * Comma separated list of methods to exclude from the wsdl file. * @param exclude */ public void setExclude(String exclude) { this.exclude = exclude; } /** * Comma separated list of classes which stop the Java2WSDL * inheritance search. * @param stopClasses */ public void setStopClasses(String stopClasses) { this.stopClasses = stopClasses; } /** * The style of the WSDL document: RPC, DOCUMENT or WRAPPED. * If RPC, a rpc/encoded wsdl is generated. If DOCUMENT, a * document/literal wsdl is generated. If WRAPPED, a * document/literal wsdl is generated using the wrapped approach. * @param style */ public void setStyle(String style) { this.style = style; } /** * add a mapping of namespaces to packages */ public void addMapping(NamespaceMapping mapping) { mappings.addMapping(mapping); } /** * add a mapping of namespaces to packages */ public void addMappingSet(MappingSet mappingset) { mappings.addMappingSet(mappingset); } /** * the default type mapping registry to use. Either 1.1 or 1.2. * Default is 1.1 * @param parameter new version */ public void setTypeMappingVersion(TypeMappingVersionEnum parameter) { this.typeMappingVersion = parameter.getValue(); } /** * If this option is specified, only the indicated methods in your * interface class will be exported into the WSDL file. The methods list * must be comma separated. If not specified, all methods declared in * the interface class will be exported into the WSDL file * @param methods list of methods */ public void setMethods(String methods) { this.methods = methods; } /** * Set the use option */ public void setUse(String use) { this.use = use; } /** * the name of the service element. * If not specified, the service element is the <tt>portTypeName</tt>Service. * @param serviceElementName */ public void setServiceElementName(String serviceElementName) { this.serviceElementName = serviceElementName; } /** * A comma separated list of classes to add to the classpath. */ public void setExtraClasses(String extraClasses) { this.extraClasses = extraClasses; } /** * The setter for the "soapAction" attribute */ public void setSoapAction( String soapAction ) { this.soapAction = soapAction; } /** * Nested element for Complex Types. * Each Complex Type uses the following fields: * @param ct */ public void addComplexType(ComplexType ct) { complexTypes.add(ct); } /** * Set the optional classpath * * @param classpath the classpath to use when loading class */ public void setClasspath(Path classpath) { createClasspath().append(classpath); } /** * Set the optional classpath * * @return a path instance to be configured by the Ant core. */ public Path createClasspath() { if (classpath == null) { classpath = new Path(getProject()); classpath = classpath.concatSystemClasspath(); } return classpath.createPath(); } /** * Set the reference to an optional classpath * * @param r the id of the Ant path instance to act as the classpath */ public void setClasspathRef(Reference r) { createClasspath().setRefid(r); } /** * Adds a system property that tests can access. * @param sysp environment variable to add */ public void addSysproperty(Environment.Variable sysp) { commandline.addSysproperty(sysp); } /** * Sets the deploy flag * @param deploy true if deploy mode */ public void setDeploy(boolean deploy) { this.isDeploy = deploy; } /** * Generate the server side artifacts from the generated WSDL * * @param j2w the Java2WSDL emitter * @param wsdlFileName the generated WSDL file * @throws Exception */ protected void generateServerSide(Emitter j2w, String wsdlFileName) throws Exception { org.apache.axis.wsdl.toJava.Emitter w2j = new org.apache.axis.wsdl.toJava.Emitter(); File wsdlFile = new File(wsdlFileName); w2j.setServiceDesc(j2w.getServiceDesc()); w2j.setQName2ClassMap(j2w.getQName2ClassMap()); w2j.setOutputDir(wsdlFile.getParent()); w2j.setServerSide(true); w2j.setDeploy(true); w2j.setHelperWanted(true); // setup namespace-to-package mapping String ns = j2w.getIntfNamespace(); String clsName = j2w.getCls().getName(); int idx = clsName.lastIndexOf("."); String pkg = null; if (idx > 0) { pkg = clsName.substring(0, idx); w2j.getNamespaceMap().put(ns, pkg); } Map nsmap = j2w.getNamespaceMap(); if (nsmap != null) { for (Iterator i = nsmap.keySet().iterator(); i.hasNext(); ) { pkg = (String) i.next(); ns = (String) nsmap.get(pkg); w2j.getNamespaceMap().put(ns, pkg); } } // set 'deploy' mode w2j.setDeploy(true); if (j2w.getImplCls() != null) { w2j.setImplementationClassName(j2w.getImplCls().getName()); } else { if (!j2w.getCls().isInterface()) { w2j.setImplementationClassName(j2w.getCls().getName()); } else { throw new Exception("implementation class is not specified."); } } w2j.run(wsdlFileName); } }
/* * Copyright 2014-2022 Web Firm Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * @author WFF */ package com.webfirmframework.wffweb.css.css3; import java.util.Arrays; import java.util.List; import com.webfirmframework.wffweb.InvalidValueException; import com.webfirmframework.wffweb.NullValueException; import com.webfirmframework.wffweb.css.CssNameConstants; import com.webfirmframework.wffweb.css.core.AbstractCssProperty; import com.webfirmframework.wffweb.util.StringUtil; import com.webfirmframework.wffweb.util.TagStringUtil; /** * <pre> * -webkit-flex-grow: <i>number</i>|initial|inherit; * * The flex-grow property specifies how much the item will grow relative to the rest of the flexible items inside the same container. * * Note: If the element is not a flexible item, the -webkit-flex-grow property has no effect. * Default value: 0 * Inherited: no * Animatable: yes * Version: CSS3 * JavaScript syntax: object.style.webkitFlexGrow="5" * </pre> * * @author WFF * @since 1.0.0 */ public class WebkitFlexGrow extends AbstractCssProperty<WebkitFlexGrow> { private static final long serialVersionUID = 1_0_0L; public static final String INITIAL = "initial"; public static final String INHERIT = "inherit"; private static final List<String> PREDEFINED_CONSTANTS = Arrays.asList(INITIAL, INHERIT); private String cssValue; private Float value; /** * The default value 0 will be set as the cssValue. * * @author WFF * @since 1.0.0 */ public WebkitFlexGrow() { value = Float.valueOf(0); cssValue = value.toString(); } /** * @param cssValue the css value to set. */ public WebkitFlexGrow(final String cssValue) { setCssValue(cssValue); } /** * @param webkitFlexGrow the {@code webkitFlexGrow} object from which the * cssValue to set.And, {@code null} will throw * {@code NullValueException} */ public WebkitFlexGrow(final WebkitFlexGrow webkitFlexGrow) { if (webkitFlexGrow == null) { throw new NullValueException("webkitFlexGrow can not be null"); } setCssValue(webkitFlexGrow.getCssValue()); } /** * @param value */ public WebkitFlexGrow(final float value) { this.value = value; cssValue = String.valueOf(value); } /* * (non-Javadoc) * * @see com.webfirmframework.wffweb.css.CssProperty#getCssName() * * @since 1.0.0 * * @author WFF */ @Override public String getCssName() { return CssNameConstants.WEBKIT_FLEX_GROW; } /* * (non-Javadoc) * * @see com.webfirmframework.wffweb.css.CssProperty#getCssValue() * * @since 1.0.0 * * @author WFF */ @Override public String getCssValue() { return cssValue; } @Override public String toString() { return getCssName() + ": " + getCssValue(); } /** * gets the webkitFlexGrow in {@code Float} value. * * @return the value in float or null if the cssValue is <code>initial</code> or * <code>inherit</code>. * @since 1.0.0 * @author WFF */ public Float getValue() { return value; } /** * @param value the value to set * @author WFF * @since 1.0.0 */ public void setValue(final float value) { this.value = value; cssValue = String.valueOf(value); if (getStateChangeInformer() != null) { getStateChangeInformer().stateChanged(this); } } /** * @param cssValue the value should be in the format of <code>0.5</code>, * <code>initial/inherit</code>. {@code null} is considered as * an invalid value and it will throw * {@code NullValueException}. * @since 1.0.0 * @author WFF */ @Override public WebkitFlexGrow setCssValue(final String cssValue) { if (cssValue == null) { throw new NullValueException( "null is an invalid value. The value format should be as for example 0.5, initial/inherit."); } else { final String trimmedCssValue = TagStringUtil.toLowerCase(StringUtil.strip(cssValue)); if (INITIAL.equals(trimmedCssValue) || INHERIT.equals(trimmedCssValue)) { this.cssValue = trimmedCssValue; value = null; } else { try { value = Float.valueOf(trimmedCssValue); this.cssValue = value.toString(); } catch (final NumberFormatException e) { throw new InvalidValueException(cssValue + " is an invalid value. The value format should be as for example 0.5, initial, inherit etc.."); } } } if (getStateChangeInformer() != null) { getStateChangeInformer().stateChanged(this); } return this; } /** * sets as {@code initial} * * @since 1.0.0 * @author WFF */ public void setAsInitial() { setCssValue(INITIAL); } /** * sets as {@code inherit} * * @since 1.0.0 * @author WFF */ public void setAsInherit() { setCssValue(INHERIT); } /** * validates if the given cssValue is valid for this class. * * @param cssValue the value to check. * @return true if valid and false if invalid. * @author WFF * @since 1.0.0 */ public static boolean isValid(final String cssValue) { final String trimmedCssValue = TagStringUtil.toLowerCase(StringUtil.strip(cssValue)); if (StringUtil.containsSpace(trimmedCssValue)) { return false; } try { Float.parseFloat(trimmedCssValue); return true; } catch (final NumberFormatException e) { } return PREDEFINED_CONSTANTS.contains(trimmedCssValue); } }
package indexing; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Set; import core.DocumentContainer; import core.MainBackend; import core.QueryContainer; public class Indexer { //constant representation for document and query public final static int DOCUMENT = 1; public final static int QUERY = 2; //containers for the parsing process //DOCUMENT CONTAINERS static ArrayList<String> stopwordList = new ArrayList<String>(); //list of stop words //QUERY CONTAINERS //variables used in TF calculations static int TF_max = 0; //maximum raw TF, used in Augmented TF calculation //variables used in indexing method //TODO integrate with GUI public final static int DOCUMENT_BINARY_TF = 0; public final static int DOCUMENT_RAW_TF = 1; public final static int DOCUMENT_LOG_TF = 2; public final static int DOCUMENT_AUGMENTED_TF = 3; static boolean useDocumentIDF = false; static boolean useDocumentNorm = false; //use normalization? /** * parses the document file, extracting the required info from each document * @param path file path * @param doc_or_query code to distinguish between document or query * @throws IOException */ public static void readFile(String path, int doc_or_query, DocumentContainer dc, QueryContainer qc) throws IOException{ //used for parsing process int idx = 0; char status = ' '; boolean firstline = true; //parsing BufferedReader br = new BufferedReader(new FileReader(path)); for(String line = br.readLine(); line != null; line = br.readLine()){ if((line.length() > 1) && (line.charAt(0) == '.')){ switch (doc_or_query){ case Indexer.DOCUMENT: switch (line.charAt(1)) { case 'I': idx = Integer.parseInt(line.substring(3)); //assign current doc number dc.titleList.add(""); dc.authorList.add(""); dc.contentList.add(""); break; case 'T': status = 'T'; firstline = true; break; case 'A': status = 'A'; firstline = true; break; case 'W': status = 'W'; firstline = true; break; default: status = ' '; break; } break; case Indexer.QUERY: switch (line.charAt(1)) { case 'I': idx = Integer.parseInt(line.substring(3)); //assign current query number qc.queryList.add(""); break; case 'W': status = 'W'; firstline = true; break; default: status = ' '; break; } break; } } else{ String curr_line = line.replaceAll("[\\[\\](){},.:;\"!?<>%/0-9-]", ""); //trim punctuation curr_line = curr_line.toLowerCase(); //convert all characters to lower case switch (doc_or_query) { case Indexer.DOCUMENT: switch (status) { case 'T': if(firstline){ dc.titleList.set(idx-1,curr_line); //add a new title elmt firstline = false; } else{ dc.titleList.set(idx-1, dc.titleList.get(idx-1)+" "+curr_line); //append to existing elmt } break; case 'A': if(firstline){ dc.authorList.set(idx-1,curr_line); //add a new author elmt firstline = false; } else{ dc.authorList.set(idx-1, dc.authorList.get(idx-1)+" "+curr_line); //append to existing elmt } break; case 'W': if(firstline){ dc.contentList.set(idx-1,curr_line); //add a new content elmt firstline = false; } else{ dc.contentList.set(idx-1, dc.contentList.get(idx-1)+" "+curr_line); //append to existing elmt } break; default: break; } break; case Indexer.QUERY: switch (status) { case 'W': if(firstline){ qc.queryList.set(idx-1,curr_line); //add a new content elmt firstline = false; } else{ qc.queryList.set(idx-1, qc.queryList.get(idx-1)+" "+curr_line); //append to existing elmt } break; default: break; } break; default: break; } } } br.close(); } /** * removes stop words in contentList based on a stop word list * @throws IOException */ public static void removeDocStopWord(DocumentContainer dc) throws IOException{ //open the stop word file BufferedReader br = new BufferedReader(new FileReader("res/stopword/1.txt")); for(String line = br.readLine(); line != null; line = br.readLine()){ if(line.length() > 2){ if((line.charAt(0) != '/') && (line.charAt(1) != '/')){ //skip comments stopwordList.add(line.trim()); } } else{ stopwordList.add(line.trim()); } } br.close(); //remove stop words from contentList for(int i = 0; i < dc.contentList.size(); i++){ String newcontent = dc.contentList.get(i); for(String str_stopword : stopwordList){ newcontent = newcontent.replaceAll("\\b"+str_stopword+"\\b(?!-)", ""); //hyphen-separated words will be counted as one word } dc.contentList.set(i, newcontent); } //delete the contents of stopwordList, freeing up resource stopwordList.clear(); } /** * removes stop words from queryList based on a stop word list * @throws IOException */ public static void removeQueryStopWord(QueryContainer qc) throws IOException{ //open the stop word file BufferedReader br = new BufferedReader(new FileReader("res/stopword/1.txt")); for(String line = br.readLine(); line != null; line = br.readLine()){ if(line.length() > 2){ if((line.charAt(0) != '/') && (line.charAt(1) != '/')){ //skip comments stopwordList.add(line.trim()); } } else{ stopwordList.add(line.trim()); } } br.close(); //remove stop words from queryList for(int i = 0; i < qc.queryList.size(); i++){ String newcontent = qc.queryList.get(i); for(String str_stopword : stopwordList){ newcontent = newcontent.replaceAll("\\b"+str_stopword+"\\b(?!-)", ""); //hyphen-separated words will be counted as one word } qc.queryList.set(i, newcontent); } //delete the contents of stopwordList, freeing up resource stopwordList.clear(); } /** * lists all the words in the contentList, then finds the matching documents * <p>this method also calculates raw TF for each word in each matching document */ public static void listDocumentWord(DocumentContainer dc){ dc.wordList.clear(); //tokenize the words in each document for(String content : dc.contentList){ String[] words = content.trim().split(" +"); ArrayList<String> temp = new ArrayList<String>(); for(String word : words){ temp.add(word); } //insert tokens into wordList dc.wordList.add(temp); } //delete the contents of contentList, freeing up resource //contentList.clear(); //trying to deep copy the contents of wordList for indexing purpose ArrayList<ArrayList<String>> temp_wordList = new ArrayList<ArrayList<String>>(); for(ArrayList<String> elmt_wordlist : dc.wordList){ temp_wordList.add(new ArrayList<String>()); ArrayList<String> str = temp_wordList.get(temp_wordList.size()-1); for(String elmt_elmt_wordlist : elmt_wordlist){ str.add(elmt_elmt_wordlist); } } //enumerate words and its occurrence in the entire document file for (int i = 0; i < temp_wordList.size(); i++) { while(!(temp_wordList.get(i).isEmpty())){ //take the first word String word = temp_wordList.get(i).get(0); if(dc.invFile.containsKey(word)){ temp_wordList.get(i).remove(0); } else{ ArrayList<InvFileTF> InvFileTFList = new ArrayList<InvFileTF>(); //create InvFileTF for (int idx_doc = i; idx_doc < temp_wordList.size(); idx_doc++) { ArrayList<String> examined_wordList_doc = temp_wordList.get(idx_doc); int occurrence = Collections.frequency(examined_wordList_doc, word); if(occurrence > 0){ //if found in wordList InvFileTF itemTF = new InvFileTF(); //set the attrib value of itemTF itemTF.docnum = idx_doc+1; //for document numbering, using 1 as the first index itemTF.TF_raw = occurrence; if (itemTF.TF_raw > TF_max){ TF_max = itemTF.TF_raw; //set the new value for TF_max } InvFileTFList.add(itemTF); //add to InvFileTFList temp_wordList.get(idx_doc).removeAll(Collections.singleton(word)); //remove all occurrences of word in wordList_doc } } dc.invFile.put(word, InvFileTFList); //put to TFList HashTable } } } } /** * lists all the words in the queryList * <p>this method also calculates raw TF for each word in each query */ public static void listQueryWord(QueryContainer qc){ qc.wordList.clear(); //tokenize the words in each document for(String content : qc.queryList){ String[] words = content.trim().split(" +"); ArrayList<String> temp = new ArrayList<String>(); for(String word : words){ temp.add(word); } //insert tokens into wordList qc.wordList.add(temp); } //trying to deep copy the contents of wordList for indexing purpose ArrayList<ArrayList<String>> temp_wordList = new ArrayList<ArrayList<String>>(); for(ArrayList<String> elmt_wordlist : qc.wordList){ temp_wordList.add(new ArrayList<String>()); ArrayList<String> str = temp_wordList.get(temp_wordList.size()-1); for(String elmt_elmt_wordlist : elmt_wordlist){ str.add(elmt_elmt_wordlist); } } //enumerate words and its occurrence in the entire document file for (int i = 0; i < temp_wordList.size(); i++) { while(!(temp_wordList.get(i).isEmpty())){ //take the first word String word = temp_wordList.get(i).get(0); if(qc.invFile.containsKey(word)){ temp_wordList.get(i).remove(0); } else{ ArrayList<InvFileTF_Query> InvFileTFList = new ArrayList<InvFileTF_Query>(); //create InvFileTF for (int idx_doc = i; idx_doc < temp_wordList.size(); idx_doc++) { ArrayList<String> examined_wordList_doc = temp_wordList.get(idx_doc); int occurrence = Collections.frequency(examined_wordList_doc, word); if(occurrence > 0){ //if found in wordList InvFileTF_Query itemTF = new InvFileTF_Query(); //set the attrib value of itemTF itemTF.docnum = idx_doc+1; //for document numbering, using 1 as the first index itemTF.TF_raw = occurrence; if (itemTF.TF_raw > TF_max){ TF_max = itemTF.TF_raw; //set the new value for TF_max } InvFileTFList.add(itemTF); //add to InvFileTFList temp_wordList.get(idx_doc).removeAll(Collections.singleton(word)); //remove all occurrences of word in wordList_doc } } qc.invFile.put(word, InvFileTFList); //put to TFList HashTable } } } } /** * calculates TF based on user's choice (Binary,Raw,Logarithmic,Augmented) * @param TFType TF calculation method */ public static void calculateTF(int TFType, DocumentContainer dc){ Set<String> words = dc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); for(String word : words_list){ ArrayList<InvFileTF> word_result = dc.invFile.get(word); for(InvFileTF elmt : word_result){ //calculate TF switch (TFType) { case DOCUMENT_BINARY_TF: elmt.TF = 1; //its value will be always 1, because the word exists in the doc break; case DOCUMENT_RAW_TF: elmt.TF = elmt.TF_raw; break; case DOCUMENT_LOG_TF: elmt.TF = 1 + Math.log10(elmt.TF_raw); //using base 10 logarithm break; case DOCUMENT_AUGMENTED_TF: elmt.TF = 0.5 + (0.5 * elmt.TF_raw / TF_max); break; default: break; } } } } public static void calculateTF_Query(int TFType, QueryContainer qc){ Set<String> words = qc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); for(String word : words_list){ ArrayList<InvFileTF_Query> word_result = qc.invFile.get(word); for(InvFileTF_Query elmt : word_result){ //calculate TF switch (TFType) { case DOCUMENT_BINARY_TF: elmt.TF = 1; //its value will be always 1, because the word exists in the doc break; case DOCUMENT_RAW_TF: elmt.TF = elmt.TF_raw; break; case DOCUMENT_LOG_TF: elmt.TF = 1 + Math.log10(elmt.TF_raw); //using base 10 logarithm break; case DOCUMENT_AUGMENTED_TF: elmt.TF = 0.5 + (0.5 * elmt.TF_raw / TF_max); break; default: break; } } } } /** * calculates IDF for each word in the document */ public static void calculateIDF(DocumentContainer dc){ Set<String> words = dc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); Collections.sort(words_list); for(String word : words_list){ ArrayList<InvFileTF> occurrence = dc.invFile.get(word); if(occurrence != null){ dc.IDFList.add(Math.log10(dc.titleList.size() / occurrence.size())); //add IDF value to IDFList } else{ dc.IDFList.add(Math.log10(dc.titleList.size() / 1)); } } } /** * calculates the length of each document (using each word's final TF in each document) */ public static void calculateDocLength(DocumentContainer dc){ for (ArrayList<String> doc_words : dc.wordList) { //get list of document string in wordList Double sum_squareTF = (double) 0; for (String word : doc_words){ //get list of words in each document //search the TF for current word in TFList ArrayList<InvFileTF> word_invfiletf = dc.invFile.get(word); for(InvFileTF invfiletf : word_invfiletf){ if((invfiletf.docnum - 1) == dc.wordList.indexOf(doc_words)){ //if InvFileTF for the current word is found sum_squareTF += (invfiletf.TF * invfiletf.TF); //square TF break; } } } Double doc_length = Math.sqrt(sum_squareTF); dc.doclengthList.add(doc_length); } } /** * calculates the length of each query (using each word's final TF in each query) */ public static void calculateQueryLength(QueryContainer qc){ for (ArrayList<String> doc_words : qc.wordList) { //get list of query string in wordList Double sum_squareTF = (double) 0; for (String word : doc_words){ //get list of words in each document //search the TF for current word in TFList ArrayList<InvFileTF_Query> word_invfiletf = qc.invFile.get(word); for(InvFileTF_Query invfiletf : word_invfiletf){ if((invfiletf.docnum - 1) == qc.wordList.indexOf(doc_words)){ //if InvFileTF for the current word is found sum_squareTF += (invfiletf.TF * invfiletf.TF); //square TF break; } } } Double doc_length = Math.sqrt(sum_squareTF); qc.querylengthList.add(doc_length); } } /** * applies TF x IDF calculation as the weight of a word */ public static void applyTFIDF(DocumentContainer dc){ Set<String> words = dc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); Collections.sort(words_list); for(String word : words_list){ //iterate through the list of words ArrayList<InvFileTF> word_result = dc.invFile.get(word); for(InvFileTF invfiletf : word_result){ //iterate through the list of InvFileTF of the current word invfiletf.TF = invfiletf.TF * dc.IDFList.get(words_list.indexOf(word)); //multiply TF by IDF } } } /** * applies TF x IDF calculation as the weight of a word */ public static void applyTFIDF_Query(DocumentContainer dc, QueryContainer qc){ Set<String> words = qc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); Collections.sort(words_list); for(String word : words_list){ //iterate through the list of words ArrayList<InvFileTF_Query> word_result = qc.invFile.get(word); for(InvFileTF_Query invfiletf : word_result){ //iterate through the list of InvFileTF_Query of the current word double IDF = 0; if(dc.invFile.get(word) != null){ IDF = dc.IDFList.get(words_list.indexOf(word)); } else{ IDF = 0; } invfiletf.TF = invfiletf.TF * IDF; //multiply TF by IDF } } } /** * applies normalization to current TF value of each word in each document */ public static void applyNormalization(DocumentContainer dc){ Set<String> words = dc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); for(String word : words_list){ //iterate through the list of words ArrayList<InvFileTF> word_result = dc.invFile.get(word); for(InvFileTF invfiletf : word_result){ //iterate through the list of InvFileTF of the current word invfiletf.TF = invfiletf.TF / dc.doclengthList.get((invfiletf.docnum) - 1); //divide TF with document length } } } /** * applies normalization to current TF value of each word in each query */ public static void applyNormalization_Query(QueryContainer qc){ Set<String> words = qc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); for(String word : words_list){ //iterate through the list of words ArrayList<InvFileTF_Query> word_result = qc.invFile.get(word); for(InvFileTF_Query invfiletf : word_result){ //iterate through the list of InvFileTF of the current word invfiletf.TF = invfiletf.TF / qc.querylengthList.get((invfiletf.docnum) - 1); //divide TF with document length } } } public static void printResult(DocumentContainer dc){ //System.out.println("Indexing result:"); PrintWriter writer; try { writer = new PrintWriter("invertedfile.txt"); /*BEGIN OF TEST*/ Set<String> words = dc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); Collections.sort(words_list); for(String word : words_list){ ArrayList<InvFileTF> word_result = dc.invFile.get(word); for(InvFileTF elmt : word_result){ if(MainBackend.document_flagIDF == true){ writer.println(word+"\t"+elmt.docnum+"\t"+elmt.TF_raw+"\t"+dc.IDFList.get(words_list.indexOf(word))+"\t"+elmt.TF); } else{ writer.println(word+"\t"+elmt.docnum+"\t"+elmt.TF_raw+"\t"+"-"+"\t"+elmt.TF); } } } /*END OF TEST*/ writer.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } } public static void printResult_Query(DocumentContainer dc, QueryContainer qc){ System.out.println("Indexing result:"); PrintWriter writer; try { writer = new PrintWriter("invertedfile_query.txt"); /*BEGIN OF TEST*/ Set<String> words = qc.invFile.keySet(); ArrayList<String> words_list = new ArrayList<String>(words); Collections.sort(words_list); for(String word : words_list){ ArrayList<InvFileTF_Query> word_result = qc.invFile.get(word); for(InvFileTF_Query elmt : word_result){ writer.println(word+"\t"+elmt.docnum+"\t"+elmt.TF_raw+"\t"+"-"+"\t"+elmt.TF); } } /*END OF TEST*/ writer.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/AbortDocumentVersionUpload" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbortDocumentVersionUploadRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> */ private String authenticationToken; /** * <p> * The ID of the document. * </p> */ private String documentId; /** * <p> * The ID of the version. * </p> */ private String versionId; /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @param authenticationToken * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. */ public void setAuthenticationToken(String authenticationToken) { this.authenticationToken = authenticationToken; } /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @return Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. */ public String getAuthenticationToken() { return this.authenticationToken; } /** * <p> * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * </p> * * @param authenticationToken * Amazon WorkDocs authentication token. Do not set this field when using administrative API actions, as in * accessing the API using AWS credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public AbortDocumentVersionUploadRequest withAuthenticationToken(String authenticationToken) { setAuthenticationToken(authenticationToken); return this; } /** * <p> * The ID of the document. * </p> * * @param documentId * The ID of the document. */ public void setDocumentId(String documentId) { this.documentId = documentId; } /** * <p> * The ID of the document. * </p> * * @return The ID of the document. */ public String getDocumentId() { return this.documentId; } /** * <p> * The ID of the document. * </p> * * @param documentId * The ID of the document. * @return Returns a reference to this object so that method calls can be chained together. */ public AbortDocumentVersionUploadRequest withDocumentId(String documentId) { setDocumentId(documentId); return this; } /** * <p> * The ID of the version. * </p> * * @param versionId * The ID of the version. */ public void setVersionId(String versionId) { this.versionId = versionId; } /** * <p> * The ID of the version. * </p> * * @return The ID of the version. */ public String getVersionId() { return this.versionId; } /** * <p> * The ID of the version. * </p> * * @param versionId * The ID of the version. * @return Returns a reference to this object so that method calls can be chained together. */ public AbortDocumentVersionUploadRequest withVersionId(String versionId) { setVersionId(versionId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAuthenticationToken() != null) sb.append("AuthenticationToken: ").append("***Sensitive Data Redacted***").append(","); if (getDocumentId() != null) sb.append("DocumentId: ").append(getDocumentId()).append(","); if (getVersionId() != null) sb.append("VersionId: ").append(getVersionId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AbortDocumentVersionUploadRequest == false) return false; AbortDocumentVersionUploadRequest other = (AbortDocumentVersionUploadRequest) obj; if (other.getAuthenticationToken() == null ^ this.getAuthenticationToken() == null) return false; if (other.getAuthenticationToken() != null && other.getAuthenticationToken().equals(this.getAuthenticationToken()) == false) return false; if (other.getDocumentId() == null ^ this.getDocumentId() == null) return false; if (other.getDocumentId() != null && other.getDocumentId().equals(this.getDocumentId()) == false) return false; if (other.getVersionId() == null ^ this.getVersionId() == null) return false; if (other.getVersionId() != null && other.getVersionId().equals(this.getVersionId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAuthenticationToken() == null) ? 0 : getAuthenticationToken().hashCode()); hashCode = prime * hashCode + ((getDocumentId() == null) ? 0 : getDocumentId().hashCode()); hashCode = prime * hashCode + ((getVersionId() == null) ? 0 : getVersionId().hashCode()); return hashCode; } @Override public AbortDocumentVersionUploadRequest clone() { return (AbortDocumentVersionUploadRequest) super.clone(); } }
package com.box.androidsdk.share.fragments; import android.app.Activity; import android.app.DatePickerDialog; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CompoundButton; import android.widget.DatePicker; import android.widget.Switch; import android.widget.TextView; import com.box.androidsdk.content.BoxException; import com.box.androidsdk.content.BoxFutureTask; import com.box.androidsdk.content.models.BoxBookmark; import com.box.androidsdk.content.models.BoxFile; import com.box.androidsdk.content.models.BoxFolder; import com.box.androidsdk.content.models.BoxItem; import com.box.androidsdk.content.models.BoxSharedLink; import com.box.androidsdk.content.requests.BoxRequestItem; import com.box.androidsdk.content.requests.BoxRequestUpdateSharedItem; import com.box.androidsdk.content.requests.BoxRequestsFile; import com.box.androidsdk.content.requests.BoxRequestsFolder; import com.box.androidsdk.content.requests.BoxResponse; import com.box.androidsdk.share.R; import com.box.androidsdk.share.internal.models.BoxFeatures; import java.net.HttpURLConnection; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.GregorianCalendar; public class SharedLinkAccessFragment extends BoxFragment implements DatePickerDialog.OnDateSetListener, PositiveNegativeDialogFragment.OnPositiveOrNegativeButtonClickedListener { private static final String DATE_FRAGMENT_TAG = "datePicker"; private static final String PASSWORD_FRAGMENT_TAG = "passwordFrag"; private static final String ACCESS_RADIAL_FRAGMENT_TAG = "accessFrag"; private View mAccessLayout; private TextView mAccessText; private Button mPasswordButton; private Button mExpiresButton; private Switch mAllowDownloadsBtn; private Switch mRequirePasswordBtn; private Switch mExpireLinkBtn; private View mPasswordHeader; private View mPasswordSection; private View mLinkExpirationSection; private boolean mPasswordProtectedLinksSupported = false; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mController.getSupportedFeatures().addOnCompletedListener(new BoxFutureTask.OnCompletedListener<BoxFeatures>() { @Override public void onCompleted(BoxResponse<BoxFeatures> response) { if (response.isSuccess()) { mPasswordProtectedLinksSupported = response.getResult().hasPasswordProtectForSharedLinks(); } else { mPasswordProtectedLinksSupported = true; //Defaulting to true - if they aren't indeed supported, this will fail later when attempting to set password. } if (mPasswordProtectedLinksSupported && getView() != null && checkIfHasRequiredFields(mShareItem)) { updateUi(); } } }); } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_shared_link_access, container, false); mAccessLayout = view.findViewById(R.id.shared_link_access_layout); mAccessLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showAccessChooserDialog(); } }); mAccessText = (TextView) view.findViewById(R.id.shared_link_access_text); mAccessText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showAccessChooserDialog(); } }); mPasswordButton = (Button)view.findViewById(R.id.shared_link_password_btn); mPasswordButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showPasswordChooserDialog(); } }); mExpiresButton = (Button)view.findViewById(R.id.shared_link_expires_on_btn); mPasswordSection = view.findViewById(R.id.password_section); mLinkExpirationSection = view.findViewById(R.id.link_expiration_section); mExpiresButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showDatePicker(mShareItem.getSharedLink().getUnsharedDate()); } }); mAllowDownloadsBtn = (Switch)view.findViewById(R.id.shared_link_allow_download_btn); mAllowDownloadsBtn.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (mShareItem instanceof BoxBookmark || mShareItem.getSharedLink().getPermissions().getCanDownload() == isChecked ){ // if there is no change or we are busy with another task then do nothing. return; } changeDownloadPermission(isChecked); } }); mRequirePasswordBtn = (Switch)view.findViewById(R.id.share_link_require_password_btn); mRequirePasswordBtn.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (mShareItem.getSharedLink().getIsPasswordEnabled() == isChecked ){ // if there is no change or we are busy with another task then do nothing. return; } if (isChecked) { showPasswordChooserDialog(); } else { showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, mController.getCreatedSharedLinkRequest(mShareItem).setPassword(null)).addOnCompletedListener(mBoxItemListener); } } }); mExpireLinkBtn = (Switch)view.findViewById(R.id.shared_link_expire_link_btn); mExpireLinkBtn.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if ((mShareItem.getSharedLink().getUnsharedDate() != null) == isChecked) { // if there is no change or we are busy with another task then do nothing. return; } if (isChecked) { showDatePicker(new Date()); } else { try { showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, mController.getCreatedSharedLinkRequest(mShareItem).setRemoveUnsharedAtDate()).addOnCompletedListener(mBoxItemListener); } catch (ParseException e) { dismissSpinner(); } } } }); mPasswordHeader = view.findViewById(R.id.box_sharesdk_password_header); if (!checkIfHasRequiredFields(mShareItem)){ // we need to refresh since the item given to us is not complete. refreshShareItemInfo(); } else { setupUi(); } return view; } @Override public void onPositiveButtonClicked(PositiveNegativeDialogFragment fragment) { if (fragment instanceof PasswordDialogFragment){ try { showSpinner(); changePassword(((PasswordDialogFragment) fragment).getPassword()); } catch (Exception e){ dismissSpinner(); mController.showToast(getActivity(), "invalid password"); } } else if (fragment instanceof AccessRadialDialogFragment){ changeAccess(((AccessRadialDialogFragment)fragment).getAccess()); } } @Override public void onNegativeButtonClicked(PositiveNegativeDialogFragment fragment) { // reset ui since user didn't choose anything. setupUi(); } /** * Updates the UI with the provided BoxItem */ private void updateUi(){ getActivity().runOnUiThread(new Runnable() { @Override public void run() { setupUi(); } }); } /** * Displays the DatePickerFragment * * @param date the default date that should be selected */ private void showDatePicker(Date date){ if (getFragmentManager().findFragmentByTag(DATE_FRAGMENT_TAG) != null){ return; } DatePickerFragment fragment = DatePickerFragment.createFragment(date, this, this); fragment.show(getActivity().getSupportFragmentManager(), DATE_FRAGMENT_TAG); } /** * Displays the dialog for the user to set a password for the shared link */ private void showPasswordChooserDialog(){ if (getFragmentManager().findFragmentByTag(PASSWORD_FRAGMENT_TAG) != null){ return; } PasswordDialogFragment fragment = PasswordDialogFragment.createFragment(R.string.box_sharesdk_password, R.string.box_sharesdk_set_password, R.string.box_sharesdk_ok, R.string.box_sharesdk_cancel, this); fragment.show(getActivity().getSupportFragmentManager(), PASSWORD_FRAGMENT_TAG); } /** * Displays the access dialog for the user to select the appropriate access */ private void showAccessChooserDialog(){ if (getFragmentManager().findFragmentByTag(ACCESS_RADIAL_FRAGMENT_TAG) != null){ return; } if (mShareItem.getAllowedSharedLinkAccessLevels() != null){ AccessRadialDialogFragment fragment = AccessRadialDialogFragment.createFragment(mShareItem, this); fragment.show(getActivity().getSupportFragmentManager(), ACCESS_RADIAL_FRAGMENT_TAG); } else { mController.showToast(getContext(),R.string.box_sharesdk_network_error); } } /** * Initializes the UI */ private void setupUi(){ BoxSharedLink link = mShareItem.getSharedLink(); if (link != null) { BoxSharedLink.Access access = link.getEffectiveAccess(); if (access != null) { String accessDescription = ""; switch (access) { case OPEN: accessDescription = getResources().getString(R.string.box_sharesdk_access_public); break; case COLLABORATORS: if (mShareItem instanceof BoxFile){ accessDescription = getResources().getString(R.string.box_sharesdk_access_collaborator_file); } else { accessDescription = getResources().getString(R.string.box_sharesdk_access_collaborator); } break; case COMPANY: accessDescription = getResources().getString(R.string.box_sharesdk_access_company); } mAccessText.setText(accessDescription); } if (mShareItem instanceof BoxBookmark || (access != null && access == BoxSharedLink.Access.COLLABORATORS)) { hideView(mAllowDownloadsBtn); } else { showView(mAllowDownloadsBtn); mAllowDownloadsBtn.setChecked(link.getPermissions() != null && link.getPermissions().getCanDownload()); } if ((access != null && access == BoxSharedLink.Access.COLLABORATORS) || !mPasswordProtectedLinksSupported){ hideView(mPasswordSection); } else { showView(mPasswordSection); mRequirePasswordBtn.setChecked(link.getIsPasswordEnabled()); if (link.getIsPasswordEnabled()) { mPasswordButton.setText(createTitledSpannable(getResources().getString(R.string.box_sharesdk_password), "*****")); showView(mPasswordButton); } else { hideView(mPasswordButton); } } if (mPasswordProtectedLinksSupported) { showView(mLinkExpirationSection); mExpireLinkBtn.setChecked(link.getUnsharedDate() != null); if (link.getUnsharedDate() != null) { mExpiresButton.setText(createTitledSpannable(getResources().getString(R.string.box_sharesdk_expire_on), SimpleDateFormat.getDateInstance().format(link.getUnsharedDate()))); showView(mExpiresButton); } else { hideView(mExpiresButton); } } else { hideView(mLinkExpirationSection); } } else { mController.showToast(getActivity(),getText(R.string.box_sharesdk_problem_accessing_this_shared_link)); getActivity().finish(); } } /** * Modifies the download permssion of the share item * * @param canDownload whether or not the item can be downloaded */ private void changeDownloadPermission(boolean canDownload){ if (mShareItem instanceof BoxFile) { showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, ((BoxRequestsFile.UpdatedSharedFile) mController.getCreatedSharedLinkRequest(mShareItem)).setCanDownload(canDownload)).addOnCompletedListener(mBoxItemListener); } else if (mShareItem instanceof BoxFolder) { showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, ((BoxRequestsFolder.UpdateSharedFolder) mController.getCreatedSharedLinkRequest(mShareItem)).setCanDownload(canDownload)).addOnCompletedListener(mBoxItemListener); } else if (mShareItem instanceof BoxBookmark) { mController.showToast(getActivity(), "Bookmarks do not have a permission that can be changed."); } } /** * Modifies the share link access * * @param access the share link access level */ private void changeAccess(final BoxSharedLink.Access access){ if (access == null){ // Should not be possible to get here. mController.showToast(getActivity(), "No access chosen"); return; } showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, mController.getCreatedSharedLinkRequest(mShareItem).setAccess(access)).addOnCompletedListener(mBoxItemListener); } public static SharedLinkAccessFragment newInstance(BoxItem boxItem) { Bundle args = BoxFragment.getBundle(boxItem); SharedLinkAccessFragment fragment = new SharedLinkAccessFragment(); fragment.setArguments(args); return fragment; } /** * Handles when a date is selected on the DatePickerFragment * * @param view the DatePicker view * @param year the year * @param month the month * @param day the day */ public void onDateSet(DatePicker view, int year, int month, int day) { // Do something with the date chosen by the user GregorianCalendar calendar = new GregorianCalendar(year, month, day); try { showSpinner(R.string.box_sharesdk_updating_link_access, R.string.boxsdk_Please_wait); mController.executeRequest(BoxItem.class, mController.getCreatedSharedLinkRequest(mShareItem).setUnsharedAt(calendar.getTime())).addOnCompletedListener(mBoxItemListener); } catch (Exception e){ dismissSpinner(); mController.showToast(getActivity(), "invalid time selected"); } } /** * Sets the password to the provided string * * @param password the password to set on the shared item * @throws ParseException */ private void changePassword(final String password) throws ParseException{ mController.executeRequest(BoxItem.class, mController.getCreatedSharedLinkRequest(mShareItem).setPassword(password)).addOnCompletedListener(mBoxItemListener); } /** * Check if the required fields are available on the BoxItem * * @param shareItem the BoxItem to verify * @return whether or not all the required fields are present */ private boolean checkIfHasRequiredFields(BoxItem shareItem){ return shareItem.getSharedLink() != null && shareItem.getAllowedSharedLinkAccessLevels() != null; } /** * Refreshes the information of the shared link */ public void refreshShareItemInfo() { showSpinner(); mController.fetchItemInfo(mShareItem).addOnCompletedListener(mBoxItemListener); } private BoxFutureTask.OnCompletedListener<BoxItem> mBoxItemListener = new BoxFutureTask.OnCompletedListener<BoxItem>() { @Override public void onCompleted(final BoxResponse<BoxItem> response) { dismissSpinner(); final Activity activity = getActivity(); if (activity == null) { return; } activity.runOnUiThread(new Runnable() { @Override public void run() { if (response.isSuccess()) { if (response.getRequest() instanceof BoxRequestItem) { if (checkIfHasRequiredFields(response.getResult())) { mShareItem = response.getResult(); updateUi(); } } } else { if (response.getException() instanceof BoxException) { BoxException boxException = (BoxException) response.getException(); int responseCode = boxException.getResponseCode(); if (responseCode == HttpURLConnection.HTTP_NOT_MODIFIED) { return; } if (responseCode == HttpURLConnection.HTTP_FORBIDDEN) { mController.showToast(getActivity(), R.string.box_sharesdk_insufficient_permissions); setupUi(); return; } } // reset ui to previous object. if (response.getRequest() instanceof BoxRequestItem && mShareItem.getId().equals(((BoxRequestItem) response.getRequest()).getId())) { if (response.getRequest() instanceof BoxRequestUpdateSharedItem) { mController.showToast(getActivity(), R.string.box_sharesdk_unable_to_modify_toast); } else { mController.showToast(getActivity(), R.string.box_sharesdk_problem_accessing_this_shared_link); } } setupUi(); } } }); } }; }
/* * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.rxjava3.parallel; import static org.junit.Assert.*; import java.util.List; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.reactivestreams.Subscriber; import io.reactivex.rxjava3.annotations.NonNull; import io.reactivex.rxjava3.core.*; import io.reactivex.rxjava3.exceptions.TestException; import io.reactivex.rxjava3.functions.*; import io.reactivex.rxjava3.internal.functions.Functions; import io.reactivex.rxjava3.internal.subscriptions.BooleanSubscription; import io.reactivex.rxjava3.operators.ConditionalSubscriber; import io.reactivex.rxjava3.plugins.RxJavaPlugins; import io.reactivex.rxjava3.schedulers.Schedulers; import io.reactivex.rxjava3.testsupport.TestHelper; public class ParallelMapTest extends RxJavaTest { @Test public void subscriberCount() { ParallelFlowableTest.checkSubscriberCount(Flowable.range(1, 5).parallel() .map(Functions.identity())); } @Test public void doubleFilter() { Flowable.range(1, 10) .parallel() .map(Functions.<Integer>identity()) .filter(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return v % 2 == 0; } }) .filter(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return v % 3 == 0; } }) .sequential() .test() .assertResult(6); } @Test public void doubleFilterAsync() { Flowable.range(1, 10) .parallel() .runOn(Schedulers.computation()) .map(Functions.<Integer>identity()) .filter(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return v % 2 == 0; } }) .filter(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return v % 3 == 0; } }) .sequential() .test() .awaitDone(5, TimeUnit.SECONDS) .assertResult(6); } @Test public void doubleError() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new ParallelInvalid() .map(Functions.<Object>identity()) .sequential() .test() .assertFailure(TestException.class); assertFalse(errors.isEmpty()); for (Throwable ex : errors) { assertTrue(ex.toString(), ex.getCause() instanceof TestException); } } finally { RxJavaPlugins.reset(); } } @Test public void doubleError2() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new ParallelInvalid() .map(Functions.<Object>identity()) .filter(Functions.alwaysTrue()) .sequential() .test() .assertFailure(TestException.class); assertFalse(errors.isEmpty()); for (Throwable ex : errors) { assertTrue(ex.toString(), ex.getCause() instanceof TestException); } } finally { RxJavaPlugins.reset(); } } @Test public void error() { Flowable.error(new TestException()) .parallel() .map(Functions.<Object>identity()) .sequential() .test() .assertFailure(TestException.class); } @Test public void mapCrash() { Flowable.just(1) .parallel() .map(new Function<Integer, Object>() { @Override public Object apply(Integer v) throws Exception { throw new TestException(); } }) .sequential() .test() .assertFailure(TestException.class); } @Test public void mapCrashConditional() { Flowable.just(1) .parallel() .map(new Function<Integer, Object>() { @Override public Object apply(Integer v) throws Exception { throw new TestException(); } }) .filter(Functions.alwaysTrue()) .sequential() .test() .assertFailure(TestException.class); } @Test public void mapCrashConditional2() { Flowable.just(1) .parallel() .runOn(Schedulers.computation()) .map(new Function<Integer, Object>() { @Override public Object apply(Integer v) throws Exception { throw new TestException(); } }) .filter(Functions.alwaysTrue()) .sequential() .test() .awaitDone(5, TimeUnit.SECONDS) .assertFailure(TestException.class); } @Test public void invalidSubscriberCount() { TestHelper.checkInvalidParallelSubscribers( Flowable.range(1, 10).parallel() .map(v -> v) ); } @Test public void doubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeParallel( p -> p.map(v -> v) ); TestHelper.checkDoubleOnSubscribeParallel( p -> p.map(v -> v) .filter(v -> true) ); } @Test public void conditionalCancelIgnored() { Flowable<Integer> f = new Flowable<Integer>() { @Override protected void subscribeActual(@NonNull Subscriber<@NonNull ? super @NonNull Integer> s) { @SuppressWarnings("unchecked") ConditionalSubscriber<Integer> subscriber = (ConditionalSubscriber<Integer>)s; subscriber.onSubscribe(new BooleanSubscription()); subscriber.tryOnNext(1); subscriber.tryOnNext(2); } }; ParallelFlowable.fromArray(f) .map(v -> { throw new TestException(); }) .filter(v -> true) .sequential() .test() .assertFailure(TestException.class); } }
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import org.wso2.developerstudio.eclipse.gmf.esb.ClassMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ClassMediatorInputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ClassMediatorOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ClassProperty; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Class Mediator</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ClassMediatorImpl#getClassName <em>Class Name</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ClassMediatorImpl#getProperties <em>Properties</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ClassMediatorImpl#getInputConnector <em>Input Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ClassMediatorImpl#getOutputConnector <em>Output Connector</em>}</li> * </ul> * * @generated */ public class ClassMediatorImpl extends MediatorImpl implements ClassMediator { /** * The default value of the '{@link #getClassName() <em>Class Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getClassName() * @generated * @ordered */ protected static final String CLASS_NAME_EDEFAULT = ""; /** * The cached value of the '{@link #getClassName() <em>Class Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getClassName() * @generated * @ordered */ protected String className = CLASS_NAME_EDEFAULT; /** * The cached value of the '{@link #getProperties() <em>Properties</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getProperties() * @generated * @ordered */ protected EList<ClassProperty> properties; /** * The cached value of the '{@link #getInputConnector() <em>Input Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInputConnector() * @generated * @ordered */ protected ClassMediatorInputConnector inputConnector; /** * The cached value of the '{@link #getOutputConnector() <em>Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOutputConnector() * @generated * @ordered */ protected ClassMediatorOutputConnector outputConnector; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ClassMediatorImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EsbPackage.Literals.CLASS_MEDIATOR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getClassName() { return className; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setClassName(String newClassName) { String oldClassName = className; className = newClassName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CLASS_MEDIATOR__CLASS_NAME, oldClassName, className)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ClassProperty> getProperties() { if (properties == null) { properties = new EObjectContainmentEList<ClassProperty>(ClassProperty.class, this, EsbPackage.CLASS_MEDIATOR__PROPERTIES); } return properties; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ClassMediatorInputConnector getInputConnector() { return inputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetInputConnector(ClassMediatorInputConnector newInputConnector, NotificationChain msgs) { ClassMediatorInputConnector oldInputConnector = inputConnector; inputConnector = newInputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR, oldInputConnector, newInputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInputConnector(ClassMediatorInputConnector newInputConnector) { if (newInputConnector != inputConnector) { NotificationChain msgs = null; if (inputConnector != null) msgs = ((InternalEObject)inputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR, null, msgs); if (newInputConnector != null) msgs = ((InternalEObject)newInputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR, null, msgs); msgs = basicSetInputConnector(newInputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR, newInputConnector, newInputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ClassMediatorOutputConnector getOutputConnector() { return outputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOutputConnector(ClassMediatorOutputConnector newOutputConnector, NotificationChain msgs) { ClassMediatorOutputConnector oldOutputConnector = outputConnector; outputConnector = newOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR, oldOutputConnector, newOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOutputConnector(ClassMediatorOutputConnector newOutputConnector) { if (newOutputConnector != outputConnector) { NotificationChain msgs = null; if (outputConnector != null) msgs = ((InternalEObject)outputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); if (newOutputConnector != null) msgs = ((InternalEObject)newOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOutputConnector(newOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR, newOutputConnector, newOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EsbPackage.CLASS_MEDIATOR__PROPERTIES: return ((InternalEList<?>)getProperties()).basicRemove(otherEnd, msgs); case EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR: return basicSetInputConnector(null, msgs); case EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR: return basicSetOutputConnector(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EsbPackage.CLASS_MEDIATOR__CLASS_NAME: return getClassName(); case EsbPackage.CLASS_MEDIATOR__PROPERTIES: return getProperties(); case EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR: return getInputConnector(); case EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR: return getOutputConnector(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EsbPackage.CLASS_MEDIATOR__CLASS_NAME: setClassName((String)newValue); return; case EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR: setInputConnector((ClassMediatorInputConnector)newValue); return; case EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((ClassMediatorOutputConnector)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EsbPackage.CLASS_MEDIATOR__CLASS_NAME: setClassName(CLASS_NAME_EDEFAULT); return; case EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR: setInputConnector((ClassMediatorInputConnector)null); return; case EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((ClassMediatorOutputConnector)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EsbPackage.CLASS_MEDIATOR__CLASS_NAME: return CLASS_NAME_EDEFAULT == null ? className != null : !CLASS_NAME_EDEFAULT.equals(className); case EsbPackage.CLASS_MEDIATOR__PROPERTIES: return properties != null && !properties.isEmpty(); case EsbPackage.CLASS_MEDIATOR__INPUT_CONNECTOR: return inputConnector != null; case EsbPackage.CLASS_MEDIATOR__OUTPUT_CONNECTOR: return outputConnector != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (className: "); result.append(className); result.append(')'); return result.toString(); } } // ClassMediatorImpl
/* Copyright (c) 2012-2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Gabriel Roldan (Boundless) - initial implementation */ package org.locationtech.geogig.api.porcelain; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import java.util.Collection; import java.util.List; import java.util.Set; import org.eclipse.jdt.annotation.Nullable; import org.locationtech.geogig.api.AbstractGeoGigOp; import org.locationtech.geogig.api.NodeRef; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.api.Ref; import org.locationtech.geogig.api.RevCommit; import org.locationtech.geogig.api.RevObject.TYPE; import org.locationtech.geogig.api.RevTree; import org.locationtech.geogig.api.RevTreeBuilder; import org.locationtech.geogig.api.SymRef; import org.locationtech.geogig.api.hooks.Hookable; import org.locationtech.geogig.api.plumbing.FindTreeChild; import org.locationtech.geogig.api.plumbing.RefParse; import org.locationtech.geogig.api.plumbing.ResolveTreeish; import org.locationtech.geogig.api.plumbing.RevObjectParse; import org.locationtech.geogig.api.plumbing.RevParse; import org.locationtech.geogig.api.plumbing.UpdateRef; import org.locationtech.geogig.api.plumbing.UpdateSymRef; import org.locationtech.geogig.api.plumbing.WriteBack; import org.locationtech.geogig.api.plumbing.merge.Conflict; import org.locationtech.geogig.api.porcelain.CheckoutException.StatusCode; import org.locationtech.geogig.api.porcelain.ConfigOp.ConfigAction; import org.locationtech.geogig.api.porcelain.ConfigOp.ConfigScope; import org.locationtech.geogig.di.CanRunDuringConflict; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Updates objects in the working tree to match the version in the index or the specified tree. If * no {@link #addPath paths} are given, will also update {@link Ref#HEAD HEAD} to set the specified * branch as the current branch, or to the specified commit if the given {@link #setSource origin} * is a commit id instead of a branch name, in which case HEAD will be a plain ref instead of a * symbolic ref, hence making it a "dettached head". */ @CanRunDuringConflict @Hookable(name = "checkout") public class CheckoutOp extends AbstractGeoGigOp<CheckoutResult> { private String branchOrCommit; private Set<String> paths; private boolean force = false; private boolean ours; private boolean theirs; public CheckoutOp() { paths = Sets.newTreeSet(); } public CheckoutOp setSource(@Nullable final String branchOrCommit) { this.branchOrCommit = branchOrCommit; return this; } public CheckoutOp setForce(final boolean force) { this.force = force; return this; } public CheckoutOp addPath(final CharSequence path) { checkNotNull(path); paths.add(path.toString()); return this; } public CheckoutOp setOurs(final boolean ours) { this.ours = ours; return this; } public CheckoutOp setTheirs(final boolean theirs) { this.theirs = theirs; return this; } public CheckoutOp addPaths(final Collection<? extends CharSequence> paths) { checkNotNull(paths); for (CharSequence path : paths) { addPath(path); } return this; } /** * @return the id of the new work tree */ @Override protected CheckoutResult _call() { checkState(branchOrCommit != null || !paths.isEmpty(), "No branch, tree, or path were specified"); checkArgument(!(ours && theirs), "Cannot use both --ours and --theirs."); checkArgument((ours == theirs) || branchOrCommit == null, "--ours/--theirs is incompatible with switching branches."); CheckoutResult result = new CheckoutResult(); List<Conflict> conflicts = conflictsDatabase().getConflicts(null, null); if (!paths.isEmpty()) { result.setResult(CheckoutResult.Results.UPDATE_OBJECTS); Optional<RevTree> tree = Optional.absent(); List<String> unmerged = lookForUnmerged(conflicts, paths); if (!unmerged.isEmpty()) { if (!(force || ours || theirs)) { StringBuilder msg = new StringBuilder(); for (String path : unmerged) { msg.append("error: path " + path + " is unmerged.\n"); } throw new CheckoutException(msg.toString(), StatusCode.UNMERGED_PATHS); } } if (branchOrCommit != null) { Optional<ObjectId> id = command(ResolveTreeish.class).setTreeish(branchOrCommit) .call(); checkState(id.isPresent(), "'" + branchOrCommit + "' not found in repository."); tree = command(RevObjectParse.class).setObjectId(id.get()).call(RevTree.class); } else { tree = Optional.of(index().getTree()); } Optional<RevTree> mainTree = tree; for (String st : paths) { if (unmerged.contains(st)) { if (ours || theirs) { String refspec = ours ? Ref.ORIG_HEAD : Ref.MERGE_HEAD; Optional<ObjectId> treeId = command(ResolveTreeish.class).setTreeish( refspec).call(); if (treeId.isPresent()) { tree = command(RevObjectParse.class).setObjectId(treeId.get()).call( RevTree.class); } } else {// --force continue; } } else { tree = mainTree; } Optional<NodeRef> node = command(FindTreeChild.class).setParent(tree.get()) .setChildPath(st).call(); if ((ours || theirs) && !node.isPresent()) { // remove the node. command(RemoveOp.class).setRecursive(true).addPathToRemove(st).call(); } else { checkState(node.isPresent(), "pathspec '" + st + "' didn't match a feature in the tree"); if (node.get().getType() == TYPE.TREE) { RevTreeBuilder treeBuilder = new RevTreeBuilder(objectDatabase(), workingTree().getTree()); treeBuilder.remove(st); treeBuilder.put(node.get().getNode()); RevTree newRoot = treeBuilder.build(); objectDatabase().put(newRoot); workingTree().updateWorkHead(newRoot.getId()); } else { ObjectId metadataId = ObjectId.NULL; Optional<NodeRef> parentNode = command(FindTreeChild.class) .setParent(workingTree().getTree()) .setChildPath(node.get().getParentPath()).call(); RevTreeBuilder treeBuilder = null; if (parentNode.isPresent()) { metadataId = parentNode.get().getMetadataId(); Optional<RevTree> parsed = command(RevObjectParse.class).setObjectId( parentNode.get().getNode().getObjectId()).call(RevTree.class); checkState(parsed.isPresent(), "Parent tree couldn't be found in the repository."); treeBuilder = new RevTreeBuilder(objectDatabase(), parsed.get()); treeBuilder.remove(node.get().getNode().getName()); } else { treeBuilder = new RevTreeBuilder(objectDatabase()); } treeBuilder.put(node.get().getNode()); ObjectId newTreeId = command(WriteBack.class) .setAncestor(new RevTreeBuilder(objectDatabase(), workingTree().getTree())) .setChildPath(node.get().getParentPath()) .setTree(treeBuilder.build()).setMetadataId(metadataId).call(); workingTree().updateWorkHead(newTreeId); } } } } else { if (!conflicts.isEmpty()) { if (!(force)) { StringBuilder msg = new StringBuilder(); for (Conflict conflict : conflicts) { msg.append("error: " + conflict.getPath() + " needs merge.\n"); } msg.append("You need to resolve your index first.\n"); throw new CheckoutException(msg.toString(), StatusCode.UNMERGED_PATHS); } } Optional<Ref> targetRef = Optional.absent(); Optional<ObjectId> targetCommitId = Optional.absent(); Optional<ObjectId> targetTreeId = Optional.absent(); targetRef = command(RefParse.class).setName(branchOrCommit).call(); if (targetRef.isPresent()) { ObjectId commitId = targetRef.get().getObjectId(); if (targetRef.get().getName().startsWith(Ref.REMOTES_PREFIX)) { String remoteName = targetRef.get().getName(); remoteName = remoteName.substring(Ref.REMOTES_PREFIX.length(), targetRef.get() .getName().lastIndexOf("/")); if (branchOrCommit.contains(remoteName + '/')) { RevCommit commit = command(RevObjectParse.class).setObjectId(commitId) .call(RevCommit.class).get(); targetTreeId = Optional.of(commit.getTreeId()); targetCommitId = Optional.of(commit.getId()); targetRef = Optional.absent(); } else { Ref branch = command(BranchCreateOp.class) .setName(targetRef.get().localName()) .setSource(commitId.toString()).call(); command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL) .setName("branches." + branch.localName() + ".remote") .setValue(remoteName).call(); command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET) .setScope(ConfigScope.LOCAL) .setName("branches." + branch.localName() + ".merge") .setValue(targetRef.get().getName()).call(); targetRef = Optional.of(branch); result.setResult(CheckoutResult.Results.CHECKOUT_REMOTE_BRANCH); result.setRemoteName(remoteName); } } if (commitId.isNull()) { targetTreeId = Optional.of(ObjectId.NULL); targetCommitId = Optional.of(ObjectId.NULL); } else { Optional<RevCommit> parsed = command(RevObjectParse.class) .setObjectId(commitId).call(RevCommit.class); checkState(parsed.isPresent()); checkState(parsed.get() instanceof RevCommit); RevCommit commit = parsed.get(); targetCommitId = Optional.of(commit.getId()); targetTreeId = Optional.of(commit.getTreeId()); } } else { final Optional<ObjectId> addressed = command(RevParse.class).setRefSpec( branchOrCommit).call(); checkArgument(addressed.isPresent(), "source '" + branchOrCommit + "' not found in repository"); RevCommit commit = command(RevObjectParse.class).setObjectId(addressed.get()) .call(RevCommit.class).get(); targetTreeId = Optional.of(commit.getTreeId()); targetCommitId = Optional.of(commit.getId()); } if (targetTreeId.isPresent()) { if (!force) { if (!index().isClean() || !workingTree().isClean()) { throw new CheckoutException(StatusCode.LOCAL_CHANGES_NOT_COMMITTED); } } // update work tree ObjectId treeId = targetTreeId.get(); workingTree().updateWorkHead(treeId); index().updateStageHead(treeId); result.setNewTree(treeId); if (targetRef.isPresent()) { // update HEAD Ref target = targetRef.get(); String refName; if (target instanceof SymRef) {// beware of cyclic refs, peel symrefs refName = ((SymRef) target).getTarget(); } else { refName = target.getName(); } command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(refName).call(); result.setNewRef(targetRef.get()); result.setOid(targetCommitId.get()); result.setResult(CheckoutResult.Results.CHECKOUT_LOCAL_BRANCH); } else { // set HEAD to a dettached state ObjectId commitId = targetCommitId.get(); command(UpdateRef.class).setName(Ref.HEAD).setNewValue(commitId).call(); result.setOid(commitId); result.setResult(CheckoutResult.Results.DETACHED_HEAD); } Optional<Ref> ref = command(RefParse.class).setName(Ref.MERGE_HEAD).call(); if (ref.isPresent()) { command(UpdateRef.class).setName(Ref.MERGE_HEAD).setDelete(true).call(); } return result; } } result.setNewTree(workingTree().getTree().getId()); return result; } private List<String> lookForUnmerged(List<Conflict> conflicts, Set<String> paths) { List<String> unmerged = Lists.newArrayList(); for (String path : paths) { for (Conflict conflict : conflicts) { if (conflict.getPath().equals(path)) { unmerged.add(path); break; } } } return unmerged; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.StringTokenizer; import junit.framework.TestCase; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster.Builder; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.Level; /** * A JUnit test for copying files recursively. */ public class TestCopyFiles extends TestCase { { ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange") ).getLogger().setLevel(Level.ERROR); ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR); ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR); ((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL); } static final URI LOCAL_FS = URI.create("file:///"); private static final Random RAN = new Random(); private static final int NFILES = 20; private static String TEST_ROOT_DIR = new Path(System.getProperty("test.build.data","/tmp")) .toString().replace(' ', '+'); /** class MyFile contains enough information to recreate the contents of * a single file. */ private static class MyFile { private static Random gen = new Random(); private static final int MAX_LEVELS = 3; private static final int MAX_SIZE = 8*1024; private static String[] dirNames = { "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine" }; private final String name; private int size = 0; private long seed = 0L; MyFile() { this(gen.nextInt(MAX_LEVELS)); } MyFile(int nLevels) { String xname = ""; if (nLevels != 0) { int[] levels = new int[nLevels]; for (int idx = 0; idx < nLevels; idx++) { levels[idx] = gen.nextInt(10); } StringBuffer sb = new StringBuffer(); for (int idx = 0; idx < nLevels; idx++) { sb.append(dirNames[levels[idx]]); sb.append("/"); } xname = sb.toString(); } long fidx = gen.nextLong() & Long.MAX_VALUE; name = xname + Long.toString(fidx); reset(); } void reset() { final int oldsize = size; do { size = gen.nextInt(MAX_SIZE); } while (oldsize == size); final long oldseed = seed; do { seed = gen.nextLong() & Long.MAX_VALUE; } while (oldseed == seed); } String getName() { return name; } int getSize() { return size; } long getSeed() { return seed; } } private static MyFile[] createFiles(URI fsname, String topdir) throws IOException { return createFiles(FileSystem.get(fsname, new Configuration()), topdir); } /** create NFILES with random names and directory hierarchies * with random (but reproducible) data in them. */ private static MyFile[] createFiles(FileSystem fs, String topdir) throws IOException { Path root = new Path(topdir); MyFile[] files = new MyFile[NFILES]; for (int i = 0; i < NFILES; i++) { files[i] = createFile(root, fs); } return files; } static MyFile createFile(Path root, FileSystem fs, int levels) throws IOException { MyFile f = levels < 0 ? new MyFile() : new MyFile(levels); Path p = new Path(root, f.getName()); FSDataOutputStream out = fs.create(p); byte[] toWrite = new byte[f.getSize()]; new Random(f.getSeed()).nextBytes(toWrite); out.write(toWrite); out.close(); FileSystem.LOG.info("created: " + p + ", size=" + f.getSize()); return f; } static MyFile createFile(Path root, FileSystem fs) throws IOException { return createFile(root, fs, -1); } private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files ) throws IOException { return checkFiles(fs, topdir, files, false); } private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files, boolean existingOnly) throws IOException { Path root = new Path(topdir); for (int idx = 0; idx < files.length; idx++) { Path fPath = new Path(root, files[idx].getName()); try { fs.getFileStatus(fPath); FSDataInputStream in = fs.open(fPath); byte[] toRead = new byte[files[idx].getSize()]; byte[] toCompare = new byte[files[idx].getSize()]; Random rb = new Random(files[idx].getSeed()); rb.nextBytes(toCompare); assertEquals("Cannnot read file.", toRead.length, in.read(toRead)); in.close(); for (int i = 0; i < toRead.length; i++) { if (toRead[i] != toCompare[i]) { return false; } } toRead = null; toCompare = null; } catch(FileNotFoundException fnfe) { if (!existingOnly) { throw fnfe; } } } return true; } private static void updateFiles(FileSystem fs, String topdir, MyFile[] files, int nupdate) throws IOException { assert nupdate <= NFILES; Path root = new Path(topdir); for (int idx = 0; idx < nupdate; ++idx) { Path fPath = new Path(root, files[idx].getName()); // overwrite file assertTrue(fPath.toString() + " does not exist", fs.exists(fPath)); FSDataOutputStream out = fs.create(fPath); files[idx].reset(); byte[] toWrite = new byte[files[idx].getSize()]; Random rb = new Random(files[idx].getSeed()); rb.nextBytes(toWrite); out.write(toWrite); out.close(); } } private static FileStatus[] getFileStatus(FileSystem fs, String topdir, MyFile[] files) throws IOException { return getFileStatus(fs, topdir, files, false); } private static FileStatus[] getFileStatus(FileSystem fs, String topdir, MyFile[] files, boolean existingOnly) throws IOException { Path root = new Path(topdir); List<FileStatus> statuses = new ArrayList<FileStatus>(); for (int idx = 0; idx < NFILES; ++idx) { try { statuses.add(fs.getFileStatus(new Path(root, files[idx].getName()))); } catch(FileNotFoundException fnfe) { if (!existingOnly) { throw fnfe; } } } return statuses.toArray(new FileStatus[statuses.size()]); } private static boolean checkUpdate(FileSystem fs, FileStatus[] old, String topdir, MyFile[] upd, final int nupdate) throws IOException { Path root = new Path(topdir); // overwrote updated files for (int idx = 0; idx < nupdate; ++idx) { final FileStatus stat = fs.getFileStatus(new Path(root, upd[idx].getName())); if (stat.getModificationTime() <= old[idx].getModificationTime()) { return false; } } // did not overwrite files not updated for (int idx = nupdate; idx < NFILES; ++idx) { final FileStatus stat = fs.getFileStatus(new Path(root, upd[idx].getName())); if (stat.getModificationTime() != old[idx].getModificationTime()) { return false; } } return true; } /** delete directory and everything underneath it.*/ private static void deldir(FileSystem fs, String topdir) throws IOException { fs.delete(new Path(topdir), true); } /** copy files from local file system to local file system */ public void testCopyFromLocalToLocal() throws Exception { Configuration conf = new Configuration(); FileSystem localfs = FileSystem.get(LOCAL_FS, conf); MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat"); ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat", "file:///"+TEST_ROOT_DIR+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(localfs, TEST_ROOT_DIR+"/destdat", files)); deldir(localfs, TEST_ROOT_DIR+"/destdat"); deldir(localfs, TEST_ROOT_DIR+"/srcdat"); } /** copy files from dfs file system to dfs file system */ public void testCopyFromDfsToDfs() throws Exception { String namenode = null; MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat", files)); FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf); assertTrue("Log directory does not exist.", fs.exists(new Path(namenode+"/logs"))); deldir(hdfs, "/destdat"); deldir(hdfs, "/srcdat"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** copy empty directory on dfs file system */ public void testEmptyDir() throws Exception { String namenode = null; MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration()); fs.mkdirs(new Path("/empty")); ToolRunner.run(new DistCpV1(conf), new String[] { "-log", namenode+"/logs", namenode+"/empty", namenode+"/dest"}); fs = FileSystem.get(URI.create(namenode+"/destdat"), conf); assertTrue("Destination directory does not exist.", fs.exists(new Path(namenode+"/dest"))); deldir(hdfs, "/dest"); deldir(hdfs, "/empty"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** copy files from local file system to dfs file system */ public void testCopyFromLocalToDfs() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).build(); final FileSystem hdfs = cluster.getFileSystem(); final String namenode = hdfs.getUri().toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-log", namenode+"/logs", "file:///"+TEST_ROOT_DIR+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(cluster.getFileSystem(), "/destdat", files)); assertTrue("Log directory does not exist.", hdfs.exists(new Path(namenode+"/logs"))); deldir(hdfs, "/destdat"); deldir(hdfs, "/logs"); deldir(FileSystem.get(LOCAL_FS, conf), TEST_ROOT_DIR+"/srcdat"); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** copy files from dfs file system to local file system */ public void testCopyFromDfsToLocal() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); final FileSystem localfs = FileSystem.get(LOCAL_FS, conf); cluster = new MiniDFSCluster.Builder(conf).build(); final FileSystem hdfs = cluster.getFileSystem(); final String namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-log", "/logs", namenode+"/srcdat", "file:///"+TEST_ROOT_DIR+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(localfs, TEST_ROOT_DIR+"/destdat", files)); assertTrue("Log directory does not exist.", hdfs.exists(new Path("/logs"))); deldir(localfs, TEST_ROOT_DIR+"/destdat"); deldir(hdfs, "/logs"); deldir(hdfs, "/srcdat"); } } finally { if (cluster != null) { cluster.shutdown(); } } } public void testCopyDfsToDfsUpdateOverwrite() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); final String namenode = hdfs.getUri().toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-p", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat", files)); FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf); assertTrue("Log directory does not exist.", fs.exists(new Path(namenode+"/logs"))); FileStatus[] dchkpoint = getFileStatus(hdfs, "/destdat", files); final int nupdate = NFILES>>2; updateFiles(cluster.getFileSystem(), "/srcdat", files, nupdate); deldir(hdfs, "/logs"); ToolRunner.run(new DistCpV1(conf), new String[] { "-prbugp", // no t to avoid preserving mod. times "-update", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat", files)); assertTrue("Update failed to replicate all changes in src", checkUpdate(hdfs, dchkpoint, "/destdat", files, nupdate)); deldir(hdfs, "/logs"); ToolRunner.run(new DistCpV1(conf), new String[] { "-prbugp", // no t to avoid preserving mod. times "-overwrite", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat", files)); assertTrue("-overwrite didn't.", checkUpdate(hdfs, dchkpoint, "/destdat", files, NFILES)); deldir(hdfs, "/destdat"); deldir(hdfs, "/srcdat"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } public void testCopyDfsToDfsUpdateWithSkipCRC() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); final String namenode = hdfs.getUri().toString(); FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration()); // Create two files of the same name, same length but different // contents final String testfilename = "test"; final String srcData = "act act act"; final String destData = "cat cat cat"; if (namenode.startsWith("hdfs://")) { deldir(hdfs,"/logs"); Path srcPath = new Path("/srcdat", testfilename); Path destPath = new Path("/destdat", testfilename); FSDataOutputStream out = fs.create(srcPath, true); out.writeUTF(srcData); out.close(); out = fs.create(destPath, true); out.writeUTF(destData); out.close(); // Run with -skipcrccheck option ToolRunner.run(new DistCpV1(conf), new String[] { "-p", "-update", "-skipcrccheck", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); // File should not be overwritten FSDataInputStream in = hdfs.open(destPath); String s = in.readUTF(); System.out.println("Dest had: " + s); assertTrue("Dest got over written even with skip crc", s.equalsIgnoreCase(destData)); in.close(); deldir(hdfs, "/logs"); // Run without the option ToolRunner.run(new DistCpV1(conf), new String[] { "-p", "-update", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); // File should be overwritten in = hdfs.open(destPath); s = in.readUTF(); System.out.println("Dest had: " + s); assertTrue("Dest did not get overwritten without skip crc", s.equalsIgnoreCase(srcData)); in.close(); deldir(hdfs, "/destdat"); deldir(hdfs, "/srcdat"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } public void testCopyDuplication() throws Exception { final FileSystem localfs = FileSystem.get(LOCAL_FS, new Configuration()); try { MyFile[] files = createFiles(localfs, TEST_ROOT_DIR+"/srcdat"); ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat", "file:///"+TEST_ROOT_DIR+"/src2/srcdat"}); assertTrue("Source and destination directories do not match.", checkFiles(localfs, TEST_ROOT_DIR+"/src2/srcdat", files)); assertEquals(DistCpV1.DuplicationException.ERROR_CODE, ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat", "file:///"+TEST_ROOT_DIR+"/src2/srcdat", "file:///"+TEST_ROOT_DIR+"/destdat",})); } finally { deldir(localfs, TEST_ROOT_DIR+"/destdat"); deldir(localfs, TEST_ROOT_DIR+"/srcdat"); deldir(localfs, TEST_ROOT_DIR+"/src2"); } } public void testCopySingleFile() throws Exception { FileSystem fs = FileSystem.get(LOCAL_FS, new Configuration()); Path root = new Path(TEST_ROOT_DIR+"/srcdat"); try { MyFile[] files = {createFile(root, fs)}; //copy a dir with a single file ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat", "file:///"+TEST_ROOT_DIR+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, TEST_ROOT_DIR+"/destdat", files)); //copy a single file String fname = files[0].getName(); Path p = new Path(root, fname); FileSystem.LOG.info("fname=" + fname + ", exists? " + fs.exists(p)); ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat/"+fname, "file:///"+TEST_ROOT_DIR+"/dest2/"+fname}); assertTrue("Source and destination directories do not match.", checkFiles(fs, TEST_ROOT_DIR+"/dest2", files)); // single file update should skip copy if destination has the file already String[] args = {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+fname, "file:///"+TEST_ROOT_DIR+"/dest2/"+fname}; Configuration conf = new Configuration(); JobConf job = new JobConf(conf, DistCpV1.class); DistCpV1.Arguments distcpArgs = DistCpV1.Arguments.valueOf(args, conf); assertFalse("Single file update failed to skip copying even though the " + "file exists at destination.", DistCpV1.setup(conf, job, distcpArgs)); //copy single file to existing dir deldir(fs, TEST_ROOT_DIR+"/dest2"); fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2")); MyFile[] files2 = {createFile(root, fs, 0)}; String sname = files2[0].getName(); ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname, "file:///"+TEST_ROOT_DIR+"/dest2/"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2)); updateFiles(fs, TEST_ROOT_DIR+"/srcdat", files2, 1); //copy single file to existing dir w/ dst name conflict ToolRunner.run(new DistCpV1(new Configuration()), new String[] {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname, "file:///"+TEST_ROOT_DIR+"/dest2/"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2)); } finally { deldir(fs, TEST_ROOT_DIR+"/destdat"); deldir(fs, TEST_ROOT_DIR+"/dest2"); deldir(fs, TEST_ROOT_DIR+"/srcdat"); } } /** tests basedir option copying files from dfs file system to dfs file system */ public void testBasedir() throws Exception { String namenode = null; MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/basedir/middle/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-basedir", "/basedir", namenode+"/basedir/middle/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat/middle/srcdat", files)); deldir(hdfs, "/destdat"); deldir(hdfs, "/basedir"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } public void testPreserveOption() throws Exception { Configuration conf = new Configuration(); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); String nnUri = FileSystem.getDefaultUri(conf).toString(); FileSystem fs = FileSystem.get(URI.create(nnUri), conf); {//test preserving user MyFile[] files = createFiles(URI.create(nnUri), "/srcdat"); FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files); for(int i = 0; i < srcstat.length; i++) { fs.setOwner(srcstat[i].getPath(), "u" + i, null); } ToolRunner.run(new DistCpV1(conf), new String[]{"-pu", nnUri+"/srcdat", nnUri+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, "/destdat", files)); FileStatus[] dststat = getFileStatus(fs, "/destdat", files); for(int i = 0; i < dststat.length; i++) { assertEquals("i=" + i, "u" + i, dststat[i].getOwner()); } deldir(fs, "/destdat"); deldir(fs, "/srcdat"); } {//test preserving group MyFile[] files = createFiles(URI.create(nnUri), "/srcdat"); FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files); for(int i = 0; i < srcstat.length; i++) { fs.setOwner(srcstat[i].getPath(), null, "g" + i); } ToolRunner.run(new DistCpV1(conf), new String[]{"-pg", nnUri+"/srcdat", nnUri+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, "/destdat", files)); FileStatus[] dststat = getFileStatus(fs, "/destdat", files); for(int i = 0; i < dststat.length; i++) { assertEquals("i=" + i, "g" + i, dststat[i].getGroup()); } deldir(fs, "/destdat"); deldir(fs, "/srcdat"); } {//test preserving mode MyFile[] files = createFiles(URI.create(nnUri), "/srcdat"); FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files); FsPermission[] permissions = new FsPermission[srcstat.length]; for(int i = 0; i < srcstat.length; i++) { permissions[i] = new FsPermission((short)(i & 0666)); fs.setPermission(srcstat[i].getPath(), permissions[i]); } ToolRunner.run(new DistCpV1(conf), new String[]{"-pp", nnUri+"/srcdat", nnUri+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, "/destdat", files)); FileStatus[] dststat = getFileStatus(fs, "/destdat", files); for(int i = 0; i < dststat.length; i++) { assertEquals("i=" + i, permissions[i], dststat[i].getPermission()); } deldir(fs, "/destdat"); deldir(fs, "/srcdat"); } {//test preserving times MyFile[] files = createFiles(URI.create(nnUri), "/srcdat"); fs.mkdirs(new Path("/srcdat/tmpf1")); fs.mkdirs(new Path("/srcdat/tmpf2")); FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files); FsPermission[] permissions = new FsPermission[srcstat.length]; for(int i = 0; i < srcstat.length; i++) { fs.setTimes(srcstat[i].getPath(), 40, 50); } ToolRunner.run(new DistCpV1(conf), new String[]{"-pt", nnUri+"/srcdat", nnUri+"/destdat"}); FileStatus[] dststat = getFileStatus(fs, "/destdat", files); for(int i = 0; i < dststat.length; i++) { assertEquals("Modif. Time i=" + i, 40, dststat[i].getModificationTime()); assertEquals("Access Time i=" + i+ srcstat[i].getPath() + "-" + dststat[i].getPath(), 50, dststat[i].getAccessTime()); } assertTrue("Source and destination directories do not match.", checkFiles(fs, "/destdat", files)); deldir(fs, "/destdat"); deldir(fs, "/srcdat"); } } finally { if (cluster != null) { cluster.shutdown(); } } } public void testMapCount() throws Exception { String namenode = null; MiniDFSCluster dfs = null; MiniDFSCluster mr = null; try { Configuration conf = new Configuration(); dfs= new MiniDFSCluster.Builder(conf).numDataNodes(3).format(true).build(); FileSystem fs = dfs.getFileSystem(); final FsShell shell = new FsShell(conf); namenode = fs.getUri().toString(); MyFile[] files = createFiles(fs.getUri(), "/srcdat"); long totsize = 0; for (MyFile f : files) { totsize += f.getSize(); } Configuration job = new JobConf(conf); job.setLong("distcp.bytes.per.map", totsize / 3); ToolRunner.run(new DistCpV1(job), new String[] {"-m", "100", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(fs, "/destdat", files)); String logdir = namenode + "/logs"; System.out.println(execCmd(shell, "-lsr", logdir)); FileStatus[] logs = fs.listStatus(new Path(logdir)); // rare case where splits are exact, logs.length can be 4 assertTrue( logs.length == 2); deldir(fs, "/destdat"); deldir(fs, "/logs"); ToolRunner.run(new DistCpV1(job), new String[] {"-m", "1", "-log", namenode+"/logs", namenode+"/srcdat", namenode+"/destdat"}); System.out.println(execCmd(shell, "-lsr", logdir)); logs = fs.globStatus(new Path(namenode+"/logs/part*")); assertTrue("Unexpected map count, logs.length=" + logs.length, logs.length == 1); } finally { if (dfs != null) { dfs.shutdown(); } if (mr != null) { mr.shutdown(); } } } public void testLimits() throws Exception { Configuration conf = new Configuration(); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final String nnUri = FileSystem.getDefaultUri(conf).toString(); final FileSystem fs = FileSystem.get(URI.create(nnUri), conf); final DistCpV1 distcp = new DistCpV1(conf); final FsShell shell = new FsShell(conf); final String srcrootdir = "/src_root"; final Path srcrootpath = new Path(srcrootdir); final String dstrootdir = "/dst_root"; final Path dstrootpath = new Path(dstrootdir); {//test -filelimit MyFile[] files = createFiles(URI.create(nnUri), srcrootdir); int filelimit = files.length / 2; System.out.println("filelimit=" + filelimit); ToolRunner.run(distcp, new String[]{"-filelimit", ""+filelimit, nnUri+srcrootdir, nnUri+dstrootdir}); String results = execCmd(shell, "-lsr", dstrootdir); results = removePrefix(results, dstrootdir); System.out.println("results=" + results); FileStatus[] dststat = getFileStatus(fs, dstrootdir, files, true); assertEquals(filelimit, dststat.length); deldir(fs, dstrootdir); deldir(fs, srcrootdir); } {//test -sizelimit createFiles(URI.create(nnUri), srcrootdir); long sizelimit = fs.getContentSummary(srcrootpath).getLength()/2; System.out.println("sizelimit=" + sizelimit); ToolRunner.run(distcp, new String[]{"-sizelimit", ""+sizelimit, nnUri+srcrootdir, nnUri+dstrootdir}); ContentSummary summary = fs.getContentSummary(dstrootpath); System.out.println("summary=" + summary); assertTrue(summary.getLength() <= sizelimit); deldir(fs, dstrootdir); deldir(fs, srcrootdir); } {//test update final MyFile[] srcs = createFiles(URI.create(nnUri), srcrootdir); final long totalsize = fs.getContentSummary(srcrootpath).getLength(); System.out.println("src.length=" + srcs.length); System.out.println("totalsize =" + totalsize); fs.mkdirs(dstrootpath); final int parts = RAN.nextInt(NFILES/3 - 1) + 2; final int filelimit = srcs.length/parts; final long sizelimit = totalsize/parts; System.out.println("filelimit=" + filelimit); System.out.println("sizelimit=" + sizelimit); System.out.println("parts =" + parts); final String[] args = {"-filelimit", ""+filelimit, "-sizelimit", ""+sizelimit, "-update", nnUri+srcrootdir, nnUri+dstrootdir}; int dstfilecount = 0; long dstsize = 0; for(int i = 0; i <= parts; i++) { ToolRunner.run(distcp, args); FileStatus[] dststat = getFileStatus(fs, dstrootdir, srcs, true); System.out.println(i + ") dststat.length=" + dststat.length); assertTrue(dststat.length - dstfilecount <= filelimit); ContentSummary summary = fs.getContentSummary(dstrootpath); System.out.println(i + ") summary.getLength()=" + summary.getLength()); assertTrue(summary.getLength() - dstsize <= sizelimit); assertTrue(checkFiles(fs, dstrootdir, srcs, true)); dstfilecount = dststat.length; dstsize = summary.getLength(); } deldir(fs, dstrootdir); deldir(fs, srcrootdir); } } finally { if (cluster != null) { cluster.shutdown(); } } } static final long now = System.currentTimeMillis(); static UserGroupInformation createUGI(String name, boolean issuper) { String username = name + now; String group = issuper? "supergroup": username; return UserGroupInformation.createUserForTesting(username, new String[]{group}); } static Path createHomeDirectory(FileSystem fs, UserGroupInformation ugi ) throws IOException { final Path home = new Path("/user/" + ugi.getUserName()); fs.mkdirs(home); fs.setOwner(home, ugi.getUserName(), ugi.getGroupNames()[0]); fs.setPermission(home, new FsPermission((short)0700)); return home; } public void testHftpAccessControl() throws Exception { MiniDFSCluster cluster = null; try { //start cluster by DFS_UGI final Configuration dfsConf = new Configuration(); cluster = new MiniDFSCluster.Builder(dfsConf).numDataNodes(2).build(); cluster.waitActive(); final UserGroupInformation DFS_UGI = createUGI("dfs", true); final UserGroupInformation USER_UGI = createUGI("user", false); final String httpAdd = dfsConf.get("dfs.http.address"); final URI nnURI = FileSystem.getDefaultUri(dfsConf); final String nnUri = nnURI.toString(); FileSystem fs1 = DFS_UGI.doAs(new PrivilegedExceptionAction<FileSystem>() { public FileSystem run() throws IOException { return FileSystem.get(nnURI, dfsConf); } }); final Path home = createHomeDirectory(fs1, USER_UGI); //now, login as USER_UGI final Configuration userConf = new Configuration(); final FileSystem fs = USER_UGI.doAs(new PrivilegedExceptionAction<FileSystem>() { public FileSystem run() throws IOException { return FileSystem.get(nnURI, userConf); } }); final Path srcrootpath = new Path(home, "src_root"); final String srcrootdir = srcrootpath.toString(); final Path dstrootpath = new Path(home, "dst_root"); final String dstrootdir = dstrootpath.toString(); final DistCpV1 distcp = USER_UGI.doAs(new PrivilegedExceptionAction<DistCpV1>() { public DistCpV1 run() { return new DistCpV1(userConf); } }); FileSystem.mkdirs(fs, srcrootpath, new FsPermission((short)0700)); final String[] args = {"hftp://"+httpAdd+srcrootdir, nnUri+dstrootdir}; { //copy with permission 000, should fail fs.setPermission(srcrootpath, new FsPermission((short)0)); USER_UGI.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { assertEquals(-3, ToolRunner.run(distcp, args)); return null; } }); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** test -delete */ public void testDelete() throws Exception { final Configuration conf = new Configuration(); conf.setInt("fs.trash.interval", 60); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final URI nnURI = FileSystem.getDefaultUri(conf); final String nnUri = nnURI.toString(); final FileSystem fs = FileSystem.get(URI.create(nnUri), conf); final DistCpV1 distcp = new DistCpV1(conf); final FsShell shell = new FsShell(conf); final String srcrootdir = "/src_root"; final String dstrootdir = "/dst_root"; { //create source files createFiles(nnURI, srcrootdir); String srcresults = execCmd(shell, "-lsr", srcrootdir); srcresults = removePrefix(srcresults, srcrootdir); System.out.println("srcresults=" + srcresults); //create some files in dst createFiles(nnURI, dstrootdir); System.out.println("dstrootdir=" + dstrootdir); shell.run(new String[]{"-lsr", dstrootdir}); //run distcp ToolRunner.run(distcp, new String[]{"-delete", "-update", "-log", "/log", nnUri+srcrootdir, nnUri+dstrootdir}); //make sure src and dst contains the same files String dstresults = execCmd(shell, "-lsr", dstrootdir); dstresults = removePrefix(dstresults, dstrootdir); System.out.println("first dstresults=" + dstresults); assertEquals(srcresults, dstresults); //create additional file in dst create(fs, new Path(dstrootdir, "foo")); create(fs, new Path(dstrootdir, "foobar")); //run distcp again ToolRunner.run(distcp, new String[]{"-delete", "-update", "-log", "/log2", nnUri+srcrootdir, nnUri+dstrootdir}); //make sure src and dst contains the same files dstresults = execCmd(shell, "-lsr", dstrootdir); dstresults = removePrefix(dstresults, dstrootdir); System.out.println("second dstresults=" + dstresults); assertEquals(srcresults, dstresults); // verify that files removed in -delete were moved to the trash // regrettably, this test will break if Trash changes incompatibly assertTrue(fs.exists(new Path(fs.getHomeDirectory(), ".Trash/Current" + dstrootdir + "/foo"))); assertTrue(fs.exists(new Path(fs.getHomeDirectory(), ".Trash/Current" + dstrootdir + "/foobar"))); //cleanup deldir(fs, dstrootdir); deldir(fs, srcrootdir); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** * verify that -delete option works for other {@link FileSystem} * implementations. See MAPREDUCE-1285 */ public void testDeleteLocal() throws Exception { MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); final FileSystem localfs = FileSystem.get(LOCAL_FS, conf); cluster = new MiniDFSCluster.Builder(conf).build(); final FileSystem hdfs = cluster.getFileSystem(); final String namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/srcdat"); String destdir = TEST_ROOT_DIR + "/destdat"; MyFile[] localFiles = createFiles(localfs, destdir); ToolRunner.run(new DistCpV1(conf), new String[] { "-delete", "-update", "-log", "/logs", namenode+"/srcdat", "file:///"+TEST_ROOT_DIR+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(localfs, destdir, files)); assertTrue("Log directory does not exist.", hdfs.exists(new Path("/logs"))); deldir(localfs, destdir); deldir(hdfs, "/logs"); deldir(hdfs, "/srcdat"); } } finally { if (cluster != null) { cluster.shutdown(); } } } /** test globbing */ public void testGlobbing() throws Exception { String namenode = null; MiniDFSCluster cluster = null; try { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); final FileSystem hdfs = cluster.getFileSystem(); namenode = FileSystem.getDefaultUri(conf).toString(); if (namenode.startsWith("hdfs://")) { MyFile[] files = createFiles(URI.create(namenode), "/srcdat"); ToolRunner.run(new DistCpV1(conf), new String[] { "-log", namenode+"/logs", namenode+"/srcdat/*", namenode+"/destdat"}); assertTrue("Source and destination directories do not match.", checkFiles(hdfs, "/destdat", files)); FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf); assertTrue("Log directory does not exist.", fs.exists(new Path(namenode+"/logs"))); deldir(hdfs, "/destdat"); deldir(hdfs, "/srcdat"); deldir(hdfs, "/logs"); } } finally { if (cluster != null) { cluster.shutdown(); } } } static void create(FileSystem fs, Path f) throws IOException { FSDataOutputStream out = fs.create(f); try { byte[] b = new byte[1024 + RAN.nextInt(1024)]; RAN.nextBytes(b); out.write(b); } finally { if (out != null) out.close(); } } static String execCmd(FsShell shell, String... args) throws Exception { ByteArrayOutputStream baout = new ByteArrayOutputStream(); PrintStream out = new PrintStream(baout, true); PrintStream old = System.out; System.setOut(out); shell.run(args); out.close(); System.setOut(old); return baout.toString(); } private static String removePrefix(String lines, String prefix) { final int prefixlen = prefix.length(); final StringTokenizer t = new StringTokenizer(lines, "\n"); final StringBuffer results = new StringBuffer(); for(; t.hasMoreTokens(); ) { String s = t.nextToken(); results.append(s.substring(s.indexOf(prefix) + prefixlen) + "\n"); } return results.toString(); } }
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2017 Groupon, Inc * Copyright 2014-2017 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.invoice.dao; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import javax.annotation.Nullable; import javax.inject.Inject; import org.joda.time.LocalDate; import org.killbill.billing.ErrorCode; import org.killbill.billing.ObjectType; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.invoice.api.InvoiceApiException; import org.killbill.billing.invoice.api.InvoiceItemType; import org.killbill.billing.invoice.api.InvoiceStatus; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.entity.dao.EntitySqlDaoWrapperFactory; import org.killbill.billing.util.tag.ControlTagType; import org.killbill.billing.util.tag.Tag; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; public class InvoiceDaoHelper { private static final Logger log = LoggerFactory.getLogger(InvoiceDaoHelper.class); private final InternalCallContextFactory internalCallContextFactory; @Inject public InvoiceDaoHelper(final InternalCallContextFactory internalCallContextFactory) { this.internalCallContextFactory = internalCallContextFactory; } /** * Find amounts to adjust for individual items, if not specified. * The user gives us a list of items to adjust associated with a given amount (how much to refund per invoice item). * In case of full adjustments, the amount can be null: in this case, we retrieve the original amount for the invoice * item. * * @param invoiceId original invoice id * @param entitySqlDaoWrapperFactory the EntitySqlDaoWrapperFactory from the current transaction * @param invoiceItemIdsWithNullAmounts the original mapping between invoice item ids and amount to refund (contains null) * @param context the tenant callcontext * @return the final mapping between invoice item ids and amount to refund * @throws org.killbill.billing.invoice.api.InvoiceApiException */ public Map<UUID, BigDecimal> computeItemAdjustments(final String invoiceId, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final Map<UUID, BigDecimal> invoiceItemIdsWithNullAmounts, final InternalTenantContext context) throws InvoiceApiException { // Populate the missing amounts for individual items, if needed final Map<UUID, BigDecimal> outputItemIdsWithAmounts = new HashMap<UUID, BigDecimal>(); // Retrieve invoice before the Refund final InvoiceModelDao invoice = entitySqlDaoWrapperFactory.become(InvoiceSqlDao.class).getById(invoiceId, context); if (invoice != null) { populateChildren(invoice, invoicesTags, entitySqlDaoWrapperFactory, context); } else { throw new IllegalStateException("Invoice shouldn't be null for id " + invoiceId); } // // If we have an item amount, we 'd like to use it, but we need to check first that it is lesser or equal than maximum allowed //If, not we compute maximum value we can adjust per item for (final UUID invoiceItemId : invoiceItemIdsWithNullAmounts.keySet()) { final List<InvoiceItemModelDao> adjustedOrRepairedItems = entitySqlDaoWrapperFactory.become(InvoiceItemSqlDao.class).getAdjustedOrRepairedInvoiceItemsByLinkedId(invoiceItemId.toString(), context); computeItemAdjustmentsForTargetInvoiceItem(getInvoiceItemForId(invoice, invoiceItemId), adjustedOrRepairedItems, invoiceItemIdsWithNullAmounts, outputItemIdsWithAmounts); } return outputItemIdsWithAmounts; } private static void computeItemAdjustmentsForTargetInvoiceItem(final InvoiceItemModelDao targetInvoiceItem, final List<InvoiceItemModelDao> adjustedOrRepairedItems, final Map<UUID, BigDecimal> inputAdjInvoiceItem, final Map<UUID, BigDecimal> outputAdjInvoiceItem) throws InvoiceApiException { final BigDecimal originalItemAmount = targetInvoiceItem.getAmount(); final BigDecimal maxAdjLeftAmount = computeItemAdjustmentAmount(originalItemAmount, adjustedOrRepairedItems); final BigDecimal proposedItemAmount = inputAdjInvoiceItem.get(targetInvoiceItem.getId()); if (proposedItemAmount != null && proposedItemAmount.compareTo(maxAdjLeftAmount) > 0) { throw new InvoiceApiException(ErrorCode.INVOICE_ITEM_ADJUSTMENT_AMOUNT_INVALID, proposedItemAmount, maxAdjLeftAmount); } final BigDecimal itemAmountToAdjust = MoreObjects.firstNonNull(proposedItemAmount, maxAdjLeftAmount); if (itemAmountToAdjust.compareTo(BigDecimal.ZERO) > 0) { outputAdjInvoiceItem.put(targetInvoiceItem.getId(), itemAmountToAdjust); } } /** * @param requestedPositiveAmountToAdjust amount we are adjusting for that item * @param adjustedOrRepairedItems list of all adjusted or repaired linking to this item * @return the amount we should really adjust based on whether or not the item got repaired */ private static BigDecimal computeItemAdjustmentAmount(final BigDecimal requestedPositiveAmountToAdjust, final List<InvoiceItemModelDao> adjustedOrRepairedItems) { BigDecimal positiveAdjustedOrRepairedAmount = BigDecimal.ZERO; for (final InvoiceItemModelDao cur : adjustedOrRepairedItems) { // Adjustment or repair items are negative so we negate to make it positive positiveAdjustedOrRepairedAmount = positiveAdjustedOrRepairedAmount.add(cur.getAmount().negate()); } return (positiveAdjustedOrRepairedAmount.compareTo(requestedPositiveAmountToAdjust) >= 0) ? BigDecimal.ZERO : requestedPositiveAmountToAdjust.subtract(positiveAdjustedOrRepairedAmount); } private InvoiceItemModelDao getInvoiceItemForId(final InvoiceModelDao invoice, final UUID invoiceItemId) throws InvoiceApiException { for (final InvoiceItemModelDao invoiceItem : invoice.getInvoiceItems()) { if (invoiceItem.getId().equals(invoiceItemId)) { return invoiceItem; } } throw new InvoiceApiException(ErrorCode.INVOICE_ITEM_NOT_FOUND, invoiceItemId); } public BigDecimal computePositiveRefundAmount(final InvoicePaymentModelDao payment, final BigDecimal requestedRefundAmount, final Map<UUID, BigDecimal> invoiceItemIdsWithAmounts) throws InvoiceApiException { final BigDecimal maxRefundAmount = payment.getAmount() == null ? BigDecimal.ZERO : payment.getAmount(); final BigDecimal requestedPositiveAmount = requestedRefundAmount == null ? maxRefundAmount : requestedRefundAmount; // This check is good but not enough, we need to also take into account previous refunds // (But that should have been checked in the payment call already) if (requestedPositiveAmount.compareTo(maxRefundAmount) > 0) { throw new InvoiceApiException(ErrorCode.REFUND_AMOUNT_TOO_HIGH, requestedPositiveAmount, maxRefundAmount); } // Verify if the requested amount matches the invoice items to adjust, if specified BigDecimal amountFromItems = BigDecimal.ZERO; for (final BigDecimal itemAmount : invoiceItemIdsWithAmounts.values()) { amountFromItems = amountFromItems.add(itemAmount); } // Sanity check: if some items were specified, then the sum should be equal to specified refund amount, if specified if (amountFromItems.compareTo(BigDecimal.ZERO) != 0 && requestedPositiveAmount.compareTo(amountFromItems) < 0) { throw new InvoiceApiException(ErrorCode.REFUND_AMOUNT_DONT_MATCH_ITEMS_TO_ADJUST, requestedPositiveAmount, amountFromItems); } return requestedPositiveAmount; } public List<InvoiceModelDao> getUnpaidInvoicesByAccountFromTransaction(final UUID accountId, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, @Nullable LocalDate startDate, final LocalDate upToDate, final InternalTenantContext context) { final List<InvoiceModelDao> invoices = getAllInvoicesByAccountFromTransaction(false, invoicesTags, entitySqlDaoWrapperFactory, context); log.debug("Found invoices={} for accountId={}", invoices, accountId); return getUnpaidInvoicesByAccountFromTransaction(invoices, startDate, upToDate); } public List<InvoiceModelDao> getUnpaidInvoicesByAccountFromTransaction(final List<InvoiceModelDao> invoices, @Nullable LocalDate startDate, @Nullable final LocalDate upToDate) { final Collection<InvoiceModelDao> unpaidInvoices = Collections2.filter(invoices, new Predicate<InvoiceModelDao>() { @Override public boolean apply(final InvoiceModelDao in) { final InvoiceModelDao invoice = (in.getParentInvoice() == null) ? in : in.getParentInvoice(); final BigDecimal balance = InvoiceModelDaoHelper.getRawBalanceForRegularInvoice(invoice); log.debug("Computed balance={} for invoice={}", balance, in); return InvoiceStatus.COMMITTED.equals(in.getStatus()) && (balance.compareTo(BigDecimal.ZERO) >= 1 && !in.isWrittenOff()) && (startDate == null || in.getTargetDate() == null || in.getTargetDate().compareTo(startDate) >= 0) && (upToDate == null || in.getTargetDate() == null || in.getTargetDate().compareTo(upToDate) <= 0); } }); return new ArrayList<InvoiceModelDao>(unpaidInvoices); } /** * Create an adjustment for a given invoice item. This just creates the object in memory, it doesn't write it to disk. * * @param invoiceId the invoice id * @param invoiceItemId the invoice item id to adjust * @param effectiveDate adjustment effective date, in the account timezone * @param positiveAdjAmount the amount to adjust. Pass null to adjust the full amount of the original item * @param currency the currency of the amount. Pass null to default to the original currency used * @return the adjustment item */ public InvoiceItemModelDao createAdjustmentItem(final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final UUID invoiceId, final UUID invoiceItemId, final BigDecimal positiveAdjAmount, final Currency currency, final LocalDate effectiveDate, final InternalCallContext context) throws InvoiceApiException { // First, retrieve the invoice item in question final InvoiceItemSqlDao invoiceItemSqlDao = entitySqlDaoWrapperFactory.become(InvoiceItemSqlDao.class); final InvoiceItemModelDao invoiceItemToBeAdjusted = invoiceItemSqlDao.getById(invoiceItemId.toString(), context); if (invoiceItemToBeAdjusted == null) { throw new InvoiceApiException(ErrorCode.INVOICE_ITEM_NOT_FOUND, invoiceItemId); } // Validate the invoice it belongs to if (!invoiceItemToBeAdjusted.getInvoiceId().equals(invoiceId)) { throw new InvoiceApiException(ErrorCode.INVOICE_INVALID_FOR_INVOICE_ITEM_ADJUSTMENT, invoiceItemId, invoiceId); } // Retrieve the amount and currency if needed final BigDecimal amountToAdjust = MoreObjects.firstNonNull(positiveAdjAmount, invoiceItemToBeAdjusted.getAmount()); // TODO - should we enforce the currency (and respect the original one) here if the amount passed was null? final Currency currencyForAdjustment = MoreObjects.firstNonNull(currency, invoiceItemToBeAdjusted.getCurrency()); // Finally, create the adjustment // Note! The amount is negated here! return new InvoiceItemModelDao(context.getCreatedDate(), InvoiceItemType.ITEM_ADJ, invoiceItemToBeAdjusted.getInvoiceId(), invoiceItemToBeAdjusted.getAccountId(), null, null, null, invoiceItemToBeAdjusted.getProductName(), invoiceItemToBeAdjusted.getPlanName(), invoiceItemToBeAdjusted.getPhaseName(), invoiceItemToBeAdjusted.getUsageName(), invoiceItemToBeAdjusted.getCatalogEffectiveDate(), effectiveDate, effectiveDate, amountToAdjust.negate(), null, currencyForAdjustment, invoiceItemToBeAdjusted.getId()); } public void populateChildren(final InvoiceModelDao invoice, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { // !!! Anything updated here needs to also be reflected in void populateChildren(final Iterable<InvoiceModelDao> invoices,...) setInvoiceItemsWithinTransaction(invoice, entitySqlDaoWrapperFactory, context); setInvoicePaymentsWithinTransaction(invoice, entitySqlDaoWrapperFactory, context); setTrackingIdsFromTransaction(invoice, entitySqlDaoWrapperFactory, context); setInvoicesWrittenOff(invoice, invoicesTags); if (!invoice.isParentInvoice()) { setParentInvoice(invoice, invoicesTags, entitySqlDaoWrapperFactory, context); } } public void populateChildren(final Iterable<InvoiceModelDao> invoices, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { // !!! Anything updated here needs to also be reflected in void populateChildren(final InvoiceModelDao invoice,...) if (Iterables.isEmpty(invoices)) { return; } setInvoiceItemsWithinTransaction(invoices, entitySqlDaoWrapperFactory, context); setInvoicePaymentsWithinTransaction(invoices, entitySqlDaoWrapperFactory, context); setTrackingIdsFromTransaction(invoices, entitySqlDaoWrapperFactory, context); setInvoicesWrittenOff(invoices, invoicesTags); final Iterable<InvoiceModelDao> nonParentInvoices = Iterables.<InvoiceModelDao>filter(invoices, new Predicate<InvoiceModelDao>() { @Override public boolean apply(final InvoiceModelDao invoice) { return !invoice.isParentInvoice(); } }); if (!Iterables.isEmpty(nonParentInvoices)) { setParentInvoice(nonParentInvoices, invoicesTags, entitySqlDaoWrapperFactory, context); } } public List<InvoiceModelDao> getAllInvoicesByAccountFromTransaction(final Boolean includeVoidedInvoices, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final List<InvoiceModelDao> invoices = ImmutableList.<InvoiceModelDao>copyOf(Iterables.<InvoiceModelDao>filter(entitySqlDaoWrapperFactory.become(InvoiceSqlDao.class).getByAccountRecordId(context), new Predicate<InvoiceModelDao>() { @Override public boolean apply(final InvoiceModelDao invoice) { return includeVoidedInvoices ? true : !InvoiceStatus.VOID.equals(invoice.getStatus()); } })); populateChildren(invoices, invoicesTags, entitySqlDaoWrapperFactory, context); return invoices; } public BigDecimal getRemainingAmountPaidFromTransaction(final UUID invoicePaymentId, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final BigDecimal amount = entitySqlDaoWrapperFactory.become(InvoicePaymentSqlDao.class).getRemainingAmountPaid(invoicePaymentId.toString(), context); return amount == null ? BigDecimal.ZERO : amount; } private void setInvoiceItemsWithinTransaction(final InvoiceModelDao invoice, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final InvoiceItemSqlDao invoiceItemSqlDao = entitySqlDaoWrapperFactory.become(InvoiceItemSqlDao.class); final List<InvoiceItemModelDao> invoiceItems = invoiceItemSqlDao.getInvoiceItemsByInvoice(invoice.getId().toString(), context); // Make sure to set invoice items to a non-null value final List<InvoiceItemModelDao> invoiceItemsForInvoice = MoreObjects.firstNonNull(invoiceItems, ImmutableList.<InvoiceItemModelDao>of()); log.debug("Found items={} for invoice={}", invoiceItemsForInvoice, invoice); invoice.addInvoiceItems(invoiceItemsForInvoice); } private void setInvoiceItemsWithinTransaction(final Iterable<InvoiceModelDao> invoices, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final InvoiceItemSqlDao invoiceItemSqlDao = entitySqlDaoWrapperFactory.become(InvoiceItemSqlDao.class); final List<InvoiceItemModelDao> invoiceItemsForAccount = invoiceItemSqlDao.getByAccountRecordId(context); final Map<UUID, List<InvoiceItemModelDao>> invoiceItemsPerInvoiceId = new HashMap<UUID, List<InvoiceItemModelDao>>(); for (final InvoiceItemModelDao item : invoiceItemsForAccount) { if (invoiceItemsPerInvoiceId.get(item.getInvoiceId()) == null) { invoiceItemsPerInvoiceId.put(item.getInvoiceId(), new LinkedList<InvoiceItemModelDao>()); } invoiceItemsPerInvoiceId.get(item.getInvoiceId()).add(item); } for (final InvoiceModelDao invoice : invoices) { // Make sure to set invoice items to a non-null value final List<InvoiceItemModelDao> invoiceItemsForInvoice = MoreObjects.firstNonNull(invoiceItemsPerInvoiceId.get(invoice.getId()), ImmutableList.<InvoiceItemModelDao>of()); log.debug("Found items={} for invoice={}", invoiceItemsForInvoice, invoice); invoice.addInvoiceItems(invoiceItemsForInvoice); } } private void setInvoicePaymentsWithinTransaction(final InvoiceModelDao invoice, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final InvoicePaymentSqlDao invoicePaymentSqlDao = entitySqlDaoWrapperFactory.become(InvoicePaymentSqlDao.class); final List<InvoicePaymentModelDao> invoicePayments = invoicePaymentSqlDao.getAllPaymentsForInvoiceIncludedInit(invoice.getId().toString(), context); log.debug("Found payments={} for invoice={}", invoicePayments, invoice); invoice.addPayments(invoicePayments); for (final InvoicePaymentModelDao invoicePayment : invoicePayments) { if (invoicePayment.getCurrency() != invoicePayment.getProcessedCurrency()) { // If any entry is set with a different processed currency, we use it as a processed currency. invoice.setProcessedCurrency(invoicePayment.getProcessedCurrency()); break; } } } private void setInvoicePaymentsWithinTransaction(final Iterable<InvoiceModelDao> invoices, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final InvoicePaymentSqlDao invoicePaymentSqlDao = entitySqlDaoWrapperFactory.become(InvoicePaymentSqlDao.class); final List<InvoicePaymentModelDao> invoicePaymentsForAccount = invoicePaymentSqlDao.getByAccountRecordId(context); final Map<UUID, List<InvoicePaymentModelDao>> invoicePaymentsPerInvoiceId = new HashMap<UUID, List<InvoicePaymentModelDao>>(); for (final InvoicePaymentModelDao invoicePayment : invoicePaymentsForAccount) { if (invoicePaymentsPerInvoiceId.get(invoicePayment.getInvoiceId()) == null) { invoicePaymentsPerInvoiceId.put(invoicePayment.getInvoiceId(), new LinkedList<InvoicePaymentModelDao>()); } invoicePaymentsPerInvoiceId.get(invoicePayment.getInvoiceId()).add(invoicePayment); } for (final InvoiceModelDao invoice : invoices) { // Make sure to set payments to a non-null value final List<InvoicePaymentModelDao> invoicePaymentsForInvoice = MoreObjects.firstNonNull(invoicePaymentsPerInvoiceId.get(invoice.getId()), ImmutableList.<InvoicePaymentModelDao>of()); log.debug("Found payments={} for invoice={}", invoicePaymentsForInvoice, invoice); invoice.addPayments(invoicePaymentsForInvoice); for (final InvoicePaymentModelDao invoicePayment : invoicePaymentsForInvoice) { if (invoicePayment.getCurrency() != invoicePayment.getProcessedCurrency()) { // If any entry is set with a different processed currency, we use it as a processed currency. invoice.setProcessedCurrency(invoicePayment.getProcessedCurrency()); break; } } } } private void setInvoicesWrittenOff(final InvoiceModelDao invoice, final List<Tag> invoicesTags) { setInvoicesWrittenOff(ImmutableList.of(invoice), invoicesTags); } private void setInvoicesWrittenOff(final Iterable<InvoiceModelDao> invoices, final List<Tag> invoicesTags) { final Iterable<Tag> writtenOffTags = filterForWrittenOff(invoicesTags); for (final Tag cur : writtenOffTags) { final InvoiceModelDao foundInvoice = Iterables.tryFind(invoices, new Predicate<InvoiceModelDao>() { @Override public boolean apply(final InvoiceModelDao input) { return input.getId().equals(cur.getObjectId()); } }).orNull(); if (foundInvoice != null) { foundInvoice.setIsWrittenOff(true); } } } private void setTrackingIdsFromTransaction(final InvoiceModelDao invoice, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { setTrackingIdsFromTransaction(ImmutableList.of(invoice), entitySqlDaoWrapperFactory, context); } private void setTrackingIdsFromTransaction(final Iterable<InvoiceModelDao> invoices, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext context) { final Set<String> invoiceIds = ImmutableSet.<String>copyOf(Iterables.transform(invoices, new Function<InvoiceModelDao, String>() { @Override public String apply(final InvoiceModelDao input) { return input.getId().toString(); } })); final InvoiceTrackingSqlDao invoiceTrackingidSqlDao = entitySqlDaoWrapperFactory.become(InvoiceTrackingSqlDao.class); final List<InvoiceTrackingModelDao> trackingIds = invoiceTrackingidSqlDao.getTrackingsForInvoices(invoiceIds, context); final Map<UUID, List<InvoiceTrackingModelDao>> invoiceTrackingIdsPerInvoiceId = new HashMap<UUID, List<InvoiceTrackingModelDao>>(); for (InvoiceTrackingModelDao cur : trackingIds) { if (invoiceTrackingIdsPerInvoiceId.get(cur.getInvoiceId()) == null) { invoiceTrackingIdsPerInvoiceId.put(cur.getInvoiceId(), new LinkedList<>()); } invoiceTrackingIdsPerInvoiceId.get(cur.getInvoiceId()).add(cur); } for (final InvoiceModelDao invoice : invoices) { if (invoiceTrackingIdsPerInvoiceId.get(invoice.getId()) != null) { final List<InvoiceTrackingModelDao> perInvoiceTrackingIds = invoiceTrackingIdsPerInvoiceId.get(invoice.getId()); final Iterable<String> transform = Iterables.transform(perInvoiceTrackingIds, new Function<InvoiceTrackingModelDao, String>() { @Override public String apply(final InvoiceTrackingModelDao input) { return input.getTrackingId(); } }); invoice.addTrackingIds(ImmutableSet.<String>copyOf(transform)); } } } private Iterable<Tag> filterForWrittenOff(final List<Tag> tags) { return Iterables.filter(tags, new Predicate<Tag>() { @Override public boolean apply(final Tag input) { return input.getTagDefinitionId().equals(ControlTagType.WRITTEN_OFF.getId()); } }); } private void setParentInvoice(final InvoiceModelDao invoice, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext childContext) { final InvoiceParentChildrenSqlDao invoiceParentChildrenSqlDao = entitySqlDaoWrapperFactory.become(InvoiceParentChildrenSqlDao.class); final List<InvoiceParentChildModelDao> mappings = invoiceParentChildrenSqlDao.getParentChildMappingsByChildInvoiceIds(ImmutableList.of(invoice.getId().toString()), childContext); if (mappings.isEmpty()) { return; } Preconditions.checkState(mappings.size() == 1, String.format("Expected only one parent mapping for invoice %s", invoice.getId())); final UUID parentInvoiceId = mappings.get(0).getParentInvoiceId(); final InvoiceSqlDao invoiceSqlDao = entitySqlDaoWrapperFactory.become(InvoiceSqlDao.class); final InvoiceModelDao parentInvoice = invoiceSqlDao.getById(parentInvoiceId.toString(), childContext); final Long parentAccountRecordId = internalCallContextFactory.getRecordIdFromObject(parentInvoice.getAccountId(), ObjectType.ACCOUNT, internalCallContextFactory.createTenantContext(childContext)); final InternalTenantContext parentContext = internalCallContextFactory.createInternalTenantContext(childContext.getTenantRecordId(), parentAccountRecordId); // Note the misnomer here, populateChildren simply populates the content of these invoices (unrelated to HA) populateChildren(parentInvoice, invoicesTags, entitySqlDaoWrapperFactory, parentContext); invoice.addParentInvoice(parentInvoice); } private void setParentInvoice(final Iterable<InvoiceModelDao> childInvoices, final List<Tag> invoicesTags, final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory, final InternalTenantContext childContext) { final Collection<String> childInvoiceIds = new HashSet<String>(); for (final InvoiceModelDao childInvoice : childInvoices) { childInvoiceIds.add(childInvoice.getId().toString()); } // DAO: retrieve the mappings between parent and child invoices final InvoiceParentChildrenSqlDao invoiceParentChildrenSqlDao = entitySqlDaoWrapperFactory.become(InvoiceParentChildrenSqlDao.class); final List<InvoiceParentChildModelDao> mappings = invoiceParentChildrenSqlDao.getParentChildMappingsByChildInvoiceIds(childInvoiceIds, childContext); if (mappings.isEmpty()) { return; } final Map<UUID, InvoiceParentChildModelDao> mappingPerChildInvoiceId = new HashMap<UUID, InvoiceParentChildModelDao>(); final Collection<String> parentInvoiceIdsAsStrings = new HashSet<String>(); for (final InvoiceParentChildModelDao mapping : mappings) { mappingPerChildInvoiceId.put(mapping.getChildInvoiceId(), mapping); parentInvoiceIdsAsStrings.add(mapping.getParentInvoiceId().toString()); } // DAO: retrieve all parents invoices in bulk, for all child invoices final InvoiceSqlDao invoiceSqlDao = entitySqlDaoWrapperFactory.become(InvoiceSqlDao.class); final List<InvoiceModelDao> parentInvoices = invoiceSqlDao.getByIds(parentInvoiceIdsAsStrings, childContext); // Group the parent invoices by (parent) account id (most likely, we only have one parent account group, except in re-parenting cases) final Map<UUID, List<InvoiceModelDao>> parentInvoicesGroupedByParentAccountId = new HashMap<UUID, List<InvoiceModelDao>>(); // Create also a convenient mapping (needed below later) final Map<UUID, InvoiceModelDao> parentInvoiceByParentInvoiceId = new HashMap<UUID, InvoiceModelDao>(); for (final InvoiceModelDao parentInvoice : parentInvoices) { if (parentInvoicesGroupedByParentAccountId.get(parentInvoice.getAccountId()) == null) { parentInvoicesGroupedByParentAccountId.put(parentInvoice.getAccountId(), new LinkedList<InvoiceModelDao>()); } parentInvoicesGroupedByParentAccountId.get(parentInvoice.getAccountId()).add(parentInvoice); parentInvoiceByParentInvoiceId.put(parentInvoice.getId(), parentInvoice); } // DAO: populate the parent invoices in bulk for (final Entry<UUID, List<InvoiceModelDao>> entry : parentInvoicesGroupedByParentAccountId.entrySet()) { final List<InvoiceModelDao> parentInvoicesForOneParentAccountId = entry.getValue(); final Long parentAccountRecordId = internalCallContextFactory.getRecordIdFromObject(entry.getKey(), ObjectType.ACCOUNT, internalCallContextFactory.createTenantContext(childContext)); final InternalTenantContext parentContext = internalCallContextFactory.createInternalTenantContext(childContext.getTenantRecordId(), parentAccountRecordId); // Note the misnomer here, populateChildren simply populates the content of these invoices (unrelated to HA) populateChildren(parentInvoicesForOneParentAccountId, invoicesTags, entitySqlDaoWrapperFactory, parentContext); } for (final InvoiceModelDao invoice : childInvoices) { final InvoiceParentChildModelDao mapping = mappingPerChildInvoiceId.get(invoice.getId()); if (mapping == null) { continue; } final InvoiceModelDao parentInvoice = parentInvoiceByParentInvoiceId.get(mapping.getParentInvoiceId()); if (parentInvoice != null) { invoice.addParentInvoice(parentInvoice); } } } }
package org.bouncycastle.x509; import java.io.IOException; import java.math.BigInteger; import java.security.MessageDigest; import java.security.Principal; import java.security.cert.CertSelector; import java.security.cert.Certificate; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.List; import javax.security.auth.x500.X500Principal; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.DERSequence; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.Holder; import org.bouncycastle.asn1.x509.IssuerSerial; import org.bouncycastle.asn1.x509.ObjectDigestInfo; import org.bouncycastle.jce.PrincipalUtil; import org.bouncycastle.jce.X509Principal; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Selector; /** * The Holder object. * * <pre> * Holder ::= SEQUENCE { * baseCertificateID [0] IssuerSerial OPTIONAL, * -- the issuer and serial number of * -- the holder's Public Key Certificate * entityName [1] GeneralNames OPTIONAL, * -- the name of the claimant or role * objectDigestInfo [2] ObjectDigestInfo OPTIONAL * -- used to directly authenticate the holder, * -- for example, an executable * } * </pre> * @deprecated use org.bouncycastle.cert.AttributeCertificateHolder */ public class AttributeCertificateHolder implements CertSelector, Selector { final Holder holder; AttributeCertificateHolder(ASN1Sequence seq) { holder = Holder.getInstance(seq); } public AttributeCertificateHolder(X509Principal issuerName, BigInteger serialNumber) { holder = new org.bouncycastle.asn1.x509.Holder(new IssuerSerial( GeneralNames.getInstance(new DERSequence(new GeneralName(issuerName))), new ASN1Integer(serialNumber))); } public AttributeCertificateHolder(X500Principal issuerName, BigInteger serialNumber) { this(X509Util.convertPrincipal(issuerName), serialNumber); } public AttributeCertificateHolder(X509Certificate cert) throws CertificateParsingException { X509Principal name; try { name = PrincipalUtil.getIssuerX509Principal(cert); } catch (Exception e) { throw new CertificateParsingException(e.getMessage()); } holder = new Holder(new IssuerSerial(generateGeneralNames(name), new ASN1Integer(cert.getSerialNumber()))); } public AttributeCertificateHolder(X509Principal principal) { holder = new Holder(generateGeneralNames(principal)); } public AttributeCertificateHolder(X500Principal principal) { this(X509Util.convertPrincipal(principal)); } /** * Constructs a holder for v2 attribute certificates with a hash value for * some type of object. * <p> * <code>digestedObjectType</code> can be one of the following: * <ul> * <li>0 - publicKey - A hash of the public key of the holder must be * passed. * <li>1 - publicKeyCert - A hash of the public key certificate of the * holder must be passed. * <li>2 - otherObjectDigest - A hash of some other object type must be * passed. <code>otherObjectTypeID</code> must not be empty. * </ul> * <p> * This cannot be used if a v1 attribute certificate is used. * * @param digestedObjectType The digest object type. * @param digestAlgorithm The algorithm identifier for the hash. * @param otherObjectTypeID The object type ID if * <code>digestedObjectType</code> is * <code>otherObjectDigest</code>. * @param objectDigest The hash value. */ public AttributeCertificateHolder(int digestedObjectType, String digestAlgorithm, String otherObjectTypeID, byte[] objectDigest) { holder = new Holder(new ObjectDigestInfo(digestedObjectType, new ASN1ObjectIdentifier(otherObjectTypeID), new AlgorithmIdentifier(new ASN1ObjectIdentifier(digestAlgorithm)), Arrays .clone(objectDigest))); } /** * Returns the digest object type if an object digest info is used. * <p> * <ul> * <li>0 - publicKey - A hash of the public key of the holder must be * passed. * <li>1 - publicKeyCert - A hash of the public key certificate of the * holder must be passed. * <li>2 - otherObjectDigest - A hash of some other object type must be * passed. <code>otherObjectTypeID</code> must not be empty. * </ul> * * @return The digest object type or -1 if no object digest info is set. */ public int getDigestedObjectType() { if (holder.getObjectDigestInfo() != null) { return holder.getObjectDigestInfo().getDigestedObjectType() .getValue().intValue(); } return -1; } /** * Returns the other object type ID if an object digest info is used. * * @return The other object type ID or <code>null</code> if no object * digest info is set. */ public String getDigestAlgorithm() { if (holder.getObjectDigestInfo() != null) { return holder.getObjectDigestInfo().getDigestAlgorithm().getAlgorithm() .getId(); } return null; } /** * Returns the hash if an object digest info is used. * * @return The hash or <code>null</code> if no object digest info is set. */ public byte[] getObjectDigest() { if (holder.getObjectDigestInfo() != null) { return holder.getObjectDigestInfo().getObjectDigest().getBytes(); } return null; } /** * Returns the digest algorithm ID if an object digest info is used. * * @return The digest algorithm ID or <code>null</code> if no object * digest info is set. */ public String getOtherObjectTypeID() { if (holder.getObjectDigestInfo() != null) { holder.getObjectDigestInfo().getOtherObjectTypeID().getId(); } return null; } private GeneralNames generateGeneralNames(X509Principal principal) { return GeneralNames.getInstance(new DERSequence(new GeneralName(principal))); } private boolean matchesDN(X509Principal subject, GeneralNames targets) { GeneralName[] names = targets.getNames(); for (int i = 0; i != names.length; i++) { GeneralName gn = names[i]; if (gn.getTagNo() == GeneralName.directoryName) { try { if (new X509Principal(((ASN1Encodable)gn.getName()).toASN1Primitive() .getEncoded()).equals(subject)) { return true; } } catch (IOException e) { } } } return false; } private Object[] getNames(GeneralName[] names) { List l = new ArrayList(names.length); for (int i = 0; i != names.length; i++) { if (names[i].getTagNo() == GeneralName.directoryName) { try { l.add(new X500Principal( ((ASN1Encodable)names[i].getName()).toASN1Primitive().getEncoded())); } catch (IOException e) { throw new RuntimeException("badly formed Name object"); } } } return l.toArray(new Object[l.size()]); } private Principal[] getPrincipals(GeneralNames names) { Object[] p = this.getNames(names.getNames()); List l = new ArrayList(); for (int i = 0; i != p.length; i++) { if (p[i] instanceof Principal) { l.add(p[i]); } } return (Principal[])l.toArray(new Principal[l.size()]); } /** * Return any principal objects inside the attribute certificate holder * entity names field. * * @return an array of Principal objects (usually X500Principal), null if no * entity names field is set. */ public Principal[] getEntityNames() { if (holder.getEntityName() != null) { return getPrincipals(holder.getEntityName()); } return null; } /** * Return the principals associated with the issuer attached to this holder * * @return an array of principals, null if no BaseCertificateID is set. */ public Principal[] getIssuer() { if (holder.getBaseCertificateID() != null) { return getPrincipals(holder.getBaseCertificateID().getIssuer()); } return null; } /** * Return the serial number associated with the issuer attached to this * holder. * * @return the certificate serial number, null if no BaseCertificateID is * set. */ public BigInteger getSerialNumber() { if (holder.getBaseCertificateID() != null) { return holder.getBaseCertificateID().getSerial().getValue(); } return null; } public Object clone() { return new AttributeCertificateHolder((ASN1Sequence)holder .toASN1Primitive()); } public boolean match(Certificate cert) { if (!(cert instanceof X509Certificate)) { return false; } X509Certificate x509Cert = (X509Certificate)cert; try { if (holder.getBaseCertificateID() != null) { return holder.getBaseCertificateID().getSerial().getValue().equals(x509Cert.getSerialNumber()) && matchesDN(PrincipalUtil.getIssuerX509Principal(x509Cert), holder.getBaseCertificateID().getIssuer()); } if (holder.getEntityName() != null) { if (matchesDN(PrincipalUtil.getSubjectX509Principal(x509Cert), holder.getEntityName())) { return true; } } if (holder.getObjectDigestInfo() != null) { MessageDigest md = null; try { md = MessageDigest.getInstance(getDigestAlgorithm(), "BC"); } catch (Exception e) { return false; } switch (getDigestedObjectType()) { case ObjectDigestInfo.publicKey: // TODO: DSA Dss-parms md.update(cert.getPublicKey().getEncoded()); break; case ObjectDigestInfo.publicKeyCert: md.update(cert.getEncoded()); break; } if (!Arrays.areEqual(md.digest(), getObjectDigest())) { return false; } } } catch (CertificateEncodingException e) { return false; } return false; } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof AttributeCertificateHolder)) { return false; } AttributeCertificateHolder other = (AttributeCertificateHolder)obj; return this.holder.equals(other.holder); } public int hashCode() { return this.holder.hashCode(); } public boolean match(Object obj) { if (!(obj instanceof X509Certificate)) { return false; } return match((Certificate)obj); } }
package crazypants.enderio.conduit.gui; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.inventory.GuiContainer; import net.minecraft.client.renderer.Tessellator; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.item.ItemStack; import net.minecraftforge.common.util.ForgeDirection; import org.lwjgl.opengl.GL11; import com.enderio.core.api.client.gui.ITabPanel; import com.enderio.core.api.client.render.IWidgetIcon; import com.enderio.core.client.gui.GuiContainerBase; import com.enderio.core.client.render.RenderUtil; import cpw.mods.fml.common.Optional; import crazypants.enderio.conduit.IConduit; import crazypants.enderio.conduit.IConduitBundle; import crazypants.enderio.conduit.gas.IGasConduit; import crazypants.enderio.conduit.item.IItemConduit; import crazypants.enderio.conduit.liquid.ILiquidConduit; import crazypants.enderio.conduit.me.IMEConduit; import crazypants.enderio.conduit.power.IPowerConduit; import crazypants.enderio.conduit.redstone.IRedstoneConduit; import crazypants.enderio.gui.IconEIO; public class GuiExternalConnection extends GuiContainerBase { private static final int TAB_HEIGHT = 24; private static int nextButtonId = 1; public static int nextButtonId() { return nextButtonId++; } private static final Map<Class<? extends IConduit>, Integer> TAB_ORDER = new HashMap<Class<? extends IConduit>, Integer>(); static { TAB_ORDER.put(IItemConduit.class, 0); TAB_ORDER.put(ILiquidConduit.class, 1); TAB_ORDER.put(IRedstoneConduit.class, 2); TAB_ORDER.put(IPowerConduit.class, 3); TAB_ORDER.put(IMEConduit.class, 4); TAB_ORDER.put(IGasConduit.class, 5); } final InventoryPlayer playerInv; final IConduitBundle bundle; private final ForgeDirection dir; private final List<IConduit> conduits = new ArrayList<IConduit>(); private final List<ITabPanel> tabs = new ArrayList<ITabPanel>(); private int activeTab = 0; private int tabYOffset = 4; private final ExternalConnectionContainer container; public GuiExternalConnection(InventoryPlayer playerInv, IConduitBundle bundle, ForgeDirection dir) { super(new ExternalConnectionContainer(playerInv, bundle, dir)); container = (ExternalConnectionContainer) inventorySlots; this.playerInv = playerInv; this.bundle = bundle; this.dir = dir; ySize = 166 + 29; xSize = 206; container.setInoutSlotsVisible(false, false); container.setInventorySlotsVisible(false); List<IConduit> cons = new ArrayList<IConduit>(bundle.getConduits()); Collections.sort(cons, new Comparator<IConduit>() { @Override public int compare(IConduit o1, IConduit o2) { Integer int1 = TAB_ORDER.get(o1.getBaseConduitType()); if(int1 == null) { return 1; } Integer int2 = TAB_ORDER.get(o2.getBaseConduitType()); if(int2 == null) { return 1; } //NB: using Double.comp instead of Integer.comp as the int version is only from Java 1.7+ return Double.compare(int1, int2); } }); for (IConduit con : cons) { if(con.containsExternalConnection(dir) || con.canConnectToExternal(dir, true)) { @SuppressWarnings("LeakingThisInConstructor") ITabPanel tab = TabFactory.instance.createPanelForConduit(this, con); if(tab != null) { conduits.add(con); tabs.add(tab); } } } } @Override public void initGui() { super.initGui(); buttonList.clear(); for (int i = 0; i < tabs.size(); i++) { if(i == activeTab) { tabs.get(i).onGuiInit(guiLeft + 10, guiTop, xSize - 20, ySize - 20); } else { tabs.get(i).deactivate(); } } } @Override public boolean doesGuiPauseGame() { return false; } @Override protected void mouseClicked(int x, int y, int par3) { super.mouseClicked(x, y, par3); int tabLeftX = xSize; int tabRightX = tabLeftX + 22; int minY = tabYOffset; int maxY = minY + (conduits.size() * TAB_HEIGHT); x = (x - guiLeft); y = (y - guiTop); if(x > tabLeftX && x < tabRightX + 24) { if(y > minY && y < maxY) { activeTab = (y - minY) / 24; initGui(); return; } } tabs.get(activeTab).mouseClicked(x, y, par3); } public void setSize(int x, int y) { xSize = x; ySize = y; } @Override protected void actionPerformed(GuiButton guiButton) { super.actionPerformed(guiButton); tabs.get(activeTab).actionPerformed(guiButton); } @Override protected void drawGuiContainerBackgroundLayer(float par1, int par2, int par3) { GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); int sx = (width - xSize) / 2; int sy = (height - ySize) / 2; int tabX = sx + xSize - 3; Tessellator tes = Tessellator.instance; tes.startDrawingQuads(); for (int i = 0; i < tabs.size(); i++) { if(i != activeTab) { RenderUtil.bindTexture(IconEIO.TEXTURE); IconEIO.map.render(IconEIO.INACTIVE_TAB, tabX, sy + tabYOffset + (i * 24)); IWidgetIcon icon = tabs.get(i).getIcon(); icon.getMap().render(icon, tabX - 1, sy + tabYOffset + (i * TAB_HEIGHT) + 4); } } tes.draw(); RenderUtil.bindTexture("enderio:textures/gui/externalConduitConnection.png"); drawTexturedModalRect(sx, sy, 0, 0, this.xSize, this.ySize); RenderUtil.bindTexture(IconEIO.TEXTURE); tes.startDrawingQuads(); IconEIO.map.render(IconEIO.ACTIVE_TAB, tabX, sy + tabYOffset + (activeTab * TAB_HEIGHT)); if(tabs.size() > 0) { IWidgetIcon icon = tabs.get(activeTab).getIcon(); icon.getMap().render(icon, tabX - 1, sy + tabYOffset + (activeTab * TAB_HEIGHT) + 4); tes.draw(); tabs.get(activeTab).render(par1, par2, par3); } else { tes.draw(); } super.drawGuiContainerBackgroundLayer(par1, par2, par3); } public ForgeDirection getDir() { return dir; } public ExternalConnectionContainer getContainer() { return container; } @Override @Optional.Method(modid = "NotEnoughItems") public boolean hideItemPanelSlot(GuiContainer gc, int x, int y, int w, int h) { if(tabs.size() > 0) { int sx = (width - xSize) / 2; int sy = (height - ySize) / 2; int tabX = sx + xSize - 3; int tabY = sy + tabYOffset; return (x+w) >= tabX && x < (tabX + 14) && (y+h) >= tabY && y < (tabY + tabs.size()*TAB_HEIGHT); } return false; } @Override protected void drawFakeItemStack(int x, int y, ItemStack stack) { super.drawFakeItemStack(x, y, stack); itemRender.renderItemOverlayIntoGUI(fontRendererObj, mc.renderEngine, stack, x, y, ""); } }
package org.apache.commons.jcs.auxiliary.remote; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.jcs.auxiliary.AbstractAuxiliaryCache; import org.apache.commons.jcs.auxiliary.AuxiliaryCacheAttributes; import org.apache.commons.jcs.auxiliary.remote.behavior.IRemoteCacheClient; import org.apache.commons.jcs.auxiliary.remote.behavior.IRemoteCacheListener; import org.apache.commons.jcs.engine.CacheStatus; import org.apache.commons.jcs.engine.behavior.ICacheElement; import org.apache.commons.jcs.engine.behavior.ICacheServiceNonLocal; import org.apache.commons.jcs.engine.stats.behavior.IStats; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Used for testing the no wait. * <p> * @author Aaron Smuts */ public class MockRemoteCacheClient<K extends Serializable, V extends Serializable> extends AbstractAuxiliaryCache<K, V> implements IRemoteCacheClient<K, V> { /** For serialization. Don't change. */ private static final long serialVersionUID = 1L; /** log instance */ private final static Log log = LogFactory.getLog( MockRemoteCacheClient.class ); /** List of ICacheElement<K, V> objects passed into update. */ public List<ICacheElement<K, V>> updateList = new LinkedList<ICacheElement<K,V>>(); /** List of key objects passed into remove. */ public List<K> removeList = new LinkedList<K>(); /** status to return. */ public CacheStatus status = CacheStatus.ALIVE; /** Can setup values to return from get. values must be ICacheElement<K, V> */ public Map<K, ICacheElement<K, V>> getSetupMap = new HashMap<K, ICacheElement<K,V>>(); /** Can setup values to return from get. values must be Map<K, ICacheElement<K, V>> */ public Map<Set<K>, Map<K, ICacheElement<K, V>>> getMultipleSetupMap = new HashMap<Set<K>, Map<K,ICacheElement<K,V>>>(); /** The last service passed to fixCache */ public ICacheServiceNonLocal<K, V> fixed; /** Attributes. */ public RemoteCacheAttributes attributes = new RemoteCacheAttributes(); /** * Stores the last argument as fixed. * <p> * (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.remote.behavior.IRemoteCacheClient#fixCache(org.apache.commons.jcs.auxiliary.remote.behavior.ICacheServiceNonLocal) */ @SuppressWarnings("unchecked") // Don't know how to do this properly public void fixCache( ICacheServiceNonLocal<?, ?> remote ) { fixed = (ICacheServiceNonLocal<K, V>)remote; } /** * @return long */ public long getListenerId() { return 0; } /** * @return null */ public IRemoteCacheListener<K, V> getListener() { return null; } /** * Adds the argument to the updatedList. * <p> * (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.AuxiliaryCache#update(org.apache.commons.jcs.engine.behavior.ICacheElement) */ public void update( ICacheElement<K, V> ce ) { updateList.add( ce ); } /** * Looks in the getSetupMap for a value. * <p> * (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.AuxiliaryCache#get(java.io.Serializable) */ public ICacheElement<K, V> get( K key ) { log.info( "get [" + key + "]" ); return getSetupMap.get( key ); } /** * Gets multiple items from the cache based on the given set of keys. * <p> * @param keys * @return a map of K key to ICacheElement<K, V> element, or an empty map if there is no * data in cache for any of these keys */ public Map<K, ICacheElement<K, V>> getMultiple(Set<K> keys) { log.info( "get [" + keys + "]" ); return getMultipleSetupMap.get( keys ); } /** * Adds the key to the remove list. * <p> * (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.AuxiliaryCache#remove(java.io.Serializable) */ public boolean remove( K key ) { removeList.add( key ); return false; } /** * Removes all cached items from the cache. */ public void removeAll() { // do nothing } /** * Prepares for shutdown. */ public void dispose() { // do nothing } /** * Returns the current cache size in number of elements. * <p> * @return number of elements */ public int getSize() { return 0; } /** * Returns the status setup variable. (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.AuxiliaryCache#getStatus() */ public CacheStatus getStatus() { return status; } /** * Returns the cache name. * <p> * @return usually the region name. */ public String getCacheName() { return null; } /** * @return null */ public Set<K> getKeySet( ) { return null; } /** * @return null */ public IStats getStatistics() { return null; } /** * Returns the setup attributes. By default they are not null. * <p> * (non-Javadoc) * @see org.apache.commons.jcs.auxiliary.AuxiliaryCache#getAuxiliaryCacheAttributes() */ public AuxiliaryCacheAttributes getAuxiliaryCacheAttributes() { return attributes; } /** * Returns the cache stats. * <p> * @return String of important historical information. */ public String getStats() { return null; } /** @return 0 */ public CacheType getCacheType() { return CacheType.REMOTE_CACHE; } /** * @param pattern * @return Map * @throws IOException */ public Map<K, ICacheElement<K, V>> getMatching(String pattern) throws IOException { return new HashMap<K, ICacheElement<K,V>>(); } /** * Nothing important * <p> * @return null */ @Override public String getEventLoggingExtraInfo() { return null; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.snapshots.mockstore; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetadata; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.DeleteResult; import org.elasticsearch.common.blobstore.support.PlainBlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.Collectors; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; /** * Mock Repository that allows testing the eventually consistent behaviour of AWS S3 as documented in the * <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel">AWS S3 docs</a>. * Currently, the repository asserts that no inconsistent reads are made. * TODO: Resolve todos on list and overwrite operation consistency to fully cover S3's behavior. */ public class MockEventuallyConsistentRepository extends BlobStoreRepository { private final Random random; private final Context context; private final NamedXContentRegistry namedXContentRegistry; public MockEventuallyConsistentRepository( final RepositoryMetadata metadata, final NamedXContentRegistry namedXContentRegistry, final ClusterService clusterService, final RecoverySettings recoverySettings, final Context context, final Random random) { super(metadata, namedXContentRegistry, clusterService, MockBigArrays.NON_RECYCLING_INSTANCE, recoverySettings, BlobPath.EMPTY); this.context = context; this.namedXContentRegistry = namedXContentRegistry; this.random = random; } // Filters out all actions that are super-seeded by subsequent actions // TODO: Remove all usages of this method, snapshots should not depend on consistent list operations private static List<BlobStoreAction> consistentView(List<BlobStoreAction> actions) { final Map<String, BlobStoreAction> lastActions = new HashMap<>(); for (BlobStoreAction action : actions) { if (action.operation == Operation.PUT) { lastActions.put(action.path, action); } else if (action.operation == Operation.DELETE) { lastActions.remove(action.path); } } return List.copyOf(lastActions.values()); } @Override protected void assertSnapshotOrGenericThread() { // eliminate thread name check as we create repo in the test thread } @Override protected BlobStore createBlobStore() { return new MockBlobStore(); } /** * Context that must be shared between all instances of {@link MockEventuallyConsistentRepository} in a test run. */ public static final class Context { // Eventual consistency is only simulated as long as this flag is false private boolean consistent; private final List<BlobStoreAction> actions = new ArrayList<>(); /** * Force the repository into a consistent end state so that its eventual state can be examined. */ public void forceConsistent() { synchronized (actions) { final List<BlobStoreAction> consistentActions = consistentView(actions); actions.clear(); actions.addAll(consistentActions); consistent = true; } } } private enum Operation { PUT, GET, DELETE } private static final class BlobStoreAction { private final Operation operation; @Nullable private final byte[] data; private final String path; private BlobStoreAction(Operation operation, String path, byte[] data) { this.operation = operation; this.path = path; this.data = data; } private BlobStoreAction(Operation operation, String path) { this(operation, path, null); } } private class MockBlobStore implements BlobStore { private final AtomicBoolean closed = new AtomicBoolean(false); @Override public BlobContainer blobContainer(BlobPath path) { return new MockBlobContainer(path); } @Override public void close() { closed.set(true); } private void ensureNotClosed() { if (closed.get()) { throw new AssertionError("Blobstore is closed already"); } } private class MockBlobContainer implements BlobContainer { private final BlobPath path; MockBlobContainer(BlobPath path) { this.path = path; } @Override public BlobPath path() { return path; } @Override public boolean blobExists(String blobName) { try { readBlob(blobName); return true; } catch (NoSuchFileException ignored) { return false; } } @Override public InputStream readBlob(String name) throws NoSuchFileException { ensureNotClosed(); final String blobPath = path.buildAsString() + name; synchronized (context.actions) { final List<BlobStoreAction> relevantActions = relevantActions(blobPath); context.actions.add(new BlobStoreAction(Operation.GET, blobPath)); if (relevantActions.stream().noneMatch(a -> a.operation == Operation.PUT)) { throw new NoSuchFileException(blobPath); } if (relevantActions.size() == 1 && relevantActions.get(0).operation == Operation.PUT) { // Consistent read after write return new ByteArrayInputStream(relevantActions.get(0).data); } throw new AssertionError("Inconsistent read on [" + blobPath + ']'); } } @Override public InputStream readBlob(String blobName, long position, long length) throws IOException { final InputStream stream = readBlob(blobName); if (position > 0) { stream.skip(position); } return Streams.limitStream(stream, length); } private List<BlobStoreAction> relevantActions(String blobPath) { assert Thread.holdsLock(context.actions); final List<BlobStoreAction> relevantActions = new ArrayList<>( context.actions.stream().filter(action -> blobPath.equals(action.path)).collect(Collectors.toList())); for (int i = relevantActions.size() - 1; i > 0; i--) { if (relevantActions.get(i).operation == Operation.GET) { relevantActions.remove(i); } else { break; } } return relevantActions; } @Override public void deleteBlobsIgnoringIfNotExists(Iterator<String> blobNames) { ensureNotClosed(); synchronized (context.actions) { blobNames.forEachRemaining(blobName -> context.actions.add(new BlobStoreAction(Operation.DELETE, path.buildAsString() + blobName))); } } @Override public DeleteResult delete() { ensureNotClosed(); final String thisPath = path.buildAsString(); final AtomicLong bytesDeleted = new AtomicLong(0L); final AtomicLong blobsDeleted = new AtomicLong(0L); synchronized (context.actions) { consistentView(context.actions).stream().filter(action -> action.path.startsWith(thisPath)) .forEach(a -> { context.actions.add(new BlobStoreAction(Operation.DELETE, a.path)); bytesDeleted.addAndGet(a.data.length); blobsDeleted.incrementAndGet(); }); } return new DeleteResult(blobsDeleted.get(), bytesDeleted.get()); } @Override public Map<String, BlobMetadata> listBlobs() { ensureNotClosed(); final String thisPath = path.buildAsString(); synchronized (context.actions) { return maybeMissLatestIndexN(consistentView(context.actions).stream() .filter( action -> action.path.startsWith(thisPath) && action.path.substring(thisPath.length()).indexOf('/') == -1 && action.operation == Operation.PUT) .collect( Collectors.toMap( action -> action.path.substring(thisPath.length()), action -> new PlainBlobMetadata(action.path.substring(thisPath.length()), action.data.length)))); } } @Override public Map<String, BlobContainer> children() { ensureNotClosed(); final String thisPath = path.buildAsString(); synchronized (context.actions) { return consistentView(context.actions).stream() .filter(action -> action.operation == Operation.PUT && action.path.startsWith(thisPath) && action.path.substring(thisPath.length()).indexOf('/') != -1) .map(action -> action.path.substring(thisPath.length()).split("/")[0]) .distinct() .collect(Collectors.toMap(Function.identity(), name -> new MockBlobContainer(path.add(name)))); } } @Override public Map<String, BlobMetadata> listBlobsByPrefix(String blobNamePrefix) { return maybeMissLatestIndexN( Maps.ofEntries(listBlobs().entrySet().stream().filter(entry -> entry.getKey().startsWith(blobNamePrefix)) .collect(Collectors.toList()))); } // Randomly filter out the index-N blobs from a listing to test that tracking of it in latestKnownRepoGen and the cluster state // ensures consistent repository operations private Map<String, BlobMetadata> maybeMissLatestIndexN(Map<String, BlobMetadata> listing) { // Randomly filter out index-N blobs at the repo root to proof that we don't need them to be consistently listed if (path.parent() == null && context.consistent == false) { final Map<String, BlobMetadata> filtered = new HashMap<>(listing); filtered.keySet().removeIf(b -> b.startsWith(BlobStoreRepository.INDEX_FILE_PREFIX) && random.nextBoolean()); return Map.copyOf(filtered); } return listing; } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { ensureNotClosed(); assert blobSize < Integer.MAX_VALUE; final byte[] data = new byte[(int) blobSize]; final int read = inputStream.read(data); assert read == data.length; final String blobPath = path.buildAsString() + blobName; synchronized (context.actions) { final List<BlobStoreAction> relevantActions = relevantActions(blobPath); // We do some checks in case there is a consistent state for a blob to prevent turning it inconsistent. final boolean hasConsistentContent = relevantActions.size() == 1 && relevantActions.get(0).operation == Operation.PUT; if (BlobStoreRepository.INDEX_LATEST_BLOB.equals(blobName) || blobName.startsWith(BlobStoreRepository.METADATA_PREFIX)) { // TODO: Ensure that it is impossible to ever decrement the generation id stored in index.latest then assert that // it never decrements here. Same goes for the metadata, ensure that we never overwrite newer with older // metadata. } else if (blobName.startsWith(BlobStoreRepository.SNAPSHOT_PREFIX)) { if (hasConsistentContent) { if (basePath().buildAsString().equals(path().buildAsString())) { try { final SnapshotInfo updatedInfo = BlobStoreRepository.SNAPSHOT_FORMAT.deserialize( blobName, namedXContentRegistry, new BytesArray(data)); // If the existing snapshotInfo differs only in the timestamps it stores, then the overwrite is not // a problem and could be the result of a correctly handled master failover. final SnapshotInfo existingInfo = SNAPSHOT_FORMAT.deserialize( blobName, namedXContentRegistry, Streams.readFully(readBlob(blobName))); assertThat(existingInfo.snapshotId(), equalTo(updatedInfo.snapshotId())); assertThat(existingInfo.reason(), equalTo(updatedInfo.reason())); assertThat(existingInfo.state(), equalTo(updatedInfo.state())); assertThat(existingInfo.totalShards(), equalTo(updatedInfo.totalShards())); assertThat(existingInfo.successfulShards(), equalTo(updatedInfo.successfulShards())); assertThat( existingInfo.shardFailures(), containsInAnyOrder(updatedInfo.shardFailures().toArray())); assertThat(existingInfo.indices(), equalTo(updatedInfo.indices())); return; // No need to add a write for this since we didn't change content } catch (Exception e) { // Rethrow as AssertionError here since kind exception might otherwise be swallowed and logged by // the blob store repository. // Since we are not doing any actual IO we don't expect this to throw ever and an exception would // signal broken SnapshotInfo bytes or unexpected behavior of SnapshotInfo otherwise. throw new AssertionError("Failed to deserialize SnapshotInfo", e); } } else { // Primaries never retry so any shard level snap- blob retry/overwrite even with the same content is // not expected. throw new AssertionError("Shard level snap-{uuid} blobs should never be overwritten"); } } } else { if (hasConsistentContent) { ESTestCase.assertArrayEquals("Tried to overwrite blob [" + blobName + "]", relevantActions.get(0).data, data); return; // No need to add a write for this since we didn't change content } } context.actions.add(new BlobStoreAction(Operation.PUT, blobPath, data)); } } @Override public void writeBlobAtomic(final String blobName, final BytesReference bytes, final boolean failIfAlreadyExists) throws IOException { writeBlob(blobName, bytes, failIfAlreadyExists); } } } }
package org.nesty.core.server.rest.controller; import org.nesty.commons.annotations.Header; import org.nesty.commons.annotations.PathVariable; import org.nesty.commons.annotations.RequestBody; import org.nesty.commons.annotations.RequestParam; import org.nesty.commons.exception.ControllerParamsNotMatchException; import org.nesty.commons.exception.ControllerParamsParsedException; import org.nesty.commons.exception.SerializeException; import org.nesty.commons.utils.SerializeUtils; import org.nesty.core.server.rest.HttpContext; import org.nesty.core.server.rest.HttpSession; import org.nesty.core.server.rest.URLResource; import org.nesty.core.server.rest.controller.ControllerMethodDescriptor.MethodParams.AnnotationType; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; /** * controller method descriptor include annotation params * <p> * Author : Michael * Date : March 07, 2016 */ public class ControllerMethodDescriptor { // method private Method method; // method param include param class type and param annotation private MethodParams[] params; // target instance private Object target; public ControllerMethodDescriptor(String URI, ControllerClassDescriptor clazz, Method method) { this.method = method; String key, value; Annotation[][] annotations = method.getParameterAnnotations(); Class<?>[] paramsTypes = method.getParameterTypes(); int total = paramsTypes.length; this.params = new MethodParams[total]; // TODO : ugly code here !! messy // for (int i = 0; i != total; i++) { if (paramsTypes[i] == HttpSession.class) { params[i] = new MethodParams(null, HttpSession.class); params[i].annotationType = AnnotationType.HTTP_SESSION; } else { params[i] = new MethodParams(annotations[i][0], paramsTypes[i]); if (params[i].annotation instanceof Header) { params[i].annotationType = AnnotationType.HEADER; } else if (params[i].annotation instanceof RequestParam) { params[i].annotationType = AnnotationType.REQUEST_PARAM; } else if (params[i].annotation instanceof RequestBody) { params[i].annotationType = AnnotationType.REQUEST_BODY; } else if (params[i].annotation instanceof PathVariable) { params[i].annotationType = AnnotationType.PATH_VARIABLE; String name = ((PathVariable) params[i].annotation).value(); // findout the index of correspond variable String[] pathVariable = URI.split("/"); int index = 0; for (String path : pathVariable) { if (path == null || path.isEmpty()) continue; if (path.charAt(0) == URLResource.VARIABLE && path.length() > 2) { String varName = path.substring(1, path.length() - 1); if (varName.equals(name)) params[i].urlPathIndex = index; } index++; } if (params[i].urlPathIndex == -1) throw new IllegalArgumentException(String.format("%s[%s] is not found around %s()", PathVariable.class.getSimpleName(), name, method.getName())); } else { // TODO : throw runtime Exception ? throw new IllegalArgumentException("unknown annotation " + params[i].annotation.annotationType().getName()); } } } try { target = clazz.getClazz().newInstance(); } catch (InstantiationException | IllegalAccessException ignored) { ignored.printStackTrace(); } } public Object invoke(HttpContext context) throws ControllerParamsNotMatchException, ControllerParamsParsedException { try { if (params != null) return method.invoke(target, resolveParams(context)); else return method.invoke(target); } catch (IllegalAccessException | InvocationTargetException e) { /** * TODO : we suppose there is no exception on newInstance() and invoke() * */ e.printStackTrace(); if (e instanceof InvocationTargetException) throw new RuntimeException(e.getMessage()); else throw new ControllerParamsNotMatchException(String.format("invoke controller %s() occur exception %s", method.getName(), e.getMessage())); } } private Object[] resolveParams(HttpContext context) throws ControllerParamsNotMatchException, ControllerParamsParsedException { Object[] paramList = new Object[params.length]; String value = null; // iterate whole method params for (int i = 0; i != paramList.length; i++) { boolean required = true; boolean auto = false; boolean serialize = false; switch (params[i].annotationType) { case HTTP_SESSION: auto = true; break; case HEADER: Header header = (Header) params[i].annotation; value = context.getHttpHeaders().get(header.value()); // only if required is fase if (value == null && !header.required()) required = false; break; case REQUEST_PARAM: RequestParam reqParam = (RequestParam) params[i].annotation; value = context.getHttpParams().get(reqParam.value()); // only if required is fase if (value == null && !reqParam.required()) required = false; break; case REQUEST_BODY: value = context.getHttpBody(); // we pass value directly on parameter's type is String.class if (params[i].clazz != String.class) serialize = true; break; case PATH_VARIABLE: PathVariable pathParam = (PathVariable) params[i].annotation; if (params[i].urlPathIndex < context.getTerms().length) value = context.getTerms()[params[i].urlPathIndex]; break; } if ((value == null || value.isEmpty()) && required && !auto) throw new ControllerParamsNotMatchException(String.format("resolve %s failed", params[i].annotation.annotationType().getName())); try { paramList[i] = parseParam(params[i].clazz, value, serialize, context); } catch (NumberFormatException | SerializeException e) { throw new ControllerParamsParsedException(String.format("parse param exception %s", e.getMessage())); } } return paramList; } private Object parseParam(Class<?> clazz, String value, boolean serialize, HttpContext context) throws SerializeException { // need body serialize parsed if (serialize) { return value != null ? SerializeUtils.decode(value, clazz) : null; } // enum if (clazz.isEnum()) { // traversal all enum constants. UNNECESSARY test value is null for (Object member : clazz.getEnumConstants()) if (member.toString().equalsIgnoreCase(value)) return member; return null; } // HttpSession inject if (clazz == HttpSession.class) { // HttpSession is the super class of HttpContext return context; } // default value // String ..................... null // int/short/long ........... 0 // float/double ............. 0 // boolean ................... false // Integer/Long/Short .... null // Boolean ................... null if (clazz == String.class) { return value; } else if (clazz == int.class || clazz == Integer.class) { return value != null ? Integer.parseInt(value) : 0; } else if (clazz == short.class || clazz == Short.class) { return value != null ? Short.parseShort(value) : (short) 0; } else if (clazz == long.class || clazz == Long.class) { return value != null ? Long.parseLong(value) : 0L; } else if (clazz == float.class || clazz == Float.class) { return value != null ? Float.parseFloat(value) : 0.0f; } else if (clazz == double.class || clazz == Double.class) { return value != null ? Double.parseDouble(value) : 0.0d; } else if (clazz == boolean.class || clazz == Boolean.class) { // without test null return Boolean.parseBoolean(value); } return null; } public Method getMethod() { return method; } static class MethodParams { // annotation instance public Annotation annotation; // annotation type. this is used for less <istanceof> operation public AnnotationType annotationType; // param class type public Class<?> clazz; // used for PathVariable type param. record its variable index public int urlPathIndex = -1; public MethodParams(Annotation annotation, Class<?> clazz) { this.annotation = annotation; this.clazz = clazz; } enum AnnotationType { REQUEST_PARAM, REQUEST_BODY, PATH_VARIABLE, HEADER, HTTP_SESSION } } }
/* Copyright 2009 Wallace Wadge This file is part of BoneCP. BoneCP is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. BoneCP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with BoneCP. If not, see <http://www.gnu.org/licenses/>. */ package com.jolbox.bonecp; import static org.easymock.EasyMock.anyLong; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.classextension.EasyMock.createNiceMock; import static org.easymock.classextension.EasyMock.makeThreadSafe; import static org.easymock.classextension.EasyMock.replay; import static org.easymock.classextension.EasyMock.reset; import static org.easymock.classextension.EasyMock.verify; import java.lang.reflect.Field; import java.sql.SQLException; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; /** * Test for connection thread tester * @author wwadge * */ public class TestConnectionThreadTester { /** Mock handle. */ private static BoneCP mockPool; /** Mock handle. */ private static ConnectionPartition mockConnectionPartition; /** Mock handle. */ private static ScheduledExecutorService mockExecutor; /** Test class handle. */ private ConnectionTesterThread testClass; /** Mock handle. */ private static ConnectionHandle mockConnection; /** Mock handle. */ private static BoneCPConfig config; /** Mock handle. */ private static Logger mockLogger; /** Mock setup. * @throws ClassNotFoundException */ @BeforeClass public static void setup() throws ClassNotFoundException{ mockPool = createNiceMock(BoneCP.class); mockConnectionPartition = createNiceMock(ConnectionPartition.class); mockExecutor = createNiceMock(ScheduledExecutorService.class); mockConnection = createNiceMock(ConnectionHandle.class); mockLogger = createNiceMock(Logger.class); makeThreadSafe(mockLogger, true); config = new BoneCPConfig(); config.setIdleMaxAge(100); config.setIdleConnectionTestPeriod(100); } /** * Reset all mocks. */ @Before public void resetMocks(){ reset(mockPool, mockConnectionPartition, mockExecutor, mockConnection, mockLogger); } /** Tests that a connection that is marked broken is closed internally and that the partition is marked as being * able to create new connections. * @throws SQLException */ @Test public void testConnectionMarkedBroken() throws SQLException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnection.isPossiblyBroken()).andReturn(true); // connection should be closed mockConnection.internalClose(); mockPool.postDestroyConnection(mockConnection); expectLastCall().once(); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection); } /** Tests that a connection that has been idle for more than the set time is closed off. * @throws SQLException */ @Test public void testIdleConnectionIsKilled() throws SQLException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(2); fakeFreeConnections.add(mockConnection); fakeFreeConnections.add(mockConnection); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(0).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); // connection should be closed mockConnection.internalClose(); mockPool.postDestroyConnection(mockConnection); expectLastCall().once(); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection); } /** Tests that a connection gets to receive a keep-alive. * @throws SQLException * @throws InterruptedException */ @Test public void testIdleConnectionIsSentKeepAlive() throws SQLException, InterruptedException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); config.setIdleConnectionTestPeriod(1); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(10).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); expect(mockPool.isConnectionHandleAlive((ConnectionHandle)anyObject())).andReturn(true).anyTimes(); mockPool.putConnectionBackInPartition((ConnectionHandle)anyObject()); // connection should be closed mockConnection.setConnectionLastReset(anyLong()); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection); } /** Tests that an active connection that fails the connection is alive test will get closed. * @throws SQLException * @throws InterruptedException */ @Test public void testIdleConnectionFailedKeepAlive() throws SQLException, InterruptedException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); config.setIdleConnectionTestPeriod(1); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(10).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); expect(mockPool.isConnectionHandleAlive((ConnectionHandle)anyObject())).andReturn(false).anyTimes(); // connection should be closed mockConnection.internalClose(); mockPool.postDestroyConnection(mockConnection); expectLastCall().once(); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection); } /** Tests fake exceptions, connection should be shutdown if the scheduler was marked as going down. Mostly for code coverage. * @throws SQLException * @throws InterruptedException */ @Test public void testInterruptedException() throws SQLException, InterruptedException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); config.setIdleConnectionTestPeriod(1); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(10).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); expect(mockPool.isConnectionHandleAlive((ConnectionHandle)anyObject())).andReturn(true).anyTimes(); expect(mockExecutor.isShutdown()).andReturn(true); mockPool.putConnectionBackInPartition((ConnectionHandle)anyObject()); expectLastCall().andThrow(new InterruptedException()); // connection should be closed mockConnection.internalClose(); mockPool.postDestroyConnection(mockConnection); expectLastCall().once(); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection); } /** Tests fake exceptions, connection should be shutdown if the scheduler was marked as going down. Same test except just used * to check for a spurious interrupted exception (should be logged). * @throws SQLException * @throws InterruptedException * @throws NoSuchFieldException * @throws SecurityException * @throws IllegalAccessException * @throws IllegalArgumentException */ @Test public void testExceptionSpurious() throws SQLException, InterruptedException, SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); config.setIdleConnectionTestPeriod(1); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(10).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); expect(mockPool.isConnectionHandleAlive((ConnectionHandle)anyObject())).andReturn(true).anyTimes(); expect(mockExecutor.isShutdown()).andReturn(false); mockPool.putConnectionBackInPartition((ConnectionHandle)anyObject()); expectLastCall().andThrow(new InterruptedException()); mockLogger.error((String)anyObject(), (Exception)anyObject()); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor, mockLogger); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); Field loggerField = this.testClass.getClass().getDeclaredField("logger"); loggerField.setAccessible(true); loggerField.set(this.testClass, mockLogger); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection, mockLogger); } /** Tests fake exceptions, connection should be shutdown if the scheduler was marked as going down. Same test except just used * to check for a spurious interrupted exception (should be logged). * @throws SQLException * @throws InterruptedException * @throws NoSuchFieldException * @throws SecurityException * @throws IllegalAccessException * @throws IllegalArgumentException */ @Test public void testExceptionOnCloseConnection() throws SQLException, InterruptedException, SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { ArrayBlockingQueue<ConnectionHandle> fakeFreeConnections = new ArrayBlockingQueue<ConnectionHandle>(1); fakeFreeConnections.add(mockConnection); config.setIdleConnectionTestPeriod(1); expect(mockPool.getConfig()).andReturn(config).anyTimes(); expect(mockConnectionPartition.getFreeConnections()).andReturn(fakeFreeConnections).anyTimes(); expect(mockConnectionPartition.getMinConnections()).andReturn(10).once(); expect(mockConnection.isPossiblyBroken()).andReturn(false); expect(mockConnection.getConnectionLastUsed()).andReturn(0L); expect(mockPool.isConnectionHandleAlive((ConnectionHandle)anyObject())).andReturn(false).anyTimes(); // connection should be closed mockConnection.internalClose(); expectLastCall().andThrow(new SQLException()); replay(mockPool, mockConnection, mockConnectionPartition, mockExecutor, mockLogger); this.testClass = new ConnectionTesterThread(mockConnectionPartition, mockExecutor, mockPool); Field loggerField = this.testClass.getClass().getDeclaredField("logger"); loggerField.setAccessible(true); loggerField.set(this.testClass, mockLogger); this.testClass.run(); verify(mockPool, mockConnectionPartition, mockExecutor, mockConnection, mockLogger); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.project.manage; import com.intellij.execution.RunManager; import com.intellij.execution.RunnerAndConfigurationSettings; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.openapi.compiler.CompileContext; import com.intellij.openapi.compiler.CompileTask; import com.intellij.openapi.compiler.CompilerManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.execution.ExternalSystemTaskExecutionSettings; import com.intellij.openapi.externalSystem.model.task.TaskData; import com.intellij.openapi.externalSystem.service.execution.AbstractExternalSystemTaskConfigurationType; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemRunConfiguration; import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode; import com.intellij.openapi.externalSystem.service.project.manage.ExternalProjectsManager.ExternalProjectsStateProvider; import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.task.TaskCallback; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemBundle; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.Function; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.FactoryMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author Vladislav.Soroka * @since 10/28/2014 */ public class ExternalSystemTaskActivator { private static final Logger LOG = Logger.getInstance(ExternalSystemTaskActivator.class); public static final String RUN_CONFIGURATION_TASK_PREFIX = "run: "; @NotNull private final Project myProject; private final List<Listener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); public ExternalSystemTaskActivator(@NotNull Project project) { myProject = project; } @NotNull public static String getRunConfigurationActivationTaskName(@NotNull RunnerAndConfigurationSettings settings) { return RUN_CONFIGURATION_TASK_PREFIX + settings.getName(); } public void init() { CompilerManager compilerManager = CompilerManager.getInstance(myProject); class MyCompileTask implements CompileTask { private final boolean myBefore; MyCompileTask(boolean before) { myBefore = before; } @Override public boolean execute(CompileContext context) { return doExecuteCompileTasks(myBefore, context); } } compilerManager.addBeforeTask(new MyCompileTask(true)); compilerManager.addAfterTask(new MyCompileTask(false)); fireTasksChanged(); } public String getDescription(ProjectSystemId systemId, String projectPath, String taskName) { List<String> result = new ArrayList<String>(); final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); final TaskActivationState taskActivationState = stateProvider.getTasksActivation(systemId, projectPath); if (taskActivationState == null) return null; for (Phase phase : Phase.values()) { if (taskActivationState.getTasks(phase).contains(taskName)) { result.add(phase.toString()); } } return StringUtil.join(result, ", "); } private boolean doExecuteCompileTasks(boolean myBefore, @NotNull CompileContext context) { final List<String> modules = ContainerUtil.map(context.getCompileScope().getAffectedModules(), new Function<Module, String>() { @Override public String fun(Module module) { return ExternalSystemApiUtil.getExternalProjectPath(module); } }); final Collection<Phase> phases = ContainerUtil.newArrayList(); if (myBefore) { if(context.isRebuild()) { phases.add(Phase.BEFORE_REBUILD); } phases.add(Phase.BEFORE_COMPILE); } else { phases.add(Phase.AFTER_COMPILE); if(context.isRebuild()) { phases.add(Phase.AFTER_REBUILD); } } return runTasks(modules, ArrayUtil.toObjectArray(phases, Phase.class)); } public boolean runTasks(@NotNull String modulePath, @NotNull Phase... phases) { return runTasks(Collections.singleton(modulePath), phases); } public boolean runTasks(@NotNull Collection<String> modules, @NotNull Phase... phases) { final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); final Queue<Pair<ProjectSystemId, ExternalSystemTaskExecutionSettings>> tasksQueue = new LinkedList<Pair<ProjectSystemId, ExternalSystemTaskExecutionSettings>>(); //noinspection MismatchedQueryAndUpdateOfCollection Map<ProjectSystemId, Map<String, RunnerAndConfigurationSettings>> lazyConfigurationsMap = new FactoryMap<ProjectSystemId, Map<String, RunnerAndConfigurationSettings>>() { @Nullable @Override protected Map<String, RunnerAndConfigurationSettings> create(ProjectSystemId key) { final AbstractExternalSystemTaskConfigurationType configurationType = ExternalSystemUtil.findConfigurationType(key); if (configurationType == null) return null; return ContainerUtil.map2Map(RunManager.getInstance(myProject).getConfigurationSettingsList(configurationType), new Function<RunnerAndConfigurationSettings, Pair<String, RunnerAndConfigurationSettings>>() { @Override public Pair<String, RunnerAndConfigurationSettings> fun(RunnerAndConfigurationSettings configurationSettings) { return Pair.create(configurationSettings.getName(), configurationSettings); } }); } }; for (final ExternalProjectsStateProvider.TasksActivation activation : stateProvider.getAllTasksActivation()) { final boolean hashPath = modules.contains(activation.projectPath); final Set<String> tasks = ContainerUtil.newLinkedHashSet(); for (Phase phase : phases) { if (hashPath || (phase.isSyncPhase() && isShareSameRootPath(modules, activation))) ContainerUtil.addAll(tasks, activation.state.getTasks(phase)); } if (tasks.isEmpty()) continue; for (Iterator<String> iterator = tasks.iterator(); iterator.hasNext(); ) { String task = iterator.next(); if (task.length() > RUN_CONFIGURATION_TASK_PREFIX.length() && task.startsWith(RUN_CONFIGURATION_TASK_PREFIX)) { iterator.remove(); final String configurationName = task.substring(RUN_CONFIGURATION_TASK_PREFIX.length()); Map<String, RunnerAndConfigurationSettings> settings = lazyConfigurationsMap.get(activation.systemId); if (settings == null) continue; RunnerAndConfigurationSettings configurationSettings = settings.get(configurationName); if (configurationSettings == null) continue; final RunConfiguration runConfiguration = configurationSettings.getConfiguration(); if (configurationName.equals(configurationSettings.getName()) && runConfiguration instanceof ExternalSystemRunConfiguration) { tasksQueue.add(Pair.create(activation.systemId, ((ExternalSystemRunConfiguration)runConfiguration).getSettings())); } } } if (tasks.isEmpty()) continue; ExternalSystemTaskExecutionSettings executionSettings = new ExternalSystemTaskExecutionSettings(); executionSettings.setExternalSystemIdString(activation.systemId.toString()); executionSettings.setExternalProjectPath(activation.projectPath); executionSettings.getTaskNames().addAll(tasks); tasksQueue.add(Pair.create(activation.systemId, executionSettings)); } return runTasksQueue(tasksQueue); } private boolean isShareSameRootPath(@NotNull Collection<String> modules, @NotNull ExternalProjectsStateProvider.TasksActivation activation) { final AbstractExternalSystemSettings systemSettings = ExternalSystemApiUtil.getSettings(myProject, activation.systemId); final String rootProjectPath = getRootProjectPath(systemSettings, activation.projectPath); final List<String> rootPath = ContainerUtil.mapNotNull(modules, new Function<String, String>() { @Override public String fun(String path) { return getRootProjectPath(systemSettings, path); } }); return rootPath.contains(rootProjectPath); } @Nullable private static String getRootProjectPath(@NotNull AbstractExternalSystemSettings systemSettings, @NotNull String projectPath) { final ExternalProjectSettings projectSettings = systemSettings.getLinkedProjectSettings(projectPath); return projectSettings != null ? projectSettings.getExternalProjectPath() : null; } private boolean runTasksQueue(final Queue<Pair<ProjectSystemId, ExternalSystemTaskExecutionSettings>> tasksQueue) { final Pair<ProjectSystemId, ExternalSystemTaskExecutionSettings> pair = tasksQueue.poll(); if (pair == null) return true; final ProjectSystemId systemId = pair.first; final ExternalSystemTaskExecutionSettings executionSettings = pair.getSecond(); final Semaphore targetDone = new Semaphore(); targetDone.down(); final Ref<Boolean> result = new Ref<Boolean>(false); ExternalSystemUtil.runTask(executionSettings, DefaultRunExecutor.EXECUTOR_ID, myProject, systemId, new TaskCallback() { @Override public void onSuccess() { result.set(runTasksQueue(tasksQueue)); targetDone.up(); } @Override public void onFailure() { targetDone.up(); } }, ProgressExecutionMode.IN_BACKGROUND_ASYNC); targetDone.waitFor(); return result.get(); } public void addListener(@NotNull Listener l) { myListeners.add(l); } public boolean isTaskOfPhase(@NotNull TaskData taskData, @NotNull Phase phase) { final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); final TaskActivationState taskActivationState = stateProvider.getTasksActivation(taskData.getOwner(), taskData.getLinkedExternalProjectPath()); if (taskActivationState == null) return false; return taskActivationState.getTasks(phase).contains(taskData.getName()); } public void addTasks(@NotNull Collection<TaskData> tasks, @NotNull final Phase phase) { if (tasks.isEmpty()) return; addTasks(ContainerUtil.map(tasks, new Function<TaskData, TaskActivationEntry>() { @Override public TaskActivationEntry fun(TaskData data) { return new TaskActivationEntry(data.getOwner(), phase, data.getLinkedExternalProjectPath(), data.getName()); } })); fireTasksChanged(); } public void addTasks(@NotNull Collection<TaskActivationEntry> entries) { if (entries.isEmpty()) return; final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); for (TaskActivationEntry entry : entries) { final TaskActivationState taskActivationState = stateProvider.getTasksActivation(entry.systemId, entry.projectPath); taskActivationState.getTasks(entry.phase).add(entry.taskName); } fireTasksChanged(); } public void removeTasks(@NotNull Collection<TaskData> tasks, @NotNull final Phase phase) { if (tasks.isEmpty()) return; removeTasks(ContainerUtil.map(tasks, new Function<TaskData, TaskActivationEntry>() { @Override public TaskActivationEntry fun(TaskData data) { return new TaskActivationEntry(data.getOwner(), phase, data.getLinkedExternalProjectPath(), data.getName()); } })); } public void removeTasks(@NotNull Collection<TaskActivationEntry> entries) { if (entries.isEmpty()) return; final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); for (TaskActivationEntry activationEntry : entries) { final TaskActivationState taskActivationState = stateProvider.getTasksActivation(activationEntry.systemId, activationEntry.projectPath); taskActivationState.getTasks(activationEntry.phase).remove(activationEntry.taskName); } fireTasksChanged(); } public void addTask(@NotNull TaskActivationEntry entry) { addTasks(Collections.singleton(entry)); } public void removeTask(@NotNull TaskActivationEntry entry) { removeTasks(Collections.singleton(entry)); } public void moveTasks(@NotNull Collection<TaskActivationEntry> entries, int increment) { LOG.assertTrue(increment == -1 || increment == 1); final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); for (TaskActivationEntry activationEntry : entries) { final TaskActivationState taskActivationState = stateProvider.getTasksActivation(activationEntry.systemId, activationEntry.projectPath); final List<String> tasks = taskActivationState.getTasks(activationEntry.phase); final int i1 = tasks.indexOf(activationEntry.taskName); final int i2 = i1 + increment; if (i1 != -1 && tasks.size() > i2 && i2 >= 0) { Collections.swap(tasks, i1, i2); } } } public void moveProjects(@NotNull ProjectSystemId systemId, @NotNull List<String> projectsPathsToMove, @Nullable Collection<String> pathsGroup, int increment) { LOG.assertTrue(increment == -1 || increment == 1); final ExternalProjectsStateProvider stateProvider = ExternalProjectsManager.getInstance(myProject).getStateProvider(); final Map<String, TaskActivationState> activationMap = stateProvider.getProjectsTasksActivationMap(systemId); final List<String> currentPaths = ContainerUtil.newArrayList(activationMap.keySet()); if (pathsGroup != null) { currentPaths.retainAll(pathsGroup); } for (String path : projectsPathsToMove) { final int i1 = currentPaths.indexOf(path); final int i2 = i1 + increment; if (i1 != -1 && currentPaths.size() > i2 && i2 >= 0) { Collections.swap(currentPaths, i1, i2); } } Map<String, TaskActivationState> rearrangedMap = ContainerUtil.newLinkedHashMap(); for (String path : currentPaths) { rearrangedMap.put(path, activationMap.get(path)); activationMap.remove(path); } activationMap.putAll(rearrangedMap); } public void fireTasksChanged() { for (Listener each : myListeners) { each.tasksActivationChanged(); } } public enum Phase { BEFORE_SYNC("external.system.task.before.sync"), AFTER_SYNC("external.system.task.after.sync"), BEFORE_COMPILE("external.system.task.before.compile"), AFTER_COMPILE("external.system.task.after.compile"), BEFORE_REBUILD("external.system.task.before.rebuild"), AFTER_REBUILD("external.system.task.after.rebuild"); public final String myMessageKey; Phase(String messageKey) { myMessageKey = messageKey; } public boolean isSyncPhase () { return this == BEFORE_SYNC || this == AFTER_SYNC; } @Override public String toString() { return ExternalSystemBundle.message(myMessageKey); } } public interface Listener { void tasksActivationChanged(); } public static class TaskActivationEntry { @NotNull private final ProjectSystemId systemId; @NotNull private final Phase phase; @NotNull private final String projectPath; @NotNull private final String taskName; public TaskActivationEntry(@NotNull ProjectSystemId systemId, @NotNull Phase phase, @NotNull String projectPath, @NotNull String taskName) { this.systemId = systemId; this.phase = phase; this.projectPath = projectPath; this.taskName = taskName; } @NotNull public ProjectSystemId getSystemId() { return systemId; } @NotNull public Phase getPhase() { return phase; } @NotNull public String getProjectPath() { return projectPath; } @NotNull public String getTaskName() { return taskName; } } }