repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
vladmunthiu/dasein-cloud-core-GR-fork | src/main/java/org/dasein/cloud/platform/DatabaseLicenseModel.java | 1114 | /**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.platform;
/**
* Description
* <p>Created by stas: 18/07/2014 10:47</p>
*
* @author Stas Maksimov
* @version 2014.08 initial version
* @since 2014.08
*/
public enum DatabaseLicenseModel {
GENERAL_PUBLIC_LICENSE,
LICENSE_INCLUDED,
BRING_YOUR_OWN_LICENSE,
POSTGRESQL_LICENSE,
}
| apache-2.0 |
zstackorg/zstack | plugin/portForwarding/src/main/java/org/zstack/network/service/portforwarding/PortForwardingCascadeExtension.java | 5313 | package org.zstack.network.service.portforwarding;
import org.springframework.beans.factory.annotation.Autowired;
import org.zstack.core.cascade.AbstractAsyncCascadeExtension;
import org.zstack.core.cascade.CascadeAction;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusListCallBack;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.Q;
import org.zstack.header.core.Completion;
import org.zstack.header.message.MessageReply;
import org.zstack.network.service.vip.VipInventory;
import org.zstack.network.service.vip.VipVO;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.logging.CLogger;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static org.codehaus.groovy.runtime.InvokerHelper.asList;
public class PortForwardingCascadeExtension extends AbstractAsyncCascadeExtension {
private static final CLogger logger = Utils.getLogger(PortForwardingCascadeExtension.class);
private static final String NAME = PortForwardingRuleVO.class.getSimpleName();
@Autowired
private DatabaseFacade dbf;
@Autowired
private CloudBus bus;
public void asyncCascade(CascadeAction action, Completion completion) {
if (action.isActionCode(CascadeConstant.DELETION_CHECK_CODE)) {
handleDeletionCheck(action, completion);
} else if (action.isActionCode(CascadeConstant.DELETION_DELETE_CODE, CascadeConstant.DELETION_FORCE_DELETE_CODE)) {
handleDeletion(action, completion);
} else if (action.isActionCode(CascadeConstant.DELETION_CLEANUP_CODE)) {
handleDeletionCleanup(action, completion);
} else {
completion.success();
}
}
private void handleDeletionCleanup(CascadeAction action, Completion completion) {
completion.success();
}
private void handleDeletion(CascadeAction action, final Completion completion) {
final List<PortForwardingRuleInventory> pfinvs = pfFromAction(action);
if (pfinvs == null || pfinvs.isEmpty()) {
completion.success();
return;
}
List<PortForwardingRuleDeletionMsg> msgs = CollectionUtils.transformToList(pfinvs, new Function<PortForwardingRuleDeletionMsg, PortForwardingRuleInventory>() {
@Override
public PortForwardingRuleDeletionMsg call(PortForwardingRuleInventory arg) {
PortForwardingRuleDeletionMsg msg = new PortForwardingRuleDeletionMsg();
msg.setRuleUuids(asList(arg.getUuid()));
bus.makeTargetServiceIdByResourceUuid(msg, PortForwardingConstant.SERVICE_ID, arg.getUuid());
return msg;
}
});
bus.send(msgs, 10, new CloudBusListCallBack(completion) {
@Override
public void run(List<MessageReply> replies) {
for (MessageReply r : replies) {
PortForwardingRuleInventory pf = pfinvs.get(replies.indexOf(r));
if (!r.isSuccess()) {
logger.warn(String.format("failed to delete portForwardingRule[uuid:%s, ip: %s, name:%s], %s",
pf.getUuid(), pf.getVipIp(), pf.getName(), r.getError()));
}
}
completion.success();
}
});
}
private void handleDeletionCheck(CascadeAction action, Completion completion) {
completion.success();
}
@Override
public List<String> getEdgeNames() {
return Arrays.asList(VipVO.class.getSimpleName());
}
@Override
public String getCascadeResourceName() {
return NAME;
}
@Override
public CascadeAction createActionForChildResource(CascadeAction action) {
if (CascadeConstant.DELETION_CODES.contains(action.getActionCode())) {
List<PortForwardingRuleInventory> pfs = pfFromAction(action);
if (pfs != null) {
return action.copy().setParentIssuer(NAME).setParentIssuerContext(pfs);
}
}
return null;
}
private List<PortForwardingRuleInventory> pfFromAction(CascadeAction action) {
if (VipVO.class.getSimpleName().equals(action.getParentIssuer())) {
List<String> vipUuids = CollectionUtils.transformToList((List<VipInventory>) action.getParentIssuerContext(), new Function<String, VipInventory>() {
@Override
public String call(VipInventory arg) {
return arg.getUuid();
}
});
if (vipUuids.isEmpty()) {
return null;
}
List<PortForwardingRuleVO> pfVos = Q.New(PortForwardingRuleVO.class).in(PortForwardingRuleVO_.vipUuid, vipUuids).list();
return PortForwardingRuleInventory.valueOf(pfVos);
} else if (PortForwardingRuleVO.class.getSimpleName().equals(action.getParentIssuer())) {
return action.getParentIssuerContext();
}
return null;
}
} | apache-2.0 |
xmpace/jetty-read | jetty-util/src/main/java/org/eclipse/jetty/util/security/Credential.java | 8289 | //
// ========================================================================
// Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.util.security;
import java.io.Serializable;
import java.security.MessageDigest;
import org.eclipse.jetty.util.StringUtil;
import org.eclipse.jetty.util.TypeUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
/* ------------------------------------------------------------ */
/**
* Credentials. The Credential class represents an abstract mechanism for
* checking authentication credentials. A credential instance either represents
* a secret, or some data that could only be derived from knowing the secret.
* <p>
* Often a Credential is related to a Password via a one way algorithm, so while
* a Password itself is a Credential, a UnixCrypt or MD5 digest of a a password
* is only a credential that can be checked against the password.
* <p>
* This class includes an implementation for unix Crypt an MD5 digest.
*
* @see Password
*
*/
public abstract class Credential implements Serializable
{
private static final Logger LOG = Log.getLogger(Credential.class);
private static final long serialVersionUID = -7760551052768181572L;
/* ------------------------------------------------------------ */
/**
* Check a credential
*
* @param credentials The credential to check against. This may either be
* another Credential object, a Password object or a String
* which is interpreted by this credential.
* @return True if the credentials indicated that the shared secret is known
* to both this Credential and the passed credential.
*/
public abstract boolean check(Object credentials);
/* ------------------------------------------------------------ */
/**
* Get a credential from a String. If the credential String starts with a
* known Credential type (eg "CRYPT:" or "MD5:" ) then a Credential of that
* type is returned. Else the credential is assumed to be a Password.
*
* @param credential String representation of the credential
* @return A Credential or Password instance.
*/
public static Credential getCredential(String credential)
{
if (credential.startsWith(Crypt.__TYPE)) return new Crypt(credential);
if (credential.startsWith(MD5.__TYPE)) return new MD5(credential);
return new Password(credential);
}
/* ------------------------------------------------------------ */
/**
* Unix Crypt Credentials
*/
public static class Crypt extends Credential
{
private static final long serialVersionUID = -2027792997664744210L;
public static final String __TYPE = "CRYPT:";
private final String _cooked;
Crypt(String cooked)
{
_cooked = cooked.startsWith(Crypt.__TYPE) ? cooked.substring(__TYPE.length()) : cooked;
}
@Override
public boolean check(Object credentials)
{
if (credentials instanceof char[])
credentials=new String((char[])credentials);
if (!(credentials instanceof String) && !(credentials instanceof Password))
LOG.warn("Can't check " + credentials.getClass() + " against CRYPT");
String passwd = credentials.toString();
return _cooked.equals(UnixCrypt.crypt(passwd, _cooked));
}
public static String crypt(String user, String pw)
{
return "CRYPT:" + UnixCrypt.crypt(pw, user);
}
}
/* ------------------------------------------------------------ */
/**
* MD5 Credentials
*/
public static class MD5 extends Credential
{
private static final long serialVersionUID = 5533846540822684240L;
public static final String __TYPE = "MD5:";
public static final Object __md5Lock = new Object();
private static MessageDigest __md;
private final byte[] _digest;
/* ------------------------------------------------------------ */
MD5(String digest)
{
digest = digest.startsWith(__TYPE) ? digest.substring(__TYPE.length()) : digest;
_digest = TypeUtil.parseBytes(digest, 16);
}
/* ------------------------------------------------------------ */
public byte[] getDigest()
{
return _digest;
}
/* ------------------------------------------------------------ */
@Override
public boolean check(Object credentials)
{
try
{
byte[] digest = null;
if (credentials instanceof char[])
credentials=new String((char[])credentials);
if (credentials instanceof Password || credentials instanceof String)
{
synchronized (__md5Lock)
{
if (__md == null) __md = MessageDigest.getInstance("MD5");
__md.reset();
__md.update(credentials.toString().getBytes(StringUtil.__ISO_8859_1));
digest = __md.digest();
}
if (digest == null || digest.length != _digest.length) return false;
for (int i = 0; i < digest.length; i++)
if (digest[i] != _digest[i]) return false;
return true;
}
else if (credentials instanceof MD5)
{
MD5 md5 = (MD5) credentials;
if (_digest.length != md5._digest.length) return false;
for (int i = 0; i < _digest.length; i++)
if (_digest[i] != md5._digest[i]) return false;
return true;
}
else if (credentials instanceof Credential)
{
// Allow credential to attempt check - i.e. this'll work
// for DigestAuthModule$Digest credentials
return ((Credential) credentials).check(this);
}
else
{
LOG.warn("Can't check " + credentials.getClass() + " against MD5");
return false;
}
}
catch (Exception e)
{
LOG.warn(e);
return false;
}
}
/* ------------------------------------------------------------ */
public static String digest(String password)
{
try
{
byte[] digest;
synchronized (__md5Lock)
{
if (__md == null)
{
try
{
__md = MessageDigest.getInstance("MD5");
}
catch (Exception e)
{
LOG.warn(e);
return null;
}
}
__md.reset();
__md.update(password.getBytes(StringUtil.__ISO_8859_1));
digest = __md.digest();
}
return __TYPE + TypeUtil.toString(digest, 16);
}
catch (Exception e)
{
LOG.warn(e);
return null;
}
}
}
}
| apache-2.0 |
zstackio/zstack | storage/src/main/java/org/zstack/storage/primary/PrimaryStorageCapacityChecker.java | 4029 | package org.zstack.storage.primary;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.zstack.core.db.Q;
import org.zstack.header.storage.primary.PrimaryStorageCapacityVO;
import org.zstack.header.storage.primary.PrimaryStorageCapacityVO_;
import org.zstack.header.storage.primary.PrimaryStorageOverProvisioningManager;
import org.zstack.utils.SizeUtils;
/**
* Created by MaJin on 2021/7/21.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class PrimaryStorageCapacityChecker {
@Autowired
protected PrimaryStorageOverProvisioningManager psRatioMgr;
@Autowired
protected PrimaryStoragePhysicalCapacityManager physicalCapacityMgr;
private String primaryStorageUuid;
private long reservedCapacity;
private long availableCapacity;
private long totalPhysicalCapacity;
private long availablePhysicalCapacity;
public static PrimaryStorageCapacityChecker New(String primaryStorageUuid) {
PrimaryStorageCapacityChecker checker = new PrimaryStorageCapacityChecker();
checker.primaryStorageUuid = primaryStorageUuid;
PrimaryStorageCapacityVO capacity = Q.New(PrimaryStorageCapacityVO.class).eq(PrimaryStorageCapacityVO_.uuid, primaryStorageUuid).find();
checker.availableCapacity = capacity.getAvailableCapacity();
checker.availablePhysicalCapacity = capacity.getAvailablePhysicalCapacity();
checker.totalPhysicalCapacity = capacity.getTotalPhysicalCapacity();
checker.reservedCapacity = SizeUtils.sizeStringToBytes(PrimaryStorageGlobalConfig.RESERVED_CAPACITY.value());
return checker;
}
public static PrimaryStorageCapacityChecker New(PrimaryStorageCapacityVO capacity) {
PrimaryStorageCapacityChecker checker = new PrimaryStorageCapacityChecker();
checker.primaryStorageUuid = capacity.getUuid();
checker.availableCapacity = capacity.getAvailableCapacity();
checker.availablePhysicalCapacity = capacity.getAvailablePhysicalCapacity();
checker.totalPhysicalCapacity = capacity.getTotalPhysicalCapacity();
checker.reservedCapacity = SizeUtils.sizeStringToBytes(PrimaryStorageGlobalConfig.RESERVED_CAPACITY.value());
return checker;
}
public static PrimaryStorageCapacityChecker New(String primaryStorageUuid, long availableCapacity, long totalPhysicalCapacity, long availablePhysicalCapacity) {
PrimaryStorageCapacityChecker checker = new PrimaryStorageCapacityChecker();
checker.primaryStorageUuid = primaryStorageUuid;
checker.availableCapacity = availableCapacity;
checker.availablePhysicalCapacity = availablePhysicalCapacity;
checker.totalPhysicalCapacity = totalPhysicalCapacity;
checker.reservedCapacity = SizeUtils.sizeStringToBytes(PrimaryStorageGlobalConfig.RESERVED_CAPACITY.value());
return checker;
}
public boolean checkRequiredSize(long requiredSize) {
return checkIncreasedAndTotalRequiredSize(requiredSize, requiredSize);
}
public boolean checkIncreasedAndTotalRequiredSize(long increasedRequiredSize, long totalRequiredSize) {
boolean availableCapacityMeetIncreaseSizeByRatio = availableCapacity
- psRatioMgr.calculateByRatio(primaryStorageUuid, increasedRequiredSize) >= reservedCapacity;
boolean physicalCapacityHasFreeSpaceByRatio = physicalCapacityMgr
.checkCapacityByRatio(primaryStorageUuid, totalPhysicalCapacity, availablePhysicalCapacity);
boolean physicalCapacityMeetTotalRequiredSizeByRatio = physicalCapacityMgr
.checkRequiredCapacityByRatio(primaryStorageUuid, totalPhysicalCapacity, totalRequiredSize);
return availableCapacityMeetIncreaseSizeByRatio &&
physicalCapacityHasFreeSpaceByRatio &&
physicalCapacityMeetTotalRequiredSizeByRatio;
}
}
| apache-2.0 |
freeVM/freeVM | enhanced/java/drlvm/vm/tests/kernel/java/lang/reflect/AuxiliaryClass.java | 1187 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Serguei S.Zapreyev
*/
package java.lang.reflect;
import junit.framework.TestCase;
/*
* Created on 10.12.2006
*
* This AuxiliaryClass class is used to support
* MethodTest.test_invoke_Obj_Obj_2() test case
*/
public class AuxiliaryClass {
public int get() throws Throwable {
int t1 = 1;
int t2 = 0;
return t1/t2;
}
}
| apache-2.0 |
api-ai/apiai-java-sdk | libai/src/test/java/ai/api/model/FulfillmentTest.java | 4225 | /**
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.api.model;
import static org.junit.Assert.*;
import org.junit.Test;
import com.google.gson.Gson;
import ai.api.GsonFactory;
import ai.api.model.ResponseMessage.ResponseImage;
import ai.api.model.ResponseMessage.ResponseQuickReply;
import ai.api.model.ResponseMessage.ResponseSpeech;
public class FulfillmentTest {
final static Gson gson = GsonFactory.getDefaultFactory().getGson();
private static final String TEST_FULFILLMENT =
"{\"speech\":\"text\", " + "\"messages\":[{\"type\":0, \"speech\":[\"one\"]}]}";
private static final String TEST_FULFILLMENT_NO_MESSAGES = "{\"speech\":\"text\"}";
private static final String TEST_FULFILLMENT_WEBHOOK_RESPONSE =
"{\"speech\":\"text\"," + "\"displayText\":\"DisplayText\", \"source\":\"webhook\", "
+ "\"contextOut\": [{\"name\":\"weather\", \"lifespan\":2, \"parameters\":{\"city\":\"Rome\"}}],"
+ "\"data\":{\"param\":\"value\"},"
+ "\"followupEvent\":{\"data\":{\"event-param\":\"event-value\"},\"name\":\"test-event\"}}";
private static final String TEST_FULFILLMENT_WITH_MESSAGES = "{\"speech\":\"test speech\","+
"\"messages\":[{\"imageUrl\":\"url image\",\"type\":3},{\"title\":\"Quick title\","+
"\"type\":2},{\"speech\":[\"speech 1\",\"speech 2\"],\"type\":0}]}";
@Test
public void testDeserialization() {
final Fulfillment fulfillment = gson.fromJson(TEST_FULFILLMENT, Fulfillment.class);
assertEquals("text", fulfillment.getSpeech());
assertEquals(1, fulfillment.getMessages().size());
ResponseSpeech speech = (ResponseSpeech) fulfillment.getMessages().get(0);
assertEquals(1, speech.getSpeech().size());
assertEquals("one", speech.getSpeech().get(0));
}
@Test
public void testDeserializationNoMessages() {
final Fulfillment fulfillment = gson.fromJson(TEST_FULFILLMENT_NO_MESSAGES, Fulfillment.class);
assertNull(fulfillment.getMessages());
}
@Test
public void testDeserializationWebhookResponse() {
final Fulfillment fulfillment =
gson.fromJson(TEST_FULFILLMENT_WEBHOOK_RESPONSE, Fulfillment.class);
assertEquals("DisplayText", fulfillment.getDisplayText());
assertEquals("webhook", fulfillment.getSource());
assertEquals(1, fulfillment.getContextOut().size());
AIOutputContext context = fulfillment.getContext("weather");
assertEquals("weather", context.getName());
assertEquals(2, context.getLifespan());
assertEquals(1, context.getParameters().size());
assertEquals("Rome", context.getParameters().get("city").getAsString());
assertEquals(1, fulfillment.getData().size());
assertEquals("value", fulfillment.getData().get("param").getAsString());
assertNotNull(fulfillment.getFollowupEvent());
assertEquals("test-event", fulfillment.getFollowupEvent().getName());
assertEquals(1, fulfillment.getFollowupEvent().getData().size());
assertEquals("event-value", fulfillment.getFollowupEvent().getData().get("event-param"));
}
@Test
public void testSerializationWithMessages() {
ResponseSpeech speech = new ResponseMessage.ResponseSpeech();
speech.setSpeech("speech 1", "speech 2");
ResponseImage image = new ResponseMessage.ResponseImage();
image.setImageUrl("url image");
ResponseQuickReply quickReply = new ResponseMessage.ResponseQuickReply();
quickReply.setTitle("Quick title");
Fulfillment full = new Fulfillment();
full.setSpeech("test speech");
full.setMessages(image, quickReply, speech);
assertEquals(TEST_FULFILLMENT_WITH_MESSAGES, gson.toJson(full));
}
}
| apache-2.0 |
rzagabe/bazel | src/test/java/com/google/devtools/build/lib/testutil/MoreAsserts.java | 10707 | // Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.testutil;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.common.truth.Truth.assert_;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.lang.ref.Reference;
import java.lang.reflect.Field;
import java.util.ArrayDeque;
import java.util.Comparator;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.regex.Pattern;
/**
* A helper class for tests providing a simple interface for asserts.
*/
public class MoreAsserts {
public static void assertContainsRegex(String regex, String actual) {
assertThat(actual).containsMatch(regex);
}
public static void assertContainsRegex(String msg, String regex, String actual) {
assertWithMessage(msg).that(actual).containsMatch(regex);
}
public static void assertNotContainsRegex(String regex, String actual) {
assertThat(actual).doesNotContainMatch(regex);
}
public static void assertNotContainsRegex(String msg, String regex, String actual) {
assertWithMessage(msg).that(actual).doesNotContainMatch(regex);
}
public static void assertMatchesRegex(String regex, String actual) {
assertThat(actual).matches(regex);
}
public static void assertMatchesRegex(String msg, String regex, String actual) {
assertWithMessage(msg).that(actual).matches(regex);
}
public static void assertNotMatchesRegex(String regex, String actual) {
assertThat(actual).doesNotMatch(regex);
}
public static <T> void assertEquals(T expected, T actual, Comparator<T> comp) {
assertThat(comp.compare(expected, actual)).isEqualTo(0);
}
public static <T> void assertContentsAnyOrder(
Iterable<? extends T> expected, Iterable<? extends T> actual,
Comparator<? super T> comp) {
assertThat(actual).hasSize(Iterables.size(expected));
int i = 0;
for (T e : expected) {
for (T a : actual) {
if (comp.compare(e, a) == 0) {
i++;
}
}
}
assertThat(actual).hasSize(i);
}
public static void assertGreaterThanOrEqual(long target, long actual) {
assertThat(actual).isAtLeast(target);
}
public static void assertGreaterThanOrEqual(String msg, long target, long actual) {
assertWithMessage(msg).that(actual).isAtLeast(target);
}
public static void assertGreaterThan(long target, long actual) {
assertThat(actual).isGreaterThan(target);
}
public static void assertGreaterThan(String msg, long target, long actual) {
assertWithMessage(msg).that(actual).isGreaterThan(target);
}
public static void assertLessThanOrEqual(long target, long actual) {
assertThat(actual).isAtMost(target);
}
public static void assertLessThanOrEqual(String msg, long target, long actual) {
assertWithMessage(msg).that(actual).isAtMost(target);
}
public static void assertLessThan(long target, long actual) {
assertThat(actual).isLessThan(target);
}
public static void assertLessThan(String msg, long target, long actual) {
assertWithMessage(msg).that(actual).isLessThan(target);
}
public static void assertEndsWith(String ending, String actual) {
assertThat(actual).endsWith(ending);
}
public static void assertStartsWith(String prefix, String actual) {
assertThat(actual).startsWith(prefix);
}
/**
* Scans if an instance of given class is strongly reachable from a given
* object.
* <p>Runs breadth-first search in object reachability graph to check if
* an instance of <code>clz</code> can be reached.
* <strong>Note:</strong> This method can take a long time if analyzed
* data structure spans across large part of heap and may need a lot of
* memory.
*
* @param start object to start the search from
* @param clazz class to look for
*/
public static void assertInstanceOfNotReachable(
Object start, final Class<?> clazz) {
Predicate<Object> p = new Predicate<Object>() {
@Override
public boolean apply(Object obj) {
return clazz.isAssignableFrom(obj.getClass());
}
};
if (isRetained(p, start)) {
assert_().fail(
"Found an instance of " + clazz.getCanonicalName() + " reachable from " + start);
}
}
private static final Field NON_STRONG_REF;
static {
try {
NON_STRONG_REF = Reference.class.getDeclaredField("referent");
} catch (SecurityException | NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
static final Predicate<Field> ALL_STRONG_REFS = Predicates.equalTo(NON_STRONG_REF);
private static boolean isRetained(Predicate<Object> predicate, Object start) {
Map<Object, Object> visited = Maps.newIdentityHashMap();
visited.put(start, start);
Queue<Object> toScan = new ArrayDeque<>();
toScan.add(start);
while (!toScan.isEmpty()) {
Object current = toScan.poll();
if (current.getClass().isArray()) {
if (current.getClass().getComponentType().isPrimitive()) {
continue;
}
for (Object ref : (Object[]) current) {
if (ref != null) {
if (predicate.apply(ref)) {
return true;
}
if (visited.put(ref, ref) == null) {
toScan.add(ref);
}
}
}
} else {
// iterate *all* fields (getFields() returns only accessible ones)
for (Class<?> clazz = current.getClass(); clazz != null;
clazz = clazz.getSuperclass()) {
for (Field f : clazz.getDeclaredFields()) {
if (f.getType().isPrimitive() || ALL_STRONG_REFS.apply(f)) {
continue;
}
f.setAccessible(true);
try {
Object ref = f.get(current);
if (ref != null) {
if (predicate.apply(ref)) {
return true;
}
if (visited.put(ref, ref) == null) {
toScan.add(ref);
}
}
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new IllegalStateException("Error when scanning the heap", e);
}
}
}
}
}
return false;
}
private static String getClassDescription(Object object) {
return object == null
? "null"
: ("instance of " + object.getClass().getName());
}
public static String chattyFormat(String message, Object expected, Object actual) {
String expectedClass = getClassDescription(expected);
String actualClass = getClassDescription(actual);
return Joiner.on('\n').join((message != null) ? ("\n" + message) : "",
" expected " + expectedClass + ": <" + expected + ">",
" but was " + actualClass + ": <" + actual + ">");
}
public static void assertEqualsUnifyingLineEnds(String expected, String actual) {
if (actual != null) {
actual = actual.replaceAll(System.getProperty("line.separator"), "\n");
}
assertThat(actual).isEqualTo(expected);
}
public static void assertContainsWordsWithQuotes(String message,
String... strings) {
for (String string : strings) {
assertTrue(message + " should contain '" + string + "' (with quotes)",
message.contains("'" + string + "'"));
}
}
public static void assertNonZeroExitCode(int exitCode, String stdout, String stderr) {
if (exitCode == 0) {
fail("expected non-zero exit code but exit code was 0 and stdout was <"
+ stdout + "> and stderr was <" + stderr + ">");
}
}
public static void assertZeroExitCode(int exitCode, String stdout, String stderr) {
assertExitCode(0, exitCode, stdout, stderr);
}
public static void assertExitCode(int expectedExitCode,
int exitCode, String stdout, String stderr) {
if (exitCode != expectedExitCode) {
fail(String.format("expected exit code <%d> but exit code was <%d> and stdout was <%s> "
+ "and stderr was <%s>", expectedExitCode, exitCode, stdout, stderr));
}
}
public static void assertStdoutContainsString(String expected, String stdout, String stderr) {
if (!stdout.contains(expected)) {
fail("expected stdout to contain string <" + expected + "> but stdout was <"
+ stdout + "> and stderr was <" + stderr + ">");
}
}
public static void assertStderrContainsString(String expected, String stdout, String stderr) {
if (!stderr.contains(expected)) {
fail("expected stderr to contain string <" + expected + "> but stdout was <"
+ stdout + "> and stderr was <" + stderr + ">");
}
}
public static void assertStdoutContainsRegex(String expectedRegex,
String stdout, String stderr) {
if (!Pattern.compile(expectedRegex).matcher(stdout).find()) {
fail("expected stdout to contain regex <" + expectedRegex + "> but stdout was <"
+ stdout + "> and stderr was <" + stderr + ">");
}
}
public static void assertStderrContainsRegex(String expectedRegex,
String stdout, String stderr) {
if (!Pattern.compile(expectedRegex).matcher(stderr).find()) {
fail("expected stderr to contain regex <" + expectedRegex + "> but stdout was <"
+ stdout + "> and stderr was <" + stderr + ">");
}
}
public static Set<String> asStringSet(Iterable<?> collection) {
Set<String> set = Sets.newTreeSet();
for (Object o : collection) {
set.add("\"" + o + "\"");
}
return set;
}
public static <T> void
assertSameContents(Iterable<? extends T> expected, Iterable<? extends T> actual) {
if (!Sets.newHashSet(expected).equals(Sets.newHashSet(actual))) {
fail("got string set: " + asStringSet(actual) + "\nwant: " + asStringSet(expected));
}
}
}
| apache-2.0 |
floodlight/loxigen-artifacts | openflowj/gen-src/main/java/org/projectfloodlight/openflow/protocol/ver15/OFOxmUdpSrcVer15.java | 9158 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmUdpSrcVer15 implements OFOxmUdpSrc {
private static final Logger logger = LoggerFactory.getLogger(OFOxmUdpSrcVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int LENGTH = 6;
private final static TransportPort DEFAULT_VALUE = TransportPort.NONE;
// OF message fields
private final TransportPort value;
//
// Immutable default instance
final static OFOxmUdpSrcVer15 DEFAULT = new OFOxmUdpSrcVer15(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmUdpSrcVer15(TransportPort value) {
if(value == null) {
throw new NullPointerException("OFOxmUdpSrcVer15: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<TransportPort> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
public OFOxmUdpSrc.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmUdpSrc.Builder {
final OFOxmUdpSrcVer15 parentMessage;
// OF message fields
private boolean valueSet;
private TransportPort value;
BuilderWithParent(OFOxmUdpSrcVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpSrc.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.5");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFOxmUdpSrc build() {
TransportPort value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmUdpSrcVer15(
value
);
}
}
static class Builder implements OFOxmUdpSrc.Builder {
// OF message fields
private boolean valueSet;
private TransportPort value;
@Override
public long getTypeLen() {
return 0x80001e02L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpSrc.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_SRC;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.5");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.5");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
//
@Override
public OFOxmUdpSrc build() {
TransportPort value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmUdpSrcVer15(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmUdpSrc> {
@Override
public OFOxmUdpSrc readFrom(ByteBuf bb) throws OFParseError {
// fixed value property typeLen == 0x80001e02L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80001e02)
throw new OFParseError("Wrong typeLen: Expected=0x80001e02L(0x80001e02L), got="+typeLen);
TransportPort value = TransportPort.read2Bytes(bb);
OFOxmUdpSrcVer15 oxmUdpSrcVer15 = new OFOxmUdpSrcVer15(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmUdpSrcVer15);
return oxmUdpSrcVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmUdpSrcVer15Funnel FUNNEL = new OFOxmUdpSrcVer15Funnel();
static class OFOxmUdpSrcVer15Funnel implements Funnel<OFOxmUdpSrcVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmUdpSrcVer15 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80001e02L
sink.putInt((int) 0x80001e02);
message.value.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmUdpSrcVer15> {
@Override
public void write(ByteBuf bb, OFOxmUdpSrcVer15 message) {
// fixed value property typeLen = 0x80001e02L
bb.writeInt((int) 0x80001e02);
message.value.write2Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmUdpSrcVer15(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmUdpSrcVer15 other = (OFOxmUdpSrcVer15) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| apache-2.0 |
XiaoqingWang/kafka-spark-consumer | src/main/java/consumer/kafka/GlobalPartitionInformation.java | 2950 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file is based on the source code of the Kafka spout of the Apache Storm project.
* (https://github.com/apache/storm/tree/master/external/storm-kafka)
* This file has been modified to work with Spark Streaming.
*/
package consumer.kafka;
import java.io.Serializable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import com.google.common.base.Objects;
public class GlobalPartitionInformation implements Iterable<Partition>,
Serializable {
private Map<Integer, Broker> partitionMap;
public GlobalPartitionInformation() {
partitionMap = new TreeMap<Integer, Broker>();
}
public void addPartition(int partitionId, Broker broker) {
partitionMap.put(partitionId, broker);
}
@Override
public String toString() {
return "GlobalPartitionInformation{" + "partitionMap=" + partitionMap
+ '}';
}
public Broker getBrokerFor(Integer partitionId) {
return partitionMap.get(partitionId);
}
public List<Partition> getOrderedPartitions() {
List<Partition> partitions = new LinkedList<Partition>();
for (Map.Entry<Integer, Broker> partition : partitionMap.entrySet()) {
partitions.add(new Partition(partition.getValue(), partition
.getKey()));
}
return partitions;
}
public Iterator<Partition> iterator() {
final Iterator<Map.Entry<Integer, Broker>> iterator = partitionMap
.entrySet().iterator();
return new Iterator<Partition>() {
public boolean hasNext() {
return iterator.hasNext();
}
public Partition next() {
Map.Entry<Integer, Broker> next = iterator.next();
return new Partition(next.getValue(), next.getKey());
}
public void remove() {
iterator.remove();
}
};
}
@Override
public int hashCode() {
return Objects.hashCode(partitionMap);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final GlobalPartitionInformation other = (GlobalPartitionInformation) obj;
return Objects.equal(this.partitionMap, other.partitionMap);
}
}
| apache-2.0 |
CodingFabian/byte-buddy | byte-buddy-dep/src/test/java/net/bytebuddy/asm/AdviceNoRegularReturnWithinAdviceTest.java | 8557 | package net.bytebuddy.asm;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.dynamic.loading.ClassLoadingStrategy;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Collection;
import static junit.framework.TestCase.fail;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
@RunWith(Parameterized.class)
public class AdviceNoRegularReturnWithinAdviceTest {
private static final String FOO = "foo";
private final Class<?> type;
public AdviceNoRegularReturnWithinAdviceTest(Class<?> type) {
this.type = type;
}
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][]{
{VoidEnterAdvice.class},
{BooleanEnterAdvice.class},
{ByteEnterAdvice.class},
{ShortEnterAdvice.class},
{CharacterEnterAdvice.class},
{IntegerEnterAdvice.class},
{LongEnterAdvice.class},
{FloatEnterAdvice.class},
{DoubleEnterAdvice.class},
{ReferenceEnterAdvice.class},
{VoidExitAdvice.class},
{BooleanExitAdvice.class},
{ByteExitAdvice.class},
{ShortExitAdvice.class},
{CharacterExitAdvice.class},
{IntegerExitAdvice.class},
{LongExitAdvice.class},
{FloatExitAdvice.class},
{DoubleExitAdvice.class},
{ReferenceExitAdvice.class},
{VoidExitHandlerAdvice.class},
{BooleanExitHandlerAdvice.class},
{ByteExitHandlerAdvice.class},
{ShortExitHandlerAdvice.class},
{CharacterExitHandlerAdvice.class},
{IntegerExitHandlerAdvice.class},
{LongExitHandlerAdvice.class},
{FloatExitHandlerAdvice.class},
{DoubleExitHandlerAdvice.class},
{ReferenceExitHandlerAdvice.class}
});
}
@Test
public void testNoRegularReturn() throws Exception {
Class<?> type = new ByteBuddy()
.redefine(Sample.class)
.visit(Advice.to(this.type).on(named(FOO)))
.make()
.load(ClassLoadingStrategy.BOOTSTRAP_LOADER, ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
try {
type.getDeclaredMethod(FOO).invoke(type.getDeclaredConstructor().newInstance());
fail();
} catch (InvocationTargetException exception) {
assertThat(exception.getCause(), instanceOf(RuntimeException.class));
}
}
public static class Sample {
public void foo() {
/* empty */
}
}
private static class VoidEnterAdvice {
@Advice.OnMethodEnter
public static void foo() {
throw new RuntimeException();
}
}
private static class BooleanEnterAdvice {
@Advice.OnMethodEnter
public static boolean foo() {
throw new RuntimeException();
}
}
private static class ByteEnterAdvice {
@Advice.OnMethodEnter
public static byte foo() {
throw new RuntimeException();
}
}
private static class ShortEnterAdvice {
@Advice.OnMethodEnter
public static short foo() {
throw new RuntimeException();
}
}
private static class CharacterEnterAdvice {
@Advice.OnMethodEnter
public static char foo() {
throw new RuntimeException();
}
}
private static class IntegerEnterAdvice {
@Advice.OnMethodEnter
public static int foo() {
throw new RuntimeException();
}
}
private static class LongEnterAdvice {
@Advice.OnMethodEnter
public static long foo() {
throw new RuntimeException();
}
}
private static class FloatEnterAdvice {
@Advice.OnMethodEnter
public static float foo() {
throw new RuntimeException();
}
}
private static class DoubleEnterAdvice {
@Advice.OnMethodEnter
public static double foo() {
throw new RuntimeException();
}
}
private static class ReferenceEnterAdvice {
@Advice.OnMethodEnter
public static Object foo() {
throw new RuntimeException();
}
}
private static class VoidExitAdvice {
@Advice.OnMethodExit
public static void foo() {
throw new RuntimeException();
}
}
private static class BooleanExitAdvice {
@Advice.OnMethodExit
public static boolean foo() {
throw new RuntimeException();
}
}
private static class ByteExitAdvice {
@Advice.OnMethodExit
public static byte foo() {
throw new RuntimeException();
}
}
private static class ShortExitAdvice {
@Advice.OnMethodExit
public static short foo() {
throw new RuntimeException();
}
}
private static class CharacterExitAdvice {
@Advice.OnMethodExit
public static char foo() {
throw new RuntimeException();
}
}
private static class IntegerExitAdvice {
@Advice.OnMethodExit
public static int foo() {
throw new RuntimeException();
}
}
private static class LongExitAdvice {
@Advice.OnMethodExit
public static long foo() {
throw new RuntimeException();
}
}
private static class FloatExitAdvice {
@Advice.OnMethodExit
public static float foo() {
throw new RuntimeException();
}
}
private static class DoubleExitAdvice {
@Advice.OnMethodExit
public static double foo() {
throw new RuntimeException();
}
}
private static class ReferenceExitAdvice {
@Advice.OnMethodExit
public static Object foo() {
throw new RuntimeException();
}
}
private static class VoidExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static void foo() {
throw new RuntimeException();
}
}
private static class BooleanExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static boolean foo() {
throw new RuntimeException();
}
}
private static class ByteExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static byte foo() {
throw new RuntimeException();
}
}
private static class ShortExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static short foo() {
throw new RuntimeException();
}
}
private static class CharacterExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static char foo() {
throw new RuntimeException();
}
}
private static class IntegerExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static int foo() {
throw new RuntimeException();
}
}
private static class LongExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static long foo() {
throw new RuntimeException();
}
}
private static class FloatExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static float foo() {
throw new RuntimeException();
}
}
private static class DoubleExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static double foo() {
throw new RuntimeException();
}
}
private static class ReferenceExitHandlerAdvice {
@Advice.OnMethodExit(onThrowable = RuntimeException.class)
public static Object foo() {
throw new RuntimeException();
}
}
}
| apache-2.0 |
zhiqinghuang/dropwizard | dropwizard-auth/src/test/java/io/dropwizard/auth/principal/PolymorphicPrincipalEntityTest.java | 5817 | package io.dropwizard.auth.principal;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.dropwizard.auth.*;
import io.dropwizard.auth.basic.BasicCredentialAuthFilter;
import io.dropwizard.auth.basic.BasicCredentials;
import io.dropwizard.logging.BootstrapLogging;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.servlet.ServletProperties;
import org.glassfish.jersey.test.DeploymentContext;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.ServletDeploymentContext;
import org.glassfish.jersey.test.TestProperties;
import org.junit.Test;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.client.Entity;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.DynamicFeature;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import java.io.IOException;
import java.security.Principal;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
/**
* Testing that polymorphic principal entity injection works.
*/
public class PolymorphicPrincipalEntityTest extends JerseyTest {
private static final String JSON_USERNAME = "good-guy";
private static final String NULL_USERNAME = "bad-guy";
private static final String JSON_USERNAME_ENCODED_TOKEN = "Z29vZC1ndXk6c2VjcmV0";
private static final String NULL_USERNAME_ENCODED_TOKEN = "YmFkLWd1eTpzZWNyZXQ=";
static {
BootstrapLogging.bootstrap();
}
@Override
protected DeploymentContext configureDeployment() {
forceSet(TestProperties.CONTAINER_PORT, "0");
return ServletDeploymentContext
.builder(new PolymorphicPrincipalInjectedResourceConfig())
.initParam(
ServletProperties.JAXRS_APPLICATION_CLASS,
PolymorphicPrincipalInjectedResourceConfig.class.getName())
.build();
}
public static class PolymorphicPrincipalInjectedResourceConfig extends AbstractAuthResourceConfig {
public PolymorphicPrincipalInjectedResourceConfig() {
register(PolymorphicPrincipalEntityResource.class);
packages("io.dropwizard.jersey.jackson");
}
@Override protected Class<? extends Principal> getPrincipalClass() {
throw new AssertionError("getPrincipalClass must not be invoked");
}
@Override protected ContainerRequestFilter getAuthFilter() {
return requestContext -> {
throw new AssertionError("getAuthFilter result must not be invoked");
};
}
@Override protected AbstractBinder getAuthBinder() {
return new PolymorphicAuthValueFactoryProvider.Binder<>(
ImmutableSet.of(JsonPrincipal.class, NullPrincipal.class));
}
@Override protected DynamicFeature getAuthDynamicFeature(ContainerRequestFilter authFilter) {
final Authenticator<BasicCredentials, JsonPrincipal> jsonAuthenticator = credentials -> {
if (credentials.getUsername().equals(JSON_USERNAME)) {
return Optional.of(new JsonPrincipal(credentials.getUsername()));
} else {
return Optional.empty();
}
};
final Authenticator<BasicCredentials, NullPrincipal> nullAuthenticator = credentials -> {
if (credentials.getUsername().equals(NULL_USERNAME)) {
return Optional.of(new NullPrincipal());
} else {
return Optional.empty();
}
};
final BasicCredentialAuthFilter jsonAuthFilter = new BasicCredentialAuthFilter.Builder<JsonPrincipal>()
.setAuthenticator(jsonAuthenticator)
.buildAuthFilter();
final BasicCredentialAuthFilter nullAuthFilter = new BasicCredentialAuthFilter.Builder<NullPrincipal>()
.setAuthenticator(nullAuthenticator)
.buildAuthFilter();
return new PolymorphicAuthDynamicFeature<Principal>(ImmutableMap.of(
JsonPrincipal.class, jsonAuthFilter,
NullPrincipal.class, nullAuthFilter
));
}
}
@Test
public void jsonPrincipalEntityResourceAuth200() {
assertThat(target("/auth-test/json-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Basic " + JSON_USERNAME_ENCODED_TOKEN)
.get(String.class))
.isEqualTo(JSON_USERNAME);
}
@Test
public void jsonPrincipalEntityResourceNoAuth401() {
try {
target("/auth-test/json-principal-entity").request().get(String.class);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (WebApplicationException e) {
assertThat(e.getResponse().getStatus()).isEqualTo(401);
}
}
@Test
public void nullPrincipalEntityResourceAuth200() {
assertThat(target("/auth-test/null-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Basic " + NULL_USERNAME_ENCODED_TOKEN)
.get(String.class))
.isEqualTo("null");
}
@Test
public void nullPrincipalEntityResourceNoAuth401() {
try {
target("/auth-test/null-principal-entity").request().get(String.class);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (WebApplicationException e) {
assertThat(e.getResponse().getStatus()).isEqualTo(401);
}
}
}
| apache-2.0 |
bradtm/pulsar | pulsar-common/src/main/java/org/apache/pulsar/common/util/SecurityUtility.java | 7700 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.util;
import java.io.*;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.security.cert.CertificateFactory;
import java.security.spec.KeySpec;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Base64;
import java.util.Collection;
import javax.net.ssl.*;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslProvider;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
public class SecurityUtility {
public static SSLContext createSslContext(boolean allowInsecureConnection, Certificate[] trustCertificates)
throws GeneralSecurityException {
return createSslContext(allowInsecureConnection, trustCertificates, (Certificate[]) null, (PrivateKey) null);
}
public static SslContext createNettySslContext(boolean allowInsecureConnection, String trustCertsFilePath)
throws GeneralSecurityException, SSLException, FileNotFoundException {
return createNettySslContext(allowInsecureConnection, trustCertsFilePath, (Certificate[]) null, (PrivateKey) null);
}
public static SSLContext createSslContext(boolean allowInsecureConnection, String trustCertsFilePath,
String certFilePath, String keyFilePath) throws GeneralSecurityException {
X509Certificate[] trustCertificates = loadCertificatesFromPemFile(trustCertsFilePath);
X509Certificate[] certificates = loadCertificatesFromPemFile(certFilePath);
PrivateKey privateKey = loadPrivateKeyFromPemFile(keyFilePath);
return createSslContext(allowInsecureConnection, trustCertificates, certificates, privateKey);
}
public static SslContext createNettySslContext(boolean allowInsecureConnection, String trustCertsFilePath,
String certFilePath, String keyFilePath) throws GeneralSecurityException, SSLException, FileNotFoundException {
X509Certificate[] certificates = loadCertificatesFromPemFile(certFilePath);
PrivateKey privateKey = loadPrivateKeyFromPemFile(keyFilePath);
return createNettySslContext(allowInsecureConnection, trustCertsFilePath, certificates, privateKey);
}
public static SslContext createNettySslContext(boolean allowInsecureConnection, String trustCertsFilePath,
Certificate[] certificates, PrivateKey privateKey) throws GeneralSecurityException, SSLException, FileNotFoundException {
SslContextBuilder builder = SslContextBuilder.forClient();
if (allowInsecureConnection) {
builder.trustManager(InsecureTrustManagerFactory.INSTANCE);
} else {
if (trustCertsFilePath != null && trustCertsFilePath.length() != 0) {
builder.trustManager(new FileInputStream(trustCertsFilePath));
}
}
builder.keyManager(privateKey, (X509Certificate[]) certificates);
return builder.build();
}
public static SSLContext createSslContext(boolean allowInsecureConnection, Certificate[] trustCertficates,
Certificate[] certificates, PrivateKey privateKey) throws GeneralSecurityException {
KeyStoreHolder ksh = new KeyStoreHolder();
TrustManager[] trustManagers = null;
KeyManager[] keyManagers = null;
// Set trusted certificate
if (allowInsecureConnection) {
trustManagers = InsecureTrustManagerFactory.INSTANCE.getTrustManagers();
} else {
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
if (trustCertficates == null || trustCertficates.length == 0) {
tmf.init((KeyStore) null);
} else {
for (int i = 0; i < trustCertficates.length; i++) {
ksh.setCertificate("trust" + i, trustCertficates[i]);
}
tmf.init(ksh.getKeyStore());
}
trustManagers = tmf.getTrustManagers();
}
// Set private key and certificate
if (certificates != null && privateKey != null) {
ksh.setPrivateKey("private", privateKey, certificates);
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ksh.getKeyStore(), "".toCharArray());
keyManagers = kmf.getKeyManagers();
}
SSLContext sslCtx = SSLContext.getInstance("TLS");
sslCtx.init(keyManagers, trustManagers, new SecureRandom());
return sslCtx;
}
public static X509Certificate[] loadCertificatesFromPemFile(String certFilePath) throws KeyManagementException {
X509Certificate[] certificates = null;
if (certFilePath == null || certFilePath.isEmpty()) {
return certificates;
}
try (FileInputStream input = new FileInputStream(certFilePath)) {
CertificateFactory cf = CertificateFactory.getInstance("X.509");
Collection<X509Certificate> collection = (Collection<X509Certificate>) cf.generateCertificates(input);
certificates = collection.toArray(new X509Certificate[collection.size()]);
} catch (GeneralSecurityException | IOException e) {
throw new KeyManagementException("Certificate loading error", e);
}
return certificates;
}
public static PrivateKey loadPrivateKeyFromPemFile(String keyFilePath) throws KeyManagementException {
PrivateKey privateKey = null;
if (keyFilePath == null || keyFilePath.isEmpty()) {
return privateKey;
}
try (BufferedReader reader = new BufferedReader(new FileReader(keyFilePath))) {
StringBuilder sb = new StringBuilder();
String previousLine = "";
String currentLine = null;
// Skip the first line (-----BEGIN RSA PRIVATE KEY-----)
reader.readLine();
while ((currentLine = reader.readLine()) != null) {
sb.append(previousLine);
previousLine = currentLine;
}
// Skip the last line (-----END RSA PRIVATE KEY-----)
KeyFactory kf = KeyFactory.getInstance("RSA");
KeySpec keySpec = new PKCS8EncodedKeySpec(Base64.getDecoder().decode(sb.toString()));
privateKey = kf.generatePrivate(keySpec);
} catch (GeneralSecurityException | IOException e) {
throw new KeyManagementException("Private key loading error", e);
}
return privateKey;
}
}
| apache-2.0 |
jadekler/spring-cloud-dataflow | spring-cloud-dataflow-module-deployers/spring-cloud-dataflow-module-deployer-local/src/test/java/org/springframework/cloud/dataflow/module/deployer/local/LocalModuleDeployerTests.java | 3101 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.module.deployer.local;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.cloud.dataflow.core.ArtifactCoordinates;
import org.springframework.cloud.dataflow.core.ModuleDefinition;
import org.springframework.cloud.dataflow.core.ModuleDeploymentRequest;
import org.springframework.cloud.stream.module.launcher.ModuleLauncher;
import org.springframework.cloud.stream.module.launcher.ModuleLauncherConfiguration;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* Tests deployment of the time-source and log-sink modules.
*
* @author Mark Fisher
* @author Marius Bogoevici
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = ModuleLauncherConfiguration.class)
@DirtiesContext
public class LocalModuleDeployerTests {
private static final String GROUP_ID = "org.springframework.cloud.stream.module";
private static final String VERSION = "1.0.0.BUILD-SNAPSHOT";
@Autowired
private ModuleLauncher moduleLauncher;
@Test @Ignore // see TODO below
public void timeToLogStream() {
LocalModuleDeployer deployer = new LocalModuleDeployer(moduleLauncher);
ModuleDefinition timeDefinition = new ModuleDefinition.Builder()
.setGroup("ticktock")
.setName("time")
.setParameter("spring.cloud.stream.bindings.output", "ticktock.0")
.build();
ModuleDefinition logDefinition = new ModuleDefinition.Builder()
.setGroup("ticktock")
.setName("log")
.setParameter("spring.cloud.stream.bindings.input", "ticktock.0")
.build();
ArtifactCoordinates timeCoordinates = new ArtifactCoordinates.Builder()
.setGroupId(GROUP_ID)
.setArtifactId("time-source")
.setVersion(VERSION)
.setClassifier("exec")
.build();
ArtifactCoordinates logCoordinates = new ArtifactCoordinates.Builder()
.setGroupId(GROUP_ID)
.setArtifactId("log-sink")
.setVersion(VERSION)
.setClassifier("exec")
.build();
ModuleDeploymentRequest time = new ModuleDeploymentRequest(timeDefinition, timeCoordinates);
ModuleDeploymentRequest log = new ModuleDeploymentRequest(logDefinition, logCoordinates);
deployer.deploy(time);
deployer.deploy(log);
// TODO: check status, then undeploy
}
}
| apache-2.0 |
pyros2097/GdxStudio | src_libs/org/fife/rsta/ac/java/rjc/ast/EnumDeclaration.java | 748 | /*
* 03/21/2010
*
* Copyright (C) 2010 Robert Futrell
* robert_futrell at users.sourceforge.net
* http://fifesoft.com/rsyntaxtextarea
*
* This library is distributed under a modified BSD license. See the included
* RSTALanguageSupport.License.txt file for details.
*/
package org.fife.rsta.ac.java.rjc.ast;
import org.fife.rsta.ac.java.rjc.lexer.Scanner;
public class EnumDeclaration extends AbstractTypeDeclarationNode {
// private EnumBody enumBody;
public EnumDeclaration(Scanner s, int offs, String name) {
super(name, s.createOffset(offs), s.createOffset(offs+name.length()));
}
public String getTypeString() {
return "enum";
}
// public void setEnumBody(EnumBody enumBody) {
// this.enumBody = enumBody;
// }
} | apache-2.0 |
JulienMrgrd/generator-jhipster | generators/server/templates/src/main/java/package/config/_UaaConfiguration.java | 7879 | <%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%=packageName%>.config;
import <%=packageName%>.security.AuthoritiesConstants;
import io.github.jhipster.config.JHipsterProperties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerSecurityConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.oauth2.provider.token.store.JwtAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.store.JwtTokenStore;
import org.springframework.security.oauth2.provider.token.store.KeyStoreKeyFactory;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.web.filter.CorsFilter;
import javax.servlet.http.HttpServletResponse;
import java.security.KeyPair;
@Configuration
@EnableAuthorizationServer
public class UaaConfiguration extends AuthorizationServerConfigurerAdapter {
@EnableResourceServer
public static class ResourceServerConfiguration extends ResourceServerConfigurerAdapter {
private final TokenStore tokenStore;
private final JHipsterProperties jHipsterProperties;
private final CorsFilter corsFilter;
public ResourceServerConfiguration(TokenStore tokenStore, JHipsterProperties jHipsterProperties, CorsFilter corsFilter) {
this.tokenStore = tokenStore;
this.jHipsterProperties = jHipsterProperties;
this.corsFilter = corsFilter;
}
@Override
public void configure(HttpSecurity http) throws Exception {
http
.exceptionHandling()
.authenticationEntryPoint((request, response, authException) -> response.sendError(HttpServletResponse.SC_UNAUTHORIZED))
.and()
.csrf()
.disable()
.addFilterBefore(corsFilter, UsernamePasswordAuthenticationFilter.class)
.headers()
.frameOptions()
.disable()
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/api/register").permitAll()
.antMatchers("/api/activate").permitAll()
.antMatchers("/api/authenticate").permitAll()
.antMatchers("/api/account/reset_password/init").permitAll()
.antMatchers("/api/account/reset_password/finish").permitAll()
.antMatchers("/api/profile-info").permitAll()
.antMatchers("/api/**").authenticated()<% if (websocket === 'spring-websocket') { %>
.antMatchers("/websocket/tracker").hasAuthority(AuthoritiesConstants.ADMIN)
.antMatchers("/websocket/**").permitAll()<% } %>
.antMatchers("/management/health").permitAll()
.antMatchers("/management/**").hasAuthority(AuthoritiesConstants.ADMIN)
.antMatchers("/v2/api-docs/**").permitAll()
.antMatchers("/swagger-resources/configuration/ui").permitAll()
.antMatchers("/swagger-ui/index.html").hasAuthority(AuthoritiesConstants.ADMIN);
}
@Override
public void configure(ResourceServerSecurityConfigurer resources) throws Exception {
resources.resourceId("jhipster-uaa").tokenStore(tokenStore);
}
}
private final JHipsterProperties jHipsterProperties;
public UaaConfiguration(JHipsterProperties jHipsterProperties) {
this.jHipsterProperties = jHipsterProperties;
}
@Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
/*
For a better client design, this should be done by a ClientDetailsService (similar to UserDetailsService).
*/
clients.inMemory()
.withClient("web_app")
.scopes("openid")
.autoApprove(true)
.authorizedGrantTypes("implicit","refresh_token", "password", "authorization_code")
.and()
.withClient(jHipsterProperties.getSecurity().getClientAuthorization().getClientId())
.secret(jHipsterProperties.getSecurity().getClientAuthorization().getClientSecret())
.scopes("web-app")
.autoApprove(true)
.authorizedGrantTypes("client_credentials");
}
@Override
public void configure(AuthorizationServerEndpointsConfigurer endpoints) throws Exception {
endpoints.authenticationManager(authenticationManager).accessTokenConverter(
jwtAccessTokenConverter());
}
@Autowired
@Qualifier("authenticationManagerBean")
private AuthenticationManager authenticationManager;
/**
* Apply the token converter (and enhander) for token store.
*/
@Bean
public JwtTokenStore tokenStore() {
return new JwtTokenStore(jwtAccessTokenConverter());
}
/**
* This bean generates an token enhancer, which manages the exchange between JWT acces tokens and Authentication
* in both directions.
*
* @return an access token converter configured with the authorization server's public/private keys
*/
@Bean
public JwtAccessTokenConverter jwtAccessTokenConverter() {
JwtAccessTokenConverter converter = new JwtAccessTokenConverter();
KeyPair keyPair = new KeyStoreKeyFactory(
new ClassPathResource("keystore.jks"), "password".toCharArray())
.getKeyPair("selfsigned");
converter.setKeyPair(keyPair);
return converter;
}
@Override
public void configure(AuthorizationServerSecurityConfigurer oauthServer) throws Exception {
oauthServer.tokenKeyAccess("permitAll()").checkTokenAccess(
"isAuthenticated()");
}
}
| apache-2.0 |
KeyNexus/netty | src/main/java/org/jboss/netty/channel/local/LocalServerChannelFactory.java | 999 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.channel.local;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ServerChannelFactory;
/**
* A {@link ServerChannelFactory} that creates a {@link LocalServerChannel}.
*/
public interface LocalServerChannelFactory extends ServerChannelFactory {
LocalServerChannel newChannel(ChannelPipeline pipeline);
}
| apache-2.0 |
s13372/SORCER | core/sorcer-platform/src/main/java/sorcer/data/DataService.java | 12999 | /*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sorcer.data;
import org.rioproject.config.Constants;
import org.rioproject.net.HostUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sorcer.tools.webster.Webster;
import sorcer.util.FileURLHandler;
import sorcer.util.GenericUtil;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
/**
* The DataService provides the support to serve up data from a directory (or a set of directories).
*
* @author Dennis Reedy
*/
public class DataService implements FileURLHandler {
private int port;
private final String[] roots;
private final AtomicReference<Webster> websterRef = new AtomicReference<>();
private String address;
private static final Logger logger = LoggerFactory.getLogger(DataService.class.getName());
public static final String DATA_DIR = "sorcer.data.dir";
public static final String DATA_URL = "sorcer.data.url";
/**
* Get the DataService that is bound to the platform code server.
*
* @return The DataService that is bound to the platform code server,
* or if platform server properties are not found, return a DataService
* using the default data dir and an anonymous port.
*/
public static DataService getPlatformDataService() {
DataService dataService;
String webster = System.getProperty(Constants.WEBSTER);
if(webster!=null) {
int ndx = webster.lastIndexOf(":");
int port = Integer.parseInt(webster.substring(ndx+1));
String roots = getDataDir();
dataService = new DataService(port, roots.split(";")).start();
} else {
logger.warn("Platform DataService property not found, " +
"create DataService using the data dir (" + getDataDir() + "), " +
"and an anonymous port");
dataService = new DataService(0, getDataDir().split(";")).start();
}
return dataService;
}
/**
* Create a DataService with roots. The resulting service will use an anonymous port.
*
* @param roots The roots to provide access to.
*/
public DataService(final String... roots) {
this(0, roots);
}
/**
* Create a DataService with roots and a port.
*
* @param port The port to use.
* @param roots The roots to provide access to.
*
* @throws IllegalArgumentException if the roots argument is empty or null, or if any of
* the roots do not exist or are not directories.
*/
public DataService(final int port, final String... roots) {
this.port = port;
if(roots==null || roots.length==0)
throw new IllegalArgumentException("You must provide roots");
List<String> adjusted = new ArrayList<>();
for(String root : roots) {
File f = new File(root);
if(!f.exists())
throw new IllegalArgumentException("The root ["+root+"] does not exist");
if(!f.isDirectory())
throw new IllegalArgumentException("The root ["+root+"] is not a directory");
adjusted.add(root.replace('\\', '/'));
}
this.roots = adjusted.toArray(new String[adjusted.size()]);
}
/**
* Start the data service if it has not been started yet.
*
* @return An updated instance of the DataService.
*/
public DataService start() {
if(websterRef.get()==null) {
StringBuilder websterRoots = new StringBuilder();
for(String root : roots) {
if (websterRoots.length() > 0)
websterRoots.append(";");
websterRoots.append(root);
}
try {
websterRef.set(new Webster(port, websterRoots.toString(), getDataDir()));
port = websterRef.get().getPort();
address = websterRef.get().getAddress();
logger.info(String.format("Started data service on: %s:%d\n%s",
address, port, formatRoots()));
System.setProperty(DATA_URL, String.format("http://%s:%d", address, port));
System.setProperty(Constants.WEBSTER, String.format("http://%s:%d", address, port));
} catch (IOException e) {
try {
address = HostUtil.getInetAddress().getHostAddress();
} catch (UnknownHostException e1) {
logger.error("Can not get host address", e1);
throw new RuntimeException("Can not get host address", e1);
}
logger.warn(String.format("Data service already running, join %s:%d\n%s",
address, port, formatRoots()));
}
}
return this;
}
/**
* Get a {@link URL} for a file path.
*
* @param path The file path to obtain a URL for.
*
* @return A URL that can be used to access the file.
*
* @throws IOException if the file does not exist, or the URL cannot be created.
* @throws IllegalArgumentException if the file cannot be accessed from one of the roots provided.
* @throws IllegalStateException if the data service is not running.
*/
public URL getDataURL(final String path) throws IOException {
return getDataURL(new File(path));
}
/**
* Get a {@link URL} for a file.
*
* @param file The file to obtain a URL for.
*
* @return A URL that can be used to access the file.
*
* @throws IOException if the file does not exist, or the URL cannot be created.
* @throws IllegalArgumentException if the file cannot be accessed from one of the roots provided.
* @throws IllegalStateException if the data service is not available.
*/
public URL getDataURL(final File file) throws IOException {
return getDataURL(file, true);
}
/**
* Get a {@link URL} for a file.
*
* @param file The file to obtain a URL for.
* @param verify Whether to verify the file can be served up by the DataService and the
* DataService is running.
*
* @return A URL that can be used to access the file.
*
* @throws IOException if the file does not exist, or the URL cannot be created.
* @throws IllegalArgumentException if the file cannot be accessed from one of the roots provided.
* @throws IllegalStateException if the data service is not available.
*/
public URL getDataURL(final File file, final boolean verify) throws IOException {
if(file==null)
throw new IllegalArgumentException("The file argument cannot be null");
if(!file.exists())
throw new FileNotFoundException("The "+file.getPath()+" does not exist");
if(address==null)
throw new IllegalStateException("The data service is not available");
String path = file.getPath().replace('\\', '/');
String relativePath = null;
for(String root : roots) {
if(path.startsWith(root)) {
relativePath = path.substring(root.length());
break;
}
}
if(relativePath==null)
throw new IllegalArgumentException("The provided path ["+path+"], is not navigable " +
"from existing roots "+ Arrays.toString(roots));
URL url = new URL(String.format("http://%s:%d%s", address, port, relativePath));
if(verify) {
IOException notAvailable = verify(url);
if (notAvailable != null) {
logger.warn(String.format("Unable to verify %s, try and start DataService on %s:%d",
url.toExternalForm(), address, port));
start();
notAvailable = verify(url);
if (notAvailable != null)
throw notAvailable;
}
}
return url;
}
/**
* Download the contents of a URL to a local file
*
* @param url The URL to download
* @param to The file to download to
*
* @throws IOException
*/
public void download(final URL url, final File to) throws IOException {
GenericUtil.download(url, to);
}
/**
* Get a File from a URL.
*
* @param url The URL to use
*
* @return a File derived from the DataService data directory root(s).
*
* @throws FileNotFoundException If the URL cannot be accessed from one of the roots provided.
*/
public File getDataFile(final URL url) throws IOException {
File file = null;
if(url.getProtocol().startsWith("file")) {
try {
File f = new File(url.toURI());
for(String root : roots) {
// for matching windows paths (or any OS)
root = new File(root).getAbsolutePath();
if(f.getPath().startsWith(root)) {
file = f;
break;
}
}
} catch (URISyntaxException e) {
throw new FileNotFoundException("Could not create file from "+url);
}
} else {
String filePath = url.getPath();
char sep = filePath.charAt(0);
if (sep != File.separatorChar) {
filePath = filePath.replace(sep, File.separatorChar);
}
for(String root : roots) {
File f = new File(root, filePath);
if(f.exists()) {
file = f;
break;
}
}
}
if(file==null || !file.exists())
throw new FileNotFoundException("The "+url.toExternalForm()+" " +
"is not accessible from existing roots "+ Arrays.toString(roots));
return file;
}
/**
* Stop the data service.
*/
public void stop() {
if(websterRef.get()!=null) {
websterRef.get().terminate();
websterRef.set(null);
}
address = null;
}
IOException verify(URL url) {
try {
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.getResponseCode();
connection.disconnect();
} catch(IOException e) {
return e;
}
return null;
}
private String formatRoots() {
StringBuilder sb = new StringBuilder();
int i=0;
for(String root : roots) {
if(sb.length()>0)
sb.append("\n");
sb.append("Root ").append(i++).append(" ").append(root);
}
return sb.toString();
}
/**
* Get the DataService data directory. The {@link DataService#DATA_DIR} system property is first
* consulted, if that property is not set, the default of
* System.getProperty("java.io.tmpdir")/sorcer/user/data is used and the {@link DataService#DATA_DIR}
* system property is set.
*
* @return The DataService data directory.
*/
public static String getDataDir() {
String dataDir = System.getProperty(DATA_DIR);
if(dataDir==null) {
dataDir = new File(String.format("%s%ssorcer-%s%sdata",
System.getProperty("java.io.tmpdir"),
File.separator,
System.getProperty("user.name"),
File.separator)).getAbsolutePath();
System.setProperty(DATA_DIR, dataDir);
}
return dataDir;
}
/**
* Get the value of the DATA_URL system property
*
* @return The value of the DATA_URL system property
*/
public String getDataUrl() {
return String.format("http://%s:%d", address, port);
}
/**
* Get the value of the DATA_URL system property
*
* @return The value of the DATA_URL system property
*/
public String getDir() {
return DataService.getDataDir();
}
}
| apache-2.0 |
jdamick/denominator | ultradns/src/test/java/denominator/ultradns/UltraDNSGeoReadOnlyLiveTest.java | 231 | package denominator.ultradns;
import denominator.Live.UseTestGraph;
import denominator.profile.GeoReadOnlyLiveTest;
@UseTestGraph(UltraDNSTestGraph.class)
public class UltraDNSGeoReadOnlyLiveTest extends GeoReadOnlyLiveTest {
}
| apache-2.0 |
zliu41/gobblin | gobblin-compaction/src/main/java/gobblin/compaction/listeners/ReflectionCompactorListenerFactory.java | 2295 | /*
* Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.compaction.listeners;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import gobblin.configuration.State;
/**
* Implementation of {@link CompactorListenerFactory} that creates a {@link CompactorListener} using reflection. The
* config key {@link #COMPACTOR_LISTENERS} is used to specify a comma separated list of compactors to use. These
* compactors will be run serially.
*/
public class ReflectionCompactorListenerFactory implements CompactorListenerFactory {
@VisibleForTesting
static final String COMPACTOR_LISTENERS = "compactor.listeners";
@Override
public Optional<CompactorListener> createCompactorListener(Properties properties)
throws CompactorListenerCreationException {
State state = new State(properties);
if (Strings.isNullOrEmpty(state.getProp(COMPACTOR_LISTENERS))) {
return Optional.absent();
}
List<CompactorListener> listeners = new ArrayList<>();
for (String listenerClassName : state.getPropAsList(COMPACTOR_LISTENERS)) {
try {
listeners.add((CompactorListener) ConstructorUtils
.invokeConstructor(Class.forName(listenerClassName), properties));
} catch (ReflectiveOperationException e) {
throw new CompactorListenerCreationException(String
.format("Unable to create CompactorListeners from key \"%s\" with value \"%s\"", COMPACTOR_LISTENERS,
properties.getProperty(COMPACTOR_LISTENERS)), e);
}
}
return Optional.<CompactorListener>of(new SerialCompactorListener(listeners));
}
}
| apache-2.0 |
jayarampradhan/spring-boot | spring-boot-actuator/src/test/java/org/springframework/boot/actuate/autoconfigure/HealthMvcEndpointAutoConfigurationTests.java | 6100 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure;
import java.security.Principal;
import java.util.Arrays;
import javax.servlet.http.HttpServletRequest;
import org.junit.After;
import org.junit.Test;
import org.springframework.boot.actuate.endpoint.HealthEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.HealthMvcEndpoint;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.Health.Builder;
import org.springframework.boot.actuate.health.Status;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;
import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration;
import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfiguration;
import org.springframework.boot.test.util.EnvironmentTestUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link EndpointWebMvcAutoConfiguration} of the {@link HealthMvcEndpoint}.
*
* @author Dave Syer
* @author Andy Wilkinson
*/
public class HealthMvcEndpointAutoConfigurationTests {
private AnnotationConfigWebApplicationContext context;
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void testSecureByDefault() throws Exception {
this.context = new AnnotationConfigWebApplicationContext();
this.context.setServletContext(new MockServletContext());
this.context.register(TestConfiguration.class);
this.context.refresh();
MockHttpServletRequest request = new MockHttpServletRequest();
Health health = (Health) this.context.getBean(HealthMvcEndpoint.class)
.invoke(request, null);
assertThat(health.getStatus()).isEqualTo(Status.UP);
assertThat(health.getDetails().get("foo")).isNull();
}
@Test
public void testNotSecured() throws Exception {
this.context = new AnnotationConfigWebApplicationContext();
this.context.setServletContext(new MockServletContext());
this.context.register(TestConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.security.enabled=false");
this.context.refresh();
Health health = (Health) this.context.getBean(HealthMvcEndpoint.class)
.invoke(null, null);
assertThat(health.getStatus()).isEqualTo(Status.UP);
Health map = (Health) health.getDetails().get("test");
assertThat(map.getDetails().get("foo")).isEqualTo("bar");
}
@Test
public void testSetRoles() throws Exception {
// gh-8314
this.context = new AnnotationConfigWebApplicationContext();
this.context.setServletContext(new MockServletContext());
this.context.register(TestConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.security.roles[0]=super");
this.context.refresh();
HealthMvcEndpoint health = this.context.getBean(HealthMvcEndpoint.class);
assertThat(ReflectionTestUtils.getField(health, "roles"))
.isEqualTo(Arrays.asList("super"));
}
@Test
public void endpointConditionalOnMissingBean() throws Exception {
this.context = new AnnotationConfigWebApplicationContext();
this.context.setServletContext(new MockServletContext());
this.context.register(TestConfiguration.class,
TestHealthMvcEndpointConfiguration.class);
this.context.refresh();
MockHttpServletRequest request = new MockHttpServletRequest();
Health health = (Health) this.context.getBean(HealthMvcEndpoint.class)
.invoke(request, null);
assertThat(health.getDetails()).isNotEmpty();
}
@Configuration
@ImportAutoConfiguration({ SecurityAutoConfiguration.class,
JacksonAutoConfiguration.class, WebMvcAutoConfiguration.class,
HttpMessageConvertersAutoConfiguration.class, AuditAutoConfiguration.class,
EndpointAutoConfiguration.class, EndpointWebMvcAutoConfiguration.class })
static class TestConfiguration {
@Bean
public TestHealthIndicator testHealthIndicator() {
return new TestHealthIndicator();
}
}
@Configuration
@ImportAutoConfiguration({ SecurityAutoConfiguration.class,
JacksonAutoConfiguration.class, WebMvcAutoConfiguration.class,
HttpMessageConvertersAutoConfiguration.class, AuditAutoConfiguration.class,
EndpointAutoConfiguration.class, EndpointWebMvcAutoConfiguration.class })
static class TestHealthMvcEndpointConfiguration {
@Bean
public HealthMvcEndpoint endpoint(HealthEndpoint endpoint) {
return new TestHealthMvcEndpoint(endpoint);
}
}
static class TestHealthMvcEndpoint extends HealthMvcEndpoint {
TestHealthMvcEndpoint(HealthEndpoint delegate) {
super(delegate);
}
@Override
protected boolean exposeHealthDetails(HttpServletRequest request,
Principal principal) {
return true;
}
}
static class TestHealthIndicator extends AbstractHealthIndicator {
@Override
protected void doHealthCheck(Builder builder) throws Exception {
builder.up().withDetail("foo", "bar");
}
}
}
| apache-2.0 |
greg-dove/flex-falcon | debugger/src/main/java/flash/tools/debugger/NotSupportedException.java | 1440 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flash.tools.debugger;
/**
* Indicates that a debugger feature is not supported by the Flash
* player that is being targeted. For example, newer players
* support the ability to have the debugger call arbitrary
* functions, but older ones do not.
*
* @author Mike Morearty
*/
public class NotSupportedException extends PlayerDebugException {
private static final long serialVersionUID = -8873935118857320824L;
/**
* @param s an error message, e.g. "Target player does not support
* function calls," or "Target player does not support watchpoints".
*/
public NotSupportedException(String s)
{
super(s);
}
}
| apache-2.0 |
longerian/RC4A | src/org/rubychina/android/fragment/ContentDialogFragment.java | 2810 | /*Copyright (C) 2012 Longerian (http://www.longerian.me)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package org.rubychina.android.fragment;
import org.rubychina.android.R;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockDialogFragment;
public class ContentDialogFragment extends SherlockDialogFragment {
private final static String TITLE = "title";
private final static String CONTENT = "content";
private final static String STYLE = "style";
private final static String THEME = "theme";
private String title;
private String content;
private int style;
private int theme;
public static ContentDialogFragment newInstance(String title, String content) {
return newInstance(title, content, SherlockDialogFragment.STYLE_NORMAL);
}
public static ContentDialogFragment newInstance(String title, String content, int style) {
return newInstance(title, content, style, 0);
}
public static ContentDialogFragment newInstance(String title, String content, int style, int theme) {
ContentDialogFragment f = new ContentDialogFragment();
Bundle bundle = new Bundle();
bundle.putString(TITLE, title);
bundle.putString(CONTENT, content);
bundle.putInt(STYLE, style);
bundle.putInt(THEME, theme);
f.setArguments(bundle);
return f;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(getArguments() != null) {
title = getArguments().getString(TITLE);
content = getArguments().getString(CONTENT);
style = getArguments().getInt(STYLE);
theme = getArguments().getInt(THEME);
}
setStyle(style, theme);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
if(style == SherlockDialogFragment.STYLE_NORMAL) {
getDialog().setTitle(title);
}
View v = inflater.inflate(R.layout.content_dialog, container, false);
View tv = v.findViewById(R.id.content);
((TextView)tv).setText(content);
return v;
}
}
| apache-2.0 |
lihongqiang/kettle-4.4.0-stable | src/org/pentaho/di/trans/step/StepMeta.java | 28117 | /*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.step;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.pentaho.di.cluster.ClusterSchema;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.CheckResultSourceInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepLoaderException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIPositionInterface;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceExportInterface;
import org.pentaho.di.resource.ResourceHolderInterface;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.shared.SharedObjectBase;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.errorhandling.StreamInterface;
import org.w3c.dom.Node;
/**
* This class contains everything that is needed to define a step.
*
* @since 27-mei-2003
* @author Matt
*
*/
public class StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface,
CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface
{
private static Class<?> PKG = StepMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
public static final String XML_TAG = "step";
public static final String STRING_ID_MAPPING = "Mapping";
public static final String STRING_ID_SINGLE_THREADER = "SingleThreader";
public static final String STRING_ID_ETL_META_INJECT = "MetaInject";
public static final String STRING_ID_MAPPING_INPUT = "MappingInput";
public static final String STRING_ID_MAPPING_OUTPUT = "MappingOutput";
private String stepid; // --> StepPlugin.id
private String stepname;
private StepMetaInterface stepMetaInterface;
private boolean selected;
private boolean distributes;
private int copies;
private Point location;
private boolean drawstep;
private String description;
private boolean terminator;
private StepPartitioningMeta stepPartitioningMeta;
private StepPartitioningMeta targetStepPartitioningMeta;
private ClusterSchema clusterSchema;
private String clusterSchemaName; // temporary to resolve later.
private StepErrorMeta stepErrorMeta;
// OK, we need to explain to this running step that we expect input from remote steps.
// This only happens when the previous step "repartitions". (previous step has different
// partitioning method than this one)
//
// So here we go, let's create List members for the remote input and output step
//
/** These are the remote input steps to read from, one per host:port combination */
private List<RemoteStep> remoteInputSteps;
/** These are the remote output steps to write to, one per host:port combination */
private List<RemoteStep> remoteOutputSteps;
private ObjectId id;
private TransMeta parentTransMeta;
/**
* @param stepid The ID of the step: this is derived information, you can also use the constructor without stepid.
* This constructor will be deprecated soon.
* @param stepname The name of the new step
* @param stepMetaInterface The step metadata interface to use (TextFileInputMeta, etc)
*/
public StepMeta(String stepid, String stepname, StepMetaInterface stepMetaInterface)
{
this(stepname, stepMetaInterface);
if (this.stepid==null) this.stepid = stepid;
}
/**
* @param stepname The name of the new step
* @param stepMetaInterface The step metadata interface to use (TextFileInputMeta, etc)
*/
public StepMeta(String stepname, StepMetaInterface stepMetaInterface)
{
if (stepMetaInterface!=null)
{
this.stepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, stepMetaInterface);
}
this.stepname = stepname;
setStepMetaInterface( stepMetaInterface );
selected = false;
distributes = true;
copies = 1;
location = new Point(0,0);
drawstep = false;
description = null;
stepPartitioningMeta = new StepPartitioningMeta();
// targetStepPartitioningMeta = new StepPartitioningMeta();
clusterSchema = null; // non selected by default.
remoteInputSteps = new ArrayList<RemoteStep>();
remoteOutputSteps = new ArrayList<RemoteStep>();
}
public StepMeta()
{
this((String)null, (String)null, (StepMetaInterface)null);
}
public String getXML() throws KettleException
{
return getXML(true);
}
public String getXML(boolean includeInterface) throws KettleException
{
StringBuffer retval=new StringBuffer(200); //$NON-NLS-1$
retval.append(" <").append(XML_TAG).append('>').append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", getName()) ); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("type", getStepID()) ); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("description", description) ); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("distribute", distributes) ); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("copies", copies) ); //$NON-NLS-1$ //$NON-NLS-2$
retval.append( stepPartitioningMeta.getXML() );
if (targetStepPartitioningMeta!=null) {
retval.append( XMLHandler.openTag("target_step_partitioning")).append(targetStepPartitioningMeta.getXML()).append( XMLHandler.closeTag("target_step_partitioning"));
}
if (includeInterface) {
retval.append( stepMetaInterface.getXML() );
}
retval.append(" ").append(XMLHandler.addTagValue("cluster_schema", clusterSchema==null?"":clusterSchema.getName()));
retval.append(" <remotesteps>");
// Output the remote input steps
List<RemoteStep> inputSteps = new ArrayList<RemoteStep>(remoteInputSteps);
Collections.sort(inputSteps); // sort alphabetically, making it easier to compare XML files
retval.append(" <input>");
for (RemoteStep remoteStep : inputSteps) {
retval.append(" ").append(remoteStep.getXML()).append(Const.CR);
}
retval.append(" </input>");
// Output the remote output steps
List<RemoteStep> outputSteps = new ArrayList<RemoteStep>(remoteOutputSteps);
Collections.sort(outputSteps); // sort alphabetically, making it easier to compare XML files
retval.append(" <output>");
for (RemoteStep remoteStep : outputSteps) {
retval.append(" ").append(remoteStep.getXML()).append(Const.CR);
}
retval.append(" </output>");
retval.append(" </remotesteps>");
retval.append(" <GUI>").append(Const.CR); //$NON-NLS-1$
retval.append(" <xloc>").append(location.x).append("</xloc>").append(Const.CR); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" <yloc>").append(location.y).append("</yloc>").append(Const.CR); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" <draw>").append((drawstep?"Y":"N")).append("</draw>").append(Const.CR); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
retval.append(" </GUI>").append(Const.CR); //$NON-NLS-1$
retval.append(" </"+XML_TAG+">").append(Const.CR).append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
/**
* Read the step data from XML
*
* @param stepnode The XML step node.
* @param databases A list of databases
* @param counters A map with all defined counters.
*
*/
public StepMeta(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException
{
this();
PluginRegistry registry = PluginRegistry.getInstance();
try
{
stepname = XMLHandler.getTagValue(stepnode, "name"); //$NON-NLS-1$
stepid = XMLHandler.getTagValue(stepnode, "type"); //$NON-NLS-1$
// Create a new StepMetaInterface object...
PluginInterface sp = registry.findPluginWithId(StepPluginType.class, stepid);
if (sp!=null)
{
setStepMetaInterface( (StepMetaInterface) registry.loadClass(sp) );
stepid=sp.getIds()[0]; // revert to the default in case we loaded an alternate version
}
else
{
throw new KettleStepLoaderException(BaseMessages.getString(PKG, "StepMeta.Exception.UnableToLoadClass",stepid)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
// Load the specifics from XML...
if (stepMetaInterface!=null)
{
stepMetaInterface.loadXML(stepnode, databases, counters);
}
/* Handle info general to all step types...*/
description = XMLHandler.getTagValue(stepnode, "description"); //$NON-NLS-1$
copies = Const.toInt(XMLHandler.getTagValue(stepnode, "copies"), 1); //$NON-NLS-1$
String sdistri = XMLHandler.getTagValue(stepnode, "distribute"); //$NON-NLS-1$
distributes = "Y".equalsIgnoreCase(sdistri); //$NON-NLS-1$
if (sdistri==null) distributes=true; // default=distribute
// Handle GUI information: location & drawstep?
String xloc, yloc;
int x,y;
xloc=XMLHandler.getTagValue(stepnode, "GUI", "xloc"); //$NON-NLS-1$ //$NON-NLS-2$
yloc=XMLHandler.getTagValue(stepnode, "GUI", "yloc"); //$NON-NLS-1$ //$NON-NLS-2$
try{ x=Integer.parseInt(xloc); } catch(Exception e) { x=0; }
try{ y=Integer.parseInt(yloc); } catch(Exception e) { y=0; }
location=new Point(x,y);
drawstep = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "GUI", "draw")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// The partitioning information?
//
Node partNode = XMLHandler.getSubNode(stepnode, "partitioning");
stepPartitioningMeta = new StepPartitioningMeta(partNode);
// Target partitioning information?
//
Node targetPartNode = XMLHandler.getSubNode(stepnode, "target_step_partitioning");
partNode = XMLHandler.getSubNode(targetPartNode, "partitioning");
if (partNode!=null) {
targetStepPartitioningMeta = new StepPartitioningMeta(partNode);
}
clusterSchemaName = XMLHandler.getTagValue(stepnode, "cluster_schema"); // resolve to clusterSchema later
// The remote input and output steps...
Node remotestepsNode = XMLHandler.getSubNode(stepnode, "remotesteps");
Node inputNode = XMLHandler.getSubNode(remotestepsNode, "input");
int nrInput = XMLHandler.countNodes(inputNode, RemoteStep.XML_TAG);
for (int i=0;i<nrInput;i++) {
remoteInputSteps.add( new RemoteStep( XMLHandler.getSubNodeByNr(inputNode, RemoteStep.XML_TAG, i) ) );
}
Node outputNode = XMLHandler.getSubNode(remotestepsNode, "output");
int nrOutput = XMLHandler.countNodes(outputNode, RemoteStep.XML_TAG);
for (int i=0;i<nrOutput;i++) {
remoteOutputSteps.add( new RemoteStep( XMLHandler.getSubNodeByNr(outputNode, RemoteStep.XML_TAG, i) ) );
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "StepMeta.Exception.UnableToLoadStepInfo")+e.toString(), e); //$NON-NLS-1$
}
}
/**
* Resolves the name of the cluster loaded from XML/Repository to the correct clusterSchema object
* @param clusterSchemas The list of clusterSchemas to reference.
*/
public void setClusterSchemaAfterLoading(List<ClusterSchema> clusterSchemas)
{
if (clusterSchemaName==null) return;
for (ClusterSchema look : clusterSchemas)
{
if (look.getName().equals(clusterSchemaName)) clusterSchema=look;
}
}
public ObjectId getObjectId()
{
return id;
}
public void setObjectId(ObjectId id)
{
this.id = id;
}
/**
* See wether or not the step is drawn on the canvas.
*
* @return True if the step is drawn on the canvas.
*/
public boolean isDrawn()
{
return drawstep;
}
/**
* See wether or not the step is drawn on the canvas.
* Same as isDrawn(), but needed for findMethod(StepMeta, drawstep)
* called by StringSearcher.findMetaData(). Otherwise findMethod() returns
* org.pentaho.di.trans.step.StepMeta.drawStep() instead of isDrawn().
* @return True if the step is drawn on the canvas.
*/
public boolean isDrawStep()
{
return drawstep;
}
/**
* Sets the draw attribute of the step so that it will be drawn on the canvas.
*
* @param draw True if you want the step to show itself on the canvas, False if you don't.
*/
public void setDraw(boolean draw)
{
drawstep=draw;
setChanged();
}
/**
* Sets the number of parallel copies that this step will be launched with.
*
* @param c The number of copies.
*/
public void setCopies(int c)
{
if (copies!=c) setChanged();
copies=c;
}
/**
* Get the number of copies to start of a step.
* This takes into account the partitioning logic.
* @return the number of step copies to start.
*/
public int getCopies()
{
// If the step is partitioned, that's going to determine the number of copies, nothing else...
//
if (isPartitioned() && getStepPartitioningMeta().getPartitionSchema()!=null)
{
List<String> partitionIDs = getStepPartitioningMeta().getPartitionSchema().getPartitionIDs();
if (partitionIDs!=null && partitionIDs.size()>0) // these are the partitions the step can "reach"
{
return partitionIDs.size();
}
}
return copies;
}
public void drawStep()
{
setDraw(true);
setChanged();
}
public void hideStep()
{
setDraw(false);
setChanged();
}
/**
* Two steps are equal if their names are equal.
* @return true if the two steps are equal.
*/
public boolean equals(Object obj)
{
if (obj==null) return false;
StepMeta stepMeta = (StepMeta)obj;
return getName().equalsIgnoreCase(stepMeta.getName());
}
public int hashCode()
{
return stepname.hashCode();
}
public int compareTo(StepMeta o)
{
return toString().compareTo(o.toString());
}
public boolean hasChanged()
{
BaseStepMeta bsi = (BaseStepMeta)this.getStepMetaInterface();
return bsi!=null?bsi.hasChanged():false;
}
public void setChanged(boolean ch)
{
BaseStepMeta bsi = (BaseStepMeta)this.getStepMetaInterface();
if (bsi!=null) bsi.setChanged(ch);
}
public void setChanged()
{
BaseStepMeta bsi = (BaseStepMeta)this.getStepMetaInterface();
if (bsi!=null) bsi.setChanged();
}
public boolean chosesTargetSteps()
{
if (getStepMetaInterface()!=null)
{
List<StreamInterface> targetStreams = getStepMetaInterface().getStepIOMeta().getTargetStreams();
return targetStreams.isEmpty();
}
return false;
}
public Object clone()
{
StepMeta stepMeta = new StepMeta();
stepMeta.replaceMeta(this);
stepMeta.setObjectId(null);
return stepMeta;
}
public void replaceMeta(StepMeta stepMeta)
{
this.stepid = stepMeta.stepid; // --> StepPlugin.id
this.stepname = stepMeta.stepname;
if (stepMeta.stepMetaInterface!=null)
{
setStepMetaInterface( (StepMetaInterface) stepMeta.stepMetaInterface.clone() );
}
else
{
this.stepMetaInterface = null;
}
this.selected = stepMeta.selected;
this.distributes = stepMeta.distributes;
this.copies = stepMeta.copies;
if (stepMeta.location!=null)
{
this.location = new Point(stepMeta.location.x, stepMeta.location.y);
}
else
{
this.location = null;
}
this.drawstep = stepMeta.drawstep;
this.description = stepMeta.description;
this.terminator = stepMeta.terminator;
if (stepMeta.stepPartitioningMeta!=null)
{
this.stepPartitioningMeta = (StepPartitioningMeta) stepMeta.stepPartitioningMeta.clone();
}
else
{
this.stepPartitioningMeta = null;
}
if (stepMeta.clusterSchema!=null)
{
this.clusterSchema = (ClusterSchema) stepMeta.clusterSchema.clone();
}
else
{
this.clusterSchema = null;
}
this.clusterSchemaName = stepMeta.clusterSchemaName; // temporary to resolve later.
// Also replace the remote steps with cloned versions...
//
this.remoteInputSteps = new ArrayList<RemoteStep>();
for (RemoteStep remoteStep : stepMeta.remoteInputSteps) this.remoteInputSteps.add((RemoteStep)remoteStep.clone());
this.remoteOutputSteps = new ArrayList<RemoteStep>();
for (RemoteStep remoteStep : stepMeta.remoteOutputSteps) this.remoteOutputSteps.add((RemoteStep)remoteStep.clone());
// The error handling needs to be done too...
//
if (stepMeta.stepErrorMeta!=null) {
this.stepErrorMeta = stepMeta.stepErrorMeta.clone();
}
// this.setShared(stepMeta.isShared());
this.id = stepMeta.getObjectId();
this.setChanged(true);
}
public StepMetaInterface getStepMetaInterface()
{
return stepMetaInterface;
}
public void setStepMetaInterface(StepMetaInterface stepMetaInterface) {
this.stepMetaInterface = stepMetaInterface;
if (stepMetaInterface!=null) {
this.stepMetaInterface.setParentStepMeta(this);
}
}
public String getStepID()
{
return stepid;
}
public String getName()
{
return stepname;
}
public void setName(String sname)
{
stepname=sname;
}
public String getDescription()
{
return description;
}
public void setDescription(String description)
{
this.description=description;
}
public void setSelected(boolean sel)
{
selected=sel;
}
public void flipSelected()
{
selected=!selected;
}
public boolean isSelected()
{
return selected;
}
public void setTerminator()
{
setTerminator(true);
}
public void setTerminator(boolean t)
{
terminator = t;
}
public boolean hasTerminator()
{
return terminator;
}
public StepMeta(ObjectId id_step)
{
this((String)null, (String)null, (StepMetaInterface)null);
setObjectId(id_step);
}
public void setLocation(int x, int y)
{
int nx = (x>=0?x:0);
int ny = (y>=0?y:0);
Point loc = new Point(nx,ny);
if (!loc.equals(location)) setChanged();
location=loc;
}
public void setLocation(Point loc)
{
if (loc!=null && !loc.equals(location)) setChanged();
location = loc;
}
public Point getLocation()
{
return location;
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info)
{
stepMetaInterface.check(remarks, transMeta, this, prev, input, output, info);
}
public String toString()
{
if (getName()==null) return getClass().getName();
return getName();
}
/**
* @return true is the step is partitioned
*/
public boolean isPartitioned()
{
return stepPartitioningMeta.isPartitioned();
}
/**
* @return true is the step is partitioned
*/
public boolean isTargetPartitioned()
{
return targetStepPartitioningMeta.isPartitioned();
}
/**
* @return the stepPartitioningMeta
*/
public StepPartitioningMeta getStepPartitioningMeta()
{
return stepPartitioningMeta;
}
/**
* @param stepPartitioningMeta the stepPartitioningMeta to set
*/
public void setStepPartitioningMeta(StepPartitioningMeta stepPartitioningMeta)
{
this.stepPartitioningMeta = stepPartitioningMeta;
}
/**
* @return the clusterSchema
*/
public ClusterSchema getClusterSchema()
{
return clusterSchema;
}
/**
* @param clusterSchema the clusterSchema to set
*/
public void setClusterSchema(ClusterSchema clusterSchema)
{
this.clusterSchema = clusterSchema;
}
/**
* @return the distributes
*/
public boolean isDistributes()
{
return distributes;
}
/**
* @param distributes the distributes to set
*/
public void setDistributes(boolean distributes)
{
if (this.distributes != distributes){
this.distributes = distributes;
setChanged();
}
}
/**
* @return the StepErrorMeta error handling metadata for this step
*/
public StepErrorMeta getStepErrorMeta()
{
return stepErrorMeta;
}
/**
* @param stepErrorMeta the error handling metadata for this step
*/
public void setStepErrorMeta(StepErrorMeta stepErrorMeta)
{
this.stepErrorMeta = stepErrorMeta;
}
/**
* Find a step with the ID in a given ArrayList of steps
*
* @param steps The List of steps to search
* @param id The ID of the step
* @return The step if it was found, null if nothing was found
*/
public static final StepMeta findStep(List<StepMeta> steps, ObjectId id)
{
if (steps == null) return null;
for (StepMeta stepMeta : steps)
{
if (stepMeta.getObjectId()!=null && stepMeta.getObjectId().equals(id)) {
return stepMeta;
}
}
return null;
}
/**
* Find a step with its name in a given ArrayList of steps
*
* @param steps The List of steps to search
* @param stepname The name of the step
* @return The step if it was found, null if nothing was found
*/
public static final StepMeta findStep(List<StepMeta> steps, String stepname)
{
if (steps == null) return null;
for (StepMeta stepMeta : steps)
{
if (stepMeta.getName().equalsIgnoreCase(stepname)) return stepMeta;
}
return null;
}
public boolean supportsErrorHandling()
{
return stepMetaInterface.supportsErrorHandling();
}
/**
* @return if error handling is supported for this step, if error handling is defined and a target step is set
*/
public boolean isDoingErrorHandling()
{
return stepMetaInterface.supportsErrorHandling() &&
stepErrorMeta!=null &&
stepErrorMeta.getTargetStep()!=null &&
stepErrorMeta.isEnabled()
;
}
public boolean isSendingErrorRowsToStep(StepMeta targetStep)
{
return (isDoingErrorHandling() && stepErrorMeta.getTargetStep().equals(targetStep));
}
/**
* Support for CheckResultSourceInterface
*/
public String getTypeId() {
return this.getStepID();
}
public boolean isMapping() {
return STRING_ID_MAPPING.equals(stepid);
}
public boolean isSingleThreader() {
return STRING_ID_SINGLE_THREADER.equals(stepid);
}
public boolean isEtlMetaInject() {
return STRING_ID_ETL_META_INJECT.equals(stepid);
}
public boolean isMappingInput() {
return STRING_ID_MAPPING_INPUT.equals(stepid);
}
public boolean isMappingOutput() {
return STRING_ID_MAPPING_OUTPUT.equals(stepid);
}
/**
* Get a list of all the resource dependencies that the step is depending on.
*
* @return a list of all the resource dependencies that the step is depending on
*/
public List<ResourceReference> getResourceDependencies(TransMeta transMeta) {
return stepMetaInterface.getResourceDependencies(transMeta, this);
}
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository) throws KettleException {
// The step calls out to the StepMetaInterface...
// These can in turn add anything to the map in terms of resources, etc.
// Even reference files, etc. For now it's just XML probably...
//
return stepMetaInterface.exportResources(space, definitions, resourceNamingInterface, repository);
}
/**
* @return the remoteInputSteps
*/
public List<RemoteStep> getRemoteInputSteps() {
return remoteInputSteps;
}
/**
* @param remoteInputSteps the remoteInputSteps to set
*/
public void setRemoteInputSteps(List<RemoteStep> remoteInputSteps) {
this.remoteInputSteps = remoteInputSteps;
}
/**
* @return the remoteOutputSteps
*/
public List<RemoteStep> getRemoteOutputSteps() {
return remoteOutputSteps;
}
/**
* @param remoteOutputSteps the remoteOutputSteps to set
*/
public void setRemoteOutputSteps(List<RemoteStep> remoteOutputSteps) {
this.remoteOutputSteps = remoteOutputSteps;
}
/**
* @return the targetStepPartitioningMeta
*/
public StepPartitioningMeta getTargetStepPartitioningMeta() {
return targetStepPartitioningMeta;
}
/**
* @param targetStepPartitioningMeta the targetStepPartitioningMeta to set
*/
public void setTargetStepPartitioningMeta(StepPartitioningMeta targetStepPartitioningMeta) {
this.targetStepPartitioningMeta = targetStepPartitioningMeta;
}
public boolean isRepartitioning() {
if (!isPartitioned() && isTargetPartitioned()) return true;
if (isPartitioned() && isTargetPartitioned() && !stepPartitioningMeta.equals(targetStepPartitioningMeta)) return true;
return false;
}
public String getHolderType() {
return "STEP"; //$NON-NLS-1$
}
public boolean isClustered() {
return clusterSchema!=null;
}
/**
* Set the plugin step id (code)
* @param stepid
*/
public void setStepID(String stepid) {
this.stepid = stepid;
}
public void setClusterSchemaName(String clusterSchemaName) {
this.clusterSchemaName = clusterSchemaName;
}
public void setParentTransMeta(TransMeta parentTransMeta) {
this.parentTransMeta = parentTransMeta;
}
public TransMeta getParentTransMeta() {
return parentTransMeta;
}
}
| apache-2.0 |
RLDevOps/Scholastic | src/main/java/org/olat/core/util/xml/XMLPrettyPrinter.java | 3703 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.core.util.xml;
/**
* Description:<br>
* TODO: Felix Class Description for XMLPrettyPrinter
* <P>
* Initial Date: 09.09.2005 <br>
*
* @author Felix
*/
public class XMLPrettyPrinter {
/*
* public static String prettyPrint(String in) { StringBuilder sb = new StringBuilder(); DefaultHandler handler = new SAXIndent(sb); // Parse the input with the
* default (non-validating) parser SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser(); saxParser.parse(new InputSource(new StringReader(in)),
* handler); try { StringWriter sw = new StringWriter(); Transformer t = TransformerFactory.newInstance().newTransformer(); t.setOutputProperty(OutputKeys.INDENT,
* "yes"); t.setOutputProperty(OutputKeys.METHOD, "xml"); t.setOutputProperty(OutputKeys.STANDALONE, "yes"); t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION,
* "no"); t.transform(new StreamSource(new StringReader(componentListenerInfo)), new StreamResult(sw)); String res = sw.getBuffer().toString(); componentListenerInfo
* = res; } catch (Exception e) { // ignore } String r = componentListenerInfo; }
*/
}
/*
* class SAXIndent extends DefaultHandler { private static String sNEWLINE = "\n"; private StringBuilder sb; SAXIndent(StringBuilder sb) { this.sb = sb; } public void
* startDocument() throws SAXException { echoString(sNEWLINE + "<?xml ...?>" + sNEWLINE + sNEWLINE); } public void endDocument() throws SAXException {
* echoString(sNEWLINE); } public void startElement(String namespaceURI, String localName, String qName, Attributes attrs) throws SAXException { echoTextBuffer(); String
* eName = ("".equals(localName)) ? qName : localName; echoString("<" + eName); // element name if (attrs != null) { for (int i = 0; i < attrs.getLength(); i++) { String
* aName = attrs.getLocalName(i); // Attr name if ("".equals(aName)) aName = attrs.getQName(i); echoString(" " + aName + "=\"" + attrs.getValue(i) + "\""); } }
* echoString(">"); } public void endElement(String namespaceURI, String localName, // local name String qName) // qualified name throws SAXException { echoTextBuffer();
* String eName = ("".equals(localName)) ? qName : localName; echoString("</" + eName + ">"); // element name } public void characters(char[] buf, int offset, int len)
* throws SAXException { String s = new String(buf, offset, len); sb.append(s); } // ---- Helper methods ---- // Display text accumulated in the character buffer private
* void echoTextBuffer() throws SAXException { if (textBuffer == null) return; echoString(textBuffer.toString()); textBuffer = null; } // Wrap I/O exceptions in SAX
* exceptions, to // suit handler signature requirements private void echoString(String s) throws SAXException { try { if (null == out) out = new
* OutputStreamWriter(System.out, "UTF8"); out.write(s); out.flush(); } catch (IOException ex) { throw new SAXException("I/O error", ex); } } }
*/
| apache-2.0 |
msebire/intellij-community | plugins/tasks/tasks-core/jira/src/com/intellij/tasks/jira/JiraRepositoryEditor.java | 2925 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.jira;
import com.intellij.openapi.project.Project;
import com.intellij.tasks.config.BaseRepositoryEditor;
import com.intellij.tasks.jira.jql.JqlLanguage;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.LanguageTextField;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.Consumer;
import com.intellij.util.ui.FormBuilder;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* @author Mikhail Golubev
*/
public class JiraRepositoryEditor extends BaseRepositoryEditor<JiraRepository> {
private EditorTextField mySearchQueryField;
private JBLabel mySearchLabel;
private JBLabel myNoteLabel;
public JiraRepositoryEditor(Project project, JiraRepository repository, Consumer<? super JiraRepository> changeListener) {
super(project, repository, changeListener);
}
@Override
public void apply() {
myRepository.setSearchQuery(mySearchQueryField.getText());
super.apply();
enableJqlSearchIfSupported();
}
@Override
protected void afterTestConnection(boolean connectionSuccessful) {
super.afterTestConnection(connectionSuccessful);
if (connectionSuccessful) {
enableJqlSearchIfSupported();
}
updateNote();
}
@Nullable
@Override
protected JComponent createCustomPanel() {
mySearchQueryField = new LanguageTextField(JqlLanguage.INSTANCE, myProject, myRepository.getSearchQuery());
enableJqlSearchIfSupported();
installListener(mySearchQueryField);
mySearchLabel = new JBLabel("Search:", SwingConstants.RIGHT);
myNoteLabel = new JBLabel();
myNoteLabel.setComponentStyle(UIUtil.ComponentStyle.SMALL);
updateNote();
return FormBuilder.createFormBuilder()
.addLabeledComponent(mySearchLabel, mySearchQueryField)
.addComponentToRightColumn(myNoteLabel)
.getPanel();
}
private void updateNote() {
myNoteLabel.setText("JQL search cannot be used in JIRA versions prior 4.2. Your version: " + myRepository.getPresentableVersion());
}
@Override
public void setAnchor(@Nullable final JComponent anchor) {
super.setAnchor(anchor);
mySearchLabel.setAnchor(anchor);
}
private void enableJqlSearchIfSupported() {
mySearchQueryField.setEnabled(myRepository.isJqlSupported());
}
}
| apache-2.0 |
glubtech/secureftp | src/com/glub/secureftp/client/gui/GetCommand.java | 7898 |
//*****************************************************************************
//*
//* (c) Copyright 2002. Glub Tech, Incorporated. All Rights Reserved.
//*
//* $Id: GetCommand.java 37 2009-05-11 22:46:15Z gary $
//*
//*****************************************************************************
package com.glub.secureftp.client.gui;
import com.glub.secureftp.bean.*;
import com.glub.secureftp.client.framework.*;
import com.glub.util.*;
import java.io.*;
import java.util.*;
public class GetCommand extends NetworkCommand {
private boolean resumeAll = false;
private boolean replaceAll = false;
private boolean skipAll = false;
public GetCommand() {
this( "get", CommandID.GET_COMMAND_ID );
}
public GetCommand( String commandName, short id ) {
super(commandName, id, 3, 3, "remote-file session progress",
"receive file");
}
public SecureFTPError doIt() throws CommandException {
SecureFTPError result = super.doIt();
if ( !Client.getAllowDownload() ) {
result.setCode( SecureFTPError.PERMISSION_DENIED );
return result;
}
RemoteFile remoteFile = (RemoteFile)getArgs().get(0);
String remoteFileStr = remoteFile.toString();
FTPSession session = (FTPSession)getArgs().get(1);
DataTransferDialog progress = (DataTransferDialog)getArgs().get(2);
boolean weCreatedTheFile = false;
File newFile = null;
Date modTime = null;
if ( null == session.getLocalDir() ) {
return result;
}
try {
String newFileStr = remoteFileStr;
StringTokenizer tok = new StringTokenizer( newFileStr, "/" );
while ( tok.hasMoreTokens() ) {
newFileStr = tok.nextToken();
}
// sanitize filename if needed
newFileStr = Util.searchAndReplace( newFileStr, "\\", "_", true );
newFile = new File( newFileStr );
if ( !newFile.isAbsolute() ) {
newFile = new File( session.getLocalDir(), newFileStr );
}
weCreatedTheFile = !newFile.exists();
session.getFTPBean().setSendCmdStream(null);
session.getFTPBean().setRecvCmdStream(null);
try {
// there seems to be a bug with at least one server where
// the time command doesn't properly handle a time query with
// a utf8 file name
if ( session.supportsMDTM() &&
!session.getFTPBean().stringDataAsUTF8() ) {
modTime = session.getFTPBean().time( remoteFile );
}
else {
Calendar remoteFileCal = remoteFile.getDate();
if ( remoteFileCal != null ) {
modTime = remoteFileCal.getTime();
}
}
}
catch ( FTPNoSuchFileException remoteFileNotFound ) {
ErrorDialog.showDialog( new LString("DataTransfer.file_not_found",
"File not found.") );
result.setCode( SecureFTPError.NO_SUCH_FILE );
return result;
}
catch ( FTPException noTime ) {
session.setSupportsMDTM( false );
}
long fileSize = remoteFile.getFileSize();
try {
if ( fileSize < 0 && session.supportsSIZE() ) {
fileSize = session.getFTPBean().size( remoteFile );
}
if ( fileSize < 0 ) {
fileSize = 0;
}
}
catch ( FTPNoSuchFileException remoteFileNotFound ) {
ErrorDialog.showDialog( new LString("DataTransfer.file_not_found",
"File not found.") );
result.setCode( SecureFTPError.NO_SUCH_FILE );
return result;
}
catch ( FTPException noSize ) {
session.setSupportsSIZE( false );
}
if ( session.testForREST() ) {
session.setTestForREST( false );
session.setSupportsREST( session.getFTPBean().isTransferRestartable() );
}
session.getFTPBean().setSendCmdStream(session.getOutputStream());
session.getFTPBean().setRecvCmdStream(session.getOutputStream());
boolean resumeDownload = false;
if ( !weCreatedTheFile ) {
boolean resumable = false;
// we can only resume a transfer during a binary mode
if ( session.supportsREST() && fileSize > 0 &&
fileSize > newFile.length() &&
(session.getFTPBean().getTransferMode() ==
FTP.BINARY_TRANSFER_MODE ||
(session.getFTPBean().getTransferMode() ==
FTP.AUTO_TRANSFER_MODE &&
!FileTypeDecider.isAscii(newFile.getName()))) ) {
resumable = true;
}
if ( GTOverride.getBoolean("glub.resume_xfer.disabled") )
resumable = false;
int r = FileExistsDialog.SKIP;
if ( !resumeAll && !replaceAll && !skipAll ) {
r =
FileExistsDialog.showDialog( FileExistsDialog.DIRECTION_GET,
newFile.getName(),
modTime, newFile.lastModified(),
fileSize, newFile.length(),
resumable );
}
else if ( resumeAll ) {
r = FileExistsDialog.RESUME_ALL;
}
else if ( replaceAll ) {
r = FileExistsDialog.REPLACE_ALL;
}
else if ( skipAll ) {
return result;
}
if ( FileExistsDialog.CANCEL == r ) {
result.setCode( SecureFTPError.TRANSFER_ABORTED );
return result;
}
else if ( FileExistsDialog.SKIP == r ) {
return result;
}
else if ( FileExistsDialog.SKIP_ALL == r ) {
skipAll = true;
return result;
}
resumeDownload = ( FileExistsDialog.RESUME == r ||
FileExistsDialog.RESUME_ALL == r );
resumeAll = FileExistsDialog.RESUME_ALL == r;
replaceAll = FileExistsDialog.REPLACE_ALL == r;
}
FTPAbortableTransfer abort = new FTPAbortableTransfer();
progress.setFileName( remoteFileStr );
progress.setAbortableTransfer( abort );
session.getFTPBean().retrieve( remoteFile, newFile,
resumeDownload, progress, abort );
}
catch ( IOException ioe ) {
if ( weCreatedTheFile ) {
//newFile.delete();
}
LString msg = new LString("DataTransfer.transfer_failed",
"The data transfer failed: [^0]");
msg.replace( 0, ioe.getMessage() );
ErrorDialog.showDialog( msg );
result.setCode( SecureFTPError.IO_EXCEPTION );
}
catch ( FTPAbortException fe ) {
if ( weCreatedTheFile ) {
//newFile.delete();
}
result.setCode( SecureFTPError.TRANSFER_ABORTED );
}
catch ( FTPConnectionLostException fcle ) {
SecureFTP.getCommandDispatcher().fireCommand( this, new CloseCommand() );
ErrorDialog.showDialog( new LString("Common.connection_lost",
"Connection lost.") );
if ( progress != null ) {
progress.dispose();
}
result.setCode( SecureFTPError.NOT_CONNECTED );
return result;
}
catch ( FTPException fe ) {
if ( progress == null || progress.abortAttempted() ) {
result.setCode( SecureFTPError.TRANSFER_ABORTED );
return result;
}
if ( weCreatedTheFile ) {
//newFile.delete();
}
LString msg = new LString("DataTransfer.transfer_failed",
"The data transfer failed: [^0]");
msg.replace( 0, fe.getMessage() );
ErrorDialog.showDialog( msg );
result.setCode( SecureFTPError.DOWNLOAD_FAILED );
}
finally {
if ( modTime != null ) {
newFile.setLastModified( modTime.getTime() );
}
}
return result;
}
}
| apache-2.0 |
goodwinnk/intellij-community | plugins/tasks/tasks-tests/test/com/intellij/tasks/vcs/TaskVcsTest.java | 23459 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.vcs;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.CheckinProjectPanel;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.ProjectLevelVcsManager;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.committed.MockAbstractVcs;
import com.intellij.openapi.vcs.changes.shelf.ShelveChangesManager;
import com.intellij.openapi.vcs.changes.shelf.ShelvedChangeList;
import com.intellij.openapi.vcs.changes.ui.CommitHelper;
import com.intellij.openapi.vcs.checkin.CheckinHandler;
import com.intellij.openapi.vcs.impl.ProjectLevelVcsManagerImpl;
import com.intellij.openapi.vcs.impl.projectlevelman.AllVcses;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.tasks.*;
import com.intellij.tasks.actions.OpenTaskDialog;
import com.intellij.tasks.impl.LocalTaskImpl;
import com.intellij.tasks.impl.TaskChangelistSupport;
import com.intellij.tasks.impl.TaskCheckinHandlerFactory;
import com.intellij.tasks.impl.TaskManagerImpl;
import com.intellij.testFramework.fixtures.CodeInsightFixtureTestCase;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.VcsUtil;
import icons.TasksIcons;
import org.easymock.EasyMock;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
public class TaskVcsTest extends CodeInsightFixtureTestCase {
private TestRepository myRepository;
private MockAbstractVcs myVcs;
private MyMockChangeProvider myChangeProvider;
private ChangeListManagerImpl myChangeListManager;
private TaskManagerImpl myTaskManager;
public void testInitialState() {
assertEquals(1, myTaskManager.getLocalTasks().size());
final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0);
assertEquals(defaultTask, myTaskManager.getActiveTask());
assertTrue(defaultTask.isDefault());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
assertEquals(defaultTask.getChangeLists().get(0).id, myChangeListManager.getChangeListsCopy().get(0).getId());
assertEquals(defaultTask.getChangeLists().get(0), new ChangeListInfo(myChangeListManager.getChangeListsCopy().get(0)));
}
public void testSwitchingTasks() {
final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0);
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
myTaskManager.activateTask(task, false);
assertEquals(2, myTaskManager.getLocalTasks().size());
LocalTask localTask = myTaskManager.getActiveTask();
assertEquals(task, localTask);
assertEquals(0, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
myTaskManager.activateTask(defaultTask, false);
assertEquals(0, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
activateAndCreateChangelist(localTask);
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
LocalChangeList activeChangeList = myChangeListManager.getDefaultChangeList();
LocalChangeList anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList));
assertNotNull(activeChangeList);
assertEquals(localTask, myTaskManager.getAssociatedTask(activeChangeList));
assertEquals("TEST-001 Summary", activeChangeList.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, anotherChangeList.getName());
myTaskManager.activateTask(defaultTask, false);
myChangeListManager.waitUntilRefreshed();
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
activeChangeList = myChangeListManager.getDefaultChangeList();
anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList));
assertNotNull(activeChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(activeChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, activeChangeList.getName());
assertEquals(localTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testAddChangeListViaCreateChangeListAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
myTaskManager.createChangeList(defaultTask, "Default (1)");
myChangeListManager.waitUntilRefreshed();
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(2, defaultTask.getChangeLists().size());
assertEquals(3, myChangeListManager.getChangeListsCopy().size());
LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList("Default (1)");
assertNotNull(defaultChangeListActive);
assertTrue(defaultChangeListActive.isDefault());
LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeListInactive);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive));
assertEquals("Default (1)", defaultChangeListActive.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListInactive.getName());
assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testRemoveChangelistViaVcsAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
LocalChangeList defaultChangeList = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeList);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
removeChangeList(anotherChangeList);
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeList.getName());
}
private void activateAndCreateChangelist(Task task) {
LocalTask localTask = myTaskManager.activateTask(task, false);
if (localTask.getChangeLists().isEmpty()) {
myTaskManager.createChangeList(localTask, myTaskManager.getChangelistName(localTask));
}
}
public void testAddChangeListViaVcsAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
addChangeList("Default (1)");
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(2, defaultTask.getChangeLists().size());
assertEquals(3, myChangeListManager.getChangeListsCopy().size());
LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeListActive);
assertTrue(myChangeListManager.getDefaultListName(), defaultChangeListActive.isDefault());
LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList("Default (1)");
assertNotNull(defaultChangeListInactive);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListActive.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive));
assertEquals("Default (1)", defaultChangeListInactive.getName());
assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testTrackContext() {
myTaskManager.getState().trackContextForNewChangelist = true;
addChangeList("New Changelist");
assertEquals(2, myTaskManager.getLocalTasks().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
LocalChangeList newChangeList = myChangeListManager.findChangeList("New Changelist");
assertNotNull(newChangeList);
LocalTask newTask = myTaskManager.getAssociatedTask(newChangeList);
assertNotNull(newTask);
assertEquals("New Changelist", newTask.getSummary());
myTaskManager.getState().trackContextForNewChangelist = false;
}
public void testCreateComment() {
myRepository.setShouldFormatCommitMessage(true);
myRepository.setCommitMessageFormat("{id} {summary} {number} {project}");
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask localTask = myTaskManager.getActiveTask();
assertNotNull(localTask);
assertEquals("TEST-001", localTask.getId());
List<ChangeListInfo> lists = localTask.getChangeLists();
assertEquals(1, lists.size());
assertEquals("TEST-001 Summary 001 TEST", lists.get(0).comment);
}
public void testSaveContextOnCommitForExistingTask() {
myTaskManager.getState().saveContextOnCommit = true;
assertEquals(1, myTaskManager.getLocalTasks().size());
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
assertEquals(1, myChangeListManager.getChangeListsCopy().size()); // default change list should be here
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
assertEquals(2, myTaskManager.getLocalTasks().size());
List<LocalChangeList> copy = myChangeListManager.getChangeListsCopy();
assertEquals(copy.toString(), 2, copy.size());
LocalTask localTask = myTaskManager.getActiveTask();
List<ChangeListInfo> changelists = localTask.getChangeLists();
ChangeListInfo info = changelists.get(0);
LocalChangeList changeList = myChangeListManager.getChangeList(info.id);
assertNotNull(changeList);
List<Change> changes = addChanges(changeList);
commitChanges(changeList, changes);
assertEquals(2, myTaskManager.getLocalTasks().size()); // no extra task created
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertEquals(localTask, myTaskManager.getAssociatedTask(changeList)); // association should survive
}
public void testSaveContextOnCommit() {
myTaskManager.getState().saveContextOnCommit = true;
assertEquals(1, myTaskManager.getLocalTasks().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
LocalChangeList changeList = addChangeList("New Changelist");
assertEquals(1, myTaskManager.getLocalTasks().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
List<Change> changes = addChanges(changeList);
commitChanges(changeList, changes);
assertEquals(2, myTaskManager.getLocalTasks().size()); // extra task created
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertTrue(ContainerUtil.exists(myTaskManager.getLocalTasks(), task -> task.getSummary().equals("New Changelist")));
}
private void commitChanges(LocalChangeList changeList, List<Change> changes) {
String commitMessage = changeList.getName();
CheckinProjectPanel panel = EasyMock.createMock(CheckinProjectPanel.class);
EasyMock.expect(panel.getProject()).andReturn(getProject());
EasyMock.expect(panel.getCommitMessage()).andReturn(commitMessage);
EasyMock.replay(panel);
CheckinHandler checkinHandler = new TaskCheckinHandlerFactory().createHandler(panel, new CommitContext());
List<CheckinHandler> handlers = ContainerUtil.list(checkinHandler);
CommitHelper helper = new CommitHelper(getProject(), changeList, changes, "Commit", commitMessage, handlers, false, true,
new PseudoMap<>(), null);
helper.doCommit();
}
private LocalChangeList addChangeList(String title) {
final LocalChangeList list = myChangeListManager.addChangeList(title, "");
new TaskChangelistSupport(getProject(), myTaskManager).addControls(new JPanel(), null).consume(list);
return list;
}
private void removeChangeList(LocalChangeList changeList) {
myChangeListManager.removeChangeList(changeList);
myTaskManager.getChangeListListener().changeListRemoved(changeList);
}
@NotNull
private List<Change> addChanges(@NotNull LocalChangeList list) {
VirtualFile file = myFixture.getTempDirFixture().createFile("Test.txt");
FilePath path = VcsUtil.getFilePath(file);
Change change = new Change(null,
new CurrentContentRevision(path));
List<Change> changes = Collections.singletonList(change);
myChangeProvider.setChanges(changes);
VcsDirtyScopeManager.getInstance(getProject()).markEverythingDirty();
myChangeListManager.scheduleUpdate();
myChangeListManager.waitUntilRefreshed();
myChangeListManager.moveChangesTo(list, change);
myChangeListManager.waitUntilRefreshed();
LOG.debug(dumpChangeListManager());
return changes;
}
public void testProjectWithDash() {
LocalTaskImpl task = new LocalTaskImpl("foo-bar-001", "summary") {
@Override
public TaskRepository getRepository() {
return myRepository;
}
@Override
public boolean isIssue() {
return true;
}
};
assertEquals("foo-bar", task.getProject());
assertEquals("001", task.getNumber());
String name = myTaskManager.getChangelistName(task);
assertEquals("foo-bar-001 summary", name);
}
public void testIds() {
LocalTaskImpl task = new LocalTaskImpl("", "");
assertEquals("", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("-", "");
assertEquals("-", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("foo", "");
assertEquals("foo", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("112", "");
assertEquals("112", task.getNumber());
assertEquals(null, task.getProject());
}
public void testRestoreChangelist() {
final LocalTaskImpl task = new LocalTaskImpl("foo", "bar");
activateAndCreateChangelist(task);
activateAndCreateChangelist(new LocalTaskImpl("next", ""));
final String changelistName = myTaskManager.getChangelistName(task);
myChangeListManager.removeChangeList(changelistName);
myChangeListManager.invokeAfterUpdate(() -> {
assertTrue(myTaskManager.isLocallyClosed(task));
activateAndCreateChangelist(task);
assertNotNull(myChangeListManager.findChangeList(changelistName));
}, InvokeAfterUpdateMode.SYNCHRONOUS_NOT_CANCELLABLE, "foo", ModalityState.NON_MODAL);
}
public void testSuggestBranchName() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
assertTrue(task.isIssue());
assertEquals("TEST-001", myTaskManager.suggestBranchName(task));
LocalTaskImpl simple = new LocalTaskImpl("1", "simple");
assertEquals("simple", myTaskManager.suggestBranchName(simple));
LocalTaskImpl strange = new LocalTaskImpl("1", "very long and strange summary");
assertEquals("very-long", myTaskManager.suggestBranchName(strange));
myTaskManager.getState().branchNameFormat = "{id} {summary}";
LocalTaskImpl withIllegalSymbolsInIssue = new LocalTaskImpl("1", "contains Illegal$Symbols");
withIllegalSymbolsInIssue.setIssue(true);
assertEquals("1-contains-Illegal$Symbols", myTaskManager.suggestBranchName(withIllegalSymbolsInIssue));
}
public void testShelveChanges() {
LocalTask activeTask = myTaskManager.getActiveTask();
addChanges(myChangeListManager.getDefaultChangeList());
myTaskManager.getState().shelveChanges = true;
LocalTaskImpl task = new LocalTaskImpl("id", "summary");
OpenTaskDialog dialog = new OpenTaskDialog(getProject(), task);
try {
dialog.createTask();
assertEquals(dumpChangeListManager(), activeTask.getSummary(), activeTask.getShelfName());
List<ShelvedChangeList> lists = ShelveChangesManager.getInstance(getProject()).getShelvedChangeLists();
assertTrue(lists.stream().anyMatch(list -> list.DESCRIPTION.equals(activeTask.getShelfName())));
assertEmpty(myChangeListManager.getDefaultChangeList().getChanges());
myTaskManager.activateTask(activeTask, true);
Collection<Change> changes = myChangeListManager.getDefaultChangeList().getChanges();
assertNotEmpty(changes);
}
finally {
dialog.close(DialogWrapper.OK_EXIT_CODE);
}
UIUtil.dispatchAllInvocationEvents();
}
@Override
public void setUp() throws Exception {
super.setUp();
myVcs = new MockAbstractVcs(getProject());
myChangeProvider = new MyMockChangeProvider();
myVcs.setChangeProvider(myChangeProvider);
myChangeListManager = (ChangeListManagerImpl)ChangeListManager.getInstance(getProject());
ProjectLevelVcsManagerImpl vcsManager = (ProjectLevelVcsManagerImpl)ProjectLevelVcsManager.getInstance(getProject());
vcsManager.registerVcs(myVcs);
vcsManager.setDirectoryMappings(Collections.singletonList(new VcsDirectoryMapping("", myVcs.getName())));
vcsManager.waitForInitialized();
assertTrue(vcsManager.hasActiveVcss());
myTaskManager = (TaskManagerImpl)TaskManager.getManager(getProject());
myRepository = new TestRepository();
myRepository.setTasks(new MyTask());
myTaskManager.setRepositories(Collections.singletonList(myRepository));
}
@Override
protected void tearDown() throws Exception {
try {
myTaskManager.setRepositories(Collections.emptyList());
AllVcses.getInstance(getProject()).unregisterManually(myVcs);
}
finally {
myTaskManager = null;
myVcs = null;
myChangeListManager = null;
super.tearDown();
}
}
@NotNull
private String dumpChangeListManager() {
return StringUtil.join(myChangeListManager.getChangeLists(), list -> {
return String.format("list: %s (%s) changes: %s", list.getName(), list.getId(), StringUtil.join(list.getChanges(), ", "));
}, "\n");
}
private static class MyMockChangeProvider implements ChangeProvider {
private List<Change> myChanges = Collections.emptyList();
public void setChanges(List<Change> changes) {
myChanges = changes;
}
@Override
public void getChanges(@NotNull VcsDirtyScope dirtyScope,
@NotNull final ChangelistBuilder builder,
@NotNull ProgressIndicator progress,
@NotNull ChangeListManagerGate addGate) {
for (Change change : myChanges) {
builder.processChange(change, MockAbstractVcs.getKey());
}
}
@Override
public boolean isModifiedDocumentTrackingRequired() {
return false;
}
@Override
public void doCleanup(List<VirtualFile> files) {
}
}
private class MyTask extends Task {
@NotNull
@Override
public String getId() {
return "TEST-001";
}
@NotNull
@Override
public String getSummary() {
return "Summary";
}
@Override
public String getDescription() {
return null;
}
@NotNull
@Override
public Comment[] getComments() {
return Comment.EMPTY_ARRAY;
}
@NotNull
@Override
public Icon getIcon() {
return TasksIcons.Unknown;
}
@NotNull
@Override
public TaskType getType() {
return TaskType.BUG;
}
@Override
public Date getUpdated() {
return null;
}
@Override
public Date getCreated() {
return null;
}
@Override
public boolean isClosed() {
return false;
}
@Override
public boolean isIssue() {
return true;
}
@Override
public String getIssueUrl() {
return null;
}
@Override
public TaskRepository getRepository() {
return myRepository;
}
}
}
| apache-2.0 |
bboyfeiyu/AndroidJUnit4 | android-junit4-robolectric/src/main/java/com/uphyca/testing/shadows/CustomShadowTimePicker.java | 948 | package com.uphyca.testing.shadows;
import android.widget.TimePicker;
import com.xtremelabs.robolectric.internal.Implementation;
import com.xtremelabs.robolectric.internal.Implements;
import com.xtremelabs.robolectric.internal.RealObject;
import com.xtremelabs.robolectric.shadows.ShadowView;
@Implements(TimePicker.class)
public class CustomShadowTimePicker extends ShadowView {
@RealObject
protected TimePicker realTimePicker;
private Integer mCurrentHour;
private Integer mCurrentMinute;
@Implementation
public Integer getCurrentHour() {
return mCurrentHour;
}
@Implementation
public Integer getCurrentMinute() {
return mCurrentMinute;
}
@Implementation
public void setCurrentHour(Integer currentHour) {
mCurrentHour = currentHour;
}
@Implementation
public void setCurrentMinute(Integer currentMinute) {
mCurrentMinute = currentMinute;
}
}
| apache-2.0 |
arvindsv/gocd | common/src/test/java/com/thoughtworks/go/helper/SvnTestRepo.java | 6799 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.helper;
import com.thoughtworks.go.config.materials.svn.SvnMaterial;
import com.thoughtworks.go.config.materials.svn.SvnMaterialConfig;
import static com.thoughtworks.go.helper.MaterialConfigsMother.svn;
import com.thoughtworks.go.domain.materials.*;
import com.thoughtworks.go.util.FileUtil;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.util.List;
import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.io.FileUtils.copyDirectory;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class SvnTestRepo extends TestRepo {
protected File tempRepo;
private static final String REPO_TEST_DATA_FOLDER = "../common/src/test/resources/data/svnrepo";
public SvnTestRepo(TemporaryFolder temporaryFolder, String folderName) throws IOException {
super(temporaryFolder);
if (isBlank(folderName)) {
tempRepo = temporaryFolder.newFolder();
} else {
tempRepo = temporaryFolder.newFolder(folderName);
}
tmpFolders.add(tempRepo);
try {
copyDirectory(new File(REPO_TEST_DATA_FOLDER), tempRepo);
} catch (IOException e) {
Assert.fail("Could not copy test repo [" + REPO_TEST_DATA_FOLDER + "] into [" + tempRepo + "] beacuse of " + e.getMessage());
}
new File(tempRepo, "/project1/db/transactions").mkdir();
}
public SvnTestRepo(TemporaryFolder temporaryFolder) throws IOException {
this(temporaryFolder, null);
}
public String urlFor(String project) {
return repositoryUrl(project);
}
@Override
public String projectRepositoryUrl() {
return repositoryUrl("project1/trunk");
}
public File projectRepositoryRoot() {
return repositoryUrlAsFile("project1");
}
public File projectRepositoryUrlAsFile() {
return repositoryUrlAsFile("project1/trunk");
}
public String end2endRepositoryUrl() {
return repositoryUrl("end2end");
}
public String repositoryUrl(String project) {
return FileUtil.toFileURI(new File(tempRepo, project));
}
public File repositoryUrlAsFile(String project) {
return new File(tempRepo, project);
}
@Override
public SvnMaterial material() {
return new SvnMaterial(projectRepositoryUrl(), null, null, false);
}
public SvnMaterialConfig materialConfig() {
return svn(projectRepositoryUrl(), null, null, false);
}
public SvnMaterial createMaterial(String repo, String folder) {
SvnMaterial material = MaterialsMother.svnMaterial(urlFor(repo), folder);
material.setAutoUpdate(true);
return material;
}
private Revision getLatestRevision(SvnMaterial svnMaterial) throws IOException {
final File workingCopy = temporaryFolder.newFolder();
tmpFolders.add(workingCopy);
return latestRevision(svnMaterial, workingCopy, new TestSubprocessExecutionContext());
}
public Modification checkInOneFile(String path) throws Exception {
return checkInOneFile(path, "adding file [" + path + "]").get(0);
}
@Override
public List<Modification> checkInOneFile(String filename, String message) throws Exception {
SvnMaterial svnMaterial = material();
return checkInOneFile(svnMaterial, filename, message);
}
protected List<Modification> checkInOneFile(SvnMaterial svnMaterial, String filename, String message) throws IOException {
final File workingCopy = temporaryFolder.newFolder();
tmpFolders.add(workingCopy);
InMemoryStreamConsumer consumer = inMemoryConsumer();
Revision latestRevision = getLatestRevision(svnMaterial);
svnMaterial.updateTo(consumer, workingCopy, new RevisionContext(latestRevision), new TestSubprocessExecutionContext());
File newFileToAdd = new File(workingCopy, filename);
File directoryToAddTo = newFileToAdd.getParentFile();
boolean addedToExistingDir = directoryToAddTo.exists();
directoryToAddTo.mkdirs();
FileUtils.writeStringToFile(newFileToAdd, "", UTF_8);
svnMaterial.add(consumer, addedToExistingDir ? newFileToAdd : directoryToAddTo);
svnMaterial.commit(consumer, workingCopy, message);
return svnMaterial.latestModification(workingCopy, new TestSubprocessExecutionContext());
}
@Override
public List<Modification> latestModification() throws IOException {
final File workingCopy = temporaryFolder.newFolder();
return material().latestModification(workingCopy, new TestSubprocessExecutionContext());
}
public void checkInOneFile(String fileName, SvnMaterial svnMaterial) throws Exception {
final File baseDir = temporaryFolder.newFolder();
tmpFolders.add(baseDir);
ProcessOutputStreamConsumer consumer = inMemoryConsumer();
Revision revision = latestRevision(svnMaterial, baseDir, new TestSubprocessExecutionContext());
svnMaterial.updateTo(consumer, baseDir, new RevisionContext(revision), new TestSubprocessExecutionContext());
File workingDir = new File(baseDir, svnMaterial.getFolder());
File newFileToAdd = new File(workingDir, fileName);
newFileToAdd.getParentFile().mkdirs();
FileUtils.writeStringToFile(newFileToAdd, "", UTF_8);
svnMaterial.add(consumer, newFileToAdd);
svnMaterial.commit(consumer, workingDir, "adding file [" + svnMaterial.getFolder() + "/" + fileName + "]");
}
private Revision latestRevision(SvnMaterial material, File workingDir, TestSubprocessExecutionContext execCtx) {
List<Modification> modifications = material.latestModification(workingDir, execCtx);
return new Modifications(modifications).latestRevision(material);
}
}
| apache-2.0 |
YasithLokuge/core-util | components/dbutil/src/main/java/com/wso2telco/utils/exception/ThrowableError.java | 151 | package com.wso2telco.utils.exception;
@Deprecated
public interface ThrowableError {
public String getMessage();
public String getCode();
}
| apache-2.0 |
aleph-zero/presto | presto-main/src/test/java/com/facebook/presto/execution/scheduler/TestPhasedExecutionSchedule.java | 12126 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.scheduler;
import com.facebook.presto.connector.ConnectorId;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.TestingColumnHandle;
import com.facebook.presto.sql.planner.TestingTableHandle;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.plan.JoinNode.DistributionType.PARTITIONED;
import static com.facebook.presto.sql.planner.plan.JoinNode.DistributionType.REPLICATED;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.INNER;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.RIGHT;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableList;
import static org.testng.Assert.assertEquals;
public class TestPhasedExecutionSchedule
{
@Test
public void testExchange()
throws Exception
{
PlanFragment aFragment = createTableScanPlanFragment("a");
PlanFragment bFragment = createTableScanPlanFragment("b");
PlanFragment cFragment = createTableScanPlanFragment("c");
PlanFragment exchangeFragment = createExchangePlanFragment("exchange", aFragment, bFragment, cFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(aFragment, bFragment, cFragment, exchangeFragment));
assertEquals(phases, ImmutableList.of(
ImmutableSet.of(exchangeFragment.getId()),
ImmutableSet.of(aFragment.getId()),
ImmutableSet.of(bFragment.getId()),
ImmutableSet.of(cFragment.getId())));
}
@Test
public void testUnion()
throws Exception
{
PlanFragment aFragment = createTableScanPlanFragment("a");
PlanFragment bFragment = createTableScanPlanFragment("b");
PlanFragment cFragment = createTableScanPlanFragment("c");
PlanFragment unionFragment = createUnionPlanFragment("union", aFragment, bFragment, cFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(aFragment, bFragment, cFragment, unionFragment));
assertEquals(phases, ImmutableList.of(
ImmutableSet.of(unionFragment.getId()),
ImmutableSet.of(aFragment.getId()),
ImmutableSet.of(bFragment.getId()),
ImmutableSet.of(cFragment.getId())));
}
@Test
public void testJoin()
throws Exception
{
PlanFragment buildFragment = createTableScanPlanFragment("build");
PlanFragment probeFragment = createTableScanPlanFragment("probe");
PlanFragment joinFragment = createJoinPlanFragment(INNER, "join", buildFragment, probeFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(joinFragment, buildFragment, probeFragment));
assertEquals(phases, ImmutableList.of(ImmutableSet.of(joinFragment.getId()), ImmutableSet.of(buildFragment.getId()), ImmutableSet.of(probeFragment.getId())));
}
@Test
public void testRightJoin()
throws Exception
{
PlanFragment buildFragment = createTableScanPlanFragment("build");
PlanFragment probeFragment = createTableScanPlanFragment("probe");
PlanFragment joinFragment = createJoinPlanFragment(RIGHT, "join", buildFragment, probeFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(joinFragment, buildFragment, probeFragment));
assertEquals(phases, ImmutableList.of(ImmutableSet.of(joinFragment.getId()), ImmutableSet.of(buildFragment.getId()), ImmutableSet.of(probeFragment.getId())));
}
@Test
public void testBroadcastJoin()
throws Exception
{
PlanFragment buildFragment = createTableScanPlanFragment("build");
PlanFragment joinFragment = createBroadcastJoinPlanFragment("join", buildFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(joinFragment, buildFragment));
assertEquals(phases, ImmutableList.of(ImmutableSet.of(joinFragment.getId(), buildFragment.getId())));
}
@Test
public void testJoinWithDeepSources()
throws Exception
{
PlanFragment buildSourceFragment = createTableScanPlanFragment("buildSource");
PlanFragment buildMiddleFragment = createExchangePlanFragment("buildMiddle", buildSourceFragment);
PlanFragment buildTopFragment = createExchangePlanFragment("buildTop", buildMiddleFragment);
PlanFragment probeSourceFragment = createTableScanPlanFragment("probeSource");
PlanFragment probeMiddleFragment = createExchangePlanFragment("probeMiddle", probeSourceFragment);
PlanFragment probeTopFragment = createExchangePlanFragment("probeTop", probeMiddleFragment);
PlanFragment joinFragment = createJoinPlanFragment(INNER, "join", buildTopFragment, probeTopFragment);
List<Set<PlanFragmentId>> phases = PhasedExecutionSchedule.extractPhases(ImmutableList.of(
joinFragment,
buildTopFragment,
buildMiddleFragment,
buildSourceFragment,
probeTopFragment,
probeMiddleFragment,
probeSourceFragment));
assertEquals(phases, ImmutableList.of(
ImmutableSet.of(joinFragment.getId()),
ImmutableSet.of(buildTopFragment.getId()),
ImmutableSet.of(buildMiddleFragment.getId()),
ImmutableSet.of(buildSourceFragment.getId()),
ImmutableSet.of(probeTopFragment.getId()),
ImmutableSet.of(probeMiddleFragment.getId()),
ImmutableSet.of(probeSourceFragment.getId())));
}
private static PlanFragment createExchangePlanFragment(String name, PlanFragment... fragments)
{
PlanNode planNode = new RemoteSourceNode(
new PlanNodeId(name + "_id"),
Stream.of(fragments)
.map(PlanFragment::getId)
.collect(toImmutableList()),
fragments[0].getPartitioningScheme().getOutputLayout());
return createFragment(planNode);
}
private static PlanFragment createUnionPlanFragment(String name, PlanFragment... fragments)
{
PlanNode planNode = new UnionNode(
new PlanNodeId(name + "_id"),
Stream.of(fragments)
.map(fragment -> new RemoteSourceNode(new PlanNodeId(fragment.getId().toString()), fragment.getId(), fragment.getPartitioningScheme().getOutputLayout()))
.collect(toImmutableList()),
ImmutableListMultimap.of(),
ImmutableList.of());
return createFragment(planNode);
}
private static PlanFragment createBroadcastJoinPlanFragment(String name, PlanFragment buildFragment)
{
Symbol symbol = new Symbol("column");
PlanNode tableScan = new TableScanNode(
new PlanNodeId(name),
new TableHandle(new ConnectorId("test"), new TestingTableHandle()),
ImmutableList.of(symbol),
ImmutableMap.of(symbol, new TestingColumnHandle("column")),
Optional.empty(),
TupleDomain.all(),
null);
RemoteSourceNode remote = new RemoteSourceNode(new PlanNodeId("build_id"), buildFragment.getId(), ImmutableList.of());
PlanNode join = new JoinNode(
new PlanNodeId(name + "_id"),
INNER,
tableScan,
remote,
ImmutableList.of(),
ImmutableList.<Symbol>builder()
.addAll(tableScan.getOutputSymbols())
.addAll(remote.getOutputSymbols())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.of(REPLICATED));
return createFragment(join);
}
private static PlanFragment createJoinPlanFragment(JoinNode.Type joinType, String name, PlanFragment buildFragment, PlanFragment probeFragment)
{
RemoteSourceNode probe = new RemoteSourceNode(new PlanNodeId("probe_id"), probeFragment.getId(), ImmutableList.of());
RemoteSourceNode build = new RemoteSourceNode(new PlanNodeId("build_id"), buildFragment.getId(), ImmutableList.of());
PlanNode planNode = new JoinNode(
new PlanNodeId(name + "_id"),
joinType,
probe,
build,
ImmutableList.of(),
ImmutableList.<Symbol>builder()
.addAll(probe.getOutputSymbols())
.addAll(build.getOutputSymbols())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.of(PARTITIONED));
return createFragment(planNode);
}
private static PlanFragment createTableScanPlanFragment(String name)
{
Symbol symbol = new Symbol("column");
PlanNode planNode = new TableScanNode(
new PlanNodeId(name),
new TableHandle(new ConnectorId("test"), new TestingTableHandle()),
ImmutableList.of(symbol),
ImmutableMap.of(symbol, new TestingColumnHandle("column")),
Optional.empty(),
TupleDomain.all(),
null);
return createFragment(planNode);
}
private static PlanFragment createFragment(PlanNode planNode)
{
ImmutableMap.Builder<Symbol, Type> types = ImmutableMap.builder();
for (Symbol symbol : planNode.getOutputSymbols()) {
types.put(symbol, VARCHAR);
}
return new PlanFragment(
new PlanFragmentId(planNode.getId() + "_fragment_id"),
planNode,
types.build(),
SOURCE_DISTRIBUTION,
ImmutableList.of(planNode.getId()),
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), planNode.getOutputSymbols()));
}
}
| apache-2.0 |
catalinmoraru/habbit-quest | src/main/java/com/redhat/developers/msa/ola/HolaService.java | 994 | /**
* JBoss, Home of Professional Open Source
* Copyright 2016, Red Hat, Inc. and/or its affiliates, and individual
* contributors by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.redhat.developers.msa.ola;
import feign.RequestLine;
import java.util.List;
public interface HolaService {
@RequestLine("GET /api/hola-chaining")
public List<String> hola();
}
| apache-2.0 |
DANS-KNAW/dccd-lib | src/main/java/nl/knaw/dans/dccd/authn/OrganisationRegistrationSpecification.java | 2809 | /*******************************************************************************
* Copyright 2015 DANS - Data Archiving and Networked Services
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package nl.knaw.dans.dccd.authn;
import nl.knaw.dans.common.lang.RepositoryException;
import nl.knaw.dans.dccd.application.services.DccdUserService;
import nl.knaw.dans.dccd.model.DccdOrganisation;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Refactoring note: commonalities with UserRegistrationSpecification
*
* @author paulboon
*
*/
public final class OrganisationRegistrationSpecification {
private static Logger logger = LoggerFactory.getLogger(OrganisationRegistrationSpecification.class);
public static boolean isSatisfiedBy(OrganisationRegistration registration)
{
boolean satisfied = hasSufficientData(registration) && hasUniqueID(registration);
return satisfied;
}
private static boolean hasSufficientData(OrganisationRegistration registration)
{
// TODO more sophisticated tests
boolean sufficientData = true;
DccdOrganisation organisation = registration.getOrganisation();
if (StringUtils.isBlank(organisation.getId()))
{
sufficientData = false;
registration.setState(OrganisationRegistration.State.OrganisationIdCannotBeBlank);
}
return sufficientData;
}
private static boolean hasUniqueID(OrganisationRegistration registration)
{
boolean hasUniqueId = false;
String organisationId = registration.getOrganisation().getId();
try
{
if (!DccdUserService.getService().getOrganisationRepo().exists(organisationId))
{
hasUniqueId = true;
}
else
{
registration.setState(OrganisationRegistration.State.OrganisationIdNotUnique);
}
}
catch (RepositoryException e)
{
logger.error("Could not verify if Id is unique: ", e);
registration.setState(OrganisationRegistration.State.SystemError, e);
}
return hasUniqueId;
}
}
| apache-2.0 |
pumpadump/sweble-wikitext | swc-article-cruncher/src/main/java/org/sweble/wikitext/articlecruncher/StorerFactory.java | 1026 | /**
* Copyright 2011 The Open Source Research Group,
* University of Erlangen-Nürnberg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sweble.wikitext.articlecruncher;
import java.util.concurrent.BlockingQueue;
import org.sweble.wikitext.articlecruncher.utils.AbortHandler;
import org.sweble.wikitext.articlecruncher.utils.WorkerBase;
public interface StorerFactory
{
WorkerBase create(
AbortHandler abortHandler,
JobTraceSet jobTraces,
BlockingQueue<Job> outTray);
}
| apache-2.0 |
selkhateeb/closure-compiler | src/com/google/javascript/jscomp/newtypes/JSTypeCreatorFromJSDoc.java | 41130 | /*
* Copyright 2013 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.newtypes;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.CodingConvention;
import com.google.javascript.jscomp.DiagnosticGroup;
import com.google.javascript.jscomp.DiagnosticType;
import com.google.javascript.jscomp.JSError;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
* @author blickly@google.com (Ben Lickly)
* @author dimvar@google.com (Dimitris Vardoulakis)
*/
public final class JSTypeCreatorFromJSDoc {
public static final DiagnosticType INVALID_GENERICS_INSTANTIATION =
DiagnosticType.warning(
"JSC_NTI_INVALID_GENERICS_INSTANTIATION",
"Invalid generics instantiation{0}.\n"
+ "Expected {1} type argument(s), but found {2}");
public static final DiagnosticType EXTENDS_NON_OBJECT =
DiagnosticType.warning(
"JSC_NTI_EXTENDS_NON_OBJECT",
"{0} extends non-object type {1}.\n");
public static final DiagnosticType EXTENDS_NOT_ON_CTOR_OR_INTERF =
DiagnosticType.warning(
"JSC_NTI_EXTENDS_NOT_ON_CTOR_OR_INTERF",
"@extends used without @constructor or @interface for {0}.\n");
public static final DiagnosticType INHERITANCE_CYCLE =
DiagnosticType.warning(
"JSC_NTI_INHERITANCE_CYCLE",
"Cycle detected in inheritance chain of type {0}");
public static final DiagnosticType DICT_IMPLEMENTS_INTERF =
DiagnosticType.warning(
"JSC_NTI_DICT_IMPLEMENTS_INTERF",
"Class {0} is a dict. Dicts can't implement interfaces");
public static final DiagnosticType IMPLEMENTS_WITHOUT_CONSTRUCTOR =
DiagnosticType.warning(
"JSC_NTI_IMPLEMENTS_WITHOUT_CONSTRUCTOR",
"@implements used without @constructor or @interface for {0}");
// Not part of ALL_DIAGNOSTICS because it should not be enabled with
// --jscomp_error=newCheckTypes. It should only be enabled explicitly.
public static final DiagnosticType CONFLICTING_SHAPE_TYPE =
DiagnosticType.disabled(
"JSC_NTI_CONFLICTING_SHAPE_TYPE",
"{1} cannot extend this type; {0}s can only extend {0}s");
public static final DiagnosticType CONFLICTING_EXTENDED_TYPE =
DiagnosticType.warning(
"JSC_NTI_CONFLICTING_EXTENDED_TYPE",
"{1} cannot extend this type; {0}s can only extend {0}s");
public static final DiagnosticType CONFLICTING_IMPLEMENTED_TYPE =
DiagnosticType.warning(
"JSC_NTI_CONFLICTING_IMPLEMENTED_TYPE",
"{0} cannot implement this type; "
+ "an interface can only extend, but not implement interfaces");
public static final DiagnosticType UNION_IS_UNINHABITABLE =
DiagnosticType.warning(
"JSC_NTI_UNION_IS_UNINHABITABLE",
"Union of {0} with {1} would create an impossible type");
public static final DiagnosticType NEW_EXPECTS_OBJECT_OR_TYPEVAR =
DiagnosticType.warning(
"JSC_NTI_NEW_EXPECTS_OBJECT_OR_TYPEVAR",
"The \"new:\" annotation only accepts object types and type variables; "
+ "found {0}");
public static final DiagnosticType BAD_ARRAY_TYPE_SYNTAX =
DiagnosticType.warning(
"JSC_NTI_BAD_ARRAY_TYPE_SYNTAX",
"The [] type syntax is not supported. Please use Array.<T> instead");
public static final DiagnosticType CANNOT_MAKE_TYPEVAR_NON_NULL =
DiagnosticType.warning(
"JSC_NTI_CANNOT_MAKE_TYPEVAR_NON_NULL",
"Cannot use ! to restrict type variable type.\n"
+ "Prefer to make type argument non-nullable and add "
+ "null explicitly where needed (e.g. through ?T or T|null)");
public static final DiagnosticType CIRCULAR_TYPEDEF_ENUM =
DiagnosticType.warning(
"JSC_NTI_CIRCULAR_TYPEDEF_ENUM",
"Circular typedefs/enums are not allowed");
public static final DiagnosticType ENUM_WITH_TYPEVARS =
DiagnosticType.warning(
"JSC_NTI_ENUM_WITH_TYPEVARS",
"An enum type cannot include type variables");
public static final DiagnosticType ENUM_IS_TOP =
DiagnosticType.warning(
"JSC_NTI_ENUM_IS_TOP",
"An enum type cannot be *. "
+ "Use ? if you do not want the elements checked");
// TODO(dimvar): This may prove to be too strict, may revisit.
public static final DiagnosticType ENUM_IS_UNION =
DiagnosticType.warning(
"JSC_NTI_ENUM_IS_UNION",
"An enum type cannot be a union type");
public static final DiagnosticType WRONG_PARAMETER_ORDER =
DiagnosticType.warning(
"JSC_NTI_WRONG_PARAMETER_ORDER",
"Wrong parameter order: required parameters are first, "
+ "then optional, then varargs");
public static final DiagnosticType IMPLEMENTS_NON_INTERFACE =
DiagnosticType.warning(
"JSC_NTI_IMPLEMENTS_NON_INTERFACE",
"Cannot implement non-interface {0}");
public static final DiagnosticType EXTENDS_NON_INTERFACE =
DiagnosticType.warning(
"JSC_NTI_EXTENDS_NON_INTERFACE",
"Cannot extend non-interface {0}");
public static final DiagnosticType FUNCTION_WITH_NONFUNC_JSDOC =
DiagnosticType.warning(
"JSC_NTI_FUNCTION_WITH_NONFUNC_JSDOC",
"The function is annotated with a non-function jsdoc. "
+ "Ignoring jsdoc");
public static final DiagnosticType TEMPLATED_GETTER_SETTER =
DiagnosticType.warning(
"JSC_NTI_TEMPLATED_GETTER_SETTER",
"@template can't be used with getters/setters");
public static final DiagnosticType TWO_JSDOCS =
DiagnosticType.warning(
"JSC_NTI_TWO_JSDOCS",
"Found two JsDoc comments for {0}");
public static final DiagnosticGroup ALL_DIAGNOSTICS = new DiagnosticGroup(
BAD_ARRAY_TYPE_SYNTAX,
CANNOT_MAKE_TYPEVAR_NON_NULL,
CIRCULAR_TYPEDEF_ENUM,
CONFLICTING_EXTENDED_TYPE,
CONFLICTING_IMPLEMENTED_TYPE,
DICT_IMPLEMENTS_INTERF,
ENUM_IS_TOP,
ENUM_IS_UNION,
ENUM_WITH_TYPEVARS,
EXTENDS_NON_INTERFACE,
EXTENDS_NON_OBJECT,
EXTENDS_NOT_ON_CTOR_OR_INTERF,
FUNCTION_WITH_NONFUNC_JSDOC,
IMPLEMENTS_NON_INTERFACE,
IMPLEMENTS_WITHOUT_CONSTRUCTOR,
INHERITANCE_CYCLE,
INVALID_GENERICS_INSTANTIATION,
NEW_EXPECTS_OBJECT_OR_TYPEVAR,
TEMPLATED_GETTER_SETTER,
TWO_JSDOCS,
UNION_IS_UNINHABITABLE,
WRONG_PARAMETER_ORDER);
private final CodingConvention convention;
private final UniqueNameGenerator nameGen;
// Used to communicate state between methods when resolving enum types
private int howmanyTypeVars = 0;
/** Exception for when unrecognized type names are encountered */
public static class UnknownTypeException extends Exception {
UnknownTypeException(String cause) {
super(cause);
}
}
private Set<JSError> warnings = new LinkedHashSet<>();
// Unknown type names indexed by JSDoc AST node at which they were found.
private Map<Node, String> unknownTypeNames = new LinkedHashMap<>();
public JSTypeCreatorFromJSDoc(
CodingConvention convention, UniqueNameGenerator nameGen) {
this.qmarkFunctionDeclared = new FunctionAndSlotType(
null, FunctionTypeBuilder.qmarkFunctionBuilder().buildDeclaration());
this.convention = convention;
this.nameGen = nameGen;
}
private FunctionAndSlotType qmarkFunctionDeclared;
private static final boolean NULLABLE_TYPES_BY_DEFAULT = true;
public JSType maybeMakeNullable(JSType t) {
if (NULLABLE_TYPES_BY_DEFAULT) {
return JSType.join(JSType.NULL, t);
}
return t;
}
public JSType getDeclaredTypeOfNode(JSDocInfo jsdoc, RawNominalType ownerType,
DeclaredTypeRegistry registry) {
return getDeclaredTypeOfNode(jsdoc, registry, ownerType == null
? ImmutableList.<String>of() : ownerType.getTypeParameters());
}
private JSType getDeclaredTypeOfNode(JSDocInfo jsdoc,
DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) {
if (jsdoc == null) {
return null;
}
return getTypeFromJSTypeExpression(
jsdoc.getType(), registry, typeParameters);
}
public Set<JSError> getWarnings() {
return warnings;
}
public Map<Node, String> getUnknownTypesMap() {
return unknownTypeNames;
}
private JSType getTypeFromJSTypeExpression(JSTypeExpression expr,
DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) {
if (expr == null) {
return null;
}
return getTypeFromComment(expr.getRoot(), registry, typeParameters);
}
// Very similar to JSTypeRegistry#createFromTypeNodesInternal
// n is a jsdoc node, not an AST node; the same class (Node) is used for both
private JSType getTypeFromComment(Node n, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) {
try {
return getTypeFromCommentHelper(n, registry, typeParameters);
} catch (UnknownTypeException e) {
return JSType.UNKNOWN;
}
}
private JSType getMaybeTypeFromComment(Node n, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) {
try {
return getTypeFromCommentHelper(n, registry, typeParameters);
} catch (UnknownTypeException e) {
return null;
}
}
private JSType getTypeFromCommentHelper(Node n, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) throws UnknownTypeException {
Preconditions.checkNotNull(n);
if (typeParameters == null) {
typeParameters = ImmutableList.of();
}
switch (n.getType()) {
case Token.LC:
return getRecordTypeHelper(n, registry, typeParameters);
case Token.EMPTY: // for function types that don't declare a return type
return JSType.UNKNOWN;
case Token.VOID:
// TODO(dimvar): void can be represented in 2 ways: Token.VOID and a
// Token.STRING whose getString() is "void".
// Change jsdoc parsing to only have one representation.
return JSType.UNDEFINED;
case Token.LB:
warnings.add(JSError.make(n, BAD_ARRAY_TYPE_SYNTAX));
return JSType.UNKNOWN;
case Token.STRING:
return getNamedTypeHelper(n, registry, typeParameters);
case Token.PIPE: {
// The way JSType.join works, Subtype|Supertype is equal to Supertype,
// so when programmers write un-normalized unions, we normalize them
// silently. We may also want to warn.
JSType union = JSType.BOTTOM;
for (Node child = n.getFirstChild(); child != null;
child = child.getNext()) {
// TODO(dimvar): When the union has many things, we join and throw
// away types, except the result of the last join. Very inefficient.
// Consider optimizing.
JSType nextType = getTypeFromCommentHelper(child, registry, typeParameters);
if (nextType.isUnknown()) {
return JSType.UNKNOWN;
}
JSType nextUnion = JSType.join(union, nextType);
if (nextUnion.isBottom()) {
warnings.add(JSError.make(n, UNION_IS_UNINHABITABLE,
nextType.toString(), union.toString()));
return JSType.UNKNOWN;
}
union = nextUnion;
}
return union;
}
case Token.BANG: {
JSType nullableType = getTypeFromCommentHelper(
n.getFirstChild(), registry, typeParameters);
if (nullableType.isTypeVariable()) {
warnings.add(JSError.make(n, CANNOT_MAKE_TYPEVAR_NON_NULL));
}
return nullableType.removeType(JSType.NULL);
}
case Token.QMARK: {
Node child = n.getFirstChild();
if (child == null) {
return JSType.UNKNOWN;
} else {
return JSType.join(JSType.NULL,
getTypeFromCommentHelper(child, registry, typeParameters));
}
}
case Token.STAR:
return JSType.TOP;
case Token.FUNCTION:
return getFunTypeHelper(n, registry, typeParameters);
default:
throw new IllegalArgumentException("Unsupported type exp: " +
Token.name(n.getType()) + " " + n.toStringTree());
}
}
// Looks at the type AST without evaluating it
private boolean isUnionWithUndefined(Node n) {
if (n == null || n.getType() != Token.PIPE) {
return false;
}
for (Node child : n.children()) {
if (child.getType() == Token.VOID
|| child.getType() == Token.STRING
&& (child.getString().equals("void")
|| child.getString().equals("undefined"))) {
return true;
}
}
return false;
}
private JSType getRecordTypeHelper(Node n, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) throws UnknownTypeException {
Map<String, Property> props = new LinkedHashMap<>();
for (Node propNode = n.getFirstFirstChild();
propNode != null;
propNode = propNode.getNext()) {
boolean isPropDeclared = propNode.getType() == Token.COLON;
Node propNameNode = isPropDeclared ? propNode.getFirstChild() : propNode;
String propName = propNameNode.getString();
if (propName.startsWith("'") || propName.startsWith("\"")) {
propName = propName.substring(1, propName.length() - 1);
}
JSType propType = !isPropDeclared
? JSType.UNKNOWN
: getTypeFromCommentHelper(propNode.getLastChild(), registry, typeParameters);
Property prop;
if (propType.equals(JSType.UNDEFINED)
|| isUnionWithUndefined(propNode.getLastChild())) {
prop = Property.makeOptional(null, propType, propType);
} else {
prop = Property.make(propType, propType);
}
props.put(propName, prop);
}
return JSType.fromObjectType(ObjectType.fromProperties(props));
}
private JSType getNamedTypeHelper(Node n, DeclaredTypeRegistry registry,
ImmutableList<String> outerTypeParameters)
throws UnknownTypeException {
String typeName = n.getString();
switch (typeName) {
case "boolean":
checkInvalidGenericsInstantiation(n);
return JSType.BOOLEAN;
case "null":
checkInvalidGenericsInstantiation(n);
return JSType.NULL;
case "number":
checkInvalidGenericsInstantiation(n);
return JSType.NUMBER;
case "string":
checkInvalidGenericsInstantiation(n);
return JSType.STRING;
case "undefined":
case "void":
checkInvalidGenericsInstantiation(n);
return JSType.UNDEFINED;
case "Function":
checkInvalidGenericsInstantiation(n);
return maybeMakeNullable(registry.getCommonTypes().qmarkFunction());
case "Object":
// We don't generally handle parameterized Object<...>, but we want to
// at least not warn about inexistent properties on it, so we type it
// as @dict.
return maybeMakeNullable(n.hasChildren() ? JSType.TOP_DICT : JSType.TOP_OBJECT);
default:
return lookupTypeByName(typeName, n, registry, outerTypeParameters);
}
}
private JSType lookupTypeByName(String name, Node n,
DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters)
throws UnknownTypeException {
String tvar = UniqueNameGenerator.findGeneratedName(name, outerTypeParameters);
if (tvar != null) {
checkInvalidGenericsInstantiation(n);
return JSType.fromTypeVar(tvar);
}
Declaration decl = registry.getDeclaration(QualifiedName.fromQualifiedString(name), true);
if (decl == null) {
unknownTypeNames.put(n, name);
throw new UnknownTypeException("Unhandled type: " + name);
}
// It's either a typedef, an enum, a type variable, a nominal type, or a
// forward-declared type.
if (decl.getTypedef() != null) {
checkInvalidGenericsInstantiation(n);
return getTypedefType(decl.getTypedef(), registry);
}
if (decl.getEnum() != null) {
checkInvalidGenericsInstantiation(n);
return getEnumPropType(decl.getEnum(), registry);
}
if (decl.isTypeVar()) {
checkInvalidGenericsInstantiation(n);
howmanyTypeVars++;
return decl.getTypeOfSimpleDecl();
}
if (decl.getNominal() != null) {
return getNominalTypeHelper(decl.getNominal(), n, registry, outerTypeParameters);
}
// Forward-declared type
return JSType.UNKNOWN;
}
private JSType getTypedefType(Typedef td, DeclaredTypeRegistry registry) {
resolveTypedef(td, registry);
return td.getType();
}
public void resolveTypedef(Typedef td, DeclaredTypeRegistry registry) {
Preconditions.checkState(td != null, "getTypedef should only be " +
"called when we know that the typedef is defined");
if (td.isResolved()) {
return;
}
JSTypeExpression texp = td.getTypeExpr();
JSType tdType;
if (texp == null) {
warnings.add(JSError.make(
td.getTypeExprForErrorReporting().getRoot(), CIRCULAR_TYPEDEF_ENUM));
tdType = JSType.UNKNOWN;
} else {
tdType = getTypeFromJSTypeExpression(texp, registry, null);
}
td.resolveTypedef(tdType);
}
private JSType getEnumPropType(EnumType e, DeclaredTypeRegistry registry) {
resolveEnum(e, registry);
return e.getPropType();
}
public void resolveEnum(EnumType e, DeclaredTypeRegistry registry) {
Preconditions.checkState(e != null, "getEnum should only be " +
"called when we know that the enum is defined");
if (e.isResolved()) {
return;
}
JSTypeExpression texp = e.getTypeExpr();
JSType enumeratedType;
if (texp == null) {
warnings.add(JSError.make(
e.getTypeExprForErrorReporting().getRoot(), CIRCULAR_TYPEDEF_ENUM));
enumeratedType = JSType.UNKNOWN;
} else {
int numTypeVars = howmanyTypeVars;
enumeratedType = getTypeFromJSTypeExpression(texp, registry, null);
if (howmanyTypeVars > numTypeVars) {
warnings.add(JSError.make(texp.getRoot(), ENUM_WITH_TYPEVARS));
enumeratedType = JSType.UNKNOWN;
howmanyTypeVars = numTypeVars;
} else if (enumeratedType.isTop()) {
warnings.add(JSError.make(texp.getRoot(), ENUM_IS_TOP));
enumeratedType = JSType.UNKNOWN;
} else if (enumeratedType.isUnion()) {
warnings.add(JSError.make(texp.getRoot(), ENUM_IS_UNION));
enumeratedType = JSType.UNKNOWN;
}
}
e.resolveEnum(enumeratedType);
}
private void checkInvalidGenericsInstantiation(Node n) {
if (n.hasChildren()) {
Preconditions.checkState(n.getFirstChild().isBlock(), n);
warnings.add(JSError.make(n, INVALID_GENERICS_INSTANTIATION,
"", "0", String.valueOf(n.getFirstChild().getChildCount())));
}
}
private JSType getNominalTypeHelper(RawNominalType rawType, Node n,
DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters)
throws UnknownTypeException {
NominalType uninstantiated = rawType.getAsNominalType();
if (!rawType.isGeneric() && !n.hasChildren()) {
return rawType.getInstanceWithNullability(NULLABLE_TYPES_BY_DEFAULT);
}
ImmutableList.Builder<JSType> typeList = ImmutableList.builder();
if (n.hasChildren()) {
// Compute instantiation of polymorphic class/interface.
Preconditions.checkState(n.getFirstChild().isBlock(), n);
for (Node child : n.getFirstChild().children()) {
typeList.add(
getTypeFromCommentHelper(child, registry, outerTypeParameters));
}
}
ImmutableList<JSType> typeArguments = typeList.build();
ImmutableList<String> typeParameters = rawType.getTypeParameters();
int typeArgsSize = typeArguments.size();
int typeParamsSize = typeParameters.size();
if (typeArgsSize != typeParamsSize) {
// We used to also warn when (typeArgsSize < typeParamsSize), but it
// happens so often that we stopped. Array, Object and goog.Promise are
// common culprits, but many other types as well.
if (typeArgsSize > typeParamsSize) {
warnings.add(JSError.make(
n, INVALID_GENERICS_INSTANTIATION,
" for type " + uninstantiated.getName(),
String.valueOf(typeParamsSize),
String.valueOf(typeArgsSize)));
}
return maybeMakeNullable(JSType.fromObjectType(ObjectType.fromNominalType(
uninstantiated.instantiateGenerics(
fixLengthOfTypeList(typeParameters.size(), typeArguments)))));
}
return maybeMakeNullable(JSType.fromObjectType(ObjectType.fromNominalType(
uninstantiated.instantiateGenerics(typeArguments))));
}
private static List<JSType> fixLengthOfTypeList(
int desiredLength, List<JSType> typeList) {
int length = typeList.size();
if (length == desiredLength) {
return typeList;
}
ImmutableList.Builder<JSType> builder = ImmutableList.builder();
for (int i = 0; i < desiredLength; i++) {
builder.add(i < length ? typeList.get(i) : JSType.UNKNOWN);
}
return builder.build();
}
// Computes a type from a jsdoc that includes a function type, rather than
// one that includes @param, @return, etc.
private JSType getFunTypeHelper(Node jsdocNode, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) throws UnknownTypeException {
FunctionTypeBuilder builder = new FunctionTypeBuilder();
fillInFunTypeBuilder(jsdocNode, null, registry, typeParameters, builder);
return registry.getCommonTypes().fromFunctionType(builder.buildFunction());
}
private void fillInFunTypeBuilder(
Node jsdocNode, RawNominalType ownerType, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters, FunctionTypeBuilder builder)
throws UnknownTypeException {
Node child = jsdocNode.getFirstChild();
if (child.getType() == Token.THIS) {
if (ownerType == null) {
builder.addReceiverType(
getThisOrNewType(child.getFirstChild(), registry, typeParameters));
}
child = child.getNext();
} else if (child.getType() == Token.NEW) {
Node newTypeNode = child.getFirstChild();
JSType t = getThisOrNewType(newTypeNode, registry, typeParameters);
if (!t.isSubtypeOf(JSType.TOP_OBJECT)
&& (!t.hasTypeVariable() || t.hasScalar())) {
warnings.add(JSError.make(
newTypeNode, NEW_EXPECTS_OBJECT_OR_TYPEVAR, t.toString()));
}
builder.addNominalType(t);
child = child.getNext();
}
if (child.getType() == Token.PARAM_LIST) {
for (Node arg = child.getFirstChild(); arg != null; arg = arg.getNext()) {
try {
switch (arg.getType()) {
case Token.EQUALS:
builder.addOptFormal(getTypeFromCommentHelper(
arg.getFirstChild(), registry, typeParameters));
break;
case Token.ELLIPSIS:
Node restNode = arg.getFirstChild();
builder.addRestFormals(restNode == null ? JSType.UNKNOWN :
getTypeFromCommentHelper(restNode, registry, typeParameters));
break;
default:
builder.addReqFormal(
getTypeFromCommentHelper(arg, registry, typeParameters));
break;
}
} catch (FunctionTypeBuilder.WrongParameterOrderException e) {
warnings.add(JSError.make(jsdocNode, WRONG_PARAMETER_ORDER));
builder.addPlaceholderFormal();
}
}
child = child.getNext();
}
builder.addRetType(
getTypeFromCommentHelper(child, registry, typeParameters));
}
private JSType getThisOrNewType(Node n,
DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) {
JSType t = getTypeFromComment(n, registry, typeParameters);
return t.isSingletonObjWithNull() ? t.removeType(JSType.NULL) : t;
}
private ImmutableSet<NominalType> getImplementedInterfaces(
JSDocInfo jsdoc, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) {
return getInterfacesHelper(jsdoc, registry, typeParameters, true);
}
private ImmutableSet<NominalType> getExtendedInterfaces(
JSDocInfo jsdoc, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters) {
return getInterfacesHelper(jsdoc, registry, typeParameters, false);
}
private ImmutableSet<NominalType> getInterfacesHelper(
JSDocInfo jsdoc, DeclaredTypeRegistry registry,
ImmutableList<String> typeParameters, boolean implementedIntfs) {
ImmutableSet.Builder<NominalType> builder = ImmutableSet.builder();
for (JSTypeExpression texp : (implementedIntfs ?
jsdoc.getImplementedInterfaces() :
jsdoc.getExtendedInterfaces())) {
Node expRoot = texp.getRoot();
JSType interfaceType =
getMaybeTypeFromComment(expRoot, registry, typeParameters);
if (interfaceType != null) {
NominalType nt = interfaceType.getNominalTypeIfSingletonObj();
if (nt != null && nt.isInterface()) {
builder.add(nt);
} else if (implementedIntfs) {
warnings.add(JSError.make(
expRoot, IMPLEMENTS_NON_INTERFACE, interfaceType.toString()));
} else {
warnings.add(JSError.make(
expRoot, EXTENDS_NON_INTERFACE, interfaceType.toString()));
}
}
}
return builder.build();
}
public static class FunctionAndSlotType {
public JSType slotType;
public DeclaredFunctionType functionType;
public FunctionAndSlotType(JSType slotType, DeclaredFunctionType functionType) {
this.slotType = slotType;
this.functionType = functionType;
}
}
/**
* Consumes either a "classic" function jsdoc with @param, @return, etc,
* or a jsdoc with @type {function ...} and finds the types of the formal
* parameters and the return value. It returns a builder because the callers
* of this function must separately handle @constructor, @interface, etc.
*
* constructorType is non-null iff this function is a constructor or
* interface declaration.
*/
public FunctionAndSlotType getFunctionType(
JSDocInfo jsdoc, String functionName, Node declNode,
RawNominalType constructorType, RawNominalType ownerType,
DeclaredTypeRegistry registry) {
FunctionTypeBuilder builder = new FunctionTypeBuilder();
if (ownerType != null) {
builder.addReceiverType(ownerType.getInstanceAsJSType());
}
try {
if (jsdoc != null && jsdoc.getType() != null) {
JSType simpleType = getDeclaredTypeOfNode(jsdoc, ownerType, registry);
if (simpleType.isUnknown() || simpleType.isTop()) {
return qmarkFunctionDeclared;
}
FunctionType funType = simpleType.getFunType();
if (funType != null) {
JSType slotType = simpleType.isFunctionType() ? null : simpleType;
DeclaredFunctionType declType = funType.toDeclaredFunctionType();
if (ownerType != null && funType.getThisType() == null) {
declType = declType.withReceiverType(ownerType.getInstanceAsJSType());
}
return new FunctionAndSlotType(slotType, declType);
} else {
warnings.add(JSError.make(declNode, FUNCTION_WITH_NONFUNC_JSDOC));
jsdoc = null;
}
}
DeclaredFunctionType declType = getFunTypeFromTypicalFunctionJsdoc(
jsdoc, functionName, declNode,
constructorType, ownerType, registry, builder);
return new FunctionAndSlotType(null, declType);
} catch (FunctionTypeBuilder.WrongParameterOrderException e) {
warnings.add(JSError.make(declNode, WRONG_PARAMETER_ORDER));
return qmarkFunctionDeclared;
}
}
private static class ParamIterator {
/** The parameter names from the JSDocInfo. Only set if 'params' is null. */
Iterator<String> paramNames;
/**
* The PARAM_LIST node containing the function parameters. Only set if
* 'paramNames' is null.
*/
Node params;
int index = -1;
ParamIterator(Node params, JSDocInfo jsdoc) {
Preconditions.checkArgument(params != null || jsdoc != null);
if (params != null) {
this.params = params;
this.paramNames = null;
} else {
this.params = null;
this.paramNames = jsdoc.getParameterNames().iterator();
}
}
boolean hasNext() {
if (paramNames != null) {
return paramNames.hasNext();
}
return index + 1 < params.getChildCount();
}
String nextString() {
if (paramNames != null) {
return paramNames.next();
}
index++;
return params.getChildAtIndex(index).getString();
}
Node getNode() {
if (paramNames != null) {
return null;
}
return params.getChildAtIndex(index);
}
}
private DeclaredFunctionType getFunTypeFromTypicalFunctionJsdoc(
JSDocInfo jsdoc, String functionName, Node funNode,
RawNominalType constructorType, RawNominalType ownerType,
DeclaredTypeRegistry registry, FunctionTypeBuilder builder) {
ImmutableList.Builder<String> typeParamsBuilder = ImmutableList.builder();
ImmutableList<String> typeParameters = ImmutableList.of();
Node parent = funNode.getParent();
// TODO(dimvar): need more @template warnings
// - warn for multiple @template annotations
// - warn for @template annotation w/out usage
boolean ignoreJsdoc = false;
if (jsdoc != null) {
if (constructorType != null) {
// We have created new names for these type variables in GTI, don't
// create new ones here.
typeParamsBuilder.addAll(constructorType.getTypeParameters());
} else {
for (String typeParam : jsdoc.getTemplateTypeNames()) {
typeParamsBuilder.add(this.nameGen.getNextName(typeParam));
}
}
// We don't properly support the type transformation language; we treat
// its type variables as ordinary type variables.
for (String typeParam : jsdoc.getTypeTransformations().keySet()) {
typeParamsBuilder.add(this.nameGen.getNextName(typeParam));
}
typeParameters = typeParamsBuilder.build();
if (!typeParameters.isEmpty()) {
if (parent.isSetterDef() || parent.isGetterDef()) {
ignoreJsdoc = true;
jsdoc = null;
warnings.add(JSError.make(funNode, TEMPLATED_GETTER_SETTER));
} else {
builder.addTypeParameters(typeParameters);
}
}
}
if (ownerType != null) {
typeParamsBuilder.addAll(ownerType.getTypeParameters());
typeParameters = typeParamsBuilder.build();
}
fillInFormalParameterTypes(
jsdoc, funNode, typeParameters, registry, builder, ignoreJsdoc);
fillInReturnType(
jsdoc, funNode, parent, typeParameters, registry, builder, ignoreJsdoc);
if (jsdoc == null) {
return builder.buildDeclaration();
}
// Look at other annotations, eg, @constructor
NominalType parentClass = getMaybeParentClass(
jsdoc, functionName, funNode, typeParameters, registry);
ImmutableSet<NominalType> implementedIntfs = getImplementedInterfaces(
jsdoc, registry, typeParameters);
if (constructorType == null && jsdoc.isConstructorOrInterface()) {
// Anonymous type, don't register it.
return builder.buildDeclaration();
} else if (jsdoc.isConstructor()) {
handleConstructorAnnotation(functionName, funNode, constructorType,
parentClass, implementedIntfs, registry, builder);
} else if (jsdoc.isInterface()) {
handleInterfaceAnnotation(jsdoc, functionName, funNode, constructorType,
implementedIntfs, typeParameters, registry, builder);
} else if (!implementedIntfs.isEmpty()) {
warnings.add(JSError.make(
funNode, IMPLEMENTS_WITHOUT_CONSTRUCTOR, functionName));
}
if (jsdoc.hasThisType()) {
Node thisRoot = jsdoc.getThisType().getRoot();
Preconditions.checkState(thisRoot.getType() == Token.BANG);
builder.addReceiverType(
getThisOrNewType(thisRoot.getFirstChild(), registry, typeParameters));
}
return builder.buildDeclaration();
}
private void fillInFormalParameterTypes(
JSDocInfo jsdoc, Node funNode,
ImmutableList<String> typeParameters,
DeclaredTypeRegistry registry, FunctionTypeBuilder builder,
boolean ignoreJsdoc /* for when the jsdoc is malformed */) {
boolean ignoreFunNode = !funNode.isFunction();
Node params = ignoreFunNode ? null : funNode.getSecondChild();
ParamIterator iterator = new ParamIterator(params, jsdoc);
while (iterator.hasNext()) {
String pname = iterator.nextString();
Node param = iterator.getNode();
ParameterKind p = ParameterKind.REQUIRED;
if (param != null && convention.isOptionalParameter(param)) {
p = ParameterKind.OPTIONAL;
} else if (param != null && convention.isVarArgsParameter(param)) {
p = ParameterKind.REST;
}
ParameterType inlineParamType = (ignoreJsdoc || ignoreFunNode || param.getJSDocInfo() == null)
? null : parseParameter(param.getJSDocInfo().getType(), p, registry, typeParameters);
ParameterType fnParamType = inlineParamType;
JSTypeExpression jsdocExp = jsdoc == null ? null : jsdoc.getParameterType(pname);
if (jsdocExp != null) {
if (inlineParamType == null) {
fnParamType = parseParameter(jsdocExp, p, registry, typeParameters);
} else {
warnings.add(JSError.make(
param, TWO_JSDOCS, "formal parameter " + pname));
}
}
JSType t = null;
if (fnParamType != null) {
p = fnParamType.kind;
t = fnParamType.type;
}
switch (p) {
case REQUIRED:
builder.addReqFormal(t);
break;
case OPTIONAL:
builder.addOptFormal(t);
break;
case REST:
builder.addRestFormals(t != null ? t : JSType.UNKNOWN);
break;
}
}
}
private void fillInReturnType(
JSDocInfo jsdoc, Node funNode, Node parent,
ImmutableList<String> typeParameters,
DeclaredTypeRegistry registry, FunctionTypeBuilder builder,
boolean ignoreJsdoc /* for when the jsdoc is malformed */) {
JSDocInfo inlineRetJsdoc =
ignoreJsdoc ? null : funNode.getFirstChild().getJSDocInfo();
JSTypeExpression retTypeExp = jsdoc == null ? null : jsdoc.getReturnType();
if (parent.isSetterDef() && retTypeExp == null) {
// inline returns for getters/setters are not parsed
builder.addRetType(JSType.UNDEFINED);
} else if (inlineRetJsdoc != null) {
builder.addRetType(
getDeclaredTypeOfNode(inlineRetJsdoc, registry, typeParameters));
if (retTypeExp != null) {
warnings.add(JSError.make(funNode, TWO_JSDOCS, "the return type"));
}
} else {
builder.addRetType(
getTypeFromJSTypeExpression(retTypeExp, registry, typeParameters));
}
}
private NominalType getMaybeParentClass(
JSDocInfo jsdoc, String functionName, Node funNode,
ImmutableList<String> typeParameters, DeclaredTypeRegistry registry) {
if (!jsdoc.hasBaseType()) {
return null;
}
if (!jsdoc.isConstructor()) {
warnings.add(JSError.make(
funNode, EXTENDS_NOT_ON_CTOR_OR_INTERF, functionName));
return null;
}
Node docNode = jsdoc.getBaseType().getRoot();
JSType extendedType =
getMaybeTypeFromComment(docNode, registry, typeParameters);
if (extendedType == null) {
return null;
}
NominalType parentClass = extendedType.getNominalTypeIfSingletonObj();
if (parentClass != null && parentClass.isClass()) {
return parentClass;
}
if (parentClass == null) {
warnings.add(JSError.make(funNode, EXTENDS_NON_OBJECT,
functionName, extendedType.toString()));
} else {
Preconditions.checkState(parentClass.isInterface());
warnings.add(JSError.make(funNode, CONFLICTING_EXTENDED_TYPE,
"constructor", functionName));
}
return null;
}
private void handleConstructorAnnotation(
String functionName, Node funNode, RawNominalType constructorType,
NominalType parentClass, ImmutableSet<NominalType> implementedIntfs,
DeclaredTypeRegistry registry, FunctionTypeBuilder builder) {
String className = constructorType.toString();
NominalType builtinObject = registry.getCommonTypes().getObjectType();
if (parentClass == null && !functionName.equals("Object")) {
parentClass = builtinObject;
}
if (parentClass != null) {
if (!constructorType.addSuperClass(parentClass)) {
warnings.add(JSError.make(funNode, INHERITANCE_CYCLE, className));
} else if (parentClass != builtinObject) {
if (constructorType.isStruct() && !parentClass.isStruct()) {
warnings.add(JSError.make(
funNode, CONFLICTING_SHAPE_TYPE, "struct", className));
} else if (constructorType.isDict() && !parentClass.isDict()) {
warnings.add(JSError.make(
funNode, CONFLICTING_SHAPE_TYPE, "dict", className));
}
}
}
if (constructorType.isDict() && !implementedIntfs.isEmpty()) {
warnings.add(JSError.make(funNode, DICT_IMPLEMENTS_INTERF, className));
}
boolean noCycles = constructorType.addInterfaces(implementedIntfs);
Preconditions.checkState(noCycles);
builder.addNominalType(constructorType.getInstanceAsJSType());
}
private void handleInterfaceAnnotation(
JSDocInfo jsdoc, String functionName, Node funNode,
RawNominalType constructorType,
ImmutableSet<NominalType> implementedIntfs,
ImmutableList<String> typeParameters,
DeclaredTypeRegistry registry, FunctionTypeBuilder builder) {
if (!implementedIntfs.isEmpty()) {
warnings.add(JSError.make(
funNode, CONFLICTING_IMPLEMENTED_TYPE, functionName));
}
ImmutableSet<NominalType> extendedInterfaces =
getExtendedInterfaces(jsdoc, registry, typeParameters);
boolean noCycles = constructorType.addInterfaces(
extendedInterfaces.isEmpty()
? ImmutableSet.of(registry.getCommonTypes().getObjectType())
: extendedInterfaces);
if (!noCycles) {
warnings.add(JSError.make(
funNode, INHERITANCE_CYCLE, constructorType.toString()));
}
builder.addNominalType(constructorType.getInstanceAsJSType());
}
// /** @param {...?} var_args */ function f(var_args) { ... }
// var_args shouldn't be used in the body of f
public static boolean isRestArg(JSDocInfo funJsdoc, String formalParamName) {
if (funJsdoc == null) {
return false;
}
JSTypeExpression texp = funJsdoc.getParameterType(formalParamName);
Node jsdocNode = texp == null ? null : texp.getRoot();
return jsdocNode != null && jsdocNode.getType() == Token.ELLIPSIS;
}
private ParameterType parseParameter(
JSTypeExpression jsdoc, ParameterKind p,
DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) {
if (jsdoc == null) {
return null;
}
return parseParameter(jsdoc.getRoot(), p, registry, typeParameters);
}
private ParameterType parseParameter(
Node jsdoc, ParameterKind p,
DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) {
if (jsdoc == null) {
return null;
}
switch (jsdoc.getType()) {
case Token.EQUALS:
p = ParameterKind.OPTIONAL;
jsdoc = jsdoc.getFirstChild();
break;
case Token.ELLIPSIS:
p = ParameterKind.REST;
jsdoc = jsdoc.getFirstChild();
break;
}
JSType t = getMaybeTypeFromComment(jsdoc, registry, typeParameters);
return new ParameterType(t, p);
}
private static class ParameterType {
private JSType type;
private ParameterKind kind;
ParameterType(JSType type, ParameterKind kind) {
this.type = type;
this.kind = kind;
}
}
private static enum ParameterKind {
REQUIRED,
OPTIONAL,
REST,
}
}
| apache-2.0 |
thomasdarimont/spring-security | core/src/test/java/org/springframework/security/authorization/AuthorityReactiveAuthorizationManagerTests.java | 4060 | /*
* Copyright 2002-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.authorization;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.core.Authentication;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.util.Collections;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
/**
* @author Rob Winch
* @since 5.0
*/
@RunWith(MockitoJUnitRunner.class)
public class AuthorityReactiveAuthorizationManagerTests {
@Mock
Authentication authentication;
AuthorityReactiveAuthorizationManager<Object> manager = AuthorityReactiveAuthorizationManager
.hasAuthority("ADMIN");
@Test
public void checkWhenHasAuthorityAndNotAuthenticatedThenReturnFalse() {
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isFalse();
}
@Test
public void checkWhenHasAuthorityAndEmptyThenReturnFalse() {
boolean granted = manager.check(Mono.empty(), null).block().isGranted();
assertThat(granted).isFalse();
}
@Test
public void checkWhenHasAuthorityAndErrorThenError() {
Mono<AuthorizationDecision> result = manager.check(Mono.error(new RuntimeException("ooops")), null);
StepVerifier
.create(result)
.expectError()
.verify();
}
@Test
public void checkWhenHasAuthorityAndAuthenticatedAndNoAuthoritiesThenReturnFalse() {
when(authentication.isAuthenticated()).thenReturn(true);
when(authentication.getAuthorities()).thenReturn(Collections.emptyList());
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isFalse();
}
@Test
public void checkWhenHasAuthorityAndAuthenticatedAndWrongAuthoritiesThenReturnFalse() {
authentication = new TestingAuthenticationToken("rob", "secret", "ROLE_ADMIN");
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isFalse();
}
@Test
public void checkWhenHasAuthorityAndAuthorizedThenReturnTrue() {
authentication = new TestingAuthenticationToken("rob", "secret", "ADMIN");
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isTrue();
}
@Test
public void checkWhenHasRoleAndAuthorizedThenReturnTrue() {
manager = AuthorityReactiveAuthorizationManager.hasRole("ADMIN");
authentication = new TestingAuthenticationToken("rob", "secret", "ROLE_ADMIN");
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isTrue();
}
@Test
public void checkWhenHasRoleAndNotAuthorizedThenReturnTrue() {
manager = AuthorityReactiveAuthorizationManager.hasRole("ADMIN");
authentication = new TestingAuthenticationToken("rob", "secret", "ADMIN");
boolean granted = manager.check(Mono.just(authentication), null).block().isGranted();
assertThat(granted).isFalse();
}
@Test(expected = IllegalArgumentException.class)
public void hasRoleWhenNullThenException() {
String role = null;
AuthorityReactiveAuthorizationManager.hasRole(role);
}
@Test(expected = IllegalArgumentException.class)
public void hasAuthorityWhenNullThenException() {
String authority = null;
AuthorityReactiveAuthorizationManager.hasAuthority(authority);
}
}
| apache-2.0 |
renchunxiao/logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/LambdaUtil.java | 2632 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.util;
import org.apache.logging.log4j.message.Message;
/**
* Utility class for lambda support.
*/
public class LambdaUtil {
/**
* Private constructor: this class is not intended to be instantiated.
*/
private LambdaUtil() {
}
/**
* Converts an array of lambda expressions into an array of their evaluation results.
*
* @param suppliers an array of lambda expressions or {@code null}
* @return an array containing the results of evaluating the lambda expressions (or {@code null} if the suppliers
* array was {@code null}
*/
public static Object[] getAll(final Supplier<?>... suppliers) {
if (suppliers == null) {
return null;
}
final Object[] result = new Object[suppliers.length];
for (int i = 0; i < result.length; i++) {
result[i] = get(suppliers[i]);
}
return result;
}
/**
* Returns the result of evaluating the specified function.
* @param supplier a lambda expression or {@code null}
* @return the results of evaluating the lambda expression (or {@code null} if the supplier
* was {@code null}
*/
public static Object get(final Supplier<?> supplier) {
if (supplier == null) {
return null;
}
return supplier.get();
}
/**
* Returns the Message supplied by the specified function.
* @param supplier a lambda expression or {@code null}
* @return the Message resulting from evaluating the lambda expression (or {@code null} if the supplier was
* {@code null}
*/
public static Message get(final MessageSupplier supplier) {
if (supplier == null) {
return null;
}
return supplier.get();
}
}
| apache-2.0 |
ajordens/orca | orca-clouddriver/src/main/java/com/netflix/spinnaker/orca/clouddriver/pipeline/job/KubernetesRunJobStageDecorator.java | 1796 | /*
* Copyright 2020 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.pipeline.job;
import com.netflix.spinnaker.kork.annotations.NonnullByDefault;
import com.netflix.spinnaker.orca.api.pipeline.graph.TaskNode;
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.clouddriver.tasks.manifest.PromoteManifestKatoOutputsTask;
import java.util.Map;
import org.springframework.stereotype.Component;
@Component
@NonnullByDefault
public class KubernetesRunJobStageDecorator implements RunJobStageDecorator {
@Override
public boolean supports(String cloudProvider) {
return "kubernetes".equalsIgnoreCase(cloudProvider);
}
@Override
public void afterRunJobTaskGraph(StageExecution stageExecution, TaskNode.Builder builder) {
final Map<String, Object> context = stageExecution.getContext();
boolean manifestBasedRunJob = context.containsKey("source") || context.containsKey("manifest");
if (manifestBasedRunJob) {
builder.withTask("promoteOutputs", PromoteManifestKatoOutputsTask.class);
}
}
@Override
public void modifyDestroyJobContext(
RunJobStageContext context, Map<String, Object> destroyContext) {
// Do nothing.
}
}
| apache-2.0 |
OpenSkywalking/skywalking | oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/dynamic/MeterClassPackageHolder.java | 1094 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.meter.dynamic;
/**
* MeterClassPackageHolder holds the package for generated meter classes.
*
* @since 8.9.0 for adopting JDK16+ to avoid `--add-opens java.base/java.lang=ALL-UNNAMED`
*/
public class MeterClassPackageHolder {
}
| apache-2.0 |
deezer/weslang | third_party/java/language-detection-v2/src/com/cybozu/labs/langdetect/util/LangProfileTest.java | 3765 | /**
*
*/
package com.cybozu.labs.langdetect.util;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author Nakatani Shuyo
*
*/
public class LangProfileTest {
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* Test method for {@link com.cybozu.labs.langdetect.util.LangProfile#LangProfile()}.
*/
@Test
public final void testLangProfile() {
LangProfile profile = new LangProfile();
assertEquals(profile.name, null);
}
/**
* Test method for {@link com.cybozu.labs.langdetect.util.LangProfile#LangProfile(java.lang.String)}.
*/
@Test
public final void testLangProfileStringInt() {
LangProfile profile = new LangProfile("en");
assertEquals(profile.name, "en");
}
/**
* Test method for {@link com.cybozu.labs.langdetect.util.LangProfile#add(java.lang.String)}.
*/
@Test
public final void testAdd() {
LangProfile profile = new LangProfile("en");
profile.add("a");
assertEquals((int)profile.freq.get("a"), 1);
profile.add("a");
assertEquals((int)profile.freq.get("a"), 2);
profile.omitLessFreq();
}
/**
* Illegal call test for {@link LangProfile#add(String)}
*/
@Test
public final void testAddIllegally1() {
LangProfile profile = new LangProfile(); // Illegal ( available for only JSONIC ) but ignore
profile.add("a"); // ignore
assertEquals(profile.freq.get("a"), null); // ignored
}
/**
* Illegal call test for {@link LangProfile#add(String)}
*/
@Test
public final void testAddIllegally2() {
LangProfile profile = new LangProfile("en");
profile.add("a");
profile.add(""); // Illegal (string's length of parameter must be between 1 and 3) but ignore
profile.add("abcd"); // as well
assertEquals((int)profile.freq.get("a"), 1);
assertEquals(profile.freq.get(""), null); // ignored
assertEquals(profile.freq.get("abcd"), null); // ignored
}
/**
* Test method for {@link com.cybozu.labs.langdetect.util.LangProfile#omitLessFreq()}.
*/
@Test
public final void testOmitLessFreq() {
LangProfile profile = new LangProfile("en");
String[] grams = "a b c \u3042 \u3044 \u3046 \u3048 \u304a \u304b \u304c \u304d \u304e \u304f".split(" ");
for (int i=0;i<5;++i) for (String g : grams) {
profile.add(g);
}
profile.add("\u3050");
assertEquals((int)profile.freq.get("a"), 5);
assertEquals((int)profile.freq.get("\u3042"), 5);
assertEquals((int)profile.freq.get("\u3050"), 1);
profile.omitLessFreq();
assertEquals(profile.freq.get("a"), null); // omitted
assertEquals((int)profile.freq.get("\u3042"), 5);
assertEquals(profile.freq.get("\u3050"), null); // omitted
}
/**
* Illegal call test for {@link com.cybozu.labs.langdetect.util.LangProfile#omitLessFreq()}.
*/
@Test
public final void testOmitLessFreqIllegally() {
LangProfile profile = new LangProfile();
profile.omitLessFreq(); // ignore
}
}
| apache-2.0 |
gaohoward/activemq-artemis | artemis-server/src/main/java/org/apache/activemq/artemis/core/server/ServiceRegistry.java | 5155 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.activemq.artemis.api.core.BaseInterceptor;
import org.apache.activemq.artemis.api.core.Pair;
import org.apache.activemq.artemis.core.config.ConnectorServiceConfiguration;
import org.apache.activemq.artemis.core.config.TransformerConfiguration;
import org.apache.activemq.artemis.core.server.transformer.Transformer;
import org.apache.activemq.artemis.spi.core.remoting.AcceptorFactory;
/**
* A holder for common services leveraged by the broker.
*/
public interface ServiceRegistry {
ExecutorService getExecutorService();
void setExecutorService(ExecutorService executorService);
ExecutorService getIOExecutorService();
void setIOExecutorService(ExecutorService ioExecutorService);
ScheduledExecutorService getScheduledExecutorService();
void setScheduledExecutorService(ScheduledExecutorService scheduledExecutorService);
void addConnectorService(ConnectorServiceFactory connectorServiceFactory,
ConnectorServiceConfiguration configuration);
void removeConnectorService(ConnectorServiceConfiguration configuration);
/**
* Get a collection of paired org.apache.activemq.artemis.core.server.ConnectorServiceFactory and
* org.apache.activemq.artemis.core.config.ConnectorServiceConfiguration instances.
*
* @param configs
* @return
*/
Collection<Pair<ConnectorServiceFactory, ConnectorServiceConfiguration>> getConnectorServices(List<ConnectorServiceConfiguration> configs);
/**
* Get connector service for a given configuration.
*
* @param configuration The connector service configuration.
* @return an instance of the connector service factory.
*/
ConnectorServiceFactory getConnectorService(ConnectorServiceConfiguration configuration);
void addIncomingInterceptor(BaseInterceptor interceptor);
/**
* Get a list of org.apache.activemq.artemis.api.core.BaseInterceptor instances
*
* @param classNames
* @return
*/
List<BaseInterceptor> getIncomingInterceptors(List<String> classNames);
void addOutgoingInterceptor(BaseInterceptor interceptor);
/**
* Get a list of org.apache.activemq.artemis.api.core.BaseInterceptor instances
*
* @param classNames
* @return
*/
List<BaseInterceptor> getOutgoingInterceptors(List<String> classNames);
/**
* Get an instance of org.apache.activemq.artemis.core.server.transformer.Transformer for a divert
*
* @param name the name of divert for which the transformer will be used
* @param transformerConfiguration the transformer configuration
* @return
*/
Transformer getDivertTransformer(String name, TransformerConfiguration transformerConfiguration);
void addDivertTransformer(String name, Transformer transformer);
/**
* Get an instance of org.apache.activemq.artemis.core.server.transformer.Transformer for a bridge
*
* @param name the name of bridge for which the transformer will be used
* @param transformerConfiguration the transformer configuration
* @return
*/
Transformer getBridgeTransformer(String name, TransformerConfiguration transformerConfiguration);
void addBridgeTransformer(String name, Transformer transformer);
/**
* Get an instance of org.apache.activemq.artemis.core.server.transformer.Transformer for federation
*
* @param name the name of bridge for which the transformer will be used
* @param transformerConfiguration the transformer configuration
* @return
*/
Transformer getFederationTransformer(String name, TransformerConfiguration transformerConfiguration);
void addFederationTransformer(String name, Transformer transformer);
/**
* Get an instance of org.apache.activemq.artemis.spi.core.remoting.AcceptorFactory
*
* @param name the name of acceptor for which the factory will be used
* @param className the fully qualified name of the factory implementation (can be null)
* @return
*/
AcceptorFactory getAcceptorFactory(String name, String className);
void addAcceptorFactory(String name, AcceptorFactory acceptorFactory);
}
| apache-2.0 |
howepeng/isis | core/metamodel/src/test/java/org/apache/isis/core/metamodel/facets/object/disabled/ObjectDisabledMethodFacetFactoryTest.java | 3611 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.object.disabled;
import java.lang.reflect.Method;
import org.jmock.Expectations;
import org.apache.isis.applib.Identifier;
import org.apache.isis.applib.Identifier.Type;
import org.apache.isis.applib.services.i18n.TranslationService;
import org.apache.isis.core.metamodel.facetapi.Facet;
import org.apache.isis.core.metamodel.facets.FacetFactory.ProcessClassContext;
import org.apache.isis.core.metamodel.facets.AbstractFacetFactoryTest;
import org.apache.isis.core.metamodel.facets.object.disabled.method.DisabledObjectFacetViaMethod;
import org.apache.isis.core.metamodel.facets.object.disabled.method.DisabledObjectFacetViaMethodFactory;
import org.apache.isis.core.metamodel.runtimecontext.ServicesInjector;
import org.apache.isis.core.unittestsupport.jmocking.JUnitRuleMockery2;
public class ObjectDisabledMethodFacetFactoryTest extends AbstractFacetFactoryTest {
private JUnitRuleMockery2 context = JUnitRuleMockery2.createFor(JUnitRuleMockery2.Mode.INTERFACES_AND_CLASSES);
private ServicesInjector mockServicesInjector;
private TranslationService mockTranslationService;
private DisabledObjectFacetViaMethodFactory facetFactory;
public void setUp() throws Exception {
super.setUp();
mockServicesInjector = context.mock(ServicesInjector.class);
mockTranslationService = context.mock(TranslationService.class);
context.checking(new Expectations() {{
allowing(mockServicesInjector).lookupService(TranslationService.class);
will(returnValue(mockTranslationService));
}});
facetFactory = new DisabledObjectFacetViaMethodFactory();
facetFactory.setServicesInjector(mockServicesInjector);
}
@Override
protected void tearDown() throws Exception {
facetFactory = null;
super.tearDown();
}
public void testDisabledMethodPickedUpAndMethodRemoved() {
final Class<?>[] params = new Class<?>[1];
params[0] = Identifier.Type.class;
class Customer {
@SuppressWarnings("unused")
public String disabled(final Type type) {
return null;
}
}
final Method disabledMethod = findMethod(Customer.class, "disabled", params);
assertNotNull(disabledMethod);
final ProcessClassContext processClassContext = new ProcessClassContext(Customer.class, methodRemover, facetHolder);
facetFactory.process(processClassContext);
final Facet facet = facetHolder.getFacet(DisabledObjectFacet.class);
assertNotNull(facet);
assertTrue(facet instanceof DisabledObjectFacetViaMethod);
assertTrue(methodRemover.getRemovedMethodMethodCalls().contains(disabledMethod));
}
}
| apache-2.0 |
salyh/elasticsearch | src/test/java/org/elasticsearch/tribe/TribeTests.java | 15737 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.tribe;
import com.google.common.base.Predicate;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.TestCluster;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
/**
* Note, when talking to tribe client, no need to set the local flag on master read operations, it
* does it by default.
*/
public class TribeTests extends ElasticsearchIntegrationTest {
private static TestCluster cluster2;
private Node tribeNode;
private Client tribeClient;
@BeforeClass
public static void setupSecondCluster() throws Exception {
ElasticsearchIntegrationTest.beforeClass();
// create another cluster
cluster2 = new TestCluster(randomLong(), 2, 2, Strings.randomBase64UUID(getRandom()), 0, false);
cluster2.beforeTest(getRandom(), 0.1);
cluster2.ensureAtLeastNumDataNodes(2);
}
@AfterClass
public static void tearDownSecondCluster() {
if (cluster2 != null) {
try {
cluster2.close();
} finally {
cluster2 = null;
}
}
}
@After
public void tearDownTribeNode() {
if (cluster2 != null) {
try {
cluster2.wipe();
} finally {
cluster2.afterTest();
}
}
if (tribeNode != null) {
tribeNode.close();
tribeNode = null;
}
}
private void setupTribeNode(Settings settings) {
Settings merged = ImmutableSettings.builder()
.put("tribe.t1.cluster.name", cluster().getClusterName())
.put("tribe.t2.cluster.name", cluster2.getClusterName())
.put("tribe.blocks.write", false)
.put("tribe.blocks.read", false)
.put(settings)
.build();
tribeNode = NodeBuilder.nodeBuilder()
.settings(merged)
.node();
tribeClient = tribeNode.client();
}
@Test
public void testGlobalReadWriteBlocks() throws Exception {
logger.info("create 2 indices, test1 on t1, and test2 on t2");
cluster().client().admin().indices().prepareCreate("test1").get();
cluster2.client().admin().indices().prepareCreate("test2").get();
setupTribeNode(ImmutableSettings.builder()
.put("tribe.blocks.write", true)
.put("tribe.blocks.metadata", true)
.build());
logger.info("wait till tribe has the same nodes as the 2 clusters");
awaitSameNodeCounts();
// wait till the tribe node connected to the cluster, by checking if the index exists in the cluster state
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2");
try {
tribeClient.prepareIndex("test1", "type1", "1").setSource("field1", "value1").execute().actionGet();
fail("cluster block should be thrown");
} catch (ClusterBlockException e) {
// all is well!
}
try {
tribeClient.admin().indices().prepareOptimize("test1").execute().actionGet();
fail("cluster block should be thrown");
} catch (ClusterBlockException e) {
// all is well!
}
try {
tribeClient.admin().indices().prepareOptimize("test2").execute().actionGet();
fail("cluster block should be thrown");
} catch (ClusterBlockException e) {
// all is well!
}
}
@Test
public void testIndexWriteBlocks() throws Exception {
logger.info("create 2 indices, test1 on t1, and test2 on t2");
cluster().client().admin().indices().prepareCreate("test1").get();
cluster().client().admin().indices().prepareCreate("block_test1").get();
cluster2.client().admin().indices().prepareCreate("test2").get();
cluster2.client().admin().indices().prepareCreate("block_test2").get();
setupTribeNode(ImmutableSettings.builder()
.put("tribe.blocks.write.indices", "block_*")
.build());
logger.info("wait till tribe has the same nodes as the 2 clusters");
awaitSameNodeCounts();
// wait till the tribe node connected to the cluster, by checking if the index exists in the cluster state
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2", "block_test1", "block_test2");
tribeClient.prepareIndex("test1", "type1", "1").setSource("field1", "value1").get();
try {
tribeClient.prepareIndex("block_test1", "type1", "1").setSource("field1", "value1").get();
fail("cluster block should be thrown");
} catch (ClusterBlockException e) {
// all is well!
}
tribeClient.prepareIndex("test2", "type1", "1").setSource("field1", "value1").get();
try {
tribeClient.prepareIndex("block_test2", "type1", "1").setSource("field1", "value1").get();
fail("cluster block should be thrown");
} catch (ClusterBlockException e) {
// all is well!
}
}
@Test
public void testOnConflictDrop() throws Exception {
logger.info("create 2 indices, test1 on t1, and test2 on t2");
assertAcked(cluster().client().admin().indices().prepareCreate("conflict"));
assertAcked(cluster2.client().admin().indices().prepareCreate("conflict"));
assertAcked(cluster().client().admin().indices().prepareCreate("test1"));
assertAcked(cluster2.client().admin().indices().prepareCreate("test2"));
setupTribeNode(ImmutableSettings.builder()
.put("tribe.on_conflict", "drop")
.build());
logger.info("wait till tribe has the same nodes as the 2 clusters");
awaitSameNodeCounts();
// wait till the tribe node connected to the cluster, by checking if the index exists in the cluster state
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2");
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get(TribeService.TRIBE_NAME), equalTo("t1"));
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get(TribeService.TRIBE_NAME), equalTo("t2"));
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().hasIndex("conflict"), equalTo(false));
}
@Test
public void testOnConflictPrefer() throws Exception {
testOnConflictPrefer(randomBoolean() ? "t1" : "t2");
}
private void testOnConflictPrefer(String tribe) throws Exception {
logger.info("testing preference for tribe {}", tribe);
logger.info("create 2 indices, test1 on t1, and test2 on t2");
cluster().client().admin().indices().prepareCreate("conflict").get();
cluster2.client().admin().indices().prepareCreate("conflict").get();
cluster().client().admin().indices().prepareCreate("test1").get();
cluster2.client().admin().indices().prepareCreate("test2").get();
setupTribeNode(ImmutableSettings.builder()
.put("tribe.on_conflict", "prefer_" + tribe)
.build());
logger.info("wait till tribe has the same nodes as the 2 clusters");
awaitSameNodeCounts();
// wait till the tribe node connected to the cluster, by checking if the index exists in the cluster state
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2", "conflict");
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get(TribeService.TRIBE_NAME), equalTo("t1"));
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get(TribeService.TRIBE_NAME), equalTo("t2"));
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("conflict").getSettings().get(TribeService.TRIBE_NAME), equalTo(tribe));
}
@Test
public void testTribeOnOneCluster() throws Exception {
setupTribeNode(ImmutableSettings.EMPTY);
logger.info("create 2 indices, test1 on t1, and test2 on t2");
cluster().client().admin().indices().prepareCreate("test1").get();
cluster2.client().admin().indices().prepareCreate("test2").get();
// wait till the tribe node connected to the cluster, by checking if the index exists in the cluster state
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2");
logger.info("wait till tribe has the same nodes as the 2 clusters");
awaitSameNodeCounts();
assertThat(tribeClient.admin().cluster().prepareHealth().setWaitForGreenStatus().get().getStatus(), equalTo(ClusterHealthStatus.GREEN));
logger.info("create 2 docs through the tribe node");
tribeClient.prepareIndex("test1", "type1", "1").setSource("field1", "value1").get();
tribeClient.prepareIndex("test2", "type1", "1").setSource("field1", "value1").get();
tribeClient.admin().indices().prepareRefresh().get();
logger.info("verify they are there");
assertHitCount(tribeClient.prepareCount().get(), 2l);
assertHitCount(tribeClient.prepareSearch().get(), 2l);
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
ClusterState tribeState = tribeNode.client().admin().cluster().prepareState().get().getState();
return tribeState.getMetaData().index("test1").mapping("type1") != null &&
tribeState.getMetaData().index("test2").mapping("type2") != null;
}
});
logger.info("write to another type");
tribeClient.prepareIndex("test1", "type2", "1").setSource("field1", "value1").get();
tribeClient.prepareIndex("test2", "type2", "1").setSource("field1", "value1").get();
tribeClient.admin().indices().prepareRefresh().get();
logger.info("verify they are there");
assertHitCount(tribeClient.prepareCount().get(), 4l);
assertHitCount(tribeClient.prepareSearch().get(), 4l);
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
ClusterState tribeState = tribeNode.client().admin().cluster().prepareState().get().getState();
return tribeState.getMetaData().index("test1").mapping("type1") != null && tribeState.getMetaData().index("test1").mapping("type2") != null &&
tribeState.getMetaData().index("test2").mapping("type1") != null && tribeState.getMetaData().index("test2").mapping("type2") != null;
}
});
logger.info("make sure master level write operations fail... (we don't really have a master)");
try {
tribeClient.admin().indices().prepareCreate("tribe_index").setMasterNodeTimeout("10ms").get();
fail();
} catch (MasterNotDiscoveredException e) {
// all is well!
}
logger.info("delete an index, and make sure its reflected");
cluster2.client().admin().indices().prepareDelete("test2").get();
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
ClusterState tribeState = tribeNode.client().admin().cluster().prepareState().get().getState();
return tribeState.getMetaData().hasIndex("test1") && !tribeState.getMetaData().hasIndex("test2") &&
tribeState.getRoutingTable().hasIndex("test1") && !tribeState.getRoutingTable().hasIndex("test2");
}
});
logger.info("stop a node, make sure its reflected");
cluster2.stopRandomDataNode();
awaitSameNodeCounts();
}
private void awaitIndicesInClusterState(final String... indices) throws Exception {
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
ClusterState tribeState = tribeNode.client().admin().cluster().prepareState().get().getState();
for (String index : indices) {
if (!tribeState.getMetaData().hasIndex(index)) {
return false;
}
if (!tribeState.getRoutingTable().hasIndex(index)) {
return false;
}
}
return true;
}
});
}
private void awaitSameNodeCounts() throws Exception {
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
DiscoveryNodes tribeNodes = tribeNode.client().admin().cluster().prepareState().get().getState().getNodes();
return countDataNodesForTribe("t1", tribeNodes) == cluster().client().admin().cluster().prepareState().get().getState().getNodes().dataNodes().size()
&& countDataNodesForTribe("t2", tribeNodes) == cluster2.client().admin().cluster().prepareState().get().getState().getNodes().dataNodes().size();
}
});
}
private int countDataNodesForTribe(String tribeName, DiscoveryNodes nodes) {
int count = 0;
for (DiscoveryNode node : nodes) {
if (!node.dataNode()) {
continue;
}
if (tribeName.equals(node.getAttributes().get(TribeService.TRIBE_NAME))) {
count++;
}
}
return count;
}
}
| apache-2.0 |
kchitalia/android-travis | libs/SmartSync/src/com/salesforce/androidsdk/smartsync/util/SyncDownTarget.java | 6202 | /*
* Copyright (c) 2014, salesforce.com, inc.
* All rights reserved.
* Redistribution and use of this software in source and binary forms, with or
* without modification, are permitted provided that the following conditions
* are met:
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of salesforce.com, inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission of salesforce.com, inc.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.androidsdk.smartsync.util;
import android.util.Log;
import com.salesforce.androidsdk.smartsync.manager.SyncManager;
import com.salesforce.androidsdk.util.JSONObjectHelper;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.lang.reflect.Constructor;
/**
* Target for sync down:
* - what records to download from server
* - how to download those records
*/
public abstract class SyncDownTarget extends SyncTarget {
// Constants
public static final String QUERY_TYPE = "type";
// Fields
protected QueryType queryType;
protected int totalSize; // set during a fetch
/**
* Build SyncDownTarget from json
* @param target as json
* @return
* @throws JSONException
*/
@SuppressWarnings("unchecked")
public static SyncDownTarget fromJSON(JSONObject target) throws JSONException {
if (target == null)
return null;
QueryType queryType = QueryType.valueOf(target.getString(QUERY_TYPE));
switch (queryType) {
case mru: return new MruSyncDownTarget(target);
case sosl: return new SoslSyncDownTarget(target);
case soql: return new SoqlSyncDownTarget(target);
case custom:
default:
try {
Class<? extends SyncDownTarget> implClass = (Class<? extends SyncDownTarget>) Class.forName(target.getString(ANDROID_IMPL));
Constructor<? extends SyncDownTarget> constructor = implClass.getConstructor(JSONObject.class);
return constructor.newInstance(target);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
/**
* Construct SyncDownTarget
*/
public SyncDownTarget() {
super();
}
/**
* Construct SyncDownTarget from json
* @param target
* @throws JSONException
*/
public SyncDownTarget(JSONObject target) throws JSONException {
super(target);
queryType = QueryType.valueOf(target.getString(QUERY_TYPE));
}
/**
* @return json representation of target
* @throws JSONException
*/
public JSONObject asJSON() throws JSONException {
JSONObject target = super.asJSON();
target.put(QUERY_TYPE, queryType.name());
return target;
}
/**
* Start fetching records conforming to target
* If a value for maxTimeStamp greater than 0 is passed in, only records created/modified after maxTimeStamp should be returned
* @param syncManager
* @param maxTimeStamp
* @throws IOException, JSONException
*/
public abstract JSONArray startFetch(SyncManager syncManager, long maxTimeStamp) throws IOException, JSONException;
/**
* Continue fetching records conforming to target if any
* @param syncManager
* @return null if there are no more records to fetch
* @throws IOException, JSONException
*/
public abstract JSONArray continueFetch(SyncManager syncManager) throws IOException, JSONException;
/**
* @return number of records expected to be fetched - is set when startFetch() is called
*/
public int getTotalSize() {
return totalSize;
}
/**
* @return QueryType of this target
*/
public QueryType getQueryType() {
return queryType;
}
/**
* Gets the latest modification timestamp from the array of records.
* @param records
* @return latest modification time stamp
* @throws JSONException
*/
public long getLatestModificationTimeStamp(JSONArray records) throws JSONException {
long maxTimeStamp = -1;
for (int i = 0; i < records.length(); i++) {
String timeStampStr = JSONObjectHelper.optString(records.getJSONObject(i), getModificationDateFieldName());
if (timeStampStr == null) {
maxTimeStamp = -1;
break; // field not present
}
try {
long timeStamp = Constants.TIMESTAMP_FORMAT.parse(timeStampStr).getTime();
maxTimeStamp = Math.max(timeStamp, maxTimeStamp);
} catch (Exception e) {
Log.w("SyncDownTarget.getLatestModificationTimeStamp", "Could not parse modification date field " + getModificationDateFieldName(), e);
maxTimeStamp = -1;
break;
}
}
return maxTimeStamp;
}
/**
* Enum for query type
*/
public enum QueryType {
mru,
sosl,
soql,
custom
}
}
| apache-2.0 |
camunda/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/bpmn/tasklistener/util/RecorderTaskListener.java | 3683 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.bpmn.tasklistener.util;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.DelegateTask;
import org.camunda.bpm.engine.delegate.TaskListener;
/**
* @author Thorben Lindhauer
*/
public class RecorderTaskListener implements TaskListener, Serializable {
private static final long serialVersionUID = 1L;
private static List<RecorderTaskListener.RecordedTaskEvent> recordedEvents = new ArrayList<>();
private static LinkedList<String> orderedEvents = new LinkedList<>();
private static Map<String, Integer> eventCounters = new HashMap<>();
public static class RecordedTaskEvent {
protected String taskId;
protected String executionId;
protected String event;
protected String activityInstanceId;
public RecordedTaskEvent(String taskId, String executionId, String event, String activityInstanceId) {
this.executionId = executionId;
this.taskId = taskId;
this.event = event;
this.activityInstanceId = activityInstanceId;
}
public String getExecutionId() {
return executionId;
}
public String getTaskId() {
return taskId;
}
public String getEvent() {
return event;
}
public String getActivityInstanceId() {
return activityInstanceId;
}
}
public void notify(DelegateTask task) {
DelegateExecution execution = task.getExecution();
String eventName = task.getEventName();
recordedEvents.add(new RecordedTaskEvent(task.getId(),
task.getExecutionId(),
eventName,
execution.getActivityInstanceId()));
orderedEvents.addLast(eventName);
Integer counter = eventCounters.get(eventName);
if (counter == null) {
eventCounters.put(eventName, 1);
} else {
eventCounters.put(eventName, ++counter);
}
}
public static void clear() {
recordedEvents.clear();
orderedEvents.clear();
eventCounters.clear();
}
public static List<RecordedTaskEvent> getRecordedEvents() {
return recordedEvents;
}
public static LinkedList<String> getOrderedEvents() {
return orderedEvents;
}
public static Map<String, Integer> getEventCounters() {
return eventCounters;
}
public static int getTotalEventCount() {
int total = 0;
for (Integer eventCount : eventCounters.values()) {
total += eventCount != null ? eventCount : 0;
}
return total;
}
public static int getEventCount(String eventName) {
Integer count = eventCounters.get(eventName);
return (count != null)? count : 0;
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/client/hotrod-client/src/test/java/org/infinispan/client/hotrod/DefaultExpirationTest.java | 4561 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.infinispan.client.hotrod;
import org.testng.annotations.Test;
import org.testng.AssertJUnit;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.infinispan.Cache;
import org.infinispan.client.hotrod.test.HotRodClientTestingUtil;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.container.entries.InternalCacheEntry;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.marshall.Marshaller;
import org.infinispan.marshall.jboss.JBossMarshaller;
import org.infinispan.server.core.CacheValue;
import org.infinispan.server.hotrod.HotRodServer;
import org.infinispan.test.SingleCacheManagerTest;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.ByteArrayKey;
import org.testng.annotations.AfterClass;
import static org.testng.AssertJUnit.*;
/**
* @author Tristan Tarrant
* @since 5.2
*/
@Test (testName = "client.hotrod.DefaultExpirationTest", groups = "functional" )
public class DefaultExpirationTest extends SingleCacheManagerTest {
private Marshaller marshaller = new JBossMarshaller();
private RemoteCache<String, String> remoteCache;
private RemoteCacheManager remoteCacheManager;
private Cache<ByteArrayKey, CacheValue> cache;
protected HotRodServer hotrodServer;
@Override
protected EmbeddedCacheManager createCacheManager() throws Exception {
ConfigurationBuilder builder = getDefaultStandaloneCacheConfig(false);
builder.expiration().lifespan(3, TimeUnit.SECONDS).maxIdle(2, TimeUnit.SECONDS);
cacheManager = TestCacheManagerFactory.createCacheManager(builder);
cache = cacheManager.getCache();
//pass the config file to the cache
hotrodServer = TestHelper.startHotRodServer(cacheManager);
log.info("Started server on port: " + hotrodServer.getPort());
remoteCacheManager = getRemoteCacheManager();
remoteCache = remoteCacheManager.getCache();
return cacheManager;
}
protected RemoteCacheManager getRemoteCacheManager() {
Properties config = new Properties();
config.put("infinispan.client.hotrod.server_list", "127.0.0.1:" + hotrodServer.getPort());
return new RemoteCacheManager(config);
}
@AfterClass(alwaysRun = true)
public void testDestroyRemoteCacheFactory() {
HotRodClientTestingUtil.killRemoteCacheManager(remoteCacheManager);
HotRodClientTestingUtil.killServers(hotrodServer);
}
@Test
public void testDefaultExpiration() throws Exception {
remoteCache.put("Key", "Value");
InternalCacheEntry entry = getInternalCacheEntry(cache, "Key", "Value");
assertTrue(entry.canExpire());
assertEquals(3000, entry.getLifespan());
assertEquals(2000, entry.getMaxIdle());
Thread.sleep(5000);
assertFalse(remoteCache.containsKey("Key"));
}
private InternalCacheEntry getInternalCacheEntry(Cache<ByteArrayKey, CacheValue> cache, String key, String value) throws Exception {
InternalCacheEntry entry = cache.getAdvancedCache().getDataContainer().get(toBinaryKey(key), null);
if (value != null) {
CacheValue v = (CacheValue) entry.getValue();
AssertJUnit.assertEquals(toBinaryValue(value), v.data());
}
return entry;
}
private ByteArrayKey toBinaryKey(String key) throws Exception {
byte[] keyBytes = marshaller.objectToByteBuffer(key, 64);
return new ByteArrayKey(keyBytes);
}
private byte[] toBinaryValue(String value) throws Exception {
return marshaller.objectToByteBuffer(value, 64);
}
}
| apache-2.0 |
sdw2330976/apache-tomcat-7.0.57 | target/classes/org/apache/jasper/compiler/TagLibraryInfoImpl.java | 30355 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
import java.util.Vector;
import javax.servlet.ServletContext;
import javax.servlet.jsp.tagext.FunctionInfo;
import javax.servlet.jsp.tagext.PageData;
import javax.servlet.jsp.tagext.TagAttributeInfo;
import javax.servlet.jsp.tagext.TagExtraInfo;
import javax.servlet.jsp.tagext.TagFileInfo;
import javax.servlet.jsp.tagext.TagInfo;
import javax.servlet.jsp.tagext.TagLibraryInfo;
import javax.servlet.jsp.tagext.TagLibraryValidator;
import javax.servlet.jsp.tagext.TagVariableInfo;
import javax.servlet.jsp.tagext.ValidationMessage;
import javax.servlet.jsp.tagext.VariableInfo;
import org.apache.jasper.Constants;
import org.apache.jasper.JasperException;
import org.apache.jasper.JspCompilationContext;
import org.apache.jasper.util.ExceptionUtils;
import org.apache.jasper.xmlparser.ParserUtils;
import org.apache.jasper.xmlparser.TreeNode;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
/**
* Implementation of the TagLibraryInfo class from the JSP spec.
*
* @author Anil K. Vijendran
* @author Mandar Raje
* @author Pierre Delisle
* @author Kin-man Chung
* @author Jan Luehe
*/
class TagLibraryInfoImpl extends TagLibraryInfo implements TagConstants {
// Logger
private final Log log = LogFactory.getLog(TagLibraryInfoImpl.class);
private JspCompilationContext ctxt;
private PageInfo pi;
private ErrorDispatcher err;
private ParserController parserController;
private final void print(String name, String value, PrintWriter w) {
if (value != null) {
w.print(name + " = {\n\t");
w.print(value);
w.print("\n}\n");
}
}
@Override
public String toString() {
StringWriter sw = new StringWriter();
PrintWriter out = new PrintWriter(sw);
print("tlibversion", tlibversion, out);
print("jspversion", jspversion, out);
print("shortname", shortname, out);
print("urn", urn, out);
print("info", info, out);
print("uri", uri, out);
print("tagLibraryValidator", "" + tagLibraryValidator, out);
for (int i = 0; i < tags.length; i++)
out.println(tags[i].toString());
for (int i = 0; i < tagFiles.length; i++)
out.println(tagFiles[i].toString());
for (int i = 0; i < functions.length; i++)
out.println(functions[i].toString());
return sw.toString();
}
// XXX FIXME
// resolveRelativeUri and/or getResourceAsStream don't seem to properly
// handle relative paths when dealing when home and getDocBase are set
// the following is a workaround until these problems are resolved.
private InputStream getResourceAsStream(String uri)
throws FileNotFoundException {
// Is uri absolute?
if (uri.startsWith("file:")) {
return new FileInputStream(new File(uri.substring(5)));
} else {
try {
// see if file exists on the filesystem
String real = ctxt.getRealPath(uri);
if (real == null) {
return ctxt.getResourceAsStream(uri);
} else {
return new FileInputStream(real);
}
} catch (FileNotFoundException ex) {
// if file not found on filesystem, get the resource through
// the context
return ctxt.getResourceAsStream(uri);
}
}
}
/**
* Constructor.
*/
public TagLibraryInfoImpl(JspCompilationContext ctxt, ParserController pc,
PageInfo pi, String prefix, String uriIn, TldLocation location,
ErrorDispatcher err, Mark mark)
throws JasperException {
super(prefix, uriIn);
this.ctxt = ctxt;
this.parserController = pc;
this.pi = pi;
this.err = err;
InputStream in = null;
if (location == null) {
// The URI points to the TLD itself or to a JAR file in which the
// TLD is stored
location = generateTLDLocation(uri, ctxt);
}
String tldName = location.getName();
JarResource jarResource = location.getJarResource();
try {
if (jarResource == null) {
// Location points directly to TLD file
try {
in = getResourceAsStream(tldName);
if (in == null) {
throw new FileNotFoundException(tldName);
}
} catch (FileNotFoundException ex) {
err.jspError(mark, "jsp.error.file.not.found", tldName);
}
parseTLD(tldName, in, null);
// Add TLD to dependency list
PageInfo pageInfo = ctxt.createCompiler().getPageInfo();
if (pageInfo != null) {
pageInfo.addDependant(tldName,
ctxt.getLastModified(tldName));
}
} else {
// Tag library is packaged in JAR file
try {
in = jarResource.getEntry(tldName).openStream();
parseTLD(jarResource.getUrl(), in, jarResource);
} catch (Exception ex) {
err.jspError(mark, "jsp.error.tld.unable_to_read", jarResource.getUrl(),
tldName, ex.toString());
}
}
} finally {
if (in != null) {
try {
in.close();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
}
}
}
}
@Override
public TagLibraryInfo[] getTagLibraryInfos() {
Collection<TagLibraryInfo> coll = pi.getTaglibs();
return coll.toArray(new TagLibraryInfo[0]);
}
/*
* @param ctxt The JSP compilation context @param uri The TLD's uri @param
* in The TLD's input stream @param jarFileUrl The JAR file containing the
* TLD, or null if the tag library is not packaged in a JAR
*/
private void parseTLD(String uri, InputStream in, JarResource jarResource)
throws JasperException {
Vector<TagInfo> tagVector = new Vector<TagInfo>();
Vector<TagFileInfo> tagFileVector = new Vector<TagFileInfo>();
Hashtable<String, FunctionInfo> functionTable = new Hashtable<String, FunctionInfo>();
ServletContext servletContext = ctxt.getServletContext();
boolean validate = Boolean.parseBoolean(servletContext.getInitParameter(
Constants.XML_VALIDATION_TLD_INIT_PARAM));
String blockExternalString = servletContext.getInitParameter(
Constants.XML_BLOCK_EXTERNAL_INIT_PARAM);
boolean blockExternal;
if (blockExternalString == null) {
blockExternal = true;
} else {
blockExternal = Boolean.parseBoolean(blockExternalString);
}
// Create an iterator over the child elements of our <taglib> element
ParserUtils pu = new ParserUtils(validate, blockExternal);
TreeNode tld = pu.parseXMLDocument(uri, in);
// Check to see if the <taglib> root element contains a 'version'
// attribute, which was added in JSP 2.0 to replace the <jsp-version>
// subelement
this.jspversion = tld.findAttribute("version");
// Process each child element of our <taglib> element
Iterator<TreeNode> list = tld.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("tlibversion".equals(tname) // JSP 1.1
|| "tlib-version".equals(tname)) { // JSP 1.2
this.tlibversion = element.getBody();
} else if ("jspversion".equals(tname)
|| "jsp-version".equals(tname)) {
this.jspversion = element.getBody();
} else if ("shortname".equals(tname) || "short-name".equals(tname))
this.shortname = element.getBody();
else if ("uri".equals(tname))
this.urn = element.getBody();
else if ("info".equals(tname) || "description".equals(tname))
this.info = element.getBody();
else if ("validator".equals(tname))
this.tagLibraryValidator = createValidator(element);
else if ("tag".equals(tname))
tagVector.addElement(createTagInfo(element, jspversion));
else if ("tag-file".equals(tname)) {
TagFileInfo tagFileInfo = createTagFileInfo(element,
jarResource);
tagFileVector.addElement(tagFileInfo);
} else if ("function".equals(tname)) { // JSP2.0
FunctionInfo funcInfo = createFunctionInfo(element);
String funcName = funcInfo.getName();
if (functionTable.containsKey(funcName)) {
err.jspError("jsp.error.tld.fn.duplicate.name", funcName,
uri);
}
functionTable.put(funcName, funcInfo);
} else if ("display-name".equals(tname) ||
"small-icon".equals(tname) || "large-icon".equals(tname)
|| "listener".equals(tname)) {
// Ignored elements
} else if ("taglib-extension".equals(tname)) {
// Recognized but ignored
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.taglib", tname));
}
}
}
if (tlibversion == null) {
err.jspError("jsp.error.tld.mandatory.element.missing",
"tlib-version", uri);
}
if (jspversion == null) {
err.jspError("jsp.error.tld.mandatory.element.missing",
"jsp-version", uri);
}
this.tags = new TagInfo[tagVector.size()];
tagVector.copyInto(this.tags);
this.tagFiles = new TagFileInfo[tagFileVector.size()];
tagFileVector.copyInto(this.tagFiles);
this.functions = new FunctionInfo[functionTable.size()];
int i = 0;
Enumeration<FunctionInfo> enumeration = functionTable.elements();
while (enumeration.hasMoreElements()) {
this.functions[i++] = enumeration.nextElement();
}
}
/*
* @param uri The uri of the TLD @param ctxt The compilation context
*
* @return String array whose first element denotes the path to the TLD. If
* the path to the TLD points to a jar file, then the second element denotes
* the name of the TLD entry in the jar file, which is hardcoded to
* META-INF/taglib.tld.
*/
private TldLocation generateTLDLocation(String uri, JspCompilationContext ctxt)
throws JasperException {
int uriType = TldLocationsCache.uriType(uri);
if (uriType == TldLocationsCache.ABS_URI) {
err.jspError("jsp.error.taglibDirective.absUriCannotBeResolved",
uri);
} else if (uriType == TldLocationsCache.NOROOT_REL_URI) {
uri = ctxt.resolveRelativeUri(uri);
}
if (uri.endsWith(".jar")) {
URL url = null;
try {
url = ctxt.getResource(uri);
} catch (Exception ex) {
err.jspError("jsp.error.tld.unable_to_get_jar", uri, ex
.toString());
}
if (url == null) {
err.jspError("jsp.error.tld.missing_jar", uri);
}
return new TldLocation("META-INF/taglib.tld", url.toString());
} else {
return new TldLocation(uri);
}
}
private TagInfo createTagInfo(TreeNode elem, String jspVersion)
throws JasperException {
String tagName = null;
String tagClassName = null;
String teiClassName = null;
/*
* Default body content for JSP 1.2 tag handlers (<body-content> has
* become mandatory in JSP 2.0, because the default would be invalid for
* simple tag handlers)
*/
String bodycontent = "JSP";
String info = null;
String displayName = null;
String smallIcon = null;
String largeIcon = null;
boolean dynamicAttributes = false;
Vector<TagAttributeInfo> attributeVector = new Vector<TagAttributeInfo>();
Vector<TagVariableInfo> variableVector = new Vector<TagVariableInfo>();
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("name".equals(tname)) {
tagName = element.getBody();
} else if ("tagclass".equals(tname) || "tag-class".equals(tname)) {
tagClassName = element.getBody();
} else if ("teiclass".equals(tname) || "tei-class".equals(tname)) {
teiClassName = element.getBody();
} else if ("bodycontent".equals(tname)
|| "body-content".equals(tname)) {
bodycontent = element.getBody();
} else if ("display-name".equals(tname)) {
displayName = element.getBody();
} else if ("small-icon".equals(tname)) {
smallIcon = element.getBody();
} else if ("large-icon".equals(tname)) {
largeIcon = element.getBody();
} else if ("icon".equals(tname)) {
TreeNode icon = element.findChild("small-icon");
if (icon != null) {
smallIcon = icon.getBody();
}
icon = element.findChild("large-icon");
if (icon != null) {
largeIcon = icon.getBody();
}
} else if ("info".equals(tname) || "description".equals(tname)) {
info = element.getBody();
} else if ("variable".equals(tname)) {
variableVector.addElement(createVariable(element));
} else if ("attribute".equals(tname)) {
attributeVector
.addElement(createAttribute(element, jspVersion));
} else if ("dynamic-attributes".equals(tname)) {
dynamicAttributes = JspUtil.booleanValue(element.getBody());
} else if ("example".equals(tname)) {
// Ignored elements
} else if ("tag-extension".equals(tname)) {
// Ignored
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.tag", tname));
}
}
}
TagExtraInfo tei = null;
if (teiClassName != null && !teiClassName.equals("")) {
try {
Class<?> teiClass =
ctxt.getClassLoader().loadClass(teiClassName);
tei = (TagExtraInfo) teiClass.newInstance();
} catch (Exception e) {
err.jspError(e, "jsp.error.teiclass.instantiation",
teiClassName);
}
}
TagAttributeInfo[] tagAttributeInfo = new TagAttributeInfo[attributeVector
.size()];
attributeVector.copyInto(tagAttributeInfo);
TagVariableInfo[] tagVariableInfos = new TagVariableInfo[variableVector
.size()];
variableVector.copyInto(tagVariableInfos);
TagInfo taginfo = new TagInfo(tagName, tagClassName, bodycontent, info,
this, tei, tagAttributeInfo, displayName, smallIcon, largeIcon,
tagVariableInfos, dynamicAttributes);
return taginfo;
}
/*
* Parses the tag file directives of the given TagFile and turns them into a
* TagInfo.
*
* @param elem The <tag-file> element in the TLD @param uri The location of
* the TLD, in case the tag file is specified relative to it @param jarFile
* The JAR file, in case the tag file is packaged in a JAR
*
* @return TagInfo corresponding to tag file directives
*/
private TagFileInfo createTagFileInfo(TreeNode elem, JarResource jarResource)
throws JasperException {
String name = null;
String path = null;
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode child = list.next();
String tname = child.getName();
if ("name".equals(tname)) {
name = child.getBody();
} else if ("path".equals(tname)) {
path = child.getBody();
} else if ("example".equals(tname)) {
// Ignore <example> element: Bugzilla 33538
} else if ("tag-extension".equals(tname)) {
// Ignore <tag-extension> element: Bugzilla 33538
} else if ("icon".equals(tname)
|| "display-name".equals(tname)
|| "description".equals(tname)) {
// Ignore these elements: Bugzilla 38015
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.tagfile", tname));
}
}
}
if (path.startsWith("/META-INF/tags")) {
// Tag file packaged in JAR
// See https://issues.apache.org/bugzilla/show_bug.cgi?id=46471
// This needs to be removed once all the broken code that depends on
// it has been removed
ctxt.setTagFileJarResource(path, jarResource);
} else if (!path.startsWith("/WEB-INF/tags")) {
err.jspError("jsp.error.tagfile.illegalPath", path);
}
TagInfo tagInfo = TagFileProcessor.parseTagFileDirectives(
parserController, name, path, jarResource, this);
return new TagFileInfo(name, path, tagInfo);
}
TagAttributeInfo createAttribute(TreeNode elem, String jspVersion) {
String name = null;
String type = null;
String expectedType = null;
String methodSignature = null;
boolean required = false, rtexprvalue = false, isFragment = false, deferredValue = false, deferredMethod = false;
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("name".equals(tname)) {
name = element.getBody();
} else if ("required".equals(tname)) {
String s = element.getBody();
if (s != null)
required = JspUtil.booleanValue(s);
} else if ("rtexprvalue".equals(tname)) {
String s = element.getBody();
if (s != null)
rtexprvalue = JspUtil.booleanValue(s);
} else if ("type".equals(tname)) {
type = element.getBody();
if ("1.2".equals(jspVersion)
&& (type.equals("Boolean") || type.equals("Byte")
|| type.equals("Character")
|| type.equals("Double")
|| type.equals("Float")
|| type.equals("Integer")
|| type.equals("Long") || type.equals("Object")
|| type.equals("Short") || type
.equals("String"))) {
type = "java.lang." + type;
}
} else if ("fragment".equals(tname)) {
String s = element.getBody();
if (s != null) {
isFragment = JspUtil.booleanValue(s);
}
} else if ("deferred-value".equals(tname)) {
deferredValue = true;
type = "javax.el.ValueExpression";
TreeNode child = element.findChild("type");
if (child != null) {
expectedType = child.getBody();
if (expectedType != null) {
expectedType = expectedType.trim();
}
} else {
expectedType = "java.lang.Object";
}
} else if ("deferred-method".equals(tname)) {
deferredMethod = true;
type = "javax.el.MethodExpression";
TreeNode child = element.findChild("method-signature");
if (child != null) {
methodSignature = child.getBody();
if (methodSignature != null) {
methodSignature = methodSignature.trim();
}
} else {
methodSignature = "java.lang.Object method()";
}
} else if ("description".equals(tname) || false) {
// Ignored elements
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.attribute", tname));
}
}
}
if (isFragment) {
/*
* According to JSP.C-3 ("TLD Schema Element Structure - tag"),
* 'type' and 'rtexprvalue' must not be specified if 'fragment' has
* been specified (this will be enforced by validating parser).
* Also, if 'fragment' is TRUE, 'type' is fixed at
* javax.servlet.jsp.tagext.JspFragment, and 'rtexprvalue' is fixed
* at true. See also JSP.8.5.2.
*/
type = "javax.servlet.jsp.tagext.JspFragment";
rtexprvalue = true;
}
if (!rtexprvalue && type == null) {
// According to JSP spec, for static values (those determined at
// translation time) the type is fixed at java.lang.String.
type = "java.lang.String";
}
return new TagAttributeInfo(name, required, type, rtexprvalue,
isFragment, null, deferredValue, deferredMethod, expectedType,
methodSignature);
}
TagVariableInfo createVariable(TreeNode elem) {
String nameGiven = null;
String nameFromAttribute = null;
String className = "java.lang.String";
boolean declare = true;
int scope = VariableInfo.NESTED;
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("name-given".equals(tname))
nameGiven = element.getBody();
else if ("name-from-attribute".equals(tname))
nameFromAttribute = element.getBody();
else if ("variable-class".equals(tname))
className = element.getBody();
else if ("declare".equals(tname)) {
String s = element.getBody();
if (s != null)
declare = JspUtil.booleanValue(s);
} else if ("scope".equals(tname)) {
String s = element.getBody();
if (s != null) {
if ("NESTED".equals(s)) {
scope = VariableInfo.NESTED;
} else if ("AT_BEGIN".equals(s)) {
scope = VariableInfo.AT_BEGIN;
} else if ("AT_END".equals(s)) {
scope = VariableInfo.AT_END;
}
}
} else if ("description".equals(tname) || // Ignored elements
false) {
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.variable", tname));
}
}
}
return new TagVariableInfo(nameGiven, nameFromAttribute, className,
declare, scope);
}
private TagLibraryValidator createValidator(TreeNode elem)
throws JasperException {
String validatorClass = null;
Map<String,Object> initParams = new Hashtable<String,Object>();
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("validator-class".equals(tname))
validatorClass = element.getBody();
else if ("init-param".equals(tname)) {
String[] initParam = createInitParam(element);
initParams.put(initParam[0], initParam[1]);
} else if ("description".equals(tname) || // Ignored elements
false) {
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.validator", tname));
}
}
}
TagLibraryValidator tlv = null;
if (validatorClass != null && !validatorClass.equals("")) {
try {
Class<?> tlvClass = ctxt.getClassLoader()
.loadClass(validatorClass);
tlv = (TagLibraryValidator) tlvClass.newInstance();
} catch (Exception e) {
err.jspError(e, "jsp.error.tlvclass.instantiation",
validatorClass);
}
}
if (tlv != null) {
tlv.setInitParameters(initParams);
}
return tlv;
}
String[] createInitParam(TreeNode elem) {
String[] initParam = new String[2];
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("param-name".equals(tname)) {
initParam[0] = element.getBody();
} else if ("param-value".equals(tname)) {
initParam[1] = element.getBody();
} else if ("description".equals(tname)) {
// Do nothing
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.initParam", tname));
}
}
}
return initParam;
}
FunctionInfo createFunctionInfo(TreeNode elem) {
String name = null;
String klass = null;
String signature = null;
Iterator<TreeNode> list = elem.findChildren();
while (list.hasNext()) {
TreeNode element = list.next();
String tname = element.getName();
if ("name".equals(tname)) {
name = element.getBody();
} else if ("function-class".equals(tname)) {
klass = element.getBody();
} else if ("function-signature".equals(tname)) {
signature = element.getBody();
} else if ("display-name".equals(tname) || // Ignored elements
"small-icon".equals(tname) || "large-icon".equals(tname)
|| "description".equals(tname) || "example".equals(tname)) {
} else {
if (log.isWarnEnabled()) {
log.warn(Localizer.getMessage(
"jsp.warning.unknown.element.in.function", tname));
}
}
}
return new FunctionInfo(name, klass, signature);
}
// *********************************************************************
// Until javax.servlet.jsp.tagext.TagLibraryInfo is fixed
/**
* The instance (if any) for the TagLibraryValidator class.
*
* @return The TagLibraryValidator instance, if any.
*/
public TagLibraryValidator getTagLibraryValidator() {
return tagLibraryValidator;
}
/**
* Translation-time validation of the XML document associated with the JSP
* page. This is a convenience method on the associated TagLibraryValidator
* class.
*
* @param thePage
* The JSP page object
* @return A string indicating whether the page is valid or not.
*/
public ValidationMessage[] validate(PageData thePage) {
TagLibraryValidator tlv = getTagLibraryValidator();
if (tlv == null)
return null;
String uri = getURI();
if (uri.startsWith("/")) {
uri = URN_JSPTLD + uri;
}
return tlv.validate(getPrefixString(), uri, thePage);
}
protected TagLibraryValidator tagLibraryValidator;
}
| apache-2.0 |
marktriggs/nyu-sakai-10.4 | samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/evaluation/QuestionScoresBean.java | 22504 | /**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.ui.bean.evaluation;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.faces.event.ActionEvent;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.jsf.model.PhaseAware;
import org.sakaiproject.tool.assessment.business.entity.RecordingData;
import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentAccessControl;
import org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc;
import org.sakaiproject.tool.assessment.ui.bean.util.Validator;
import org.sakaiproject.tool.assessment.ui.listener.evaluation.QuestionScoreListener;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
import org.sakaiproject.tool.assessment.util.AttachmentUtil;
import org.sakaiproject.util.ResourceLoader;
/**
* <p>Description: class form for evaluating question scores</p>
*
*/
public class QuestionScoresBean
implements Serializable, PhaseAware
{
private String assessmentId;
private String publishedId;
/** Use serialVersionUID for interoperability. */
private final static long serialVersionUID = 5517587781720762296L;
public static final String SHOW_SA_RATIONALE_RESPONSES_INLINE = "2";
public static final String SHOW_SA_RATIONALE_RESPONSES_POPUP = "1";
private String assessmentName;
private String itemName;
private String partName;
private String itemId;
private String anonymous;
private String groupName;
private String maxScore;
private Collection agents;
//private Collection sortedAgents;
private Collection sections;
private Collection deliveryItem;
private String score;
private String discount;
private String answer;
private String questionScoreComments;
//private String sortProperty;
private String lateHandling; // read-only property set for UI late handling
private String dueDate;
private String sortType;
private boolean sortAscending = true;
private String roleSelection;
private String allSubmissions;
private RecordingData recordingData;
private String totalPeople;
private String typeId;
private HashMap scoresByItem;
private static Log log = LogFactory.getLog(QuestionScoresBean.class);
//private String selectedSectionFilterValue = TotalScoresBean.ALL_SECTIONS_SELECT_VALUE;
private String selectedSectionFilterValue = null;
private String selectedSARationaleView =SHOW_SA_RATIONALE_RESPONSES_POPUP;
private ArrayList allAgents;
private boolean haveModelShortAnswer;
//Paging.
private int firstScoreRow;
private int maxDisplayedScoreRows;
private int scoreDataRows;
private int audioMaxDisplayedScoreRows;
private int othersMaxDisplayedScoreRows;
private boolean hasAudioMaxDisplayedScoreRowsChanged;
//Searching
private String searchString;
private String defaultSearchString;
private Map userIdMap;
private HashMap agentResultsByItemGradingId;
private boolean isAnyItemGradingAttachmentListModified;
private Boolean releasedToGroups = null;
/**
* Creates a new QuestionScoresBean object.
*/
public QuestionScoresBean()
{
log.debug("Creating a new QuestionScoresBean");
resetFields();
}
protected void init() {
defaultSearchString = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.EvaluationMessages", "search_default_student_search_string");
if (searchString == null) {
searchString = defaultSearchString;
}
// Get allAgents only at the first time
if (allAgents == null) {
allAgents = getAllAgents();
}
ArrayList matchingAgents;
if (isFilteredSearch()) {
matchingAgents = findMatchingAgents(searchString);
}
else {
matchingAgents = allAgents;
}
scoreDataRows = matchingAgents.size();
ArrayList newAgents = null;
if (maxDisplayedScoreRows == 0) {
newAgents = matchingAgents;
} else {
int nextPageRow = Math.min(firstScoreRow + maxDisplayedScoreRows, scoreDataRows);
newAgents = new ArrayList(matchingAgents.subList(firstScoreRow, nextPageRow));
log.debug("init(): subList " + firstScoreRow + ", " + nextPageRow);
}
agents = newAgents;
}
// Following three methods are for interface PhaseAware
public void endProcessValidators() {
log.debug("endProcessValidators");
}
public void endProcessUpdates() {
log.debug("endProcessUpdates");
}
public void startRenderResponse() {
log.debug("startRenderResponse");
init();
}
/**
* get assessment name
*
* @return the name
*/
public String getAssessmentName()
{
return Validator.check(assessmentName, "N/A");
}
/**
* set assessment name
*
* @param passessmentName the name
*/
public void setAssessmentName(String passessmentName)
{
assessmentName = passessmentName;
}
/**
* get part name
*
* @return the name
*/
public String getPartName()
{
return Validator.check(partName, "N/A");
}
/**
* set part name
*
* @param ppartName the name
*/
public void setPartName(String ppartName)
{
partName = ppartName;
}
/**
* get item name
*
* @return the name
*/
public String getItemName()
{
return Validator.check(itemName, "N/A");
}
/**
* set item name
*
* @param pitemName the name
*/
public void setItemName(String pitemName)
{
itemName = pitemName;
}
/**
* get item id
*
* @return the id
*/
public String getItemId()
{
return Validator.check(itemId, "1");
}
/**
* set item id
*
* @param pitemId the id
*/
public void setItemId(String pitemId)
{
itemId = pitemId;
}
/**
* get assessment id
*
* @return the assessment id
*/
public String getAssessmentId()
{
return Validator.check(assessmentId, "0");
}
/**
* set assessment id
*
* @param passessmentId the id
*/
public void setAssessmentId(String passessmentId)
{
assessmentId = passessmentId;
}
/**
* get published id
*
* @return the published id
*/
public String getPublishedId()
{
return Validator.check(publishedId, "0");
}
/**
* set published id
*
* @param passessmentId the id
*/
public void setPublishedId(String ppublishedId)
{
publishedId = ppublishedId;
}
/**
* Is this anonymous grading?
*
* @return anonymous grading? true or false
*/
public String getAnonymous()
{
return Validator.check(anonymous, "false");
}
/**
* Set switch if this is anonymous grading.
*
* @param panonymous anonymous grading? true or false
*/
public void setAnonymous(String panonymous)
{
anonymous = panonymous;
}
/**
* Get the group name
* @return group name
*/
public String getGroupName()
{
return Validator.check(groupName, "N/A");
}
/**
* set the group name
*
* @param pgroupName the name
*/
public void setGroupName(String pgroupName)
{
groupName = pgroupName;
}
/**
* get the max score
*
* @return the max score
*/
public String getMaxScore()
{
return Validator.check(maxScore, "N/A");
}
/**
* set max score
*
* @param pmaxScore set the max score
*/
public void setMaxScore(String pmaxScore)
{
maxScore = pmaxScore;
}
/**
* get the max Point
*
* @return the max point
*/
public String getMaxPoint()
{
ResourceLoader rb=new ResourceLoader("org.sakaiproject.tool.assessment.bundle.EvaluationMessages");
try{
if (Double.parseDouble(this.getMaxScore())==1.0)
return this.getMaxScore()+ " " + rb.getString("point");
else
return this.getMaxScore()+ " " + rb.getString("points");
}
catch(NumberFormatException e){
return this.getMaxScore()+ " " + rb.getString("point");
}
}
/**
* get an agent result collection
*
* @return the collection
*/
public Collection getAgents()
{
if (agents == null)
return new ArrayList();
return agents;
}
/**
* set the agent collection
*
* @param pagents the collection
*/
public void setAgents(Collection pagents)
{
agents = pagents;
}
/**
* get a list of sections
*
* @return the collection
*/
public Collection getSections()
{
if (sections == null)
return new ArrayList();
return sections;
}
/**
* set the section list
*
* @param psections the collection
*/
public void setSections(Collection psections)
{
sections = psections;
}
/**
* get the item to display
*
* @return the collection
*/
public Collection getDeliveryItem()
{
if (deliveryItem == null)
return new ArrayList();
return deliveryItem;
}
/**
* set the delivery item
*
* @param pitem the collection
*/
public void setDeliveryItem(Collection pitem)
{
deliveryItem = pitem;
}
/** This is a read-only calculated property.
* @return list of uppercase student initials
*/
public String getAgentInitials()
{
Collection c = getAgents();
StringBuilder initialsbuf = new StringBuilder();
if (c.isEmpty())
{
return "";
}
Iterator it = c.iterator();
while (it.hasNext())
{
try
{
AgentResults ar = (AgentResults) it.next();
String initial = ar.getLastInitial();
initialsbuf.append(initial);
}
catch (Exception ex)
{
log.warn(ex.getMessage());
// if there is any problem, we skip, and go on
}
}
String initials = initialsbuf.toString();
return initials.toUpperCase();
}
/**
* get agent resutls as an array
*
* @return the array
*/
public Object[] getAgentArray()
{
if (agents == null)
return new Object[0];
return agents.toArray();
}
/**
* get the total number of students for this assessment
*
* @return the number
*/
public String getTotalPeople()
{
return Validator.check(totalPeople, "N/A");
}
/**
* set the total number of people
*
* @param ptotalPeople the total
*/
public void setTotalPeople(String ptotalPeople)
{
totalPeople = ptotalPeople;
}
/**
*
* @return the score
*/
public String getScore()
{
return Validator.check(score, "N/A");
}
/**
* set the score
*
* @param pScore the score
*/
public void setScore(String pScore)
{
score = pScore;
}
/**
*
* @return the discount
*/
public String getDiscount()
{
return Validator.check(discount, "N/A");
}
/**
* set the discount
*
* @param pDiscount the discount
*/
public void setDiscount(String pDiscount)
{
discount = pDiscount;
}
/**
* get the answer text
*
* @return the answer text
*/
public String getAnswer()
{
return Validator.check(answer, "N/A");
}
/**
* set the answer text
*
* @param pAnswertext the answer text
*/
public void setAnswer(String pAnswertext)
{
answer = pAnswertext;
}
/**
* get comments
*
* @return the comments
*/
public String getQuestionScoreComments()
{
return Validator.check(questionScoreComments, "");
}
/**
* set comments for question score
*
* @param pQuestionScoreComments the comments
*/
public void setQuestionScoreComments(String pQuestionScoreComments)
{
log.debug("setting question score comments to "+pQuestionScoreComments);
questionScoreComments = pQuestionScoreComments;
}
/**
* get late handling
*
* @return late handlign
*/
public String getLateHandling()
{
return Validator.check(lateHandling, "1");
}
/**
* set late handling
*
* @param plateHandling the late handling
*/
public void setLateHandling(String plateHandling)
{
lateHandling = plateHandling;
}
/**
* get the due date
*
* @return the due date as a String
*/
public String getDueDate()
{
return Validator.check(dueDate, "N/A");
}
/**
* set due date string
*
* @param dateString the date string
*/
public void setDueDate(String dateString)
{
dueDate = dateString;
}
/**
* get sort type
* @return sort type
*/
public String getSortType()
{
if (!Boolean.parseBoolean(getAnonymous())) {
return Validator.check(sortType, "lastName");
}
else {
return Validator.check(sortType, "assessmentGradingId");
}
}
/**
* set sort type, trigger property sorts
* @param psortType the type
*/
public void setSortType(String psortType)
{
sortType = psortType;
}
/**
* is scores table sorted in ascending order
* @return true if it is
*/
public boolean isSortAscending()
{
return sortAscending;
}
/**
*
* @param sortAscending is scores table sorted in ascending order
*/
public void setSortAscending(boolean sortAscending)
{
this.sortAscending = sortAscending;
}
/**
* Is this an all submissions or, just the largest
* @return true if is is, else false
*/
public String getAllSubmissions()
{
return allSubmissions;
}
/**
* set whether all submissions are to be exposed
* @param pallSubmissions true if it is
*/
public void setAllSubmissions(String pallSubmissions)
{
if (!pallSubmissions.equals(this.allSubmissions)) {
this.allSubmissions = pallSubmissions;
setFirstRow(0); // clear the paging when we update the search
}
}
/**
* DOCUMENTATION PENDING
*
* @return DOCUMENTATION PENDING
*/
public String getRoleSelection()
{
return Validator.check(roleSelection, "N/A");
}
/**
* DOCUMENTATION PENDING
*
* @param proleSelection DOCUMENTATION PENDING
*/
public void setRoleSelection(String proleSelection)
{
roleSelection = proleSelection;
}
/**
* DOCUMENTATION PENDING
*
* @return DOCUMENTATION PENDING
*/
public String getTypeId()
{
return Validator.check(typeId, "1");
}
/**
* DOCUMENTATION PENDING
*
* @param ptypeId DOCUMENTATION PENDING
*/
public void setTypeId(String ptypeId)
{
typeId = ptypeId;
}
/**
* reset the fields
*/
public void resetFields()
{
//agents = new ArrayList();
//setAgents(agents);
}
/**
* encapsulates audio recording info
* @return recording data
*/
public RecordingData getRecordingData()
{
return this.recordingData;
}
/**
* encapsulates audio recording info
* @param rd
*/
public void setRecordingData(RecordingData rd)
{
this.recordingData = rd;
}
public HashMap getScoresByItem()
{
return scoresByItem;
}
public void setScoresByItem(HashMap newScores)
{
scoresByItem = newScores;
}
public String getSelectedSectionFilterValue() {
// lazy initialization
if (selectedSectionFilterValue == null) {
if (isReleasedToGroups()) {
setSelectedSectionFilterValue(TotalScoresBean.RELEASED_SECTIONS_GROUPS_SELECT_VALUE);
}
else {
setSelectedSectionFilterValue(TotalScoresBean.ALL_SECTIONS_SELECT_VALUE);
}
}
return selectedSectionFilterValue;
}
public void setSelectedSectionFilterValue(String param ) {
if (!param.equals(this.selectedSectionFilterValue)) {
this.selectedSectionFilterValue = param;
setFirstRow(0); // clear the paging when we update the search
}
}
// itemScoresMap = (publishedItemId, HashMap)
// = (Long publishedItemId, (Long publishedItemId, Array itemGradings))
private HashMap itemScoresMap;
public void setItemScoresMap(HashMap itemScoresMap){
this.itemScoresMap = itemScoresMap;
}
public HashMap getItemScoresMap(){
return itemScoresMap;
}
private PublishedAssessmentIfc publishedAssessment;
public void setPublishedAssessment(PublishedAssessmentIfc publishedAssessment){
this.publishedAssessment = publishedAssessment;
}
public PublishedAssessmentIfc getPublishedAssessment(){
return publishedAssessment;
}
public String getSelectedSARationaleView() {
return selectedSARationaleView;
}
public void setSelectedSARationaleView(String selectedSARationaleView) {
this.selectedSARationaleView = selectedSARationaleView;
}
public int getFirstRow() {
return firstScoreRow;
}
public void setFirstRow(int firstRow) {
firstScoreRow = firstRow;
}
public int getMaxDisplayedRows() {
return maxDisplayedScoreRows;
}
public void setMaxDisplayedRows(int maxDisplayedRows) {
maxDisplayedScoreRows = maxDisplayedRows;
}
public int getAudioMaxDisplayedScoreRows() {
return audioMaxDisplayedScoreRows;
}
public void setAudioMaxDisplayedScoreRows(int audioMaxDisplayedRows) {
audioMaxDisplayedScoreRows = audioMaxDisplayedRows;
}
public int getOtherMaxDisplayedScoreRows() {
return othersMaxDisplayedScoreRows;
}
public void setOtherMaxDisplayedScoreRows(int otherMaxDisplayedRows) {
othersMaxDisplayedScoreRows = otherMaxDisplayedRows;
}
public boolean getHasAudioMaxDisplayedScoreRowsChanged() {
return hasAudioMaxDisplayedScoreRowsChanged;
}
public void setHasAudioMaxDisplayedScoreRowsChanged(boolean hasAudioMaxDisplayedRowsChanged) {
hasAudioMaxDisplayedScoreRowsChanged = hasAudioMaxDisplayedRowsChanged;
}
public int getDataRows() {
return scoreDataRows;
}
public void setAllAgents(ArrayList allAgents) {
this.allAgents = allAgents;
}
public ArrayList getAllAgents()
{
String publishedId = ContextUtil.lookupParam("publishedId");
QuestionScoreListener questionScoreListener = new QuestionScoreListener();
if (!questionScoreListener.questionScores(publishedId, this, false))
{
throw new RuntimeException("failed to call questionScores.");
}
return allAgents;
}
public String getSearchString() {
return searchString;
}
public void setSearchString(String searchString) {
if (StringUtils.trimToNull(searchString) == null) {
searchString = defaultSearchString;
}
if (!StringUtils.equals(searchString, this.searchString)) {
log.debug("setSearchString " + searchString);
this.searchString = searchString;
setFirstRow(0); // clear the paging when we update the search
}
}
public void search(ActionEvent event) {
// We don't need to do anything special here, since init will handle the search
log.debug("search");
}
public void clear(ActionEvent event) {
log.debug("clear");
setSearchString(null);
}
private boolean isFilteredSearch() {
return !StringUtils.equals(searchString, defaultSearchString);
}
public ArrayList findMatchingAgents(final String pattern) {
ArrayList filteredList = new ArrayList();
// name1 example: John Doe
StringBuilder name1;
// name2 example: Doe, John
StringBuilder name2;
for(Iterator iter = allAgents.iterator(); iter.hasNext();) {
AgentResults result = (AgentResults)iter.next();
// name1 example: John Doe
name1 = new StringBuilder(result.getFirstName());
name1.append(" ");
name1.append(result.getLastName());
// name2 example: Doe, John
name2 = new StringBuilder(result.getLastName());
name2.append(", ");
name2.append(result.getFirstName());
if (result.getFirstName().toLowerCase().startsWith(pattern.toLowerCase()) ||
result.getLastName().toLowerCase().startsWith(pattern.toLowerCase()) ||
result.getAgentEid().toLowerCase().startsWith(pattern.toLowerCase()) ||
name1.toString().toLowerCase().startsWith(pattern.toLowerCase()) ||
name2.toString().toLowerCase().startsWith(pattern.toLowerCase())) {
filteredList.add(result);
}
}
return filteredList;
}
public boolean getHaveModelShortAnswer()
{
return haveModelShortAnswer;
}
public void setHaveModelShortAnswer(boolean haveModelShortAnswer)
{
this.haveModelShortAnswer = haveModelShortAnswer;
}
public boolean isReleasedToGroups() {
return this.getPublishedAssessment().getAssessmentAccessControl().getReleaseTo().equals(AssessmentAccessControl.RELEASE_TO_SELECTED_GROUPS);
}
public Map getUserIdMap()
{
return userIdMap;
}
public void setUserIdMap(Map userIdMap)
{
this.userIdMap = userIdMap;
}
public void setAttachment(Long itemGradingId){
List itemGradingAttachmentList = new ArrayList();
AgentResults agentResults = (AgentResults) agentResultsByItemGradingId.get(itemGradingId);
if (agentResults != null) {
AttachmentUtil attachmentUtil = new AttachmentUtil();
Set attachmentSet = new HashSet();
if (agentResults.getItemGradingAttachmentList() != null) {
attachmentSet = new HashSet(agentResults.getItemGradingAttachmentList());
}
itemGradingAttachmentList = attachmentUtil.prepareAssessmentAttachment(agentResults.getItemGrading(), attachmentSet);
agentResults.setItemGradingAttachmentList(itemGradingAttachmentList);
}
}
public HashMap getAgentResultsByItemGradingId()
{
return agentResultsByItemGradingId;
}
public void setAgentResultsByItemGradingId(HashMap agentResultsByItemGradingId)
{
this.agentResultsByItemGradingId = agentResultsByItemGradingId;
}
public boolean getIsAnyItemGradingAttachmentListModified() {
return isAnyItemGradingAttachmentListModified;
}
public void setIsAnyItemGradingAttachmentListModified(boolean isAnyItemGradingAttachmentListModified)
{
this.isAnyItemGradingAttachmentListModified = isAnyItemGradingAttachmentListModified;
}
}
| apache-2.0 |
ekirkilevics/iBatis | src/test/java/com/ibatis/jpetstore/domain/ClassIntrospector.java | 13090 | /*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibatis.jpetstore.domain;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ReflectPermission;
import java.lang.reflect.UndeclaredThrowableException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
/*
* This class represents a cached set of class definition information that
* allows for easy mapping between property names and getter/setter methods.
*/
public class ClassIntrospector {
private static boolean cacheEnabled = true;
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final Set SIMPLE_TYPE_SET = new HashSet();
private static final Map CLASS_INFO_MAP = Collections.synchronizedMap(new HashMap());
private String className;
private String[] readablePropertyNames = EMPTY_STRING_ARRAY;
private String[] writeablePropertyNames = EMPTY_STRING_ARRAY;
private HashMap setMethods = new HashMap();
private HashMap getMethods = new HashMap();
private HashMap setTypes = new HashMap();
private HashMap getTypes = new HashMap();
static {
SIMPLE_TYPE_SET.add(String.class);
SIMPLE_TYPE_SET.add(Byte.class);
SIMPLE_TYPE_SET.add(Short.class);
SIMPLE_TYPE_SET.add(Character.class);
SIMPLE_TYPE_SET.add(Integer.class);
SIMPLE_TYPE_SET.add(Long.class);
SIMPLE_TYPE_SET.add(Float.class);
SIMPLE_TYPE_SET.add(Double.class);
SIMPLE_TYPE_SET.add(Boolean.class);
SIMPLE_TYPE_SET.add(Date.class);
SIMPLE_TYPE_SET.add(Class.class);
SIMPLE_TYPE_SET.add(BigInteger.class);
SIMPLE_TYPE_SET.add(BigDecimal.class);
SIMPLE_TYPE_SET.add(Collection.class);
SIMPLE_TYPE_SET.add(Set.class);
SIMPLE_TYPE_SET.add(Map.class);
SIMPLE_TYPE_SET.add(List.class);
SIMPLE_TYPE_SET.add(HashMap.class);
SIMPLE_TYPE_SET.add(TreeMap.class);
SIMPLE_TYPE_SET.add(ArrayList.class);
SIMPLE_TYPE_SET.add(LinkedList.class);
SIMPLE_TYPE_SET.add(HashSet.class);
SIMPLE_TYPE_SET.add(TreeSet.class);
SIMPLE_TYPE_SET.add(Vector.class);
SIMPLE_TYPE_SET.add(Hashtable.class);
SIMPLE_TYPE_SET.add(Enumeration.class);
}
private ClassIntrospector(Class clazz) {
className = clazz.getName();
addMethods(clazz);
readablePropertyNames = (String[]) getMethods.keySet().toArray(new String[getMethods.keySet().size()]);
writeablePropertyNames = (String[]) setMethods.keySet().toArray(new String[setMethods.keySet().size()]);
}
private void addMethods(Class cls) {
Method[] methods = getAllMethodsForClass(cls);
for (int i = 0; i < methods.length; i++) {
String name = methods[i].getName();
if (name.startsWith("set") && name.length() > 3) {
if (methods[i].getParameterTypes().length == 1) {
name = dropCase(name);
if (setMethods.containsKey(name)) {
throw new RuntimeException("Illegal overloaded setter method for property " + name + " in class " + cls.getName() +
". This breaks the JavaBeans specification and can cause unpredicatble results.");
}
setMethods.put(name, methods[i]);
setTypes.put(name, methods[i].getParameterTypes()[0]);
}
} else if (name.startsWith("get") && name.length() > 3) {
if (methods[i].getParameterTypes().length == 0) {
name = dropCase(name);
getMethods.put(name, methods[i]);
getTypes.put(name, methods[i].getReturnType());
}
} else if (name.startsWith("is") && name.length() > 2) {
if (methods[i].getParameterTypes().length == 0) {
name = dropCase(name);
getMethods.put(name, methods[i]);
getTypes.put(name, methods[i].getReturnType());
}
}
name = null;
}
}
private Method[] getAllMethodsForClass(Class cls) {
if (cls.isInterface()) {
// interfaces only have public methods - so the
// simple call is all we need (this will also get superinterface methods)
return cls.getMethods();
} else {
// need to get all the declared methods in this class
// and any super-class - then need to set access appropriatly
// for private methods
return getClassMethods(cls);
}
}
/*
* This method returns an array containing all methods
* declared in this class and any superclass.
* We use this method, instead of the simpler Class.getMethods(),
* because we want to look for private methods as well.
*
* @param cls
* @return
*/
private Method[] getClassMethods(Class cls) {
HashMap uniqueMethods = new HashMap();
Class currentClass = cls;
while (currentClass != null) {
addUniqueMethods(uniqueMethods, currentClass.getDeclaredMethods());
// we also need to look for interface methods -
// because the class may be abstract
Class[] interfaces = currentClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
addUniqueMethods(uniqueMethods, interfaces[i].getMethods());
}
currentClass = currentClass.getSuperclass();
}
Collection methods = uniqueMethods.values();
return (Method[]) methods.toArray(new Method[methods.size()]);
}
private void addUniqueMethods(HashMap uniqueMethods, Method[] methods) {
for (int i = 0; i < methods.length; i++) {
Method currentMethod = methods[i];
String signature = getSignature(currentMethod);
// check to see if the method is already known
// if it is known, then an extended class must have
// overridden a method
if (!uniqueMethods.containsKey(signature)) {
if (canAccessPrivateMethods()) {
try {
currentMethod.setAccessible(true);
} catch (Exception e) {
// Ignored. This is only a final precaution, nothing we can do.
}
}
uniqueMethods.put(signature, currentMethod);
}
}
}
private String getSignature(Method method) {
StringBuffer sb = new StringBuffer();
sb.append(method.getName());
Class[] parameters = method.getParameterTypes();
for (int i = 0; i < parameters.length; i++) {
if (i == 0) {
sb.append(':');
} else {
sb.append(',');
}
sb.append(parameters[i].getName());
}
return sb.toString();
}
private boolean canAccessPrivateMethods() {
try {
System.getSecurityManager().checkPermission(new ReflectPermission("suppressAccessChecks"));
return true;
} catch (SecurityException e) {
return false;
} catch (NullPointerException e) {
return true;
}
}
private static String dropCase(String name) {
if (name.startsWith("is")) {
name = name.substring(2);
} else if (name.startsWith("get") || name.startsWith("set")) {
name = name.substring(3);
} else {
throw new RuntimeException("Error parsing property name '" + name + "'. Didn't start with 'is', 'get' or 'set'.");
}
if (name.length() == 1 || (name.length() > 1 && !Character.isUpperCase(name.charAt(1)))) {
name = name.substring(0, 1).toLowerCase(Locale.US) + name.substring(1);
}
return name;
}
/*
* Gets the name of the class the instance provides information for
*
* @return The class name
*/
public String getClassName() {
return className;
}
/*
* Gets the setter for a property as a Method object
*
* @param propertyName - the property
* @return The Method
*/
public Method getSetter(String propertyName) {
Method method = (Method) setMethods.get(propertyName);
if (method == null) {
throw new RuntimeException("There is no WRITEABLE property named '" + propertyName + "' in class '" + className + "'");
}
return method;
}
/*
* Gets the getter for a property as a Method object
*
* @param propertyName - the property
* @return The Method
*/
public Method getGetter(String propertyName) {
Method method = (Method) getMethods.get(propertyName);
if (method == null) {
throw new RuntimeException("There is no READABLE property named '" + propertyName + "' in class '" + className + "'");
}
return method;
}
/*
* Gets the type for a property setter
*
* @param propertyName - the name of the property
* @return The Class of the propery setter
*/
public Class getSetterType(String propertyName) {
Class clazz = (Class) setTypes.get(propertyName);
if (clazz == null) {
throw new RuntimeException("There is no WRITEABLE property named '" + propertyName + "' in class '" + className + "'");
}
return clazz;
}
/*
* Gets the type for a property getter
*
* @param propertyName - the name of the property
* @return The Class of the propery getter
*/
public Class getGetterType(String propertyName) {
Class clazz = (Class) getTypes.get(propertyName);
if (clazz == null) {
throw new RuntimeException("There is no READABLE property named '" + propertyName + "' in class '" + className + "'");
}
return clazz;
}
/*
* Gets an array of the readable properties for an object
*
* @return The array
*/
public String[] getReadablePropertyNames() {
return readablePropertyNames;
}
/*
* Gets an array of the writeable properties for an object
*
* @return The array
*/
public String[] getWriteablePropertyNames() {
return writeablePropertyNames;
}
/*
* Check to see if a class has a writeable property by name
*
* @param propertyName - the name of the property to check
* @return True if the object has a writeable property by the name
*/
public boolean hasWritableProperty(String propertyName) {
return setMethods.keySet().contains(propertyName);
}
/*
* Check to see if a class has a readable property by name
*
* @param propertyName - the name of the property to check
* @return True if the object has a readable property by the name
*/
public boolean hasReadableProperty(String propertyName) {
return getMethods.keySet().contains(propertyName);
}
/*
* Tells us if the class passed in is a knwon common type
*
* @param clazz The class to check
* @return True if the class is known
*/
public static boolean isKnownType(Class clazz) {
if (SIMPLE_TYPE_SET.contains(clazz)) {
return true;
} else if (Collection.class.isAssignableFrom(clazz)) {
return true;
} else if (Map.class.isAssignableFrom(clazz)) {
return true;
} else if (List.class.isAssignableFrom(clazz)) {
return true;
} else if (Set.class.isAssignableFrom(clazz)) {
return true;
} else if (Iterator.class.isAssignableFrom(clazz)) {
return true;
} else {
return false;
}
}
/*
* Gets an instance of ClassInfo for the specified class.
*
* @param clazz The class for which to lookup the method cache.
* @return The method cache for the class
*/
public static ClassIntrospector getInstance(Class clazz) {
if (cacheEnabled) {
synchronized (clazz) {
ClassIntrospector cache = (ClassIntrospector) CLASS_INFO_MAP.get(clazz);
if (cache == null) {
cache = new ClassIntrospector(clazz);
CLASS_INFO_MAP.put(clazz, cache);
}
return cache;
}
} else {
return new ClassIntrospector(clazz);
}
}
public static void setCacheEnabled(boolean cacheEnabled) {
ClassIntrospector.cacheEnabled = cacheEnabled;
}
/*
* Examines a Throwable object and gets it's root cause
*
* @param t - the exception to examine
* @return The root cause
*/
public static Throwable unwrapThrowable(Throwable t) {
Throwable t2 = t;
while (true) {
if (t2 instanceof InvocationTargetException) {
t2 = ((InvocationTargetException) t).getTargetException();
} else if (t instanceof UndeclaredThrowableException) {
t2 = ((UndeclaredThrowableException) t).getUndeclaredThrowable();
} else {
return t2;
}
}
}
}
| apache-2.0 |
AmesianX/binnavi | src/main/java/com/google/security/zynamics/reil/translators/arm/ARMSmlaXYTranslator.java | 6769 | /*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.reil.translators.arm;
import com.google.security.zynamics.reil.OperandSize;
import com.google.security.zynamics.reil.ReilHelpers;
import com.google.security.zynamics.reil.ReilInstruction;
import com.google.security.zynamics.reil.translators.ITranslationEnvironment;
import com.google.security.zynamics.reil.translators.InternalTranslationException;
import com.google.security.zynamics.reil.translators.TranslationHelpers;
import com.google.security.zynamics.zylib.disassembly.IInstruction;
import com.google.security.zynamics.zylib.disassembly.IOperandTreeNode;
import java.util.List;
public class ARMSmlaXYTranslator extends ARMBaseTranslator {
@Override
protected void translateCore(final ITranslationEnvironment environment,
final IInstruction instruction, final List<ReilInstruction> instructions) {
final IOperandTreeNode registerOperand1 =
instruction.getOperands().get(0).getRootNode().getChildren().get(0);
final IOperandTreeNode registerOperand2 =
instruction.getOperands().get(1).getRootNode().getChildren().get(0);
final IOperandTreeNode registerOperand3 =
instruction.getOperands().get(2).getRootNode().getChildren().get(0);
final IOperandTreeNode registerOperand4 =
instruction.getOperands().get(3).getRootNode().getChildren().get(0);
final String sourceRegister1 = (registerOperand1.getValue());
final String sourceRegister2 = (registerOperand2.getValue());
final String sourceRegister3 = (registerOperand3.getValue());
final String sourceRegister4 = (registerOperand4.getValue());
final OperandSize bt = OperandSize.BYTE;
final OperandSize wd = OperandSize.WORD;
final OperandSize dw = OperandSize.DWORD;
final OperandSize qw = OperandSize.QWORD;
long baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
final String isNotOverflowed = environment.getNextVariableString();
final String operand1 = environment.getNextVariableString();
final String operand2 = environment.getNextVariableString();
final String tmpVar1 = environment.getNextVariableString();
final String tmpVar2 = environment.getNextVariableString();
final String tmpVar3 = environment.getNextVariableString();
final String tmpVar4 = environment.getNextVariableString();
final String tmpVar5 = environment.getNextVariableString();
if (instruction.getMnemonic().contains("BB")) {
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, sourceRegister2,
dw, operand1, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, sourceRegister3,
dw, operand2, 16);
} else if (instruction.getMnemonic().contains("BT")) {
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, sourceRegister2,
dw, operand1, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, sourceRegister3, wd,
String.valueOf(-16L), dw, tmpVar1));
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, tmpVar1, dw,
operand2, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
} else if (instruction.getMnemonic().contains("TB")) {
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, sourceRegister2, wd,
String.valueOf(-16L), dw, tmpVar1));
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, tmpVar1, dw,
operand1, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, sourceRegister3,
dw, operand2, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
} else if (instruction.getMnemonic().contains("TB")) {
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, sourceRegister2, wd,
String.valueOf(-16L), dw, tmpVar1));
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, tmpVar1, dw,
operand1, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, sourceRegister3, wd,
String.valueOf(-16L), dw, tmpVar2));
Helpers.signExtend(baseOffset, environment, instruction, instructions, dw, tmpVar2, dw,
operand2, 16);
baseOffset = ReilHelpers.nextReilAddress(instruction, instructions);
}
// TODO check if mul is unsigned or not
instructions.add(ReilHelpers.createMul(baseOffset++, dw, operand1, dw, operand2, qw, tmpVar3));
instructions.add(ReilHelpers.createAdd(baseOffset++, dw, sourceRegister4, qw, tmpVar3, qw,
tmpVar4));
instructions.add(ReilHelpers.createAnd(baseOffset++, qw, tmpVar4, dw,
String.valueOf(0xFFFFFFFFL), dw, sourceRegister1));
// overflow condition
instructions.add(ReilHelpers.createBsh(baseOffset++, qw, tmpVar4, wd, String.valueOf(-32), dw,
tmpVar5));
instructions.add(ReilHelpers.createBisz(baseOffset++, dw, tmpVar5, bt, isNotOverflowed));
instructions.add(ReilHelpers.createBisz(baseOffset++, bt, isNotOverflowed, bt, "Q"));
}
/**
* SMLA<x><y>{<cond>} <Rd>, <Rm>, <Rs>, <Rn>
*
* Operation:
*
* if ConditionPassed(cond) then if (x == 0) then operand1 = SignExtend(Rm[15:0]) else // x == 1
* operand1 = SignExtend(Rm[31:16]) if (y == 0) then operand2 = SignExtend(Rs[15:0]) else // y ==
* 1 operand2 = SignExtend(Rs[31:16]) Rd = (operand1 * operand2) + Rn if OverflowFrom((operand1 *
* operand2) + Rn) then Q Flag = 1
*/
@Override
public void translate(final ITranslationEnvironment environment, final IInstruction instruction,
final List<ReilInstruction> instructions) throws InternalTranslationException {
TranslationHelpers.checkTranslationArguments(environment, instruction, instructions, "SMLA");
translateAll(environment, instruction, "SMLA", instructions);
}
}
| apache-2.0 |
liveqmock/platform-tools-idea | java/java-impl/src/com/intellij/codeInspection/ex/GlobalJavaInspectionContextImpl.java | 19817 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 19-Dec-2007
*/
package com.intellij.codeInspection.ex;
import com.intellij.CommonBundle;
import com.intellij.analysis.AnalysisScope;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspection;
import com.intellij.codeInspection.reference.*;
import com.intellij.codeInspection.ui.InspectionToolPresentation;
import com.intellij.lang.StdLanguages;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.search.*;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Processor;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NotNull;
import java.util.*;
public class GlobalJavaInspectionContextImpl extends GlobalJavaInspectionContext {
private static final Logger LOG = Logger.getInstance("#" + GlobalJavaInspectionContextImpl.class.getName());
private THashMap<SmartPsiElementPointer, List<DerivedMethodsProcessor>> myDerivedMethodsRequests;
private THashMap<SmartPsiElementPointer, List<DerivedClassesProcessor>> myDerivedClassesRequests;
private THashMap<SmartPsiElementPointer, List<UsagesProcessor>> myMethodUsagesRequests;
private THashMap<SmartPsiElementPointer, List<UsagesProcessor>> myFieldUsagesRequests;
private THashMap<SmartPsiElementPointer, List<UsagesProcessor>> myClassUsagesRequests;
@Override
public void enqueueClassUsagesProcessor(RefClass refClass, UsagesProcessor p) {
if (myClassUsagesRequests == null) myClassUsagesRequests = new THashMap<SmartPsiElementPointer, List<UsagesProcessor>>();
enqueueRequestImpl(refClass, myClassUsagesRequests, p);
}
@Override
public void enqueueDerivedClassesProcessor(RefClass refClass, DerivedClassesProcessor p) {
if (myDerivedClassesRequests == null) myDerivedClassesRequests = new THashMap<SmartPsiElementPointer, List<DerivedClassesProcessor>>();
enqueueRequestImpl(refClass, myDerivedClassesRequests, p);
}
@Override
public void enqueueDerivedMethodsProcessor(RefMethod refMethod, DerivedMethodsProcessor p) {
if (refMethod.isConstructor() || refMethod.isStatic()) return;
if (myDerivedMethodsRequests == null) myDerivedMethodsRequests = new THashMap<SmartPsiElementPointer, List<DerivedMethodsProcessor>>();
enqueueRequestImpl(refMethod, myDerivedMethodsRequests, p);
}
@Override
public void enqueueFieldUsagesProcessor(RefField refField, UsagesProcessor p) {
if (myFieldUsagesRequests == null) myFieldUsagesRequests = new THashMap<SmartPsiElementPointer, List<UsagesProcessor>>();
enqueueRequestImpl(refField, myFieldUsagesRequests, p);
}
@Override
public void enqueueMethodUsagesProcessor(RefMethod refMethod, UsagesProcessor p) {
if (myMethodUsagesRequests == null) myMethodUsagesRequests = new THashMap<SmartPsiElementPointer, List<UsagesProcessor>>();
enqueueRequestImpl(refMethod, myMethodUsagesRequests, p);
}
@Override
public EntryPointsManager getEntryPointsManager(final RefManager manager) {
return manager.getExtension(RefJavaManager.MANAGER).getEntryPointsManager();
}
@SuppressWarnings({"UseOfSystemOutOrSystemErr"})
public static boolean isInspectionsEnabled(final boolean online, @NotNull Project project) {
final Module[] modules = ModuleManager.getInstance(project).getModules();
if (online) {
if (modules.length == 0) {
Messages.showMessageDialog(project, InspectionsBundle.message("inspection.no.modules.error.message"),
CommonBundle.message("title.error"), Messages.getErrorIcon());
return false;
}
while (isBadSdk(project, modules)) {
Messages.showMessageDialog(project, InspectionsBundle.message("inspection.no.jdk.error.message"),
CommonBundle.message("title.error"), Messages.getErrorIcon());
final Sdk projectJdk = ProjectSettingsService.getInstance(project).chooseAndSetSdk();
if (projectJdk == null) return false;
}
}
else {
if (modules.length == 0) {
System.err.println(InspectionsBundle.message("inspection.no.modules.error.message"));
return false;
}
if (isBadSdk(project, modules)) {
System.err.println(InspectionsBundle.message("inspection.no.jdk.error.message"));
System.err.println(
InspectionsBundle.message("offline.inspections.jdk.not.found", ProjectRootManager.getInstance(project).getProjectSdkName()));
return false;
}
for (Module module : modules) {
final ModuleRootManager rootManager = ModuleRootManager.getInstance(module);
final OrderEntry[] entries = rootManager.getOrderEntries();
for (OrderEntry entry : entries) {
if (entry instanceof JdkOrderEntry) {
if (!ModuleType.get(module).isValidSdk(module, null)) {
System.err.println(InspectionsBundle.message("offline.inspections.module.jdk.not.found", ((JdkOrderEntry)entry).getJdkName(),
module.getName()));
return false;
}
}
else if (entry instanceof LibraryOrderEntry) {
final LibraryOrderEntry libraryOrderEntry = (LibraryOrderEntry)entry;
final Library library = libraryOrderEntry.getLibrary();
if (library == null || library.getFiles(OrderRootType.CLASSES).length < library.getUrls(OrderRootType.CLASSES).length) {
System.err.println(InspectionsBundle.message("offline.inspections.library.was.not.resolved",
libraryOrderEntry.getPresentableName(), module.getName()));
}
}
}
}
}
return true;
}
private static boolean isBadSdk(final Project project, final Module[] modules) {
boolean anyModuleAcceptsSdk = false;
boolean anyModuleUsesProjectSdk = false;
Sdk projectSdk = ProjectRootManager.getInstance(project).getProjectSdk();
for (Module module : modules) {
if (ModuleRootManager.getInstance(module).isSdkInherited()) {
anyModuleUsesProjectSdk = true;
if (ModuleType.get(module).isValidSdk(module, projectSdk)) {
anyModuleAcceptsSdk = true;
}
}
}
return anyModuleUsesProjectSdk && !anyModuleAcceptsSdk;
}
private static <T extends Processor> void enqueueRequestImpl(RefElement refElement, Map<SmartPsiElementPointer, List<T>> requestMap, T processor) {
List<T> requests = requestMap.get(refElement.getPointer());
if (requests == null) {
requests = new ArrayList<T>();
requestMap.put(refElement.getPointer(), requests);
}
requests.add(processor);
}
@Override
public void cleanup() {
myDerivedMethodsRequests = null;
myDerivedClassesRequests = null;
myMethodUsagesRequests = null;
myFieldUsagesRequests = null;
myClassUsagesRequests = null;
}
public void processSearchRequests(final GlobalInspectionContext context) {
final RefManager refManager = context.getRefManager();
final AnalysisScope scope = refManager.getScope();
final SearchScope searchScope = new GlobalSearchScope(refManager.getProject()) {
@Override
public boolean contains(VirtualFile file) {
return !scope.contains(file) || file.getFileType() != StdFileTypes.JAVA;
}
@Override
public int compare(VirtualFile file1, VirtualFile file2) {
return 0;
}
@Override
public boolean isSearchInModuleContent(@NotNull Module aModule) {
return true;
}
@Override
public boolean isSearchInLibraries() {
return false;
}
};
if (myDerivedClassesRequests != null) {
final List<SmartPsiElementPointer> sortedIDs = getSortedIDs(myDerivedClassesRequests);
for (SmartPsiElementPointer sortedID : sortedIDs) {
final PsiClass psiClass = (PsiClass)dereferenceInReadAction(sortedID);
if (psiClass == null) continue;
context.incrementJobDoneAmount(context.getStdJobDescriptors().FIND_EXTERNAL_USAGES, ApplicationManager.getApplication().runReadAction(
new Computable<String>() {
@Override
public String compute() {
return psiClass.getQualifiedName();
}
}
));
final List<DerivedClassesProcessor> processors = myDerivedClassesRequests.get(sortedID);
LOG.assertTrue(processors != null, psiClass.getClass().getName());
ClassInheritorsSearch.search(psiClass, searchScope, false)
.forEach(createMembersProcessor(processors, scope));
}
myDerivedClassesRequests = null;
}
if (myDerivedMethodsRequests != null) {
final List<SmartPsiElementPointer> sortedIDs = getSortedIDs(myDerivedMethodsRequests);
for (SmartPsiElementPointer sortedID : sortedIDs) {
final PsiMethod psiMethod = (PsiMethod)dereferenceInReadAction(sortedID);
if (psiMethod == null) continue;
final RefMethod refMethod = (RefMethod)refManager.getReference(psiMethod);
context.incrementJobDoneAmount(context.getStdJobDescriptors().FIND_EXTERNAL_USAGES, refManager.getQualifiedName(refMethod));
final List<DerivedMethodsProcessor> processors = myDerivedMethodsRequests.get(sortedID);
LOG.assertTrue(processors != null, psiMethod.getClass().getName());
OverridingMethodsSearch.search(psiMethod, searchScope, true)
.forEach(createMembersProcessor(processors, scope));
}
myDerivedMethodsRequests = null;
}
if (myFieldUsagesRequests != null) {
final List<SmartPsiElementPointer> sortedIDs = getSortedIDs(myFieldUsagesRequests);
for (SmartPsiElementPointer sortedID : sortedIDs) {
final PsiField psiField = (PsiField)dereferenceInReadAction(sortedID);
if (psiField == null) continue;
final List<UsagesProcessor> processors = myFieldUsagesRequests.get(sortedID);
LOG.assertTrue(processors != null, psiField.getClass().getName());
context.incrementJobDoneAmount(context.getStdJobDescriptors().FIND_EXTERNAL_USAGES, refManager.getQualifiedName(refManager.getReference(psiField)));
ReferencesSearch.search(psiField, searchScope, false)
.forEach(new PsiReferenceProcessorAdapter(createReferenceProcessor(processors, context)));
}
myFieldUsagesRequests = null;
}
if (myClassUsagesRequests != null) {
final List<SmartPsiElementPointer> sortedIDs = getSortedIDs(myClassUsagesRequests);
for (SmartPsiElementPointer sortedID : sortedIDs) {
final PsiClass psiClass = (PsiClass)dereferenceInReadAction(sortedID);
if (psiClass == null) continue;
final List<UsagesProcessor> processors = myClassUsagesRequests.get(sortedID);
LOG.assertTrue(processors != null, psiClass.getClass().getName());
context.incrementJobDoneAmount(context.getStdJobDescriptors().FIND_EXTERNAL_USAGES, ApplicationManager.getApplication().runReadAction(
new Computable<String>() {
@Override
public String compute() {
return psiClass.getQualifiedName();
}
}
));
ReferencesSearch.search(psiClass, searchScope, false)
.forEach(new PsiReferenceProcessorAdapter(createReferenceProcessor(processors, context)));
}
myClassUsagesRequests = null;
}
if (myMethodUsagesRequests != null) {
List<SmartPsiElementPointer> sortedIDs = getSortedIDs(myMethodUsagesRequests);
for (SmartPsiElementPointer sortedID : sortedIDs) {
final PsiMethod psiMethod = (PsiMethod)dereferenceInReadAction(sortedID);
if (psiMethod == null) continue;
final List<UsagesProcessor> processors = myMethodUsagesRequests.get(sortedID);
LOG.assertTrue(processors != null, psiMethod.getClass().getName());
context.incrementJobDoneAmount(context.getStdJobDescriptors().FIND_EXTERNAL_USAGES, refManager.getQualifiedName(refManager.getReference(psiMethod)));
MethodReferencesSearch.search(psiMethod, searchScope, true)
.forEach(new PsiReferenceProcessorAdapter(createReferenceProcessor(processors, context)));
}
myMethodUsagesRequests = null;
}
}
private static PsiElement dereferenceInReadAction(final SmartPsiElementPointer sortedID) {
return ApplicationManager.getApplication().runReadAction(new Computable<PsiElement>() {
@Override
public PsiElement compute() {
return sortedID.getElement();
}
});
}
private static <Member extends PsiMember, P extends Processor<Member>> PsiElementProcessorAdapter<Member> createMembersProcessor(final List<P> processors,
final AnalysisScope scope) {
return new PsiElementProcessorAdapter<Member>(new PsiElementProcessor<Member>() {
@Override
public boolean execute(@NotNull Member member) {
if (scope.contains(member)) return true;
final List<P> processorsArrayed = new ArrayList<P>(processors);
for (P processor : processorsArrayed) {
if (!processor.process(member)) {
processors.remove(processor);
}
}
return !processors.isEmpty();
}
});
}
private int getRequestCount() {
int sum = 0;
sum += getRequestListSize(myClassUsagesRequests);
sum += getRequestListSize(myDerivedClassesRequests);
sum += getRequestListSize(myDerivedMethodsRequests);
sum += getRequestListSize(myFieldUsagesRequests);
sum += getRequestListSize(myMethodUsagesRequests);
return sum;
}
private static int getRequestListSize(THashMap list) {
if (list == null) return 0;
return list.size();
}
private static List<SmartPsiElementPointer> getSortedIDs(final Map<SmartPsiElementPointer, ?> requests) {
final List<SmartPsiElementPointer> result = new ArrayList<SmartPsiElementPointer>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
for (SmartPsiElementPointer id : requests.keySet()) {
if (id != null) {
final PsiElement psi = id.getElement();
if (psi != null) {
result.add(id);
}
}
}
Collections.sort(result, new Comparator<SmartPsiElementPointer>() {
@Override
public int compare(final SmartPsiElementPointer o1, final SmartPsiElementPointer o2) {
PsiElement p1 = o1.getElement();
PsiElement p2 = o2.getElement();
final PsiFile psiFile1 = p1 != null ? p1.getContainingFile() : null;
LOG.assertTrue(psiFile1 != null);
final PsiFile psiFile2 = p2 != null ? p2.getContainingFile() : null;
LOG.assertTrue(psiFile2 != null);
return psiFile1.getName().compareTo(psiFile2.getName());
}
});
}
});
return result;
}
private static PsiReferenceProcessor createReferenceProcessor(@NotNull final List<UsagesProcessor> processors,
final GlobalInspectionContext context) {
return new PsiReferenceProcessor() {
@Override
public boolean execute(PsiReference reference) {
AnalysisScope scope = context.getRefManager().getScope();
if (scope.contains(reference.getElement()) && reference.getElement().getLanguage() == StdLanguages.JAVA ||
PsiTreeUtil.getParentOfType(reference.getElement(), PsiDocComment.class) != null) {
return true;
}
synchronized (processors) {
UsagesProcessor[] processorsArrayed = processors.toArray(new UsagesProcessor[processors.size()]);
for (UsagesProcessor processor : processorsArrayed) {
if (!processor.process(reference)) {
processors.remove(processor);
}
}
}
return !processors.isEmpty();
}
};
}
@Override
public void performPreRunActivities(@NotNull final List<Tools> globalTools,
@NotNull final List<Tools> localTools,
@NotNull final GlobalInspectionContext context) {
getEntryPointsManager(context.getRefManager()).resolveEntryPoints(context.getRefManager());
// UnusedDeclarationInspection should run first
for (int i = 0; i < globalTools.size(); i++) {
InspectionToolWrapper toolWrapper = globalTools.get(i).getTool();
if (UnusedDeclarationInspection.SHORT_NAME.equals(toolWrapper.getShortName())) {
Collections.swap(globalTools, i, 0);
break;
}
}
}
@Override
public void performPostRunActivities(@NotNull List<InspectionToolWrapper> needRepeatSearchRequest, @NotNull final GlobalInspectionContext context) {
JobDescriptor progress = context.getStdJobDescriptors().FIND_EXTERNAL_USAGES;
progress.setTotalAmount(getRequestCount());
do {
processSearchRequests(context);
InspectionToolWrapper[] requestors = needRepeatSearchRequest.toArray(new InspectionToolWrapper[needRepeatSearchRequest.size()]);
InspectionManager inspectionManager = InspectionManager.getInstance(context.getProject());
for (InspectionToolWrapper toolWrapper : requestors) {
boolean result = false;
if (toolWrapper instanceof GlobalInspectionToolWrapper) {
InspectionToolPresentation presentation = ((GlobalInspectionContextImpl)context).getPresentation(toolWrapper);
result = ((GlobalInspectionToolWrapper)toolWrapper).getTool().queryExternalUsagesRequests(inspectionManager, context, presentation);
}
if (!result) {
needRepeatSearchRequest.remove(toolWrapper);
}
}
int oldSearchRequestCount = progress.getTotalAmount();
int oldDoneAmount = progress.getDoneAmount();
int totalAmount = oldSearchRequestCount + getRequestCount();
progress.setTotalAmount(totalAmount);
progress.setDoneAmount(oldDoneAmount);
}
while (!needRepeatSearchRequest.isEmpty());
}
}
| apache-2.0 |
coxthepilot/vitasa | vitasa_apps/a_vitavol/obj/Release/android/src/md51558244f76c53b6aeda52c8a337f2c37/WebViewRenderer.java | 1856 | package md51558244f76c53b6aeda52c8a337f2c37;
public class WebViewRenderer
extends md51558244f76c53b6aeda52c8a337f2c37.ViewRenderer_2
implements
mono.android.IGCUserPeer
{
/** @hide */
public static final String __md_methods;
static {
__md_methods =
"";
mono.android.Runtime.register ("Xamarin.Forms.Platform.Android.WebViewRenderer, Xamarin.Forms.Platform.Android", WebViewRenderer.class, __md_methods);
}
public WebViewRenderer (android.content.Context p0, android.util.AttributeSet p1, int p2)
{
super (p0, p1, p2);
if (getClass () == WebViewRenderer.class)
mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.WebViewRenderer, Xamarin.Forms.Platform.Android", "Android.Content.Context, Mono.Android:Android.Util.IAttributeSet, Mono.Android:System.Int32, mscorlib", this, new java.lang.Object[] { p0, p1, p2 });
}
public WebViewRenderer (android.content.Context p0, android.util.AttributeSet p1)
{
super (p0, p1);
if (getClass () == WebViewRenderer.class)
mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.WebViewRenderer, Xamarin.Forms.Platform.Android", "Android.Content.Context, Mono.Android:Android.Util.IAttributeSet, Mono.Android", this, new java.lang.Object[] { p0, p1 });
}
public WebViewRenderer (android.content.Context p0)
{
super (p0);
if (getClass () == WebViewRenderer.class)
mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.WebViewRenderer, Xamarin.Forms.Platform.Android", "Android.Content.Context, Mono.Android", this, new java.lang.Object[] { p0 });
}
private java.util.ArrayList refList;
public void monodroidAddReference (java.lang.Object obj)
{
if (refList == null)
refList = new java.util.ArrayList ();
refList.add (obj);
}
public void monodroidClearReferences ()
{
if (refList != null)
refList.clear ();
}
}
| apache-2.0 |
fnp/pylucene | lucene-java-3.5.0/lucene/contrib/facet/src/test/org/apache/lucene/facet/FacetTestUtils.java | 6459 | package org.apache.lucene.facet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.facet.index.CategoryDocumentBuilder;
import org.apache.lucene.facet.index.params.DefaultFacetIndexingParams;
import org.apache.lucene.facet.index.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.search.params.CountFacetRequest;
import org.apache.lucene.facet.search.params.FacetRequest;
import org.apache.lucene.facet.search.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class FacetTestUtils {
public static Directory[][] createIndexTaxonomyDirs(int number) throws IOException {
Directory[][] dirs = new Directory[number][2];
for (int i = 0; i < number; i++) {
dirs[i][0] = LuceneTestCase.newDirectory();
dirs[i][1] = LuceneTestCase.newDirectory();
}
return dirs;
}
public static IndexTaxonomyReaderPair[] createIndexTaxonomyReaderPair(
Directory[][] dirs) throws IOException {
IndexTaxonomyReaderPair[] pairs = new IndexTaxonomyReaderPair[dirs.length];
for (int i = 0; i < dirs.length; i++) {
IndexTaxonomyReaderPair pair = new IndexTaxonomyReaderPair();
pair.indexReader = IndexReader.open(dirs[i][0]);
pair.indexSearcher = new IndexSearcher(pair.indexReader);
pair.taxReader = new DirectoryTaxonomyReader(dirs[i][1]);
pairs[i] = pair;
}
return pairs;
}
public static IndexTaxonomyWriterPair[] createIndexTaxonomyWriterPair(
Directory[][] dirs) throws IOException {
IndexTaxonomyWriterPair[] pairs = new IndexTaxonomyWriterPair[dirs.length];
for (int i = 0; i < dirs.length; i++) {
IndexTaxonomyWriterPair pair = new IndexTaxonomyWriterPair();
pair.indexWriter = new IndexWriter(dirs[i][0], new IndexWriterConfig(
LuceneTestCase.TEST_VERSION_CURRENT, new StandardAnalyzer(
LuceneTestCase.TEST_VERSION_CURRENT)));
pair.taxWriter = new DirectoryTaxonomyWriter(dirs[i][1]);
pair.indexWriter.commit();
pair.taxWriter.commit();
pairs[i] = pair;
}
return pairs;
}
public static Collector[] search(IndexSearcher searcher,
TaxonomyReader taxonomyReader, DefaultFacetIndexingParams iParams,
int k, String... facetNames) throws IOException,
IllegalAccessException, InstantiationException {
Collector[] collectors = new Collector[2];
FacetSearchParams facetSearchParams = new FacetSearchParams(iParams);
Collection<FacetRequest> fRequests = new ArrayList<FacetRequest>();
for (String facetName : facetNames) {
CategoryPath cp = new CategoryPath(facetName);
FacetRequest fq = new CountFacetRequest(cp, k);
facetSearchParams.addFacetRequest(fq);
fRequests.add(fq);
}
TopScoreDocCollector topDocsCollector = TopScoreDocCollector.create(
searcher.getIndexReader().maxDoc(), true);
FacetsCollector facetsCollector = new FacetsCollector(
facetSearchParams, searcher.getIndexReader(), taxonomyReader);
Collector mColl = MultiCollector.wrap(topDocsCollector, facetsCollector);
collectors[0] = topDocsCollector;
collectors[1] = facetsCollector;
searcher.search(new MatchAllDocsQuery(), mColl);
return collectors;
}
public static void add(FacetIndexingParams iParams, RandomIndexWriter iw,
TaxonomyWriter tw, String... strings) throws IOException,
CorruptIndexException {
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
CategoryPath cp = new CategoryPath(strings);
cps.add(cp);
Document d = new Document();
new CategoryDocumentBuilder(tw, iParams).setCategoryPaths(cps).build(d);
d.add(new Field("content", "alpha", Store.YES, Index.ANALYZED,
TermVector.NO));
iw.addDocument(d);
}
public static class IndexTaxonomyReaderPair {
public IndexReader indexReader;
public TaxonomyReader taxReader;
public IndexSearcher indexSearcher;
public void close() throws IOException {
indexSearcher.close();
indexReader.close();
taxReader.close();
}
}
public static class IndexTaxonomyWriterPair {
public IndexWriter indexWriter;
public TaxonomyWriter taxWriter;
public void close() throws IOException {
indexWriter.close();
taxWriter.close();
}
public void commit() throws IOException {
indexWriter.commit();
taxWriter.commit();
}
}
}
| apache-2.0 |
fcrepo4/fcrepo4 | fcrepo-kernel-impl/src/main/java/org/fcrepo/kernel/impl/operations/AbstractNonRdfSourceOperation.java | 4928 | /*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.impl.operations;
import java.io.InputStream;
import java.net.URI;
import java.util.Collection;
import org.fcrepo.kernel.api.identifiers.FedoraId;
import org.fcrepo.kernel.api.operations.NonRdfSourceOperation;
/**
* An abstract operation for interacting with a non-rdf source
*
* @author bbpennel
*/
public abstract class AbstractNonRdfSourceOperation extends AbstractResourceOperation implements
NonRdfSourceOperation {
private InputStream content;
private URI externalHandlingURI;
private String externalHandlingType;
private String mimeType;
private String filename;
private Collection<URI> digests;
private Long contentSize;
/**
* Constructor for external content.
*
* @param rescId the internal identifier.
* @param externalContentURI the URI of the external content.
* @param externalHandling the type of external content handling (REDIRECT, PROXY)
*/
protected AbstractNonRdfSourceOperation(final FedoraId rescId, final URI externalContentURI,
final String externalHandling) {
super(rescId);
this.externalHandlingURI = externalContentURI;
this.externalHandlingType = externalHandling;
}
/**
* Constructor for internal binaries.
*
* @param rescId the internal identifier.
* @param content the stream of the content.
*/
protected AbstractNonRdfSourceOperation(final FedoraId rescId, final InputStream content) {
super(rescId);
this.content = content;
}
/**
* Basic constructor.
*
* @param rescId The internal Fedora ID.
*/
protected AbstractNonRdfSourceOperation(final FedoraId rescId) {
super(rescId);
}
@Override
public InputStream getContentStream() {
return content;
}
@Override
public String getExternalHandling() {
return externalHandlingType;
}
@Override
public URI getContentUri() {
return externalHandlingURI;
}
@Override
public String getMimeType() {
return mimeType;
}
@Override
public String getFilename() {
return filename;
}
@Override
public Collection<URI> getContentDigests() {
return digests;
}
@Override
public Long getContentSize() {
return contentSize;
}
/**
* @return the content
*/
protected InputStream getContent() {
return content;
}
/**
* @param content the content to set
*/
protected void setContent(final InputStream content) {
this.content = content;
}
/**
* @return the externalHandlingURI
*/
protected URI getExternalHandlingURI() {
return externalHandlingURI;
}
/**
* @param externalHandlingURI the externalHandlingURI to set
*/
protected void setExternalHandlingURI(final URI externalHandlingURI) {
this.externalHandlingURI = externalHandlingURI;
}
/**
* @return the externalHandlingType
*/
protected String getExternalHandlingType() {
return externalHandlingType;
}
/**
* @param externalHandlingType the externalHandlingType to set
*/
protected void setExternalHandlingType(final String externalHandlingType) {
this.externalHandlingType = externalHandlingType;
}
/**
* @return the digests
*/
protected Collection<URI> getDigests() {
return digests;
}
/**
* @param digests the digests to set
*/
protected void setDigests(final Collection<URI> digests) {
this.digests = digests;
}
/**
* @param mimeType the mimeType to set
*/
protected void setMimeType(final String mimeType) {
this.mimeType = mimeType;
}
/**
* @param filename the filename to set
*/
protected void setFilename(final String filename) {
this.filename = filename;
}
/**
* @param contentSize the contentSize to set
*/
protected void setContentSize(final Long contentSize) {
this.contentSize = contentSize;
}
}
| apache-2.0 |
joney000/Competitive-Programming-Java-Implementation | practice/Codechef_DELISH.java | 788 | /* package joney_000 */
import java.util.*;
import java.lang.*;
import java.io.*;
import java.math.*;
class Main
{
public static void main(String[] args)throws Exception
{
BufferedReader br=new BufferedReader(new InputStreamReader(System.in));
BufferedWriter out=new BufferedWriter(new OutputStreamWriter(System.out));
int tests=Integer.parseInt(br.readLine());
for(int t=0;t<tests;t++){
int n=Integer.parseInt(br.readLine());
int num[]=new int[n];
String[] s=br.readLine().split(" ");
for(int i=0;i<n;i++){
num[i]=Integer.parseInt(s[i]);
}
out.write("\n"+);out.flush();
}
}
}
| apache-2.0 |
QBNemo/spring-mvc-showcase | src/main/java/org/springframework/web/servlet/mvc/support/ControllerBeanNameHandlerMapping.java | 3319 | /*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.mvc.support;
import java.util.ArrayList;
import java.util.List;
import org.springframework.util.StringUtils;
/**
* Implementation of {@link org.springframework.web.servlet.HandlerMapping} that
* follows a simple convention for generating URL path mappings from the <i>bean names</i>
* of registered {@link org.springframework.web.servlet.mvc.Controller} beans
* as well as {@code @Controller} annotated beans.
*
* <p>This is similar to {@link org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping}
* but doesn't expect bean names to follow the URL convention: It turns plain bean names
* into URLs by prepending a slash and optionally applying a specified prefix and/or suffix.
* However, it only does so for well-known {@link #isControllerType controller types},
* as listed above (analogous to {@link ControllerClassNameHandlerMapping}).
*
* @author Juergen Hoeller
* @since 2.5.3
* @see ControllerClassNameHandlerMapping
* @see org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping
*/
public class ControllerBeanNameHandlerMapping extends AbstractControllerUrlHandlerMapping {
private String urlPrefix = "";
private String urlSuffix = "";
/**
* Set an optional prefix to prepend to generated URL mappings.
* <p>By default this is an empty String. If you want a prefix like
* "/myapp/", you can set it for all beans mapped by this mapping.
*/
public void setUrlPrefix(String urlPrefix) {
this.urlPrefix = (urlPrefix != null ? urlPrefix : "");
}
/**
* Set an optional suffix to append to generated URL mappings.
* <p>By default this is an empty String. If you want a suffix like
* ".do", you can set it for all beans mapped by this mapping.
*/
public void setUrlSuffix(String urlSuffix) {
this.urlSuffix = (urlSuffix != null ? urlSuffix : "");
}
@Override
protected String[] buildUrlsForHandler(String beanName, Class<?> beanClass) {
List<String> urls = new ArrayList<String>();
urls.add(generatePathMapping(beanName));
String[] aliases = getApplicationContext().getAliases(beanName);
for (String alias : aliases) {
urls.add(generatePathMapping(alias));
}
return StringUtils.toStringArray(urls);
}
/**
* Prepends a '/' if required and appends the URL suffix to the name.
*/
protected String generatePathMapping(String beanName) {
String name = (beanName.startsWith("/") ? beanName : "/" + beanName);
StringBuilder path = new StringBuilder();
if (!name.startsWith(this.urlPrefix)) {
path.append(this.urlPrefix);
}
path.append(name);
if (!name.endsWith(this.urlSuffix)) {
path.append(this.urlSuffix);
}
return path.toString();
}
}
| apache-2.0 |
damienmg/bazel | src/main/java/com/google/devtools/build/lib/vfs/UnixGlob.java | 27912 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.vfs;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ForwardingListenableFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.ProfilerTask;
import com.google.devtools.build.lib.util.Preconditions;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Pattern;
/**
* Implementation of a subset of UNIX-style file globbing, expanding "*" and "?" as wildcards, but
* not [a-z] ranges.
*
* <p><code>**</code> gets special treatment in include patterns. If it is used as a complete path
* segment it matches the filenames in subdirectories recursively.
*
* <p>Importantly, note that the glob matches are in an unspecified order.
*/
public final class UnixGlob {
private UnixGlob() {}
private static List<Path> globInternal(Path base, Collection<String> patterns,
boolean excludeDirectories,
Predicate<Path> dirPred,
boolean checkForInterruption,
FilesystemCalls syscalls,
ThreadPoolExecutor threadPool)
throws IOException, InterruptedException {
GlobVisitor visitor =
(threadPool == null)
? new GlobVisitor(checkForInterruption)
: new GlobVisitor(threadPool, checkForInterruption);
return visitor.glob(base, patterns, excludeDirectories, dirPred, syscalls);
}
private static long globInternalAndReturnNumGlobTasksForTesting(
Path base, Collection<String> patterns,
boolean excludeDirectories,
Predicate<Path> dirPred,
boolean checkForInterruption,
FilesystemCalls syscalls,
ThreadPoolExecutor threadPool) throws IOException, InterruptedException {
GlobVisitor visitor =
(threadPool == null)
? new GlobVisitor(checkForInterruption)
: new GlobVisitor(threadPool, checkForInterruption);
visitor.glob(base, patterns, excludeDirectories, dirPred, syscalls);
return visitor.getNumGlobTasksForTesting();
}
private static Future<List<Path>> globAsyncInternal(
Path base,
Collection<String> patterns,
boolean excludeDirectories,
Predicate<Path> dirPred,
FilesystemCalls syscalls,
boolean checkForInterruption,
ThreadPoolExecutor threadPool) {
Preconditions.checkNotNull(threadPool, "%s %s", base, patterns);
return new GlobVisitor(threadPool, checkForInterruption)
.globAsync(base, patterns, excludeDirectories, dirPred, syscalls);
}
/**
* Checks that each pattern is valid, splits it into segments and checks
* that each segment contains only valid wildcards.
*
* @return list of segment arrays
*/
private static List<String[]> checkAndSplitPatterns(Collection<String> patterns) {
List<String[]> list = Lists.newArrayListWithCapacity(patterns.size());
for (String pattern : patterns) {
String error = checkPatternForError(pattern);
if (error != null) {
throw new IllegalArgumentException(error + " (in glob pattern '" + pattern + "')");
}
Iterable<String> segments = Splitter.on('/').split(pattern);
list.add(Iterables.toArray(segments, String.class));
}
return list;
}
/**
* @return whether or not {@code pattern} contains illegal characters
*/
public static String checkPatternForError(String pattern) {
if (pattern.isEmpty()) {
return "pattern cannot be empty";
}
if (pattern.charAt(0) == '/') {
return "pattern cannot be absolute";
}
Iterable<String> segments = Splitter.on('/').split(pattern);
for (String segment : segments) {
if (segment.isEmpty()) {
return "empty segment not permitted";
}
if (segment.equals(".") || segment.equals("..")) {
return "segment '" + segment + "' not permitted";
}
if (segment.contains("**") && !segment.equals("**")) {
return "recursive wildcard must be its own segment";
}
}
return null;
}
/**
* Calls {@link #matches(String, String, Cache) matches(pattern, str, null)}
*/
public static boolean matches(String pattern, String str) {
return matches(pattern, str, null);
}
/**
* Returns whether {@code str} matches the glob pattern {@code pattern}. This
* method may use the {@code patternCache} to speed up the matching process.
*
* @param pattern a glob pattern
* @param str the string to match
* @param patternCache a cache from patterns to compiled Pattern objects, or
* {@code null} to skip caching
*/
public static boolean matches(String pattern, String str,
Cache<String, Pattern> patternCache) {
if (pattern.length() == 0 || str.length() == 0) {
return false;
}
// Common case: **
if (pattern.equals("**")) {
return true;
}
// Common case: *
if (pattern.equals("*")) {
return true;
}
// If a filename starts with '.', this char must be matched explicitly.
if (str.charAt(0) == '.' && pattern.charAt(0) != '.') {
return false;
}
// Common case: *.xyz
if (pattern.charAt(0) == '*' && pattern.lastIndexOf('*') == 0) {
return str.endsWith(pattern.substring(1));
}
// Common case: xyz*
int lastIndex = pattern.length() - 1;
// The first clause of this if statement is unnecessary, but is an
// optimization--charAt runs faster than indexOf.
if (pattern.charAt(lastIndex) == '*' && pattern.indexOf('*') == lastIndex) {
return str.startsWith(pattern.substring(0, lastIndex));
}
Pattern regex = patternCache == null ? null : patternCache.getIfPresent(pattern);
if (regex == null) {
regex = makePatternFromWildcard(pattern);
if (patternCache != null) {
patternCache.put(pattern, regex);
}
}
return regex.matcher(str).matches();
}
/**
* Returns a regular expression implementing a matcher for "pattern", in which
* "*" and "?" are wildcards.
*
* <p>e.g. "foo*bar?.java" -> "foo.*bar.\\.java"
*/
private static Pattern makePatternFromWildcard(String pattern) {
StringBuilder regexp = new StringBuilder();
for(int i = 0, len = pattern.length(); i < len; i++) {
char c = pattern.charAt(i);
switch(c) {
case '*':
int toIncrement = 0;
if (len > i + 1 && pattern.charAt(i + 1) == '*') {
// The pattern '**' is interpreted to match 0 or more directory separators, not 1 or
// more. We skip the next * and then find a trailing/leading '/' and get rid of it.
toIncrement = 1;
if (len > i + 2 && pattern.charAt(i + 2) == '/') {
// We have '**/' -- skip the '/'.
toIncrement = 2;
} else if (len == i + 2 && i > 0 && pattern.charAt(i - 1) == '/') {
// We have '/**' -- remove the '/'.
regexp.delete(regexp.length() - 1, regexp.length());
}
}
regexp.append(".*");
i += toIncrement;
break;
case '?':
regexp.append('.');
break;
//escape the regexp special characters that are allowed in wildcards
case '^': case '$': case '|': case '+':
case '{': case '}': case '[': case ']':
case '\\': case '.':
regexp.append('\\');
regexp.append(c);
break;
default:
regexp.append(c);
break;
}
}
return Pattern.compile(regexp.toString());
}
/**
* Filesystem calls required for glob().
*/
public interface FilesystemCalls {
/**
* Get directory entries and their types.
*/
Collection<Dirent> readdir(Path path, Symlinks symlinks) throws IOException;
/**
* Return the stat() for the given path, or null.
*/
FileStatus statIfFound(Path path, Symlinks symlinks) throws IOException;
}
public static FilesystemCalls DEFAULT_SYSCALLS = new FilesystemCalls() {
@Override
public Collection<Dirent> readdir(Path path, Symlinks symlinks) throws IOException {
return path.readdir(symlinks);
}
@Override
public FileStatus statIfFound(Path path, Symlinks symlinks) throws IOException {
return path.statIfFound(symlinks);
}
};
public static final AtomicReference<FilesystemCalls> DEFAULT_SYSCALLS_REF =
new AtomicReference<>(DEFAULT_SYSCALLS);
public static Builder forPath(Path path) {
return new Builder(path);
}
/**
* Builder class for UnixGlob.
*
*
*/
public static class Builder {
private Path base;
private List<String> patterns;
private boolean excludeDirectories;
private Predicate<Path> pathFilter;
private ThreadPoolExecutor threadPool;
private AtomicReference<? extends FilesystemCalls> syscalls =
new AtomicReference<>(DEFAULT_SYSCALLS);
/**
* Creates a glob builder with the given base path.
*/
public Builder(Path base) {
this.base = base;
this.patterns = Lists.newArrayList();
this.excludeDirectories = false;
this.pathFilter = Predicates.alwaysTrue();
}
/**
* Adds a pattern to include to the glob builder.
*
* <p>For a description of the syntax of the patterns, see {@link UnixGlob}.
*/
public Builder addPattern(String pattern) {
this.patterns.add(pattern);
return this;
}
/**
* Adds a pattern to include to the glob builder.
*
* <p>For a description of the syntax of the patterns, see {@link UnixGlob}.
*/
public Builder addPatterns(String... patterns) {
Collections.addAll(this.patterns, patterns);
return this;
}
/**
* Adds a pattern to include to the glob builder.
*
* <p>For a description of the syntax of the patterns, see {@link UnixGlob}.
*/
public Builder addPatterns(Collection<String> patterns) {
this.patterns.addAll(patterns);
return this;
}
/**
* Sets the FilesystemCalls interface to use on this glob().
*/
public Builder setFilesystemCalls(AtomicReference<? extends FilesystemCalls> syscalls) {
this.syscalls = (syscalls == null)
? new AtomicReference<FilesystemCalls>(DEFAULT_SYSCALLS)
: syscalls;
return this;
}
/**
* If set to true, directories are not returned in the glob result.
*/
public Builder setExcludeDirectories(boolean excludeDirectories) {
this.excludeDirectories = excludeDirectories;
return this;
}
/**
* Sets the threadpool to use for parallel glob evaluation.
* If unset, evaluation is done in-thread.
*/
public Builder setThreadPool(ThreadPoolExecutor pool) {
this.threadPool = pool;
return this;
}
/**
* If set, the given predicate is called for every directory
* encountered. If it returns false, the corresponding item is not
* returned in the output and directories are not traversed either.
*/
public Builder setDirectoryFilter(Predicate<Path> pathFilter) {
this.pathFilter = pathFilter;
return this;
}
/**
* Executes the glob.
*/
public List<Path> glob() throws IOException {
try {
return globInternal(base, patterns, excludeDirectories, pathFilter, false, syscalls.get(),
threadPool);
} catch (InterruptedException e) {
// cannot happen, since we told globInternal not to throw
throw new IllegalStateException(e);
}
}
/**
* Executes the glob and returns the result.
*
* @throws InterruptedException if the thread is interrupted.
*/
public List<Path> globInterruptible() throws IOException, InterruptedException {
return globInternal(base, patterns, excludeDirectories, pathFilter, true, syscalls.get(),
threadPool);
}
@VisibleForTesting
public long globInterruptibleAndReturnNumGlobTasksForTesting()
throws IOException, InterruptedException {
return globInternalAndReturnNumGlobTasksForTesting(base, patterns, excludeDirectories,
pathFilter, true, syscalls.get(), threadPool);
}
/**
* Executes the glob asynchronously. {@link #setThreadPool} must have been called already with a
* non-null argument.
*
* @param checkForInterrupt if the returned future may throw InterruptedException.
*/
public Future<List<Path>> globAsync(boolean checkForInterrupt) {
return globAsyncInternal(
base,
patterns,
excludeDirectories,
pathFilter,
syscalls.get(),
checkForInterrupt,
threadPool);
}
}
/**
* Adapts the result of the glob visitation as a Future.
*/
private static class GlobFuture extends ForwardingListenableFuture<List<Path>> {
private final GlobVisitor visitor;
private final boolean checkForInterrupt;
private final SettableFuture<List<Path>> delegate = SettableFuture.create();
public GlobFuture(GlobVisitor visitor, boolean interruptible) {
this.visitor = visitor;
this.checkForInterrupt = interruptible;
}
@Override
public List<Path> get() throws InterruptedException, ExecutionException {
return checkForInterrupt ? super.get() : Uninterruptibles.getUninterruptibly(delegate());
}
@Override
protected ListenableFuture<List<Path>> delegate() {
return delegate;
}
public void setException(IOException exception) {
delegate.setException(exception);
}
public void set(List<Path> paths) {
delegate.set(paths);
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
// Best-effort interrupt of the in-flight visitation.
visitor.cancel();
return true;
}
public void markCanceled() {
super.cancel(true);
}
}
/**
* GlobVisitor executes a glob using parallelism, which is useful when
* the glob() requires many readdir() calls on high latency filesystems.
*/
private static final class GlobVisitor {
// These collections are used across workers and must therefore be thread-safe.
private final Collection<Path> results = Sets.newConcurrentHashSet();
private final Cache<String, Pattern> cache = CacheBuilder.newBuilder().build(
new CacheLoader<String, Pattern>() {
@Override
public Pattern load(String wildcard) {
return makePatternFromWildcard(wildcard);
}
});
private final GlobFuture result;
private final ThreadPoolExecutor executor;
private final AtomicLong totalOps = new AtomicLong(0);
private final AtomicLong pendingOps = new AtomicLong(0);
private final AtomicReference<IOException> failure = new AtomicReference<>();
private volatile boolean canceled = false;
GlobVisitor(
ThreadPoolExecutor executor,
boolean failFastOnInterrupt) {
this.executor = executor;
this.result = new GlobFuture(this, failFastOnInterrupt);
}
GlobVisitor(boolean failFastOnInterrupt) {
this(null, failFastOnInterrupt);
}
/**
* Performs wildcard globbing: returns the list of filenames that match any of
* {@code patterns} relative to {@code base}. Directories are traversed if and only if they
* match {@code dirPred}. The predicate is also called for the root of the traversal. The order
* of the returned list is unspecified.
*
* <p>Patterns may include "*" and "?", but not "[a-z]".
*
* <p><code>**</code> gets special treatment in include patterns. If it is
* used as a complete path segment it matches the filenames in
* subdirectories recursively.
*
* @throws IllegalArgumentException if any glob pattern
* {@linkplain #checkPatternForError(String) contains errors} or if any include pattern
* segment contains <code>**</code> but not equal to it.
*/
public List<Path> glob(Path base, Collection<String> patterns,
boolean excludeDirectories, Predicate<Path> dirPred,
FilesystemCalls syscalls)
throws IOException, InterruptedException {
try {
return globAsync(base, patterns, excludeDirectories, dirPred, syscalls).get();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
Throwables.propagateIfPossible(cause, IOException.class);
throw new RuntimeException(e);
}
}
private static boolean isRecursivePattern(String pattern) {
return "**".equals(pattern);
}
/**
* Same as {@link #glob}, except does so asynchronously and returns a {@link Future} for the
* result.
*/
Future<List<Path>> globAsync(
Path base,
Collection<String> patterns,
boolean excludeDirectories,
Predicate<Path> dirPred,
FilesystemCalls syscalls) {
FileStatus baseStat;
try {
baseStat = syscalls.statIfFound(base, Symlinks.FOLLOW);
} catch (IOException e) {
return Futures.immediateFailedFuture(e);
}
if (baseStat == null || patterns.isEmpty()) {
return Futures.immediateFuture(Collections.<Path>emptyList());
}
List<String[]> splitPatterns = checkAndSplitPatterns(patterns);
// We do a dumb loop, even though it will likely duplicate logical work (note that the
// physical filesystem operations are cached). In order to optimize, we would need to keep
// track of which patterns shared sub-patterns and which did not (for example consider the
// glob [*/*.java, sub/*.java, */*.txt]).
pendingOps.incrementAndGet();
try {
for (String[] splitPattern : splitPatterns) {
boolean containsRecursivePattern = false;
for (String pattern : splitPattern) {
if (isRecursivePattern(pattern)) {
containsRecursivePattern = true;
break;
}
}
GlobTaskContext context = containsRecursivePattern
? new RecursiveGlobTaskContext(splitPattern, excludeDirectories, dirPred, syscalls)
: new GlobTaskContext(splitPattern, excludeDirectories, dirPred, syscalls);
context.queueGlob(base, baseStat.isDirectory(), 0);
}
} finally {
decrementAndCheckDone();
}
return result;
}
/** Should only be called by link {@GlobTaskContext}. */
private void queueGlob(final Path base, final boolean baseIsDir, final int idx,
final GlobTaskContext context) {
enqueue(new Runnable() {
@Override
public void run() {
Profiler.instance().startTask(ProfilerTask.VFS_GLOB, this);
try {
reallyGlob(base, baseIsDir, idx, context);
} catch (IOException e) {
failure.set(e);
} finally {
Profiler.instance().completeTask(ProfilerTask.VFS_GLOB);
}
}
@Override
public String toString() {
return String.format(
"%s glob(include=[%s], exclude_directories=%s)",
base.getPathString(),
"\"" + Joiner.on("\", \"").join(context.patternParts) + "\"",
context.excludeDirectories);
}
});
}
protected void enqueue(final Runnable r) {
totalOps.incrementAndGet();
pendingOps.incrementAndGet();
Runnable wrapped =
() -> {
try {
if (!canceled && failure.get() == null) {
r.run();
}
} finally {
decrementAndCheckDone();
}
};
if (executor == null) {
wrapped.run();
} else {
executor.execute(wrapped);
}
}
private long getNumGlobTasksForTesting() {
return totalOps.get();
}
protected void cancel() {
this.canceled = true;
}
private void decrementAndCheckDone() {
if (pendingOps.decrementAndGet() == 0) {
// We get to 0 iff we are done all the relevant work. This is because we always increment
// the pending ops count as we're enqueuing, and don't decrement until the task is complete
// (which includes accounting for any additional tasks that one enqueues).
if (canceled) {
result.markCanceled();
} else if (failure.get() != null) {
result.setException(failure.get());
} else {
result.set(ImmutableList.copyOf(results));
}
}
}
/** A context for evaluating all the subtasks of a single top-level glob task. */
private class GlobTaskContext {
private final String[] patternParts;
private final boolean excludeDirectories;
private final Predicate<Path> dirPred;
private final FilesystemCalls syscalls;
GlobTaskContext(
String[] patternParts,
boolean excludeDirectories,
Predicate<Path> dirPred,
FilesystemCalls syscalls) {
this.patternParts = patternParts;
this.excludeDirectories = excludeDirectories;
this.dirPred = dirPred;
this.syscalls = syscalls;
}
protected void queueGlob(Path base, boolean baseIsDir, int patternIdx) {
GlobVisitor.this.queueGlob(base, baseIsDir, patternIdx, this);
}
}
/**
* A special implementation of {@link GlobTaskContext} that dedupes glob subtasks. Our naive
* implementation of recursive patterns means there are multiple ways to enqueue the same
* logical subtask.
*/
private class RecursiveGlobTaskContext extends GlobTaskContext {
private class GlobTask {
private final Path base;
private final int patternIdx;
private GlobTask(Path base, int patternIdx) {
this.base = base;
this.patternIdx = patternIdx;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof GlobTask)) {
return false;
}
GlobTask other = (GlobTask) obj;
return base.equals(other.base) && patternIdx == other.patternIdx;
}
@Override
public int hashCode() {
return Objects.hash(base, patternIdx);
}
}
private final Set<GlobTask> visitedGlobSubTasks = Sets.newConcurrentHashSet();
private RecursiveGlobTaskContext(
String[] patternParts,
boolean excludeDirectories,
Predicate<Path> dirPred,
FilesystemCalls syscalls) {
super(patternParts, excludeDirectories, dirPred, syscalls);
}
@Override
protected void queueGlob(Path base, boolean baseIsDir, int patternIdx) {
if (visitedGlobSubTasks.add(new GlobTask(base, patternIdx))) {
// This is a unique glob task. For example of how duplicates can arise, consider:
// glob(['**/foo.txt'])
// with the only file being
// a/foo.txt
//
// there are two ways to reach a/foo.txt: one by recursively globbing 'foo.txt' in the
// subdirectory 'a', and another other by recursively globbing '**/foo.txt' in the
// subdirectory 'a'.
super.queueGlob(base, baseIsDir, patternIdx);
}
}
}
/**
* Expressed in Haskell:
* <pre>
* reallyGlob base [] = { base }
* reallyGlob base [x:xs] = union { reallyGlob(f, xs) | f results "base/x" }
* </pre>
*/
private void reallyGlob(
Path base,
boolean baseIsDir,
int idx,
GlobTaskContext context) throws IOException {
if (baseIsDir && !context.dirPred.apply(base)) {
return;
}
if (idx == context.patternParts.length) { // Base case.
if (!(context.excludeDirectories && baseIsDir)) {
results.add(base);
}
return;
}
if (!baseIsDir) {
// Nothing to find here.
return;
}
final String pattern = context.patternParts[idx];
// ** is special: it can match nothing at all.
// For example, x/** matches x, **/y matches y, and x/**/y matches x/y.
final boolean isRecursivePattern = isRecursivePattern(pattern);
if (isRecursivePattern) {
context.queueGlob(base, baseIsDir, idx + 1);
}
if (!pattern.contains("*") && !pattern.contains("?")) {
// We do not need to do a readdir in this case, just a stat.
Path child = base.getChild(pattern);
FileStatus status = context.syscalls.statIfFound(child, Symlinks.FOLLOW);
if (status == null || (!status.isDirectory() && !status.isFile())) {
// The file is a dangling symlink, fifo, does not exist, etc.
return;
}
boolean childIsDir = status.isDirectory();
context.queueGlob(child, childIsDir, idx + 1);
return;
}
Collection<Dirent> dents = context.syscalls.readdir(base, Symlinks.FOLLOW);
for (Dirent dent : dents) {
Dirent.Type type = dent.getType();
if (type == Dirent.Type.UNKNOWN) {
// The file is a dangling symlink, fifo, etc.
continue;
}
boolean childIsDir = (type == Dirent.Type.DIRECTORY);
String text = dent.getName();
Path child = base.getChild(text);
if (isRecursivePattern) {
// Recurse without shifting the pattern.
if (childIsDir) {
context.queueGlob(child, childIsDir, idx);
}
}
if (matches(pattern, text, cache)) {
// Recurse and consume one segment of the pattern.
if (childIsDir) {
context.queueGlob(child, childIsDir, idx + 1);
} else {
// Instead of using an async call, just repeat the base case above.
if (idx + 1 == context.patternParts.length) {
results.add(child);
}
}
}
}
}
}
}
| apache-2.0 |
goodwinnk/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInspection/UnusedReturnValueLocalTest.java | 2076 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInspection;
import com.intellij.JavaTestUtil;
import com.intellij.codeInspection.unusedReturnValue.UnusedReturnValue;
import com.intellij.codeInspection.unusedReturnValue.UnusedReturnValueLocalInspection;
import com.intellij.psi.PsiModifier;
import com.intellij.testFramework.InspectionTestCase;
public class UnusedReturnValueLocalTest extends InspectionTestCase {
private UnusedReturnValue myGlobal = new UnusedReturnValue();
private UnusedReturnValueLocalInspection myTool = new UnusedReturnValueLocalInspection(myGlobal);
@Override
protected String getTestDataPath() {
return JavaTestUtil.getJavaTestDataPath() + "/inspection";
}
private void doTest() {
doTest("unusedReturnValue/" + getTestName(true), myTool);
}
@Override
protected void tearDown() throws Exception {
myGlobal = null;
myTool = null;
super.tearDown();
}
public void testNonLiteral() {
doTest();
}
public void testHierarchy() {
doTest();
}
public void testMethodReference() {
doTest();
}
public void testSimpleSetter() {
try {
myGlobal.IGNORE_BUILDER_PATTERN = true;
doTest();
}
finally {
myGlobal.IGNORE_BUILDER_PATTERN = false;
}
}
public void testVisibilitySetting() {
try {
myGlobal.highestModifier = PsiModifier.PRIVATE;
doTest();
}
finally {
myGlobal.highestModifier = UnusedReturnValue.DEFAULT_HIGHEST_MODIFIER;
}
}
}
| apache-2.0 |
spinnaker/clouddriver | clouddriver-titus/src/main/java/com/netflix/spinnaker/clouddriver/titus/client/model/HealthStatus.java | 712 | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.titus.client.model;
public enum HealthStatus {
HEALTHY,
UNHEALTHY
}
| apache-2.0 |
ursgraf/compiler | src/ch/ntb/inf/deep/host/ICclassFileConstsAndMnemonics.java | 2563 | /*
* Copyright 2011 - 2013 NTB University of Applied Sciences in Technology
* Buchs, Switzerland, http://www.ntb.ch/inf
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.ntb.inf.deep.host;
import ch.ntb.inf.deep.classItems.ICclassFileConsts;
public interface ICclassFileConstsAndMnemonics extends ICclassFileConsts{
//--- debug strings for: constant pool tags (cpt)
String[] cptIdents = {
"ExtSlot",
"Utf8",
"?",
"Integer", "Float", "Long", "Double",
"Class", "String", "FieldRef", "MethRef",
"IntfMethRef", "NameAndType"
};
//--- debug strings for: access and property flags (apf) for class, field, method
String[] apfIdents = {
"public", // 0
"private", // 1
"protected", // 2
"static", // 3
"final", // 4
"|Csuper|Msynch", // 5
"|Mbridge|Fvolatile", // 6
"|Mvarargs|Ftransient", // 7
"native", // 8
"interface", // 9
"abstract", // 10
"strict", // 11
"synthetic|enumArray", // 12
"annotation", // 13
"enum", // 14
//---- deep properties
"classLoaded", // 15
"rootClass", // 16
"declaration", // 17
"instances", // 18
"typeTest", // 19
"|Cmarked|Fconst", // 20
"|Cextended|FreadAccess", // 21
"writeAccess", // 22
"command", //23
"call", //24
"excHndCall", //25
"excHnd", //26
"interfaceCall", //27
"new", //28
"unsafe", //29
"sysPrimitive", //30
"synthetic", //31
};
String[] attributes = {// class file attributes:
"ConstantValue", // 4.7.2, p119, items: fields
"Deprecated", // 4.7.10, p132, items: fields, methods
"Synthetic", // 4.7.6, p127, items: fields, methods
"SourceFile", // 4.7.7, p128
"Code", // 4.7.3, p120, items: methods
"LocalVariableTable", // 4.7.9, p130
"LineNumberTable", // 4.7.8, p129
"Exceptions", // 4.7.4, p123
"InnerClasses", // 4.7.5, p125
};
}
| apache-2.0 |
fischmax/rest-schemagen | src/test/java/com/mercateo/common/rest/schemagen/parameter/ParameterTest.java | 3722 | package com.mercateo.common.rest.schemagen.parameter;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Collections;
import org.junit.Before;
import org.junit.Test;
public class ParameterTest {
private Parameter.Builder<TestClass> builder;
@Before
public void setUp() {
builder = Parameter.builderFor(TestClass.class);
}
@Test
public void testIsEmptyWithoutArg() {
assertThat(builder.isEmpty()).isTrue();
}
@Test
public void testIsEmptyWithSingleAllowedArg() {
builder.allowValues(new TestClass());
assertThat(builder.isEmpty()).isFalse();
}
@Test
public void testIsEmptyWithDefaultArg() {
builder.defaultValue(new TestClass());
assertThat(builder.isEmpty()).isFalse();
}
@Test
public void testAllowValuesWithMapper() {
TestClass testClass = new TestClass();
final Parameter<TestClass> parameter = builder.allowValues(Collections.singletonList("foo"),
s -> {
assertThat(s).isEqualTo("foo");
return testClass;
}).build();
assertThat(parameter.get()).isSameAs(testClass);
}
@Test
public void testBuildThrowsWithNoArg() {
assertThatThrownBy(builder::build).isInstanceOf(IllegalStateException.class)
.hasMessageContaining(
"parameter should have at least one allowed or default value set");
}
@Test
public void testBuildThrowsWithTwoIdenticalParameters() {
TestClass testClass = new TestClass();
builder.allowValues(testClass);
builder.build();
assertThatThrownBy(builder::build).isInstanceOf(IllegalStateException.class)
.hasMessageContaining("parameter values can only be used once");
}
@Test
public void testContainerHasParameter() {
TestClass testClass = new TestClass();
final Parameter<TestClass> parameter = builder.allowValues(testClass).build();
final CallContext context = parameter.context();
assertThat(context.hasParameter(testClass)).isTrue();
assertThat(context.hasParameter(new TestClass())).isFalse();
assertThat(context.getParameter(testClass)).isSameAs(parameter);
assertThat(context.isEmpty()).isFalse();
}
@Test
public void testBuilderIsEmptyIfAtLeastOneAllowValuesCallHasNoArguments() {
TestClass testClass = new TestClass();
builder.allowValues(testClass).allowValues();
assertThat(builder.isEmpty()).isTrue();
}
@Test
public void testParameterWithAllowedValuesOnly() {
TestClass testClass = new TestClass();
final Parameter<TestClass> parameter = builder.allowValues(testClass).build();
assertThat(parameter.hasAllowedValues()).isTrue();
assertThat(parameter.getAllowedValues()).containsExactly(testClass);
assertThat(parameter.hasDefaultValue()).isFalse();
}
@Test
public void testParameterWithDefaultValuesOnly() {
TestClass testClass = new TestClass();
final Parameter<TestClass> parameter = builder.defaultValue(testClass).build();
assertThat(parameter.hasDefaultValue()).isTrue();
assertThat(parameter.getDefaultValue()).isSameAs(testClass);
assertThat(parameter.hasAllowedValues()).isFalse();
}
@Test
public void testCreateContext() {
final CallContext context = Parameter.createContext();
assertThat(context).isNotNull();
assertThat(context.isEmpty()).isTrue();
}
static class TestClass {
}
} | apache-2.0 |
zwets/flowable-engine | modules/flowable-ui-task/flowable-ui-task-conf/src/main/java/org/flowable/app/servlet/DmnDispatcherServletConfiguration.java | 1159 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.app.servlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
@Configuration
@ComponentScan({"org.flowable.rest.dmn.exception", "org.flowable.rest.dmn.service.api"})
@EnableAsync
public class DmnDispatcherServletConfiguration extends BaseDispatcherServletConfiguration {
protected static final Logger LOGGER = LoggerFactory.getLogger(DmnDispatcherServletConfiguration.class);
}
| apache-2.0 |
stoksey69/googleads-java-lib | examples/dfp_axis/src/main/java/dfp/axis/v201408/producttemplateservice/CreateProductTemplates.java | 7139 | // Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dfp.axis.v201408.producttemplateservice;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.dfp.axis.factory.DfpServices;
import com.google.api.ads.dfp.axis.v201408.AdUnitTargeting;
import com.google.api.ads.dfp.axis.v201408.Browser;
import com.google.api.ads.dfp.axis.v201408.BrowserTargeting;
import com.google.api.ads.dfp.axis.v201408.CreativePlaceholder;
import com.google.api.ads.dfp.axis.v201408.GeoTargeting;
import com.google.api.ads.dfp.axis.v201408.LineItemType;
import com.google.api.ads.dfp.axis.v201408.Location;
import com.google.api.ads.dfp.axis.v201408.NetworkServiceInterface;
import com.google.api.ads.dfp.axis.v201408.ProductSegmentation;
import com.google.api.ads.dfp.axis.v201408.ProductTemplate;
import com.google.api.ads.dfp.axis.v201408.ProductTemplateServiceInterface;
import com.google.api.ads.dfp.axis.v201408.ProductTemplateTargeting;
import com.google.api.ads.dfp.axis.v201408.ProductType;
import com.google.api.ads.dfp.axis.v201408.RateType;
import com.google.api.ads.dfp.axis.v201408.RoadblockingType;
import com.google.api.ads.dfp.axis.v201408.Size;
import com.google.api.ads.dfp.lib.client.DfpSession;
import com.google.api.client.auth.oauth2.Credential;
import java.util.Random;
/**
* This example creates a product template.
*
* Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*
* Tags: ProductTemplateService.createProductTemplates
* Tags: NetworkService.getCurrentNetwork
*
* @author Nicholas Chen
*/
public class CreateProductTemplates {
public static void runExample(DfpServices dfpServices, DfpSession session) throws Exception {
// Get the ProductTemplateService.
ProductTemplateServiceInterface productTemplateService =
dfpServices.get(session, ProductTemplateServiceInterface.class);
// Get the NetworkService.
NetworkServiceInterface networkService =
dfpServices.get(session, NetworkServiceInterface.class);
// Create a product template.
ProductTemplate productTemplate = new ProductTemplate();
productTemplate.setName("Product template #" + new Random().nextInt(Integer.MAX_VALUE));
productTemplate.setDescription("This product template creates standard proposal line items "
+ "targeting Chrome browsers with product segmentation on ad units and geo targeting.");
// Set the name macro which will be used to generate the names of the products.
// This will create a segmentation based on the line item type, ad unit, and location.
productTemplate.setNameMacro("<line-item-type> - <ad-unit> - <template-name> - <location>");
// Set the product type so the created proposal line items will be trafficked in DFP.
productTemplate.setProductType(ProductType.DFP);
// Set rate type to create CPM priced proposal line items.
productTemplate.setRateType(RateType.CPM);
// Optionally set the creative rotation of the product to serve one or more creatives.
productTemplate.setRoadblockingType(RoadblockingType.ONE_OR_MORE);
// Create the master creative placeholder.
CreativePlaceholder creativeMasterPlaceholder = new CreativePlaceholder();
creativeMasterPlaceholder.setSize(new Size(728, 90, false));
// Create companion creative placeholders.
CreativePlaceholder companionCreativePlaceholder = new CreativePlaceholder();
companionCreativePlaceholder.setSize(new Size(300, 250, false));
// Set the size of creatives that can be associated with the product template.
productTemplate.setCreativePlaceholders(
new CreativePlaceholder[] {creativeMasterPlaceholder, companionCreativePlaceholder});
// Set the type of proposal line item to be created from the product template.
productTemplate.setLineItemType(LineItemType.STANDARD);
// Get the root ad unit ID used to target the whole site.
String rootAdUnitId = networkService.getCurrentNetwork().getEffectiveRootAdUnitId();
// Create ad unit targeting for the root ad unit (i.e. the whole network).
AdUnitTargeting adUnitTargeting = new AdUnitTargeting();
adUnitTargeting.setAdUnitId(rootAdUnitId);
adUnitTargeting.setIncludeDescendants(true);
// Create geo targeting for the US.
Location countryLocation = new Location();
countryLocation.setId(2840L);
// Create geo targeting for Hong Kong.
Location regionLocation = new Location();
regionLocation.setId(2344L);
GeoTargeting geoTargeting = new GeoTargeting();
geoTargeting.setTargetedLocations(new Location[] {countryLocation, regionLocation});
// Add browser targeting to Chrome on the product template distinct from product segmentation.
Browser chromeBrowser = new Browser();
chromeBrowser.setId(500072L);
BrowserTargeting browserTargeting = new BrowserTargeting();
browserTargeting.setBrowsers(new Browser[] {chromeBrowser});
ProductTemplateTargeting productTemplateTargeting = new ProductTemplateTargeting();
productTemplateTargeting.setBrowserTargeting(browserTargeting);
productTemplate.setTargeting(productTemplateTargeting);
// Add inventory and geo targeting as product segmentation.
ProductSegmentation productSegmentation = new ProductSegmentation();
productSegmentation.setAdUnitSegments(new AdUnitTargeting[] {adUnitTargeting});
productSegmentation.setGeoSegment(geoTargeting);
productTemplate.setProductSegmentation(productSegmentation);
// Create the product template on the server.
ProductTemplate[] productTemplates =
productTemplateService.createProductTemplates(new ProductTemplate[] {productTemplate});
for (ProductTemplate createdProductTemplate : productTemplates) {
System.out.printf("A product template with ID \"%d\" and name \"%s\" was created.%n",
createdProductTemplate.getId(), createdProductTemplate.getName());
}
}
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.DFP)
.fromFile()
.build()
.generateCredential();
// Construct a DfpSession.
DfpSession session = new DfpSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
DfpServices dfpServices = new DfpServices();
runExample(dfpServices, session);
}
}
| apache-2.0 |
vincent-zurczak/roboconf-platform | core/roboconf-core/src/test/java/net/roboconf/core/internal/tests/TestUtils.java | 9643 | /**
* Copyright 2014-2017 Linagora, Université Joseph Fourier, Floralis
*
* The present code is developed in the scope of the joint LINAGORA -
* Université Joseph Fourier - Floralis research program and is designated
* as a "Result" pursuant to the terms and conditions of the LINAGORA
* - Université Joseph Fourier - Floralis research program. Each copyright
* holder of Results enumerated here above fully & independently holds complete
* ownership of the complete Intellectual Property rights applicable to the whole
* of said Results, and may freely exploit it in any manner which does not infringe
* the moral rights of the other copyright holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.roboconf.core.internal.tests;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Handler;
import java.util.logging.LogRecord;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipOutputStream;
import org.junit.Assert;
import net.roboconf.core.dsl.ParsingModelIoTest;
import net.roboconf.core.utils.Utils;
/**
* @author Vincent Zurczak - Linagora
*/
public class TestUtils {
/**
* @return true if the current OS is part of the Linux systems
*/
public static boolean isUnix() {
String os = System.getProperty("os.name").toLowerCase();
return os.contains( "nix" ) || os.contains( "nux" ) || os.contains( "aix" ) || os.contains( "freebsd" );
}
/**
* @return true if the current OS is part of the Windows systems
*/
public static boolean isWindows() {
String os = System.getProperty("os.name").toLowerCase();
return os.contains( "win" );
}
/**
* Finds test files.
* @param dirName must start with '/'
* @return a non-null list
* @throws IOException
* @throws URISyntaxException
*/
public static List<File> findTestFiles( String dirName ) throws IOException, URISyntaxException {
URL url = ParsingModelIoTest.class.getResource( dirName );
File dir = new File( url.toURI());
if( ! dir.exists())
throw new IOException( "Could not resolve the resource directory." );
File[] resources = dir.listFiles();
if( resources == null )
throw new IOException( "Could not list the resource files." );
return Arrays.asList( resources );
}
/**
* Finds a test file.
* @param fileName must start with '/'
* @return an existing file (never null)
* @throws IOException
* @throws URISyntaxException
*/
public static File findTestFile( String fileName ) throws IOException, URISyntaxException {
return findTestFile( fileName, ParsingModelIoTest.class );
}
/**
* Finds the location of an application directory located in the "roboconf-core" module.
* @param currentDirectory the current directory (generally, <code>new File( "." );</code>).
* @param appName the application's name (not null)
* @return a non-null file (that may not exist
*/
public static File findApplicationDirectory( String appName ) throws IOException {
// This method must support test execution from Maven and IDE (e.g. Eclipse).
String suffix = "core/roboconf-core/src/test/resources/applications/" + appName;
File result = new File( "../../" + suffix ).getCanonicalFile();
return result;
}
/**
* Finds a test file.
* @param fileName must start with '/'
* @param clazz a class to search the class path
* @return an existing file (never null)
* @throws IOException
* @throws URISyntaxException
*/
public static File findTestFile( String fileName, Class<?> clazz )
throws IOException, URISyntaxException {
URL url = clazz.getResource( fileName );
File file;
if( url == null
|| ! (file = new File( url.toURI())).exists())
throw new IOException( "Could not find the resource file." );
return file;
}
/**
* @return a non-null map associated a ZIP entry name with its text content
*/
public static Map<String,String> buildZipContent() {
Map<String,String> entryToContent = new LinkedHashMap<> ();
entryToContent.put( "readme.txt", "This is a readme file." );
entryToContent.put( "graph/main.graph", "import facets.graph;\nimport components.graph;" );
entryToContent.put( "graph/facets.graph", "# nothing yet" );
entryToContent.put( "graph/components.graph", "# nothing here too" );
entryToContent.put( "descriptor/application-descriptor.properties", "application-name = Unit Test" );
entryToContent.put( "instances/initial-deployment.instances", "# No instance" );
entryToContent.put( "some/very/low/folder/demo.txt", "Whatever..." );
entryToContent.put( "graph/", null );
entryToContent.put( "anotherdir/", null );
entryToContent.put( "anotherdir/deeper/", null );
return entryToContent;
}
/**
* Creates a ZIP file from the map.
* @param entryToContent a map (key = ZIP entry, value = entry content, null for a directory)
* @param targetZipFile
*/
public static void createZipFile( Map<String,String> entryToContent, File targetZipFile ) throws IOException {
ZipOutputStream zos = null;
try {
zos = new ZipOutputStream( new FileOutputStream( targetZipFile ));
for( Map.Entry<String,String> entry : entryToContent.entrySet()) {
zos.putNextEntry( new ZipEntry( entry.getKey()));
if( entry.getValue() != null ) {
ByteArrayInputStream is = new ByteArrayInputStream( entry.getValue().getBytes( "UTF-8" ));
try {
Utils.copyStreamUnsafelyUseWithCaution( is, zos );
} finally {
Utils.closeQuietly( is );
}
}
zos.closeEntry();
}
} finally {
Utils.closeQuietly( zos );
}
}
/**
* Compares an assumed ZIP file with a content described in a map.
* @param zipFile
* @param entryToContent
* @throws ZipException
* @throws IOException
*/
public static void compareZipContent( File zipFile, Map<String,String> entryToContent ) throws IOException {
File tempDir = new File( System.getProperty( "java.io.tmpdir" ), UUID.randomUUID().toString());
if( ! tempDir.mkdir())
Assert.fail( "Failed to create a temporary directory." );
try {
Utils.extractZipArchive( zipFile, tempDir );
compareUnzippedContent( tempDir, entryToContent );
} finally {
Utils.deleteFilesRecursively( tempDir );
}
}
/**
* Compares an assumed ZIP file with a content described in a map.
* @param rootDirectory the root directory of the unzipped content
* @param entryToContent the map associating entries and content (null for directories)
* @throws IOException
*/
public static void compareUnzippedContent( File rootDirectory, Map<String,String> entryToContent ) throws IOException {
for( Map.Entry<String,String> entry : entryToContent.entrySet()) {
File extractedFile = new File( rootDirectory, entry.getKey());
Assert.assertTrue( "Missing entry: " + entry.getKey(), extractedFile.exists());
if( entry.getValue() == null ) {
Assert.assertTrue( entry.getKey() + " was supposed to be a directory.", extractedFile.isDirectory());
continue;
}
Assert.assertTrue( entry.getKey() + " was supposed to be a file.", extractedFile.isFile());
String fileContent = Utils.readFileContent( extractedFile );
Assert.assertEquals( entry.getValue(), fileContent );
}
}
/**
* Gets the value of an internal field.
* <p>
* It is sometimes useful during tests to access a field that should remain
* private in a normal execution. This method requires permissions to access
* private fields.
* </p>
* <p>
* Super class are searched too.
* </p>
*
* @param o the object from which the field must be retrieved
* @param fieldName the field name
* @param clazz the class of the internal field
* @return the internal field's value or null if this field was not found
* @throws IllegalAccessException if the field could not be read
*/
public static <T> T getInternalField( Object o, String fieldName, Class<T> clazz )
throws IllegalAccessException {
Object fieldValue = null;
for( Class<?> c = o.getClass(); c != null && fieldValue == null; c = c.getSuperclass()) {
try {
Field field = c.getDeclaredField( fieldName );
field.setAccessible( true );
fieldValue = field.get( o );
} catch( NoSuchFieldException e ) {
// nothing
}
}
return clazz.cast( fieldValue );
}
/**
* A log handler that writes records in a string buffer.
* @author Vincent Zurczak - Linagora
*/
public static class StringHandler extends Handler {
private final StringBuilder sb = new StringBuilder();
@Override
public void close() throws SecurityException {
// nothing
}
@Override
public void flush() {
// nothing
}
@Override
public void publish( LogRecord rec ) {
this.sb.append( rec.getMessage() + "\n" );
}
public String getLogs() {
return this.sb.toString();
}
public StringBuilder getStringBuilder() {
return this.sb;
}
}
}
| apache-2.0 |
akdasari/SparkCommon | src/main/java/org/sparkcommerce/common/util/WeightUnitOfMeasureType.java | 2259 | /*
* #%L
* SparkCommerce Common Libraries
* %%
* Copyright (C) 2015 Spark Commerce
* %%
*/
package org.sparkcommerce.common.util;
import org.sparkcommerce.common.SparkEnumerationType;
import java.io.Serializable;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* An extendible enumeration of units of measure types.
*
* @author Anand Dasari
*
*/
public class WeightUnitOfMeasureType implements Serializable, SparkEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, WeightUnitOfMeasureType> TYPES = new LinkedHashMap<String, WeightUnitOfMeasureType>();
public static final WeightUnitOfMeasureType POUNDS = new WeightUnitOfMeasureType("POUNDS", "Pounds");
public static final WeightUnitOfMeasureType KILOGRAMS = new WeightUnitOfMeasureType("KILOGRAMS", "Kilograms");
public static WeightUnitOfMeasureType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public WeightUnitOfMeasureType() {
//do nothing
}
public WeightUnitOfMeasureType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!getClass().isAssignableFrom(obj.getClass()))
return false;
WeightUnitOfMeasureType other = (WeightUnitOfMeasureType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| apache-2.0 |
jayantgolhar/Hadoop-0.21.0 | common/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java | 4694 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress.zlib;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.fs.CommonConfigurationKeys;
/**
* A collection of factories to create the right
* zlib/gzip compressor/decompressor instances.
*
*/
public class ZlibFactory {
private static final Log LOG =
LogFactory.getLog(ZlibFactory.class);
private static boolean nativeZlibLoaded = false;
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
nativeZlibLoaded = ZlibCompressor.isNativeZlibLoaded() &&
ZlibDecompressor.isNativeZlibLoaded();
if (nativeZlibLoaded) {
LOG.info("Successfully loaded & initialized native-zlib library");
} else {
LOG.warn("Failed to load/initialize native-zlib library");
}
}
}
/**
* Check if native-zlib code is loaded & initialized correctly and
* can be loaded for this job.
*
* @param conf configuration
* @return <code>true</code> if native-zlib is loaded & initialized
* and can be loaded for this job, else <code>false</code>
*/
public static boolean isNativeZlibLoaded(Configuration conf) {
return nativeZlibLoaded && conf.getBoolean(
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
}
/**
* Return the appropriate type of the zlib compressor.
*
* @param conf configuration
* @return the appropriate type of the zlib compressor.
*/
public static Class<? extends Compressor>
getZlibCompressorType(Configuration conf) {
return (isNativeZlibLoaded(conf)) ?
ZlibCompressor.class : BuiltInZlibDeflater.class;
}
/**
* Return the appropriate implementation of the zlib compressor.
*
* @param conf configuration
* @return the appropriate implementation of the zlib compressor.
*/
public static Compressor getZlibCompressor(Configuration conf) {
return (isNativeZlibLoaded(conf)) ?
new ZlibCompressor() : new BuiltInZlibDeflater();
}
/**
* Return the appropriate type of the zlib decompressor.
*
* @param conf configuration
* @return the appropriate type of the zlib decompressor.
*/
public static Class<? extends Decompressor>
getZlibDecompressorType(Configuration conf) {
return (isNativeZlibLoaded(conf)) ?
ZlibDecompressor.class : BuiltInZlibInflater.class;
}
/**
* Return the appropriate implementation of the zlib decompressor.
*
* @param conf configuration
* @return the appropriate implementation of the zlib decompressor.
*/
public static Decompressor getZlibDecompressor(Configuration conf) {
return (isNativeZlibLoaded(conf)) ?
new ZlibDecompressor() : new BuiltInZlibInflater();
}
public static void setCompressionStrategy(Configuration conf,
CompressionStrategy strategy) {
conf.setEnum("zlib.compress.strategy", strategy);
}
public static CompressionStrategy getCompressionStrategy(Configuration conf) {
return conf.getEnum("zlib.compress.strategy",
CompressionStrategy.DEFAULT_STRATEGY);
}
public static void setCompressionLevel(Configuration conf,
CompressionLevel level) {
conf.setEnum("zlib.compress.level", level);
}
public static CompressionLevel getCompressionLevel(Configuration conf) {
return conf.getEnum("zlib.compress.level",
CompressionLevel.DEFAULT_COMPRESSION);
}
}
| apache-2.0 |
kentonl/EasySRL | src/edu/uw/easysrl/syntax/model/feature/FeatureSet.java | 2652 | package edu.uw.easysrl.syntax.model.feature;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.google.common.collect.ImmutableSet;
import edu.uw.easysrl.syntax.model.feature.Feature.BinaryFeature;
import edu.uw.easysrl.syntax.model.feature.Feature.RootCategoryFeature;
import edu.uw.easysrl.syntax.model.feature.Feature.UnaryRuleFeature;
public class FeatureSet implements Serializable {
/**
*
*/
private static final long serialVersionUID = 4112311242618770896L;
public FeatureSet(final DenseLexicalFeature lexicalCategoryFeatures,
final Collection<BilexicalFeature> dependencyFeatures,
final Collection<ArgumentSlotFeature> argumentSlotFeatures,
final Collection<UnaryRuleFeature> unaryRuleFeatures,
final Collection<PrepositionFeature> prepositionFeatures,
final Collection<RootCategoryFeature> rootFeatures, final Collection<BinaryFeature> binaryFeatures) {
super();
this.lexicalCategoryFeatures = lexicalCategoryFeatures;
this.dependencyFeatures = ImmutableSet.copyOf(dependencyFeatures);
this.argumentSlotFeatures = ImmutableSet.copyOf(argumentSlotFeatures);
this.unaryRuleFeatures = unaryRuleFeatures;
this.prepositionFeatures = ImmutableSet.copyOf(prepositionFeatures);
this.rootFeatures = rootFeatures;
this.binaryFeatures = binaryFeatures;
}
public final transient DenseLexicalFeature lexicalCategoryFeatures;
public final Collection<BilexicalFeature> dependencyFeatures;
public final Collection<ArgumentSlotFeature> argumentSlotFeatures;
public final Collection<UnaryRuleFeature> unaryRuleFeatures;
public final Collection<PrepositionFeature> prepositionFeatures;
public final Collection<RootCategoryFeature> rootFeatures;
public final Collection<BinaryFeature> binaryFeatures;
/**
* Use after de-serializing. Means the supertagger folder can change.
*
* @param supertaggerBeam
*/
public FeatureSet setSupertaggingFeature(final File model, final double supertaggerBeam) throws IOException {
return new FeatureSet(new DenseLexicalFeature(model, supertaggerBeam), dependencyFeatures,
argumentSlotFeatures, unaryRuleFeatures, prepositionFeatures, rootFeatures, binaryFeatures);
}
public Collection<Feature> getAllFeatures() {
final List<Feature> result = new ArrayList<>();
result.add(lexicalCategoryFeatures);
result.addAll(argumentSlotFeatures);
result.addAll(unaryRuleFeatures);
result.addAll(prepositionFeatures);
result.addAll(rootFeatures);
result.addAll(binaryFeatures);
result.addAll(dependencyFeatures);
return result;
}
} | apache-2.0 |
PierreLemordant/alien4cloud | alien4cloud-rest-api/src/main/java/alien4cloud/rest/deployment/DeploymentDTO.java | 734 | package alien4cloud.rest.deployment;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import alien4cloud.model.deployment.Deployment;
import alien4cloud.model.deployment.IDeploymentSource;
import alien4cloud.model.orchestrators.locations.Location;
/**
* Deployment DTO contains the deployment and some informations of the application related.
*
* @author igor ngouagna
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class DeploymentDTO<T extends IDeploymentSource> {
private Deployment deployment;
private T source;
/* summaries of locations related to the deployments */
private List<Location> locations;
}
| apache-2.0 |
sijie/bookkeeper | bookkeeper-server/src/test/java/org/apache/bookkeeper/bookie/BufferedChannelTest.java | 4943 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.bookie;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledByteBufAllocator;
import java.io.File;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
/**
* Tests for BufferedChannel.
*/
public class BufferedChannelTest {
private static Random rand = new Random();
private static final int INTERNAL_BUFFER_WRITE_CAPACITY = 65536;
private static final int INTERNAL_BUFFER_READ_CAPACITY = 512;
@Test
public void testBufferedChannelWithNoBoundOnUnpersistedBytes() throws Exception {
testBufferedChannel(5000, 30, 0, false, false);
}
@Test
public void testBufferedChannelWithBoundOnUnpersistedBytes() throws Exception {
testBufferedChannel(5000, 30, 5000 * 28, false, false);
}
@Test
public void testBufferedChannelWithBoundOnUnpersistedBytesAndFlush() throws Exception {
testBufferedChannel(5000, 30, 5000 * 28, true, false);
}
@Test
public void testBufferedChannelFlushNoForceWrite() throws Exception {
testBufferedChannel(5000, 30, 0, true, false);
}
@Test
public void testBufferedChannelForceWriteNoFlush() throws Exception {
testBufferedChannel(5000, 30, 0, false, true);
}
@Test
public void testBufferedChannelFlushForceWrite() throws Exception {
testBufferedChannel(5000, 30, 0, true, true);
}
public void testBufferedChannel(int byteBufLength, int numOfWrites, int unpersistedBytesBound, boolean flush,
boolean shouldForceWrite) throws Exception {
File newLogFile = File.createTempFile("test", "log");
newLogFile.deleteOnExit();
FileChannel fileChannel = new RandomAccessFile(newLogFile, "rw").getChannel();
BufferedChannel logChannel = new BufferedChannel(UnpooledByteBufAllocator.DEFAULT, fileChannel,
INTERNAL_BUFFER_WRITE_CAPACITY, INTERNAL_BUFFER_READ_CAPACITY, unpersistedBytesBound);
ByteBuf dataBuf = generateEntry(byteBufLength);
dataBuf.markReaderIndex();
dataBuf.markWriterIndex();
for (int i = 0; i < numOfWrites; i++) {
logChannel.write(dataBuf);
dataBuf.resetReaderIndex();
dataBuf.resetWriterIndex();
}
if (flush && shouldForceWrite) {
logChannel.flushAndForceWrite(false);
} else if (flush) {
logChannel.flush();
} else if (shouldForceWrite) {
logChannel.forceWrite(false);
}
int expectedNumOfUnpersistedBytes = 0;
if (flush && shouldForceWrite) {
/*
* if flush call is made with shouldForceWrite,
* then expectedNumOfUnpersistedBytes should be zero.
*/
expectedNumOfUnpersistedBytes = 0;
} else if (!flush && shouldForceWrite) {
/*
* if flush is not called then internal write buffer is not flushed,
* but while adding entries to BufferedChannel if writeBuffer has
* reached its capacity then it will call flush method, and the data
* gets added to the file buffer. So though explicitly we are not
* calling flush method, implicitly flush gets called when
* writeBuffer reaches its capacity.
*/
expectedNumOfUnpersistedBytes = (byteBufLength * numOfWrites) % INTERNAL_BUFFER_WRITE_CAPACITY;
} else {
expectedNumOfUnpersistedBytes = (byteBufLength * numOfWrites) - unpersistedBytesBound;
}
if (unpersistedBytesBound > 0) {
Assert.assertEquals("Unpersisted bytes", expectedNumOfUnpersistedBytes, logChannel.getUnpersistedBytes());
}
logChannel.close();
fileChannel.close();
}
private static ByteBuf generateEntry(int length) {
byte[] data = new byte[length];
ByteBuf bb = Unpooled.buffer(length);
rand.nextBytes(data);
bb.writeBytes(data);
return bb;
}
}
| apache-2.0 |
mapr/elasticsearch | core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java | 10459 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.cat;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RestoreSource;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.mock.orig.Mockito.when;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.mockito.Mockito.mock;
public class RestRecoveryActionTests extends ESTestCase {
public void testRestRecoveryAction() {
final Settings settings = Settings.EMPTY;
final RestController restController = new RestController(settings);
final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController, null);
final int totalShards = randomIntBetween(1, 32);
final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2));
final int failedShards = totalShards - successfulShards;
final boolean detailed = randomBoolean();
final Map<String, List<RecoveryState>> shardRecoveryStates = new HashMap<>();
final List<RecoveryState> recoveryStates = new ArrayList<>();
for (int i = 0; i < successfulShards; i++) {
final RecoveryState state = mock(RecoveryState.class);
when(state.getShardId()).thenReturn(new ShardId(new Index("index", "_na_"), i));
final RecoveryState.Timer timer = mock(RecoveryState.Timer.class);
when(timer.time()).thenReturn((long)randomIntBetween(1000000, 10 * 1000000));
when(state.getTimer()).thenReturn(timer);
when(state.getType()).thenReturn(randomFrom(RecoveryState.Type.values()));
when(state.getStage()).thenReturn(randomFrom(RecoveryState.Stage.values()));
final DiscoveryNode sourceNode = randomBoolean() ? mock(DiscoveryNode.class) : null;
if (sourceNode != null) {
when(sourceNode.getHostName()).thenReturn(randomAsciiOfLength(8));
}
when(state.getSourceNode()).thenReturn(sourceNode);
final DiscoveryNode targetNode = mock(DiscoveryNode.class);
when(targetNode.getHostName()).thenReturn(randomAsciiOfLength(8));
when(state.getTargetNode()).thenReturn(targetNode);
final RestoreSource restoreSource = randomBoolean() ? mock(RestoreSource.class) : null;
if (restoreSource != null) {
final SnapshotId snapshotId = mock(SnapshotId.class);
when(snapshotId.getRepository()).thenReturn(randomAsciiOfLength(8));
when(snapshotId.getSnapshot()).thenReturn(randomAsciiOfLength(8));
when(restoreSource.snapshotId()).thenReturn(snapshotId);
}
RecoveryState.Index index = mock(RecoveryState.Index.class);
final int totalRecoveredFiles = randomIntBetween(1, 64);
when(index.totalRecoverFiles()).thenReturn(totalRecoveredFiles);
final int recoveredFileCount = randomIntBetween(0, totalRecoveredFiles);
when(index.recoveredFileCount()).thenReturn(recoveredFileCount);
when(index.recoveredFilesPercent()).thenReturn((100f * recoveredFileCount) / totalRecoveredFiles);
when(index.totalFileCount()).thenReturn(randomIntBetween(totalRecoveredFiles, 2 * totalRecoveredFiles));
final int totalRecoveredBytes = randomIntBetween(1, 1 << 24);
when(index.totalRecoverBytes()).thenReturn((long)totalRecoveredBytes);
final int recoveredBytes = randomIntBetween(0, totalRecoveredBytes);
when(index.recoveredBytes()).thenReturn((long)recoveredBytes);
when(index.recoveredBytesPercent()).thenReturn((100f * recoveredBytes) / totalRecoveredBytes);
when(index.totalRecoverBytes()).thenReturn((long)randomIntBetween(totalRecoveredBytes, 2 * totalRecoveredBytes));
when(state.getIndex()).thenReturn(index);
final RecoveryState.Translog translog = mock(RecoveryState.Translog.class);
final int translogOps = randomIntBetween(0, 1 << 18);
when(translog.totalOperations()).thenReturn(translogOps);
final int translogOpsRecovered = randomIntBetween(0, translogOps);
when(translog.recoveredOperations()).thenReturn(translogOpsRecovered);
when(translog.recoveredPercent()).thenReturn(translogOps == 0 ? 100f : (100f * translogOpsRecovered / translogOps));
when(state.getTranslog()).thenReturn(translog);
recoveryStates.add(state);
}
final List<RecoveryState> shuffle = new ArrayList<>(recoveryStates);
Randomness.shuffle(shuffle);
shardRecoveryStates.put("index", shuffle);
final List<ShardOperationFailedException> shardFailures = new ArrayList<>();
final RecoveryResponse response = new RecoveryResponse(
totalShards,
successfulShards,
failedShards,
detailed,
shardRecoveryStates,
shardFailures);
final Table table = action.buildRecoveryTable(null, response);
assertNotNull(table);
List<Table.Cell> headers = table.getHeaders();
assertThat(headers.get(0).value, equalTo("index"));
assertThat(headers.get(1).value, equalTo("shard"));
assertThat(headers.get(2).value, equalTo("time"));
assertThat(headers.get(3).value, equalTo("type"));
assertThat(headers.get(4).value, equalTo("stage"));
assertThat(headers.get(5).value, equalTo("source_host"));
assertThat(headers.get(6).value, equalTo("target_host"));
assertThat(headers.get(7).value, equalTo("repository"));
assertThat(headers.get(8).value, equalTo("snapshot"));
assertThat(headers.get(9).value, equalTo("files"));
assertThat(headers.get(10).value, equalTo("files_recovered"));
assertThat(headers.get(11).value, equalTo("files_percent"));
assertThat(headers.get(12).value, equalTo("files_total"));
assertThat(headers.get(13).value, equalTo("bytes"));
assertThat(headers.get(14).value, equalTo("bytes_recovered"));
assertThat(headers.get(15).value, equalTo("bytes_percent"));
assertThat(headers.get(16).value, equalTo("bytes_total"));
assertThat(headers.get(17).value, equalTo("translog_ops"));
assertThat(headers.get(18).value, equalTo("translog_ops_recovered"));
assertThat(headers.get(19).value, equalTo("translog_ops_percent"));
assertThat(table.getRows().size(), equalTo(successfulShards));
for (int i = 0; i < successfulShards; i++) {
final RecoveryState state = recoveryStates.get(i);
List<Table.Cell> cells = table.getRows().get(i);
assertThat(cells.get(0).value, equalTo("index"));
assertThat(cells.get(1).value, equalTo(i));
assertThat(cells.get(2).value, equalTo(new TimeValue(state.getTimer().time())));
assertThat(cells.get(3).value, equalTo(state.getType().name().toLowerCase(Locale.ROOT)));
assertThat(cells.get(4).value, equalTo(state.getStage().name().toLowerCase(Locale.ROOT)));
assertThat(cells.get(5).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName()));
assertThat(cells.get(6).value, equalTo(state.getTargetNode().getHostName()));
assertThat(
cells.get(7).value,
equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getRepository()));
assertThat(
cells.get(8).value,
equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getSnapshot()));
assertThat(cells.get(9).value, equalTo(state.getIndex().totalRecoverFiles()));
assertThat(cells.get(10).value, equalTo(state.getIndex().recoveredFileCount()));
assertThat(cells.get(11).value, equalTo(percent(state.getIndex().recoveredFilesPercent())));
assertThat(cells.get(12).value, equalTo(state.getIndex().totalFileCount()));
assertThat(cells.get(13).value, equalTo(state.getIndex().totalRecoverBytes()));
assertThat(cells.get(14).value, equalTo(state.getIndex().recoveredBytes()));
assertThat(cells.get(15).value, equalTo(percent(state.getIndex().recoveredBytesPercent())));
assertThat(cells.get(16).value, equalTo(state.getIndex().totalBytes()));
assertThat(cells.get(17).value, equalTo(state.getTranslog().totalOperations()));
assertThat(cells.get(18).value, equalTo(state.getTranslog().recoveredOperations()));
assertThat(cells.get(19).value, equalTo(percent(state.getTranslog().recoveredPercent())));
}
}
private static String percent(float percent) {
return String.format(Locale.ROOT, "%1.1f%%", percent);
}
}
| apache-2.0 |
rmaz/buck | tools/psi-grammar/src/com/facebook/buck/intellij/ideabuck/lang/psi/BcfgNamedElement.java | 2091 | /*
* Copyright 2019-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.intellij.ideabuck.lang.psi;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNameIdentifierOwner;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* See
* http://www.jetbrains.org/intellij/sdk/docs/reference_guide/custom_language_support/references_and_resolve.html
*/
public interface BcfgNamedElement extends PsiNameIdentifierOwner {
/*
* Implementation note:
* Normally, one would just leave these methods unimplemented.
* However, because this is built using two-pass generation, the
* first pass needs an implementation (subclasses will not have it
* defined yet), and the second pass will actually fill in
* subclass implementations.
*
* A future improvement to the build would be to have two versions
* of this file: one with a default implementation (like this) for
* the first pass and one without an implementation for the second
* pass so that subclasses without an implementation fail at compile
* time.
*/
@Override
default @Nullable PsiElement getNameIdentifier() {
throw new AssertionError(
"Forgot to implement " + getClass().getName() + ".getNameIdentifier()!");
}
@Override
default PsiElement setName(@NotNull String s) throws IncorrectOperationException {
throw new AssertionError("Forgot to implement " + getClass().getName() + ".setName(String)!");
}
}
| apache-2.0 |
apache/geronimo-xbean | xbean-spring/src/main/java/org/apache/xbean/spring/context/v2/XBeanXmlBeanFactory.java | 3341 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xbean.spring.context.v2;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.core.io.Resource;
import java.util.Collections;
import java.util.List;
public class XBeanXmlBeanFactory extends DefaultListableBeanFactory {
/**
* Create a new XBeanXmlBeanFactory with the given resource,
* which must be parsable using DOM.
* @param resource XML resource to load bean definitions from
* @throws BeansException in case of loading or parsing errors
*/
public XBeanXmlBeanFactory(Resource resource) throws BeansException {
this(resource, null, Collections.EMPTY_LIST);
}
/**
* Create a new XBeanXmlBeanFactory with the given input stream,
* which must be parsable using DOM.
* @param resource XML resource to load bean definitions from
* @param parentBeanFactory parent bean factory
* @throws BeansException in case of loading or parsing errors
*/
public XBeanXmlBeanFactory(Resource resource, BeanFactory parentBeanFactory) throws BeansException {
this(resource, parentBeanFactory, Collections.EMPTY_LIST);
}
/**
* Create a new XBeanXmlBeanFactory with the given input stream,
* which must be parsable using DOM.
* @param resource XML resource to load bean definitions from
* @param xmlPreprocessors the preprocessors to apply the DOM before passing to Spring for processing
* @throws BeansException in case of loading or parsing errors
*/
public XBeanXmlBeanFactory(Resource resource, List xmlPreprocessors) throws BeansException {
this(resource, null, xmlPreprocessors);
}
/**
* Create a new XBeanXmlBeanFactory with the given input stream,
* which must be parsable using DOM.
* @param resource XML resource to load bean definitions from
* @param parentBeanFactory parent bean factory
* @param xmlPreprocessors the preprocessors to apply the DOM before passing to Spring for processing
* @throws BeansException in case of loading or parsing errors
*/
public XBeanXmlBeanFactory(Resource resource, BeanFactory parentBeanFactory, List xmlPreprocessors) throws BeansException {
super(parentBeanFactory);
XBeanXmlBeanDefinitionReader reader = new XBeanXmlBeanDefinitionReader(null, this, xmlPreprocessors);
reader.loadBeanDefinitions(resource);
}
}
| apache-2.0 |
AndroidX/androidx | security/security-crypto/src/androidTest/java/androidx/security/crypto/EncryptedFileTest.java | 14599 | /*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.security.crypto;
import static androidx.security.crypto.MasterKey.KEYSTORE_PATH_URI;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static java.nio.charset.StandardCharsets.UTF_8;
import android.content.Context;
import android.content.SharedPreferences;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import com.google.crypto.tink.KeysetHandle;
import com.google.crypto.tink.StreamingAead;
import com.google.crypto.tink.integration.android.AndroidKeysetManager;
import com.google.crypto.tink.streamingaead.AesGcmHkdfStreamingKeyManager;
import com.google.crypto.tink.streamingaead.StreamingAeadConfig;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.KeyStore;
@MediumTest
@RunWith(AndroidJUnit4.class)
public class EncryptedFileTest {
private Context mContext;
private MasterKey mMasterKey;
@Before
public void setup() throws Exception {
mContext = ApplicationProvider.getApplicationContext();
SharedPreferences sharedPreferences = mContext.getSharedPreferences(
"__androidx_security_crypto_encrypted_file_pref__", Context.MODE_PRIVATE);
sharedPreferences.edit().clear().commit();
SharedPreferences customSharedPreferences = mContext.getSharedPreferences(
"CUSTOMPREFNAME", Context.MODE_PRIVATE);
customSharedPreferences.edit().clear().commit();
// Delete old keys for testing
String filePath = mContext.getFilesDir().getParent() + "/shared_prefs/"
+ "__androidx_security_crypto_encrypted_file_pref__";
File deletePrefFile = new File(filePath);
deletePrefFile.delete();
filePath = mContext.getFilesDir().getParent() + "nothing_to_see_here";
deletePrefFile = new File(filePath);
deletePrefFile.delete();
File dataFile = new File(mContext.getFilesDir(), "nothing_to_see_here");
dataFile.delete();
dataFile = new File(mContext.getFilesDir(), "nothing_to_see_here_custom");
dataFile.delete();
dataFile = new File(mContext.getFilesDir(), "tink_test_file");
dataFile.delete();
// Delete MasterKeys
KeyStore keyStore = KeyStore.getInstance("AndroidKeyStore");
keyStore.load(null);
keyStore.deleteEntry(MasterKey.DEFAULT_MASTER_KEY_ALIAS);
mMasterKey = new MasterKey.Builder(mContext)
.setKeyScheme(MasterKey.KeyScheme.AES256_GCM)
.build();
}
@Test
public void testWriteReadEncryptedFile() throws Exception {
final String fileContent = "Don't tell anyone...";
final String fileName = "nothing_to_see_here";
// Write
EncryptedFile encryptedFile = new EncryptedFile.Builder(mContext,
new File(mContext.getFilesDir(),
fileName), mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
OutputStream outputStream = encryptedFile.openFileOutput();
outputStream.write(fileContent.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
FileInputStream rawStream = mContext.openFileInput(fileName);
ByteArrayOutputStream rawByteArrayOutputStream = new ByteArrayOutputStream();
int rawNextByte = rawStream.read();
while (rawNextByte != -1) {
rawByteArrayOutputStream.write(rawNextByte);
rawNextByte = rawStream.read();
}
byte[] rawCipherText = rawByteArrayOutputStream.toByteArray();
System.out.println("Raw CipherText = " + new String(rawCipherText,
UTF_8));
rawStream.close();
InputStream inputStream = encryptedFile.openFileInput();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int nextByte = inputStream.read();
while (nextByte != -1) {
byteArrayOutputStream.write(nextByte);
nextByte = inputStream.read();
}
byte[] plainText = byteArrayOutputStream.toByteArray();
System.out.println("Decrypted Data: " + new String(plainText,
UTF_8));
Assert.assertEquals(
"Contents should be equal, data was encrypted.",
fileContent, new String(plainText, "UTF-8"));
inputStream.close();
EncryptedFile existingFileInputCheck = new EncryptedFile.Builder(mContext,
new File(mContext.getFilesDir(), "FAKE_FILE"), mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
boolean inputFailed = false;
try {
existingFileInputCheck.openFileInput();
} catch (IOException ex) {
inputFailed = true;
}
assertTrue("File should have failed opening.", inputFailed);
EncryptedFile existingFileOutputCheck = new EncryptedFile.Builder(mContext,
new File(mContext.getFilesDir(), fileName), mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
boolean outputFailed = false;
try {
existingFileOutputCheck.openFileOutput();
} catch (IOException ex) {
outputFailed = true;
}
assertTrue("File should have failed writing.", outputFailed);
}
@SuppressWarnings("deprecation")
@Test
public void testWriteReadEncryptedFileWithAlias() throws Exception {
final String fileContent = "Don't tell anyone...";
final String fileName = "nothing_to_see_here";
// Write
EncryptedFile encryptedFile = new EncryptedFile.Builder(new File(mContext.getFilesDir(),
fileName), mContext, mMasterKey.getKeyAlias(),
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
OutputStream outputStream = encryptedFile.openFileOutput();
outputStream.write(fileContent.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
FileInputStream rawStream = mContext.openFileInput(fileName);
ByteArrayOutputStream rawByteArrayOutputStream = new ByteArrayOutputStream();
int rawNextByte = rawStream.read();
while (rawNextByte != -1) {
rawByteArrayOutputStream.write(rawNextByte);
rawNextByte = rawStream.read();
}
byte[] rawCipherText = rawByteArrayOutputStream.toByteArray();
System.out.println("Raw CipherText = " + new String(rawCipherText,
UTF_8));
rawStream.close();
InputStream inputStream = encryptedFile.openFileInput();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int nextByte = inputStream.read();
while (nextByte != -1) {
byteArrayOutputStream.write(nextByte);
nextByte = inputStream.read();
}
byte[] plainText = byteArrayOutputStream.toByteArray();
System.out.println("Decrypted Data: " + new String(plainText,
UTF_8));
Assert.assertEquals(
"Contents should be equal, data was encrypted.",
fileContent, new String(plainText, "UTF-8"));
inputStream.close();
EncryptedFile existingFileInputCheck = new EncryptedFile.Builder(
new File(mContext.getFilesDir(), "FAKE_FILE"), mContext,
mMasterKey.getKeyAlias(), EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
boolean inputFailed = false;
try {
existingFileInputCheck.openFileInput();
} catch (IOException ex) {
inputFailed = true;
}
assertTrue("File should have failed opening.", inputFailed);
EncryptedFile existingFileOutputCheck = new EncryptedFile.Builder(
new File(mContext.getFilesDir(), fileName), mContext, mMasterKey.getKeyAlias(),
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
boolean outputFailed = false;
try {
existingFileOutputCheck.openFileOutput();
} catch (IOException ex) {
outputFailed = true;
}
assertTrue("File should have failed writing.", outputFailed);
}
@Test
public void testReadNonExistingFileThrows() throws Exception {
final File nonExisting = new File(mContext.getFilesDir(), "non-existing.data");
if (nonExisting.exists()) {
assertTrue(nonExisting.delete());
}
EncryptedFile encryptedFile = new EncryptedFile.Builder(
mContext,
nonExisting,
mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
try {
FileInputStream stream = encryptedFile.openFileInput();
fail("Successfully opened file that should not exist");
} catch (FileNotFoundException fnf) {
// Pass
}
}
@Test
public void testWriteReadEncryptedFileCustomPrefs() throws Exception {
final String fileContent = "Don't tell anyone...!!!!!";
final String fileName = "nothing_to_see_here_custom";
// Write
EncryptedFile encryptedFile = new EncryptedFile.Builder(mContext,
new File(mContext.getFilesDir(),
fileName), mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.setKeysetAlias("CustomKEYALIAS")
.setKeysetPrefName("CUSTOMPREFNAME")
.build();
OutputStream outputStream = encryptedFile.openFileOutput();
outputStream.write(fileContent.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
FileInputStream rawStream = mContext.openFileInput(fileName);
ByteArrayOutputStream rawByteArrayOutputStream = new ByteArrayOutputStream();
int rawNextByte = rawStream.read();
while (rawNextByte != -1) {
rawByteArrayOutputStream.write(rawNextByte);
rawNextByte = rawStream.read();
}
byte[] rawCipherText = rawByteArrayOutputStream.toByteArray();
System.out.println("Raw CipherText = " + new String(rawCipherText,
UTF_8));
rawStream.close();
InputStream inputStream = encryptedFile.openFileInput();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int nextByte = inputStream.read();
while (nextByte != -1) {
byteArrayOutputStream.write(nextByte);
nextByte = inputStream.read();
}
byte[] plainText = byteArrayOutputStream.toByteArray();
System.out.println("Decrypted Data: " + new String(plainText,
UTF_8));
Assert.assertEquals(
"Contents should be equal, data was encrypted.",
fileContent, new String(plainText, "UTF-8"));
inputStream.close();
SharedPreferences sharedPreferences = mContext.getSharedPreferences("CUSTOMPREFNAME",
Context.MODE_PRIVATE);
boolean containsKeyset = sharedPreferences.contains("CustomKEYALIAS");
assertTrue("Keyset should have existed.", containsKeyset);
}
@SuppressWarnings("deprecation")
@Test
public void tinkTest() throws Exception {
final String fileContent = "Don't tell anyone...";
final String fileName = "tink_test_file";
File file = new File(mContext.getFilesDir(), fileName);
// Write
EncryptedFile encryptedFile = new EncryptedFile.Builder(mContext, file, mMasterKey,
EncryptedFile.FileEncryptionScheme.AES256_GCM_HKDF_4KB)
.build();
OutputStream outputStream = encryptedFile.openFileOutput();
outputStream.write(fileContent.getBytes(UTF_8));
outputStream.flush();
outputStream.close();
StreamingAeadConfig.register();
KeysetHandle streadmingAeadKeysetHandle = new AndroidKeysetManager.Builder()
.withKeyTemplate(AesGcmHkdfStreamingKeyManager.aes256GcmHkdf4KBTemplate())
.withSharedPref(mContext,
"__androidx_security_crypto_encrypted_file_keyset__",
"__androidx_security_crypto_encrypted_file_pref__")
.withMasterKeyUri(KEYSTORE_PATH_URI + mMasterKey.getKeyAlias())
.build().getKeysetHandle();
StreamingAead streamingAead = com.google.crypto.tink.streamingaead.StreamingAeadFactory
.getPrimitive(streadmingAeadKeysetHandle);
FileInputStream fileInputStream = new FileInputStream(file);
InputStream inputStream = streamingAead.newDecryptingStream(fileInputStream,
file.getName().getBytes(UTF_8));
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int nextByte = inputStream.read();
while (nextByte != -1) {
byteArrayOutputStream.write(nextByte);
nextByte = inputStream.read();
}
byte[] plainText = byteArrayOutputStream.toByteArray();
System.out.println("Decrypted Data: " + new String(plainText,
UTF_8));
Assert.assertEquals(
"Contents should be equal, data was encrypted.",
fileContent, new String(plainText, "UTF-8"));
inputStream.close();
}
}
| apache-2.0 |
gkatsikas/onos | apps/k8s-networking/app/src/test/java/org/onosproject/k8snetworking/web/K8sNetworkingCodecRegisterTest.java | 2841 | /*
* Copyright 2019-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.k8snetworking.web;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.junit.Test;
import org.onlab.junit.TestUtils;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.JsonCodec;
import org.onosproject.k8snetworking.api.K8sNetwork;
import org.onosproject.k8snetworking.api.K8sPort;
import org.onosproject.k8snetworking.codec.K8sNetworkCodec;
import org.onosproject.k8snetworking.codec.K8sPortCodec;
import org.onosproject.k8snode.api.K8sNode;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* Unit tests for kubernetes networking codec register.
*/
public class K8sNetworkingCodecRegisterTest {
/**
* Tests codec register activation and deactivation.
*/
@Test
public void testActivateDeactivate() {
K8sNetworkingCodecRegister register = new K8sNetworkingCodecRegister();
CodecService codecService = new TestCodecService();
TestUtils.setField(register, "codecService", codecService);
register.activate();
assertEquals(K8sNetworkCodec.class.getName(),
codecService.getCodec(K8sNetwork.class).getClass().getName());
assertEquals(K8sPortCodec.class.getName(),
codecService.getCodec(K8sPort.class).getClass().getName());
register.deactivate();
assertNull(codecService.getCodec(K8sNode.class));
}
private static class TestCodecService implements CodecService {
private Map<String, JsonCodec> codecMap = Maps.newConcurrentMap();
@Override
public Set<Class<?>> getCodecs() {
return ImmutableSet.of();
}
@Override
public <T> JsonCodec<T> getCodec(Class<T> entityClass) {
return codecMap.get(entityClass.getName());
}
@Override
public <T> void registerCodec(Class<T> entityClass, JsonCodec<T> codec) {
codecMap.put(entityClass.getName(), codec);
}
@Override
public void unregisterCodec(Class<?> entityClass) {
codecMap.remove(entityClass.getName());
}
}
}
| apache-2.0 |
msebire/intellij-community | plugins/gradle/jps-plugin/src/org/jetbrains/jps/gradle/model/impl/GradleResourceFileFilter.java | 2488 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.gradle.model.impl;
import com.intellij.openapi.util.io.FileUtil;
import org.gradle.api.file.RelativePath;
import org.gradle.api.internal.file.pattern.PatternMatcherFactory;
import org.gradle.api.specs.Spec;
import org.gradle.api.specs.Specs;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.regex.Pattern;
/**
* @author Vladislav.Soroka
*/
public class GradleResourceFileFilter implements FileFilter {
private final FilePattern myFilePattern;
private final File myRoot;
private final Spec<RelativePath> myFileFilterSpec;
public GradleResourceFileFilter(@NotNull File rootFile, @NotNull FilePattern filePattern) {
myFilePattern = filePattern;
myRoot = rootFile;
myFileFilterSpec = getAsSpec();
}
@Override
public boolean accept(@NotNull File file) {
final String relPath = FileUtil.getRelativePath(myRoot, file);
return relPath != null && isIncluded(relPath);
}
private boolean isIncluded(@NotNull String relativePath) {
RelativePath path = new RelativePath(true, relativePath.split(Pattern.quote(File.separator)));
return myFileFilterSpec.isSatisfiedBy(path);
}
private Spec<RelativePath> getAsSpec() {
return Specs.intersect(getAsIncludeSpec(true), Specs.negate(getAsExcludeSpec(true)));
}
private Spec<RelativePath> getAsExcludeSpec(boolean caseSensitive) {
Collection<String> allExcludes = new LinkedHashSet<>(myFilePattern.excludes);
List<Spec<RelativePath>> matchers = new ArrayList<>();
for (String exclude : allExcludes) {
Spec<RelativePath> patternMatcher = PatternMatcherFactory.getPatternMatcher(false, caseSensitive, exclude);
matchers.add(patternMatcher);
}
if (matchers.isEmpty()) {
return Specs.satisfyNone();
}
return Specs.union(matchers);
}
private Spec<RelativePath> getAsIncludeSpec(boolean caseSensitive) {
List<Spec<RelativePath>> matchers = new ArrayList<>();
for (String include : myFilePattern.includes) {
Spec<RelativePath> patternMatcher = PatternMatcherFactory.getPatternMatcher(true, caseSensitive, include);
matchers.add(patternMatcher);
}
return Specs.union(matchers);
}
} | apache-2.0 |
AludraTest/aludratest | src/main/java/org/aludratest/service/locator/element/LabelLocator.java | 978 | /*
* Copyright (C) 2010-2014 Hamburg Sud and the contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.aludratest.service.locator.element;
/**
* Locates a link element by its text content.
* @author Marcel Malitz
* @author Volker Bergmann
*/
public class LabelLocator extends GUIElementLocator {
/** Constructor.
* @param text the text of the related link. */
public LabelLocator(String text) {
super(text);
}
}
| apache-2.0 |
iperdomo/keycloak | adapters/oidc/tomcat/tomcat-core/src/main/java/org/keycloak/adapters/tomcat/AbstractKeycloakAuthenticatorValve.java | 12048 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.adapters.tomcat;
import org.apache.catalina.Context;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.Manager;
import org.apache.catalina.authenticator.FormAuthenticator;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.keycloak.KeycloakSecurityContext;
import org.keycloak.adapters.AdapterDeploymentContext;
import org.keycloak.adapters.AdapterTokenStore;
import org.keycloak.adapters.KeycloakConfigResolver;
import org.keycloak.adapters.KeycloakDeployment;
import org.keycloak.adapters.KeycloakDeploymentBuilder;
import org.keycloak.adapters.NodesRegistrationManagement;
import org.keycloak.adapters.PreAuthActionsHandler;
import org.keycloak.adapters.RefreshableKeycloakSecurityContext;
import org.keycloak.adapters.spi.AuthChallenge;
import org.keycloak.adapters.spi.AuthOutcome;
import org.keycloak.adapters.spi.HttpFacade;
import org.keycloak.constants.AdapterConstants;
import org.keycloak.enums.TokenStore;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Keycloak authentication valve
*
* @author <a href="mailto:ungarida@gmail.com">Davide Ungari</a>
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public abstract class AbstractKeycloakAuthenticatorValve extends FormAuthenticator implements LifecycleListener {
public static final String TOKEN_STORE_NOTE = "TOKEN_STORE_NOTE";
private final static Logger log = Logger.getLogger(""+AbstractKeycloakAuthenticatorValve.class);
protected CatalinaUserSessionManagement userSessionManagement = new CatalinaUserSessionManagement();
protected AdapterDeploymentContext deploymentContext;
protected NodesRegistrationManagement nodesRegistrationManagement;
@Override
public void lifecycleEvent(LifecycleEvent event) {
if (Lifecycle.START_EVENT.equals(event.getType())) {
cache = false;
} else if (Lifecycle.AFTER_START_EVENT.equals(event.getType())) {
keycloakInit();
} else if (event.getType() == Lifecycle.BEFORE_STOP_EVENT) {
beforeStop();
}
}
protected void logoutInternal(Request request) {
KeycloakSecurityContext ksc = (KeycloakSecurityContext)request.getAttribute(KeycloakSecurityContext.class.getName());
if (ksc != null) {
CatalinaHttpFacade facade = new OIDCCatalinaHttpFacade(request, null);
KeycloakDeployment deployment = deploymentContext.resolveDeployment(facade);
if (ksc instanceof RefreshableKeycloakSecurityContext) {
((RefreshableKeycloakSecurityContext) ksc).logout(deployment);
}
AdapterTokenStore tokenStore = getTokenStore(request, facade, deployment);
tokenStore.logout();
request.removeAttribute(KeycloakSecurityContext.class.getName());
}
request.setUserPrincipal(null);
}
protected void beforeStop() {
if (nodesRegistrationManagement != null) {
nodesRegistrationManagement.stop();
}
}
@SuppressWarnings("UseSpecificCatch")
public void keycloakInit() {
// Possible scenarios:
// 1) The deployment has a keycloak.config.resolver specified and it exists:
// Outcome: adapter uses the resolver
// 2) The deployment has a keycloak.config.resolver and isn't valid (doesn't exists, isn't a resolver, ...) :
// Outcome: adapter is left unconfigured
// 3) The deployment doesn't have a keycloak.config.resolver , but has a keycloak.json (or equivalent)
// Outcome: adapter uses it
// 4) The deployment doesn't have a keycloak.config.resolver nor keycloak.json (or equivalent)
// Outcome: adapter is left unconfigured
String configResolverClass = context.getServletContext().getInitParameter("keycloak.config.resolver");
if (configResolverClass != null) {
try {
KeycloakConfigResolver configResolver = (KeycloakConfigResolver) context.getLoader().getClassLoader().loadClass(configResolverClass).newInstance();
deploymentContext = new AdapterDeploymentContext(configResolver);
log.log(Level.INFO, "Using {0} to resolve Keycloak configuration on a per-request basis.", configResolverClass);
} catch (Exception ex) {
log.log(Level.FINE, "The specified resolver {0} could NOT be loaded. Keycloak is unconfigured and will deny all requests. Reason: {1}", new Object[]{configResolverClass, ex.getMessage()});
deploymentContext = new AdapterDeploymentContext(new KeycloakDeployment());
}
} else {
InputStream configInputStream = getConfigInputStream(context);
KeycloakDeployment kd;
if (configInputStream == null) {
log.fine("No adapter configuration. Keycloak is unconfigured and will deny all requests.");
kd = new KeycloakDeployment();
} else {
kd = KeycloakDeploymentBuilder.build(configInputStream);
}
deploymentContext = new AdapterDeploymentContext(kd);
log.fine("Keycloak is using a per-deployment configuration.");
}
context.getServletContext().setAttribute(AdapterDeploymentContext.class.getName(), deploymentContext);
AuthenticatedActionsValve actions = new AuthenticatedActionsValve(deploymentContext, getNext(), getContainer());
setNext(actions);
nodesRegistrationManagement = new NodesRegistrationManagement();
}
private static InputStream getJSONFromServletContext(ServletContext servletContext) {
String json = servletContext.getInitParameter(AdapterConstants.AUTH_DATA_PARAM_NAME);
if (json == null) {
return null;
}
log.finest("**** using " + AdapterConstants.AUTH_DATA_PARAM_NAME);
log.finest(json);
return new ByteArrayInputStream(json.getBytes());
}
private static InputStream getConfigInputStream(Context context) {
InputStream is = getJSONFromServletContext(context.getServletContext());
if (is == null) {
String path = context.getServletContext().getInitParameter("keycloak.config.file");
if (path == null) {
log.finest("**** using /WEB-INF/keycloak.json");
is = context.getServletContext().getResourceAsStream("/WEB-INF/keycloak.json");
} else {
try {
is = new FileInputStream(path);
} catch (FileNotFoundException e) {
log.log(Level.SEVERE, "NOT FOUND {0}", path);
throw new RuntimeException(e);
}
}
}
return is;
}
@Override
public void invoke(Request request, Response response) throws IOException, ServletException {
try {
CatalinaHttpFacade facade = new OIDCCatalinaHttpFacade(request, response);
Manager sessionManager = request.getContext().getManager();
CatalinaUserSessionManagementWrapper sessionManagementWrapper = new CatalinaUserSessionManagementWrapper(userSessionManagement, sessionManager);
PreAuthActionsHandler handler = new PreAuthActionsHandler(sessionManagementWrapper, deploymentContext, facade);
if (handler.handleRequest()) {
return;
}
checkKeycloakSession(request, facade);
super.invoke(request, response);
} finally {
}
}
protected abstract GenericPrincipalFactory createPrincipalFactory();
protected abstract boolean forwardToErrorPageInternal(Request request, HttpServletResponse response, Object loginConfig) throws IOException;
protected boolean authenticateInternal(Request request, HttpServletResponse response, Object loginConfig) throws IOException {
CatalinaHttpFacade facade = new OIDCCatalinaHttpFacade(request, response);
KeycloakDeployment deployment = deploymentContext.resolveDeployment(facade);
if (deployment == null || !deployment.isConfigured()) {
return false;
}
AdapterTokenStore tokenStore = getTokenStore(request, facade, deployment);
nodesRegistrationManagement.tryRegister(deployment);
CatalinaRequestAuthenticator authenticator = createRequestAuthenticator(request, facade, deployment, tokenStore);
AuthOutcome outcome = authenticator.authenticate();
if (outcome == AuthOutcome.AUTHENTICATED) {
if (facade.isEnded()) {
return false;
}
return true;
}
AuthChallenge challenge = authenticator.getChallenge();
if (challenge != null) {
challenge.challenge(facade);
}
return false;
}
protected CatalinaRequestAuthenticator createRequestAuthenticator(Request request, CatalinaHttpFacade facade, KeycloakDeployment deployment, AdapterTokenStore tokenStore) {
return new CatalinaRequestAuthenticator(deployment, tokenStore, facade, request, createPrincipalFactory());
}
/**
* Checks that access token is still valid. Will attempt refresh of token if it is not.
*
* @param request
*/
protected void checkKeycloakSession(Request request, HttpFacade facade) {
KeycloakDeployment deployment = deploymentContext.resolveDeployment(facade);
AdapterTokenStore tokenStore = getTokenStore(request, facade, deployment);
tokenStore.checkCurrentToken();
}
public void keycloakSaveRequest(Request request) throws IOException {
saveRequest(request, request.getSessionInternal(true));
}
public boolean keycloakRestoreRequest(Request request) {
try {
return restoreRequest(request, request.getSessionInternal());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected AdapterTokenStore getTokenStore(Request request, HttpFacade facade, KeycloakDeployment resolvedDeployment) {
AdapterTokenStore store = (AdapterTokenStore)request.getNote(TOKEN_STORE_NOTE);
if (store != null) {
return store;
}
if (resolvedDeployment.getTokenStore() == TokenStore.SESSION) {
store = createSessionTokenStore(request, resolvedDeployment);
} else {
store = new CatalinaCookieTokenStore(request, facade, resolvedDeployment, createPrincipalFactory());
}
request.setNote(TOKEN_STORE_NOTE, store);
return store;
}
private AdapterTokenStore createSessionTokenStore(Request request, KeycloakDeployment resolvedDeployment) {
AdapterTokenStore store;
store = new CatalinaSessionTokenStore(request, resolvedDeployment, userSessionManagement, createPrincipalFactory(), this);
return store;
}
}
| apache-2.0 |
hurricup/intellij-community | java/testFramework/src/com/intellij/projectView/BaseProjectViewTestCase.java | 3709 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.projectView;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.projectView.impl.AbstractProjectTreeStructure;
import com.intellij.ide.projectView.impl.ClassesTreeStructureProvider;
import com.intellij.ide.projectView.impl.nodes.PackageElementNode;
import com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.ui.Queryable;
import com.intellij.psi.PsiDirectory;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.ProjectViewTestUtil;
import com.intellij.testFramework.TestSourceBasedTestCase;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;
public abstract class BaseProjectViewTestCase extends TestSourceBasedTestCase {
protected TestProjectTreeStructure myStructure;
protected Queryable.PrintInfo myPrintInfo;
@Override
protected void setUp() throws Exception {
super.setUp();
myStructure = new TestProjectTreeStructure(myProject, getTestRootDisposable());
}
@Override
protected void tearDown() throws Exception {
myStructure = null;
super.tearDown();
}
protected void assertStructureEqual(PsiDirectory packageDirectory, @NonNls String expected) {
assertStructureEqual(packageDirectory, expected, 17, myStructure);
}
protected void assertStructureEqual(PsiDirectory packageDirectory, @NonNls String expected, int maxRowCount) {
assertStructureEqual(packageDirectory, expected, maxRowCount, myStructure);
}
protected void useStandardProviders() {
getProjectTreeStructure().setProviders(new ClassesTreeStructureProvider(myProject));
}
protected AbstractProjectTreeStructure getProjectTreeStructure() {
return myStructure;
}
private void assertStructureEqual(PsiDirectory root, String expected, int maxRowCount, AbstractTreeStructure structure) {
assertNotNull(root);
PsiDirectoryNode rootNode = new PsiDirectoryNode(myProject, root, (ViewSettings)structure);
ProjectViewTestUtil.assertStructureEqual(myStructure, expected, maxRowCount, PlatformTestUtil.createComparator(myPrintInfo), rootNode, myPrintInfo);
}
protected static void assertListsEqual(ListModel model, String expected) {
assertEquals(expected, PlatformTestUtil.print(model));
}
public static void checkContainsMethod(final Object rootElement, final AbstractTreeStructure structure) {
ProjectViewTestUtil.checkContainsMethod(rootElement, structure, kid -> {
if (kid instanceof PackageElementNode) {
return ((PackageElementNode)kid).getVirtualFiles();
}
return null;
});
}
@Override
protected String getTestPath() {
return "projectView";
}
protected static String getPackageRelativePath() {
return "com/package1";
}
protected PsiDirectory getPackageDirectory() {
return getPackageDirectory(getPackageRelativePath());
}
@Override
protected String getTestDataPath() {
return PathManagerEx.getTestDataPath(getClass());
}
}
| apache-2.0 |
kierarad/gocd | plugin-infra/go-plugin-config-repo/src/test/java/com/thoughtworks/go/plugin/configrepo/contract/material/CRPluggableScmMaterialTest.java | 3496 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.configrepo.contract.material;
import com.google.gson.JsonObject;
import com.thoughtworks.go.plugin.configrepo.contract.AbstractCRTest;
import org.junit.Test;
import java.util.Arrays;
import java.util.Map;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.is;
public class CRPluggableScmMaterialTest extends AbstractCRTest<CRPluggableScmMaterial> {
private final CRPluggableScmMaterial pluggableGit;
private final CRPluggableScmMaterial pluggableGitWith2Filters;
private final CRPluggableScmMaterial simplePluggableGit;
private final CRPluggableScmMaterial simpleNamedPluggableGit;
private final CRPluggableScmMaterial pluggableGitWithFilter;
private final CRPluggableScmMaterial invalidNoScmId;
public CRPluggableScmMaterialTest()
{
pluggableGit = new CRPluggableScmMaterial("myPluggableGit","someScmGitRepositoryId","destinationDir", null);
pluggableGitWithFilter = new CRPluggableScmMaterial("myPluggableGit","someScmGitRepositoryId","destinationDir", Arrays.asList("mydir"));
pluggableGitWith2Filters = new CRPluggableScmMaterial("myPluggableGit","someScmGitRepositoryId","destinationDir", Arrays.asList("dir1", "dir2"));
simplePluggableGit = new CRPluggableScmMaterial();
simplePluggableGit.setScmId("mygit-id");
simpleNamedPluggableGit = new CRPluggableScmMaterial();
simpleNamedPluggableGit.setScmId("mygit-id");
simpleNamedPluggableGit.setName("myGitMaterial");
invalidNoScmId = new CRPluggableScmMaterial();
}
@Override
public void addGoodExamples(Map<String, CRPluggableScmMaterial> examples) {
examples.put("pluggableGit",pluggableGit);
examples.put("pluggableGitWith2Filters",pluggableGitWith2Filters);
examples.put("simplePluggableGit",simplePluggableGit);
examples.put("simpleNamedPluggableGit",simpleNamedPluggableGit);
examples.put("pluggableGitWithFilter",pluggableGitWithFilter);
}
@Override
public void addBadExamples(Map<String, CRPluggableScmMaterial> examples) {
examples.put("invalidNoScmId",invalidNoScmId);
}
@Test
public void shouldAppendTypeFieldWhenSerializingMaterials()
{
CRMaterial value = pluggableGit;
JsonObject jsonObject = (JsonObject)gson.toJsonTree(value);
assertThat(jsonObject.get("type").getAsString(), is(CRPluggableScmMaterial.TYPE_NAME));
}
@Test
public void shouldHandlePolymorphismWhenDeserializing()
{
CRMaterial value = pluggableGit;
String json = gson.toJson(value);
CRPluggableScmMaterial deserializedValue = (CRPluggableScmMaterial)gson.fromJson(json,CRMaterial.class);
assertThat("Deserialized value should equal to value before serialization",
deserializedValue,is(value));
}
}
| apache-2.0 |
luiseduardohdbackup/buck | src/com/facebook/buck/cli/FetchCommand.java | 6238 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import com.facebook.buck.command.Build;
import com.facebook.buck.file.Downloader;
import com.facebook.buck.file.HttpDownloader;
import com.facebook.buck.file.RemoteFileDescription;
import com.facebook.buck.json.BuildFileParseException;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetException;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.ParserConfig;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.CachingBuildEngine;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetGraphToActionGraph;
import com.facebook.buck.rules.keys.AbiRuleKeyBuilderFactory;
import com.facebook.buck.rules.keys.DependencyFileRuleKeyBuilderFactory;
import com.facebook.buck.rules.keys.InputBasedRuleKeyBuilderFactory;
import com.facebook.buck.step.AdbOptions;
import com.facebook.buck.step.TargetDevice;
import com.facebook.buck.step.TargetDeviceOptions;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.net.Proxy;
public class FetchCommand extends BuildCommand {
@Override
public int runWithoutHelp(CommandRunnerParams params) throws IOException, InterruptedException {
if (getArguments().isEmpty()) {
params.getConsole().printBuildFailure("Must specify at least one build target to fetch.");
return 1;
}
// Post the build started event, setting it to the Parser recorded start time if appropriate.
BuildEvent.Started started = BuildEvent.started(getArguments());
if (params.getParser().getParseStartTime().isPresent()) {
params.getBuckEventBus().post(
started,
params.getParser().getParseStartTime().get());
} else {
params.getBuckEventBus().post(started);
}
FetchTargetNodeToBuildRuleTransformer ruleGenerator = createFetchTransformer(params);
TargetGraphToActionGraph transformer = new TargetGraphToActionGraph(
params.getBuckEventBus(),
ruleGenerator,
params.getFileHashCache());
ActionGraph actionGraph;
ImmutableSet<BuildTarget> buildTargets;
try {
Pair<ImmutableSet<BuildTarget>, TargetGraph> result = params.getParser()
.buildTargetGraphForTargetNodeSpecs(
parseArgumentsAsTargetNodeSpecs(
params.getBuckConfig(),
params.getRepository().getFilesystem().getIgnorePaths(),
getArguments()),
new ParserConfig(params.getBuckConfig()),
params.getBuckEventBus(),
params.getConsole(),
params.getEnvironment(),
getEnableProfiling());
actionGraph = transformer.apply(result.getSecond());
buildTargets = ruleGenerator.getDownloadableTargets();
} catch (BuildTargetException | BuildFileParseException e) {
params.getConsole().printBuildFailureWithoutStacktrace(e);
return 1;
}
int exitCode;
SourcePathResolver pathResolver = new SourcePathResolver(transformer.getRuleResolver());
try (CommandThreadManager pool =
new CommandThreadManager("Fetch", getConcurrencyLimit(params.getBuckConfig()));
Build build = createBuild(
params.getBuckConfig(),
actionGraph,
params.getAndroidPlatformTargetSupplier(),
new CachingBuildEngine(
pool.getExecutor(),
params.getFileHashCache(),
getBuildEngineMode().or(params.getBuckConfig().getBuildEngineMode()),
params.getBuckConfig().getBuildDepFiles(),
new InputBasedRuleKeyBuilderFactory(
params.getFileHashCache(),
pathResolver),
new AbiRuleKeyBuilderFactory(
params.getFileHashCache(),
pathResolver),
new DependencyFileRuleKeyBuilderFactory(
params.getFileHashCache(),
pathResolver)),
getArtifactCache(params),
params.getConsole(),
params.getBuckEventBus(),
Optional.<TargetDevice>absent(),
params.getPlatform(),
params.getEnvironment(),
params.getObjectMapper(),
params.getClock(),
Optional.<AdbOptions>absent(),
Optional.<TargetDeviceOptions>absent())) {
exitCode = build.executeAndPrintFailuresToEventBus(
buildTargets,
isKeepGoing(),
params.getBuckEventBus(),
params.getConsole().getAnsi(),
getPathToBuildReport(params.getBuckConfig()));
}
params.getBuckEventBus().post(BuildEvent.finished(started, exitCode));
return exitCode;
}
@Override
public boolean isReadOnly() {
return false;
}
private FetchTargetNodeToBuildRuleTransformer createFetchTransformer(CommandRunnerParams params) {
Optional<String> defaultMavenRepo = params.getBuckConfig().getValue("download", "maven_repo");
Downloader downloader = new HttpDownloader(Optional.<Proxy>absent(), defaultMavenRepo);
Description<?> description = new RemoteFileDescription(downloader);
return new FetchTargetNodeToBuildRuleTransformer(
ImmutableSet.<Description<?>>of(description)
);
}
@Override
public String getShortDescription() {
return "downloads remote resources to your local machine";
}
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/model/transform/ProjectionJsonUnmarshaller.java | 3164 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.dynamodbv2.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* Projection JSON Unmarshaller
*/
public class ProjectionJsonUnmarshaller implements
Unmarshaller<Projection, JsonUnmarshallerContext> {
public Projection unmarshall(JsonUnmarshallerContext context)
throws Exception {
Projection projection = new Projection();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("ProjectionType", targetDepth)) {
context.nextToken();
projection.setProjectionType(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("NonKeyAttributes", targetDepth)) {
context.nextToken();
projection
.setNonKeyAttributes(new ListUnmarshaller<String>(
context.getUnmarshaller(String.class))
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return projection;
}
private static ProjectionJsonUnmarshaller instance;
public static ProjectionJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ProjectionJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
NguyenAnhDuc/fpt-qa | src/main/java/fpt/qa/intent/detection/qc/Data.java | 9429 | /*
Copyright (C) 2006 by
Xuan-Hieu Phan
Email: hieuxuan@ecei.tohoku.ac.jp
pxhieu@gmail.com
URL: http://www.hori.ecei.tohoku.ac.jp/~hieuxuan
Graduate School of Information Sciences,
Tohoku University
*/
package fpt.qa.intent.detection.qc;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
public class Data {
Option option = null;
public Map lbStr2Int = null;
public Map lbInt2Str = null;
public Map cpStr2Int = null;
public Map cpInt2Str = null;
public List trnData = null;
public List tstData = null;
public List ulbData = null;
public Data(Option option) {
this.option = option;
}
public void readCpMaps(BufferedReader fin) throws IOException {
if (cpStr2Int != null) {
cpStr2Int.clear();
} else {
cpStr2Int = new HashMap();
}
if (cpInt2Str != null) {
cpInt2Str.clear();
} else {
cpInt2Str = new HashMap();
}
String line;
// get size of the map
if ((line = fin.readLine()) == null) {
System.out.println("No context predicate map size information");
return;
}
int numCps = Integer.parseInt(line);
if (numCps <= 0) {
System.out.println("Invalid context predicate mapping size");
return;
}
System.out.println("Reading the context predicate maps ...");
for (int i = 0; i < numCps; i++) {
line = fin.readLine();
if (line == null) {
System.out.println("Invalid context predicate mapping line");
return;
}
StringTokenizer strTok = new StringTokenizer(line, " \t\r\n");
if (strTok.countTokens() != 2) {
continue;
}
String cpStr = strTok.nextToken();
String cpInt = strTok.nextToken();
cpStr2Int.put(cpStr, new Integer(cpInt));
cpInt2Str.put(new Integer(cpInt), cpStr);
}
System.out.println("Reading context predicate maps (" +
Integer.toString(cpStr2Int.size()) + " entries) completed!");
// read the line ###...
fin.readLine();
option.numCps = cpStr2Int.size();
}
public int numCps() {
if (cpStr2Int == null) {
return 0;
} else {
return cpStr2Int.size();
}
}
public void writeCpMaps(Dictionary dict, PrintWriter fout) throws IOException {
Iterator it;
if (cpStr2Int == null) {
return;
}
int count = 0;
for (it = cpStr2Int.keySet().iterator(); it.hasNext(); ) {
String cpStr = (String)it.next();
Integer cpInt = (Integer)cpStr2Int.get(cpStr);
Element elem = (Element)dict.dict.get(cpInt);
if (elem != null) {
if (elem.chosen == 1) {
count++;
}
}
}
// write the map size
fout.println(Integer.toString(count));
for (it = cpStr2Int.keySet().iterator(); it.hasNext(); ) {
String cpStr = (String)it.next();
Integer cpInt = (Integer)cpStr2Int.get(cpStr);
Element elem = (Element)dict.dict.get(cpInt);
if (elem != null) {
if (elem.chosen == 1) {
fout.println(cpStr + " " + cpInt.toString());
}
}
}
// write the line ###...
fout.println(Option.modelSeparator);
}
public void readLbMaps(BufferedReader fin) throws IOException {
if (lbStr2Int != null) {
lbStr2Int.clear();
} else {
lbStr2Int = new HashMap();
}
if (lbInt2Str != null) {
lbInt2Str.clear();
} else {
lbInt2Str = new HashMap();
}
String line;
// get size of the map
if ((line = fin.readLine()) == null) {
System.out.println("No label map size information");
return;
}
int numLabels = Integer.parseInt(line);
if (numLabels <= 0) {
System.out.println("Invalid label mapping size");
return;
}
System.out.println("Reading the context predicate maps ...");
for (int i = 0; i < numLabels; i++) {
line = fin.readLine();
if (line == null) {
System.out.println("Invalid context predicate mapping line");
return;
}
StringTokenizer strTok = new StringTokenizer(line, " \t\r\n");
if (strTok.countTokens() != 2) {
continue;
}
String lbStr = strTok.nextToken();
String lbInt = strTok.nextToken();
lbStr2Int.put(lbStr, new Integer(lbInt));
lbInt2Str.put(new Integer(lbInt), lbStr);
}
System.out.println("Reading label maps (" +
Integer.toString(lbStr2Int.size()) + " entries) completed!");
// read the line ###...
fin.readLine();
option.numLabels = lbStr2Int.size();
}
public int numLabels() {
if (lbStr2Int == null) {
return 0;
} else {
return lbStr2Int.size();
}
}
public void writeLbMaps(PrintWriter fout) throws IOException {
if (lbStr2Int == null) {
return;
}
// write the map size
fout.println(Integer.toString(lbStr2Int.size()));
for (Iterator it = lbStr2Int.keySet().iterator(); it.hasNext(); ) {
String lbStr = (String)it.next();
Integer lbInt = (Integer)lbStr2Int.get(lbStr);
fout.println(lbStr + " " + lbInt.toString());
}
// write the line ###...
fout.println(Option.modelSeparator);
}
public void readTrnData(String dataFile) {
if (cpStr2Int != null) {
cpStr2Int.clear();
} else {
cpStr2Int = new HashMap();
}
if (cpInt2Str != null) {
cpInt2Str.clear();
} else {
cpInt2Str = new HashMap();
}
if (lbStr2Int != null) {
lbStr2Int.clear();
} else {
lbStr2Int = new HashMap();
}
if (lbInt2Str != null) {
lbInt2Str.clear();
} else {
lbInt2Str = new HashMap();
}
if (trnData != null) {
trnData.clear();
} else {
trnData = new ArrayList();
}
// open data file
BufferedReader fin;
try {
fin = new BufferedReader(new FileReader(dataFile));
System.out.println("Reading training data ...");
String line;
while ((line = fin.readLine()) != null) {
StringTokenizer strTok = new StringTokenizer(line, " \t\r\n");
int len = strTok.countTokens();
if (len <= 1) {
// skip this invalid line
continue;
}
List strCps = new ArrayList();
for (int i = 0; i < len - 1; i++) {
strCps.add(strTok.nextToken());
}
String labelStr = strTok.nextToken();
List intCps = new ArrayList();
for (int i = 0; i < strCps.size(); i++) {
String cpStr = (String)strCps.get(i);
Integer cpInt = (Integer)cpStr2Int.get(cpStr);
if (cpInt != null) {
intCps.add(cpInt);
} else {
intCps.add(new Integer(cpStr2Int.size()));
cpStr2Int.put(cpStr, new Integer(cpStr2Int.size()));
cpInt2Str.put(new Integer(cpInt2Str.size()), cpStr);
}
}
Integer labelInt = (Integer)lbStr2Int.get(labelStr);
if (labelInt == null) {
labelInt = new Integer(lbStr2Int.size());
lbStr2Int.put(labelStr, labelInt);
lbInt2Str.put(labelInt, labelStr);
}
int[] cps = new int[intCps.size()];
for (int i = 0; i < cps.length; i++) {
cps[i] = ((Integer)intCps.get(i)).intValue();
}
Observation obsr = new Observation(labelInt.intValue(), cps);
// add this observation to the data
trnData.add(obsr);
}
System.out.println("Reading " + Integer.toString(trnData.size()) +
" training data examples completed!");
} catch (IOException e) {
System.out.println(e.toString());
return;
}
option.numCps = cpStr2Int.size();
option.numLabels = lbStr2Int.size();
option.numTrainExps = trnData.size();
}
public void readTstData(String dataFile) {
if (tstData != null) {
tstData.clear();
} else {
tstData = new ArrayList();
}
// open data file
BufferedReader fin;
try {
fin = new BufferedReader(new FileReader(dataFile));
System.out.println("Reading testing data ...");
String line;
while ((line = fin.readLine()) != null) {
StringTokenizer strTok = new StringTokenizer(line, " \t\r\n");
int len = strTok.countTokens();
if (len <= 1) {
// skip this invalid line
continue;
}
List strCps = new ArrayList();
for (int i = 0; i < len - 1; i++) {
strCps.add(strTok.nextToken());
}
String labelStr = strTok.nextToken();
List intCps = new ArrayList();
for (int i = 0; i < strCps.size(); i++) {
String cpStr = (String)strCps.get(i);
Integer cpInt = (Integer)cpStr2Int.get(cpStr);
if (cpInt != null) {
intCps.add(cpInt);
} else {
// do nothing
}
}
Integer labelInt = (Integer)lbStr2Int.get(labelStr);
if (labelInt == null) {
System.out.println("Reading testing observation, label not found or invalid");
System.out.println("Label: " + labelStr);
return;
}
int[] cps = new int[intCps.size()];
for (int i = 0; i < cps.length; i++) {
cps[i] = ((Integer)intCps.get(i)).intValue();
}
Observation obsr = new Observation(labelInt.intValue(), cps);
// add this observation to the data
tstData.add(obsr);
}
System.out.println("Reading " + Integer.toString(tstData.size()) +
" testing data examples completed!");
} catch (IOException e) {
System.out.println(e.toString());
return;
}
option.numTestExps = tstData.size();
}
/*
public void writeTstData(String dataFile) {
}
public void readUlbData(String dataFile) {
}
public void writeUlbDataWithModelLabel(String dataFile) {
}
*/
} // end of class Data
| apache-2.0 |
lummyare/lummyare-test | java/client/src/org/openqa/selenium/WebDriverCommandProcessor.java | 14835 | /*
Copyright 2007-2009 Selenium committers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.collect.Maps;
import com.thoughtworks.selenium.CommandProcessor;
import com.thoughtworks.selenium.SeleniumException;
import org.openqa.selenium.internal.WrapsDriver;
import org.openqa.selenium.internal.seleniumemulation.*;
import java.util.Map;
/**
* A CommandProcessor which delegates commands down to an underlying webdriver instance.
*/
public class WebDriverCommandProcessor implements CommandProcessor, WrapsDriver {
private final Map<String, SeleneseCommand<?>> seleneseMethods = Maps.newHashMap();
private final String baseUrl;
private final Timer timer;
private final CompoundMutator scriptMutator;
private boolean enableAlertOverrides = true;
private Supplier<WebDriver> maker;
private WebDriver driver;
public WebDriverCommandProcessor(String baseUrl, WebDriver driver) {
this(baseUrl, new ExplodingSupplier());
this.driver = driver;
assertDriverSupportsJavascript(driver);
setUpMethodMap();
}
public WebDriverCommandProcessor(String baseUrl, Supplier<WebDriver> maker) {
this.maker = maker;
this.baseUrl = baseUrl;
this.timer = new Timer(30000);
this.scriptMutator = new CompoundMutator(baseUrl);
}
public WebDriver getWrappedDriver() {
return driver;
}
public String getRemoteControlServerLocation() {
throw new UnsupportedOperationException();
}
public String doCommand(String commandName, String[] args) {
Object val = execute(commandName, args);
if (val == null) {
return null;
}
return val.toString();
}
public void setExtensionJs(String s) {
throw new UnsupportedOperationException();
}
public void start() {
start((Object) null);
}
public void start(String s) {
throw new UnsupportedOperationException("Unsure how to process: " + s);
}
public void start(Object o) {
if (driver != null) {
throw new SeleniumException("You may not start more than one session at a time");
}
driver = maker.get();
assertDriverSupportsJavascript(driver);
setUpMethodMap();
}
public void stop() {
timer.stop();
if (driver != null) {
driver.quit();
}
driver = null;
}
public String getString(String commandName, String[] args) {
return (String) execute(commandName, args);
}
public String[] getStringArray(String commandName, String[] args) {
return (String[]) execute(commandName, args);
}
public Number getNumber(String commandName, String[] args) {
return (Number) execute(commandName, args);
}
public Number[] getNumberArray(String s, String[] strings) {
throw new UnsupportedOperationException();
}
public boolean getBoolean(String commandName, String[] args) {
return (Boolean) execute(commandName, args);
}
public boolean[] getBooleanArray(String s, String[] strings) {
throw new UnsupportedOperationException();
}
private Object execute(String commandName, final String[] args) {
final SeleneseCommand<?> command = seleneseMethods.get(commandName);
if (command == null) {
throw new UnsupportedOperationException(commandName);
}
return timer.run(command, driver, args);
}
public void addMutator(ScriptMutator mutator) {
scriptMutator.addMutator(mutator);
}
public boolean isMethodAvailable(String methodName) {
return seleneseMethods.containsKey(methodName);
}
public void addMethod(String methodName, SeleneseCommand<?> command) {
seleneseMethods.put(methodName, command);
}
public SeleneseCommand<?> getMethod(String methodName) {
return seleneseMethods.get(methodName);
}
@VisibleForTesting
protected void assertDriverSupportsJavascript(WebDriver driver) {
if (!(driver instanceof JavascriptExecutor)) {
throw new IllegalStateException("Driver instance must support JS.");
}
if (!(driver instanceof HasCapabilities)) {
// Might be proxy. Bail.
return;
}
if (!((HasCapabilities) driver).getCapabilities().isJavascriptEnabled()) {
throw new IllegalStateException("JS support must be enabled.");
}
}
/**
* Sets whether to enable emulation of Selenium's alert handling functions or
* to preserve WebDriver's alert handling. This has no affect after calling
* {@link #start()}.
*/
public void setEnableAlertOverrides(boolean enableAlertOverrides) {
this.enableAlertOverrides = enableAlertOverrides;
}
private void setUpMethodMap() {
JavascriptLibrary javascriptLibrary = new JavascriptLibrary();
ElementFinder elementFinder = new ElementFinder(javascriptLibrary);
KeyState keyState = new KeyState();
AlertOverride alertOverride = new AlertOverride(enableAlertOverrides);
Windows windows = new Windows(driver);
// Note the we use the names used by the CommandProcessor
seleneseMethods.put("addLocationStrategy", new AddLocationStrategy(elementFinder));
seleneseMethods.put("addSelection", new AddSelection(javascriptLibrary, elementFinder));
seleneseMethods.put("allowNativeXpath", new AllowNativeXPath());
seleneseMethods.put("altKeyDown", new AltKeyDown(keyState));
seleneseMethods.put("altKeyUp", new AltKeyUp(keyState));
seleneseMethods.put("assignId", new AssignId(javascriptLibrary, elementFinder));
seleneseMethods.put("attachFile", new AttachFile(elementFinder));
seleneseMethods.put("captureScreenshotToString", new CaptureScreenshotToString());
seleneseMethods.put("click", new Click(alertOverride, elementFinder));
seleneseMethods.put("clickAt", new ClickAt(alertOverride, elementFinder));
seleneseMethods.put("check", new Check(alertOverride, elementFinder));
seleneseMethods.put("chooseCancelOnNextConfirmation", new SetNextConfirmationState(false));
seleneseMethods.put("chooseOkOnNextConfirmation", new SetNextConfirmationState(true));
seleneseMethods.put("close", new Close());
seleneseMethods.put("createCookie", new CreateCookie());
seleneseMethods.put("controlKeyDown", new ControlKeyDown(keyState));
seleneseMethods.put("controlKeyUp", new ControlKeyUp(keyState));
seleneseMethods.put("deleteAllVisibleCookies", new DeleteAllVisibleCookies());
seleneseMethods.put("deleteCookie", new DeleteCookie());
seleneseMethods.put("deselectPopUp", new DeselectPopUp(windows));
seleneseMethods.put("doubleClick", new DoubleClick(alertOverride, elementFinder));
seleneseMethods.put("dragdrop", new DragAndDrop(elementFinder));
seleneseMethods.put("dragAndDrop", new DragAndDrop(elementFinder));
seleneseMethods.put("dragAndDropToObject", new DragAndDropToObject(elementFinder));
seleneseMethods.put("fireEvent", new FireEvent(elementFinder, javascriptLibrary));
seleneseMethods.put("focus", new FireNamedEvent(elementFinder, javascriptLibrary, "focus"));
seleneseMethods.put("getAlert", new GetAlert(alertOverride));
seleneseMethods.put("getAllButtons", new GetAllButtons());
seleneseMethods.put("getAllFields", new GetAllFields());
seleneseMethods.put("getAllLinks", new GetAllLinks());
seleneseMethods.put("getAllWindowNames", new GetAllWindowNames());
seleneseMethods.put("getAllWindowTitles", new GetAllWindowTitles());
seleneseMethods.put("getAttribute", new GetAttribute(javascriptLibrary, elementFinder));
seleneseMethods.put("getAttributeFromAllWindows", new GetAttributeFromAllWindows());
seleneseMethods.put("getBodyText", new GetBodyText());
seleneseMethods.put("getConfirmation", new GetConfirmation(alertOverride));
seleneseMethods.put("getCookie", new GetCookie());
seleneseMethods.put("getCookieByName", new GetCookieByName());
seleneseMethods.put("getElementHeight", new GetElementHeight(elementFinder));
seleneseMethods.put("getElementIndex", new GetElementIndex(elementFinder,
javascriptLibrary));
seleneseMethods.put("getElementPositionLeft", new GetElementPositionLeft(elementFinder));
seleneseMethods.put("getElementPositionTop", new GetElementPositionTop(elementFinder));
seleneseMethods.put("getElementWidth", new GetElementWidth(elementFinder));
seleneseMethods.put("getEval", new GetEval(scriptMutator));
seleneseMethods.put("getExpression", new GetExpression());
seleneseMethods.put("getHtmlSource", new GetHtmlSource());
seleneseMethods.put("getLocation", new GetLocation());
seleneseMethods.put("getSelectedId", new FindFirstSelectedOptionProperty(javascriptLibrary,
elementFinder, "id"));
seleneseMethods.put("getSelectedIds", new FindSelectedOptionProperties(javascriptLibrary,
elementFinder, "id"));
seleneseMethods.put("getSelectedIndex", new FindFirstSelectedOptionProperty(javascriptLibrary,
elementFinder, "index"));
seleneseMethods.put("getSelectedIndexes", new FindSelectedOptionProperties(javascriptLibrary,
elementFinder, "index"));
seleneseMethods.put("getSelectedLabel", new FindFirstSelectedOptionProperty(javascriptLibrary,
elementFinder, "text"));
seleneseMethods.put("getSelectedLabels", new FindSelectedOptionProperties(javascriptLibrary,
elementFinder, "text"));
seleneseMethods.put("getSelectedValue", new FindFirstSelectedOptionProperty(javascriptLibrary,
elementFinder, "value"));
seleneseMethods.put("getSelectedValues", new FindSelectedOptionProperties(javascriptLibrary,
elementFinder, "value"));
seleneseMethods.put("getSelectOptions", new GetSelectOptions(javascriptLibrary, elementFinder));
seleneseMethods.put("getSpeed", new NoOp("0"));
seleneseMethods.put("getTable", new GetTable(elementFinder, javascriptLibrary));
seleneseMethods.put("getText", new GetText(javascriptLibrary, elementFinder));
seleneseMethods.put("getTitle", new GetTitle());
seleneseMethods.put("getValue", new GetValue(elementFinder));
seleneseMethods.put("getXpathCount", new GetXpathCount());
seleneseMethods.put("getCssCount", new GetCssCount());
seleneseMethods.put("goBack", new GoBack());
seleneseMethods.put("highlight", new Highlight(elementFinder, javascriptLibrary));
seleneseMethods.put("isAlertPresent", new IsAlertPresent(alertOverride));
seleneseMethods.put("isChecked", new IsChecked(elementFinder));
seleneseMethods.put("isConfirmationPresent", new IsConfirmationPresent(alertOverride));
seleneseMethods.put("isCookiePresent", new IsCookiePresent());
seleneseMethods.put("isEditable", new IsEditable(elementFinder));
seleneseMethods.put("isElementPresent", new IsElementPresent(elementFinder));
seleneseMethods.put("isOrdered", new IsOrdered(elementFinder, javascriptLibrary));
seleneseMethods.put("isSomethingSelected", new IsSomethingSelected(javascriptLibrary));
seleneseMethods.put("isTextPresent", new IsTextPresent(javascriptLibrary));
seleneseMethods.put("isVisible", new IsVisible(elementFinder));
seleneseMethods.put("keyDown", new KeyEvent(elementFinder, javascriptLibrary, keyState,
"doKeyDown"));
seleneseMethods.put("keyPress", new TypeKeys(alertOverride, elementFinder));
seleneseMethods.put("keyUp",
new KeyEvent(elementFinder, javascriptLibrary, keyState, "doKeyUp"));
seleneseMethods.put("metaKeyDown", new MetaKeyDown(keyState));
seleneseMethods.put("metaKeyUp", new MetaKeyUp(keyState));
seleneseMethods.put("mouseOver", new MouseEvent(elementFinder, javascriptLibrary, "mouseover"));
seleneseMethods.put("mouseOut", new MouseEvent(elementFinder, javascriptLibrary, "mouseout"));
seleneseMethods.put("mouseDown", new MouseEvent(elementFinder, javascriptLibrary, "mousedown"));
seleneseMethods.put("mouseDownAt", new MouseEventAt(elementFinder, javascriptLibrary,
"mousedown"));
seleneseMethods.put("mouseMove", new MouseEvent(elementFinder, javascriptLibrary, "mousemove"));
seleneseMethods.put("mouseMoveAt", new MouseEventAt(elementFinder, javascriptLibrary,
"mousemove"));
seleneseMethods.put("mouseUp", new MouseEvent(elementFinder, javascriptLibrary, "mouseup"));
seleneseMethods.put("mouseUpAt", new MouseEventAt(elementFinder, javascriptLibrary, "mouseup"));
seleneseMethods.put("open", new Open(baseUrl));
seleneseMethods.put("openWindow", new OpenWindow(baseUrl, new GetEval(scriptMutator)));
seleneseMethods.put("refresh", new Refresh());
seleneseMethods.put("removeAllSelections", new RemoveAllSelections(elementFinder));
seleneseMethods.put("removeSelection", new RemoveSelection(javascriptLibrary, elementFinder));
seleneseMethods.put("runScript", new RunScript(scriptMutator));
seleneseMethods.put("select",
new SelectOption(alertOverride, javascriptLibrary, elementFinder));
seleneseMethods.put("selectFrame", new SelectFrame(windows));
seleneseMethods.put("selectPopUp", new SelectPopUp(windows));
seleneseMethods.put("selectWindow", new SelectWindow(windows));
seleneseMethods.put("setBrowserLogLevel", new NoOp(null));
seleneseMethods.put("setContext", new NoOp(null));
seleneseMethods.put("setSpeed", new NoOp(null));
seleneseMethods.put("setTimeout", new SetTimeout(timer));
seleneseMethods.put("shiftKeyDown", new ShiftKeyDown(keyState));
seleneseMethods.put("shiftKeyUp", new ShiftKeyUp(keyState));
seleneseMethods.put("submit", new Submit(alertOverride, elementFinder));
seleneseMethods.put("type",
new Type(alertOverride, javascriptLibrary, elementFinder, keyState));
seleneseMethods.put("typeKeys", new TypeKeys(alertOverride, elementFinder));
seleneseMethods.put("uncheck", new Uncheck(alertOverride, elementFinder));
seleneseMethods.put("useXpathLibrary", new UseXPathLibrary());
seleneseMethods.put("waitForCondition", new WaitForCondition(scriptMutator));
seleneseMethods.put("waitForFrameToLoad", new NoOp(null));
seleneseMethods.put("waitForPageToLoad", new WaitForPageToLoad());
seleneseMethods.put("waitForPopUp", new WaitForPopup(windows));
seleneseMethods.put("windowFocus", new WindowFocus(javascriptLibrary));
seleneseMethods.put("windowMaximize", new WindowMaximize(javascriptLibrary));
}
}
| apache-2.0 |
d0k1/jmeter | src/jorphan/org/apache/jorphan/collections/ListedHashTree.java | 6216 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jorphan.collections;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.jorphan.util.JMeterError;
/**
* ListedHashTree is a different implementation of the {@link HashTree}
* collection class. In the ListedHashTree, the order in which values are added
* is preserved (not to be confused with {@link SortedHashTree}, which sorts
* the order of the values using the compare() function). Any listing of nodes
* or iteration through the list of nodes of a ListedHashTree will be given in
* the order in which the nodes were added to the tree.
*
* @see HashTree
*/
public class ListedHashTree extends HashTree implements Serializable, Cloneable {
private static final long serialVersionUID = 240L;
private final List<Object> order;
public ListedHashTree() {
super();
order = new LinkedList<>();
}
public ListedHashTree(Object key) {
this();
data.put(key, new ListedHashTree());
order.add(key);
}
public ListedHashTree(Collection<?> keys) {
this();
for (Object temp : keys) {
data.put(temp, new ListedHashTree());
order.add(temp);
}
}
public ListedHashTree(Object[] keys) {
this();
for (Object key : keys) {
data.put(key, new ListedHashTree());
order.add(key);
}
}
/** {@inheritDoc} */
@Override
public Object clone() {
ListedHashTree newTree = new ListedHashTree();
cloneTree(newTree);
return newTree;
}
/** {@inheritDoc} */
@Override
public void set(Object key, Object value) {
if (!data.containsKey(key)) {
order.add(key);
}
super.set(key, value);
}
/** {@inheritDoc} */
@Override
public void set(Object key, HashTree t) {
if (!data.containsKey(key)) {
order.add(key);
}
super.set(key, t);
}
/** {@inheritDoc} */
@Override
public void set(Object key, Object[] values) {
if (!data.containsKey(key)) {
order.add(key);
}
super.set(key, values);
}
/** {@inheritDoc} */
@Override
public void set(Object key, Collection<?> values) {
if (!data.containsKey(key)) {
order.add(key);
}
super.set(key, values);
}
/** {@inheritDoc} */
@Override
public void replaceKey(Object currentKey, Object newKey) {
HashTree tree = getTree(currentKey);
data.remove(currentKey);
data.put(newKey, tree);
// find order.indexOf(currentKey) using == rather than equals()
// there may be multiple entries which compare equals (Bug 50898)
// This will be slightly slower than the built-in method,
// but replace() is not used frequently.
int entry=-1;
for (int i=0; i < order.size(); i++) {
Object ent = order.get(i);
if (ent == currentKey) {
entry = i;
break;
}
}
if (entry == -1) {
throw new JMeterError("Impossible state, data key not present in order: "+currentKey.getClass());
}
order.set(entry, newKey);
}
/** {@inheritDoc} */
@Override
public HashTree createNewTree() {
return new ListedHashTree();
}
/** {@inheritDoc} */
@Override
public HashTree createNewTree(Object key) {
return new ListedHashTree(key);
}
/** {@inheritDoc} */
@Override
public HashTree createNewTree(Collection<?> values) {
return new ListedHashTree(values);
}
/** {@inheritDoc} */
@Override
public HashTree add(Object key) {
if (!data.containsKey(key)) {
HashTree newTree = createNewTree();
data.put(key, newTree);
order.add(key);
return newTree;
}
return getTree(key);
}
/** {@inheritDoc} */
@Override
public Collection<Object> list() {
return order;
}
/** {@inheritDoc} */
@Override
public HashTree remove(Object key) {
order.remove(key);
return data.remove(key);
}
/** {@inheritDoc} */
@Override
public Object[] getArray() {
return order.toArray();
}
/** {@inheritDoc} */
// Make sure the hashCode depends on the order as well
@Override
public int hashCode() {
int hc = 17;
hc = hc * 37 + (order == null ? 0 : order.hashCode());
hc = hc * 37 + super.hashCode();
return hc;
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (!(o instanceof ListedHashTree)) {
return false;
}
ListedHashTree lht = (ListedHashTree) o;
return (super.equals(lht) && order.equals(lht.order));
}
private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
ois.defaultReadObject();
}
private void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
}
/** {@inheritDoc} */
@Override
public void clear() {
super.clear();
order.clear();
}
}
| apache-2.0 |
spasam/spring-ldap | core/src/test/java/org/springframework/ldap/filter/NotPresentFilterTest.java | 2122 | /*
* Copyright 2005-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ldap.filter;
import junit.framework.TestCase;
import org.springframework.ldap.filter.AndFilter;
import org.springframework.ldap.filter.NotFilter;
import com.gargoylesoftware.base.testing.EqualsTester;
/**
* Unit tests for the NotPresentFilter class.
*
* @author Ulrik Sandberg
*/
public class NotPresentFilterTest extends TestCase {
public void testNotPresentFilter() {
NotPresentFilter filter = new NotPresentFilter("foo");
assertEquals("(!(foo=*))", filter.encode());
NotFilter notFilter = new NotFilter(new NotPresentFilter("foo"));
assertEquals("(!(!(foo=*)))", notFilter.encode());
AndFilter andFilter = new AndFilter();
andFilter.and(new NotPresentFilter("foo"));
andFilter.and(new NotPresentFilter("bar"));
assertEquals("(&(!(foo=*))(!(bar=*)))", andFilter.encode());
andFilter = new AndFilter();
andFilter.and(new NotPresentFilter("foo"));
andFilter.and(new NotFilter(new NotPresentFilter("bar")));
assertEquals("(&(!(foo=*))(!(!(bar=*))))", andFilter.encode());
}
public void testEquals() {
String attribute = "foo";
NotPresentFilter originalObject = new NotPresentFilter(attribute);
NotPresentFilter identicalObject = new NotPresentFilter(attribute);
NotPresentFilter differentObject = new NotPresentFilter("bar");
NotPresentFilter subclassObject = new NotPresentFilter(attribute) {
};
new EqualsTester(originalObject, identicalObject, differentObject,
subclassObject);
}
} | apache-2.0 |
gelldur/android-beacon-library | src/main/java/org/altbeacon/beacon/service/BeaconService.java | 22166 | /**
* Radius Networks, Inc.
* http://www.radiusnetworks.com
*
* @author David G. Young
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.altbeacon.beacon.service;
import android.annotation.TargetApi;
import android.app.Service;
import android.bluetooth.BluetoothDevice;
import android.content.Intent;
import android.content.pm.ApplicationInfo;
import android.os.AsyncTask;
import android.os.Binder;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import org.altbeacon.beacon.Beacon;
import org.altbeacon.beacon.BeaconManager;
import org.altbeacon.beacon.BeaconParser;
import org.altbeacon.beacon.BuildConfig;
import org.altbeacon.beacon.Region;
import org.altbeacon.beacon.distance.DistanceCalculator;
import org.altbeacon.beacon.distance.ModelSpecificDistanceCalculator;
import org.altbeacon.beacon.logging.LogManager;
import org.altbeacon.beacon.service.scanner.CycledLeScanCallback;
import org.altbeacon.beacon.service.scanner.CycledLeScanner;
import org.altbeacon.beacon.service.scanner.NonBeaconLeScanCallback;
import org.altbeacon.bluetooth.BluetoothCrashResolver;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
/**
* @author dyoung
*/
@TargetApi(5)
public class BeaconService extends Service {
public static final String TAG = "BeaconService";
private final Map<Region, RangeState> rangedRegionState = new HashMap<Region, RangeState>();
private final Map<Region, MonitorState> monitoredRegionState = new HashMap<Region, MonitorState>();
int trackedBeaconsPacketCount;
private final Handler handler = new Handler();
private int bindCount = 0;
private BluetoothCrashResolver bluetoothCrashResolver;
private DistanceCalculator defaultDistanceCalculator = null;
private BeaconManager beaconManager;
private List<BeaconParser> beaconParsers;
private CycledLeScanner mCycledScanner;
private boolean mBackgroundFlag = false;
private final GattBeaconTracker mGattBeaconTracker = new GattBeaconTracker();
private ExecutorService mExecutor;
/*
* The scan period is how long we wait between restarting the BLE advertisement scans
* Each time we restart we only see the unique advertisements once (e.g. unique beacons)
* So if we want updates, we have to restart. For updates at 1Hz, ideally we
* would restart scanning that often to get the same update rate. The trouble is that when you
* restart scanning, it is not instantaneous, and you lose any beacon packets that were in the
* air during the restart. So the more frequently you restart, the more packets you lose. The
* frequency is therefore a tradeoff. Testing with 14 beacons, transmitting once per second,
* here are the counts I got for various values of the SCAN_PERIOD:
*
* Scan period Avg beacons % missed
* 1s 6 57
* 2s 10 29
* 3s 12 14
* 5s 14 0
*
* Also, because beacons transmit once per second, the scan period should not be an even multiple
* of seconds, because then it may always miss a beacon that is synchronized with when it is stopping
* scanning.
*
*/
private List<Beacon> simulatedScanData = null;
/**
* Class used for the client Binder. Because we know this service always
* runs in the same process as its clients, we don't need to deal with IPC.
*/
public class BeaconBinder extends Binder {
public BeaconService getService() {
LogManager.i(TAG, "getService of BeaconBinder called");
// Return this instance of LocalService so clients can call public methods
return BeaconService.this;
}
}
/**
* Command to the service to display a message
*/
public static final int MSG_START_RANGING = 2;
public static final int MSG_STOP_RANGING = 3;
public static final int MSG_START_MONITORING = 4;
public static final int MSG_STOP_MONITORING = 5;
public static final int MSG_SET_SCAN_PERIODS = 6;
static class IncomingHandler extends Handler {
private final WeakReference<BeaconService> mService;
IncomingHandler(BeaconService service) {
mService = new WeakReference<BeaconService>(service);
}
@Override
public void handleMessage(Message msg) {
BeaconService service = mService.get();
StartRMData startRMData = (StartRMData) msg.obj;
if (service != null) {
switch (msg.what) {
case MSG_START_RANGING:
LogManager.i(TAG, "start ranging received");
service.startRangingBeaconsInRegion(startRMData.getRegionData(), new org.altbeacon.beacon.service.Callback(startRMData.getCallbackPackageName()));
service.setScanPeriods(startRMData.getScanPeriod(), startRMData.getBetweenScanPeriod(), startRMData.getBackgroundFlag());
break;
case MSG_STOP_RANGING:
LogManager.i(TAG, "stop ranging received");
service.stopRangingBeaconsInRegion(startRMData.getRegionData());
service.setScanPeriods(startRMData.getScanPeriod(), startRMData.getBetweenScanPeriod(), startRMData.getBackgroundFlag());
break;
case MSG_START_MONITORING:
LogManager.i(TAG, "start monitoring received");
service.startMonitoringBeaconsInRegion(startRMData.getRegionData(), new org.altbeacon.beacon.service.Callback(startRMData.getCallbackPackageName()));
service.setScanPeriods(startRMData.getScanPeriod(), startRMData.getBetweenScanPeriod(), startRMData.getBackgroundFlag());
break;
case MSG_STOP_MONITORING:
LogManager.i(TAG, "stop monitoring received");
service.stopMonitoringBeaconsInRegion(startRMData.getRegionData());
service.setScanPeriods(startRMData.getScanPeriod(), startRMData.getBetweenScanPeriod(), startRMData.getBackgroundFlag());
break;
case MSG_SET_SCAN_PERIODS:
LogManager.i(TAG, "set scan intervals received");
service.setScanPeriods(startRMData.getScanPeriod(), startRMData.getBetweenScanPeriod(), startRMData.getBackgroundFlag());
break;
default:
super.handleMessage(msg);
}
}
}
}
/**
* Target we publish for clients to send messages to IncomingHandler.
*/
final Messenger mMessenger = new Messenger(new IncomingHandler(this));
/**
* When binding to the service, we return an interface to our messenger
* for sending messages to the service.
*/
@Override
public IBinder onBind(Intent intent) {
LogManager.i(TAG, "binding");
bindCount++;
return mMessenger.getBinder();
}
@Override
public boolean onUnbind(Intent intent) {
LogManager.i(TAG, "unbinding");
bindCount--;
return false;
}
@Override
public void onCreate() {
LogManager.i(TAG, "beaconService version %s is starting up", BuildConfig.VERSION_NAME );
bluetoothCrashResolver = new BluetoothCrashResolver(this);
bluetoothCrashResolver.start();
// Create a private executor so we don't compete with threads used by AsyncTask
// This uses fewer threads than the default executor so it won't hog CPU
mExecutor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() + 1);
mCycledScanner = CycledLeScanner.createScanner(this, BeaconManager.DEFAULT_FOREGROUND_SCAN_PERIOD,
BeaconManager.DEFAULT_FOREGROUND_BETWEEN_SCAN_PERIOD, mBackgroundFlag, mCycledLeScanCallback, bluetoothCrashResolver);
beaconManager = BeaconManager.getInstanceForApplication(getApplicationContext());
beaconParsers = beaconManager.getBeaconParsers();
defaultDistanceCalculator = new ModelSpecificDistanceCalculator(this, BeaconManager.getDistanceModelUpdateUrl());
Beacon.setDistanceCalculator(defaultDistanceCalculator);
// Look for simulated scan data
try {
Class klass = Class.forName("org.altbeacon.beacon.SimulatedScanData");
java.lang.reflect.Field f = klass.getField("beacons");
this.simulatedScanData = (List<Beacon>) f.get(null);
} catch (ClassNotFoundException e) {
LogManager.d(TAG, "No org.altbeacon.beacon.SimulatedScanData class exists.");
} catch (Exception e) {
LogManager.e(e, TAG, "Cannot get simulated Scan data. Make sure your org.altbeacon.beacon.SimulatedScanData class defines a field with the signature 'public static List<Beacon> beacons'");
}
}
@Override
@TargetApi(18)
public void onDestroy() {
if (android.os.Build.VERSION.SDK_INT < 18) {
LogManager.w(TAG, "Not supported prior to API 18.");
return;
}
bluetoothCrashResolver.stop();
LogManager.i(TAG, "onDestroy called. stopping scanning");
handler.removeCallbacksAndMessages(null);
mCycledScanner.stop();
}
/**
* methods for clients
*/
public void startRangingBeaconsInRegion(Region region, Callback callback) {
synchronized (rangedRegionState) {
if (rangedRegionState.containsKey(region)) {
LogManager.i(TAG, "Already ranging that region -- will replace existing region.");
rangedRegionState.remove(region); // need to remove it, otherwise the old object will be retained because they are .equal
}
rangedRegionState.put(region, new RangeState(callback));
LogManager.d(TAG, "Currently ranging %s regions.", rangedRegionState.size());
}
mCycledScanner.start();
}
public void stopRangingBeaconsInRegion(Region region) {
int rangedRegionCount;
synchronized (rangedRegionState) {
rangedRegionState.remove(region);
rangedRegionCount = rangedRegionState.size();
LogManager.d(TAG, "Currently ranging %s regions.", rangedRegionState.size());
}
if (rangedRegionCount == 0 && monitoredRegionState.size() == 0) {
mCycledScanner.stop();
}
}
public void startMonitoringBeaconsInRegion(Region region, Callback callback) {
LogManager.d(TAG, "startMonitoring called");
synchronized (monitoredRegionState) {
if (monitoredRegionState.containsKey(region)) {
LogManager.i(TAG, "Already monitoring that region -- will replace existing region monitor.");
monitoredRegionState.remove(region); // need to remove it, otherwise the old object will be retained because they are .equal
}
monitoredRegionState.put(region, new MonitorState(callback));
}
LogManager.d(TAG, "Currently monitoring %s regions.", monitoredRegionState.size());
mCycledScanner.start();
}
public void stopMonitoringBeaconsInRegion(Region region) {
int monitoredRegionCount;
LogManager.d(TAG, "stopMonitoring called");
synchronized (monitoredRegionState) {
monitoredRegionState.remove(region);
monitoredRegionCount = monitoredRegionState.size();
}
LogManager.d(TAG, "Currently monitoring %s regions.", monitoredRegionState.size());
if (monitoredRegionCount == 0 && rangedRegionState.size() == 0) {
mCycledScanner.stop();
}
}
public void setScanPeriods(long scanPeriod, long betweenScanPeriod, boolean backgroundFlag) {
mCycledScanner.setScanPeriods(scanPeriod, betweenScanPeriod, backgroundFlag);
}
protected final CycledLeScanCallback mCycledLeScanCallback = new CycledLeScanCallback() {
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) {
NonBeaconLeScanCallback nonBeaconLeScanCallback = beaconManager.getNonBeaconLeScanCallback();
try {
new ScanProcessor(nonBeaconLeScanCallback).executeOnExecutor(mExecutor,
new ScanData(device, rssi, scanRecord));
}
catch (RejectedExecutionException e) {
LogManager.w(TAG, "Ignoring scan result because we cannot keep up.");
}
}
@Override
public void onCycleEnd() {
processExpiredMonitors();
processRangeData();
// If we want to use simulated scanning data, do it here. This is used for testing in an emulator
if (simulatedScanData != null) {
// if simulatedScanData is provided, it will be seen every scan cycle. *in addition* to anything actually seen in the air
// it will not be used if we are not in debug mode
LogManager.w(TAG, "Simulated scan data is deprecated and will be removed in a future release. Please use the new BeaconSimulator interface instead.");
if (0 != (getApplicationInfo().flags &= ApplicationInfo.FLAG_DEBUGGABLE)) {
for (Beacon beacon : simulatedScanData) {
processBeaconFromScan(beacon);
}
} else {
LogManager.w(TAG, "Simulated scan data provided, but ignored because we are not running in debug mode. Please remove simulated scan data for production.");
}
}
if (BeaconManager.getBeaconSimulator() != null) {
// if simulatedScanData is provided, it will be seen every scan cycle. *in addition* to anything actually seen in the air
// it will not be used if we are not in debug mode
if (BeaconManager.getBeaconSimulator().getBeacons() != null) {
if (0 != (getApplicationInfo().flags &= ApplicationInfo.FLAG_DEBUGGABLE)) {
for (Beacon beacon : BeaconManager.getBeaconSimulator().getBeacons()) {
processBeaconFromScan(beacon);
}
} else {
LogManager.w(TAG, "Beacon simulations provided, but ignored because we are not running in debug mode. Please remove beacon simulations for production.");
}
} else {
LogManager.w(TAG, "getBeacons is returning null. No simulated beacons to report.");
}
}
}
};
private void processRangeData() {
synchronized(rangedRegionState) {
Iterator<Region> regionIterator = rangedRegionState.keySet().iterator();
while (regionIterator.hasNext()) {
Region region = regionIterator.next();
RangeState rangeState = rangedRegionState.get(region);
LogManager.d(TAG, "Calling ranging callback");
rangeState.getCallback().call(BeaconService.this, "rangingData", new RangingData(rangeState.finalizeBeacons(), region));
}
}
}
private void processExpiredMonitors() {
synchronized (monitoredRegionState) {
Iterator<Region> monitoredRegionIterator = monitoredRegionState.keySet().iterator();
while (monitoredRegionIterator.hasNext()) {
Region region = monitoredRegionIterator.next();
MonitorState state = monitoredRegionState.get(region);
if (state.isNewlyOutside()) {
LogManager.d(TAG, "found a monitor that expired: %s", region);
state.getCallback().call(BeaconService.this, "monitoringData", new MonitoringData(state.isInside(), region));
}
}
}
}
private void processBeaconFromScan(Beacon beacon) {
if (Stats.getInstance().isEnabled()) {
Stats.getInstance().log(beacon);
}
trackedBeaconsPacketCount++;
if (LogManager.isVerboseLoggingEnabled()) {
LogManager.d(TAG,
"beacon detected : %s", beacon.toString());
}
beacon = mGattBeaconTracker.track(beacon);
// If this is a Gatt beacon that should be ignored, it will be set to null as a result of
// the above
if (beacon == null) {
if (LogManager.isVerboseLoggingEnabled()) {
LogManager.d(TAG,
"not processing detections for GATT extra data beacon");
}
}
else {
List<Region> matchedRegions = null;
synchronized(monitoredRegionState) {
matchedRegions = matchingRegions(beacon,
monitoredRegionState.keySet());
}
Iterator<Region> matchedRegionIterator = matchedRegions.iterator();
while (matchedRegionIterator.hasNext()) {
Region region = matchedRegionIterator.next();
MonitorState state = monitoredRegionState.get(region);
if (state != null && state.markInside()) {
state.getCallback().call(BeaconService.this, "monitoringData",
new MonitoringData(state.isInside(), region));
}
}
LogManager.d(TAG, "looking for ranging region matches for this beacon");
synchronized (rangedRegionState) {
matchedRegions = matchingRegions(beacon, rangedRegionState.keySet());
matchedRegionIterator = matchedRegions.iterator();
while (matchedRegionIterator.hasNext()) {
Region region = matchedRegionIterator.next();
LogManager.d(TAG, "matches ranging region: %s", region);
RangeState rangeState = rangedRegionState.get(region);
if (rangeState != null) {
rangeState.addBeacon(beacon);
}
}
}
}
}
private class ScanData {
public ScanData(BluetoothDevice device, int rssi, byte[] scanRecord) {
this.device = device;
this.rssi = rssi;
this.scanRecord = scanRecord;
}
int rssi;
BluetoothDevice device;
byte[] scanRecord;
}
private class ScanProcessor extends AsyncTask<ScanData, Void, Void> {
final DetectionTracker mDetectionTracker = DetectionTracker.getInstance();
private final NonBeaconLeScanCallback mNonBeaconLeScanCallback;
public ScanProcessor(NonBeaconLeScanCallback nonBeaconLeScanCallback) {
mNonBeaconLeScanCallback = nonBeaconLeScanCallback;
}
@Override
protected Void doInBackground(ScanData... params) {
ScanData scanData = params[0];
Beacon beacon = null;
for (BeaconParser parser : BeaconService.this.beaconParsers) {
beacon = parser.fromScanData(scanData.scanRecord,
scanData.rssi, scanData.device);
if (beacon != null) {
break;
}
}
if (beacon != null) {
mDetectionTracker.recordDetection();
processBeaconFromScan(beacon);
} else {
if (mNonBeaconLeScanCallback != null) {
mNonBeaconLeScanCallback.onNonBeaconLeScan(scanData.device, scanData.rssi, scanData.scanRecord);
}
}
return null;
}
@Override
protected void onPostExecute(Void result) {
}
@Override
protected void onPreExecute() {
}
@Override
protected void onProgressUpdate(Void... values) {
}
}
private List<Region> matchingRegions(Beacon beacon, Collection<Region> regions) {
List<Region> matched = new ArrayList<Region>();
Iterator<Region> regionIterator = regions.iterator();
while (regionIterator.hasNext()) {
Region region = regionIterator.next();
// Need to check if region is null in case it was removed from the collection by
// another thread during iteration
if (region != null) {
if (region.matchesBeacon(beacon)) {
matched.add(region);
} else {
LogManager.d(TAG, "This region (%s) does not match beacon: %s", region, beacon);
}
}
}
return matched;
}
}
| apache-2.0 |
karreiro/uberfire | uberfire-extensions/uberfire-layout-editor/uberfire-layout-editor-client/src/main/java/org/uberfire/ext/layout/editor/client/infra/DragHelperComponentColumn.java | 3870 | /*
* Copyright 2015 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.layout.editor.client.infra;
import java.util.function.Supplier;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import com.google.gwt.core.client.GWT;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import org.gwtbootstrap3.client.ui.Modal;
import org.jboss.errai.common.client.dom.DOMUtil;
import org.jboss.errai.common.client.dom.Document;
import org.jboss.errai.common.client.dom.HTMLElement;
import org.uberfire.ext.layout.editor.api.editor.LayoutComponent;
import org.uberfire.ext.layout.editor.api.editor.LayoutTemplate;
import org.uberfire.ext.layout.editor.client.api.HasModalConfiguration;
import org.uberfire.ext.layout.editor.client.api.LayoutDragComponent;
import org.uberfire.ext.layout.editor.client.api.ModalConfigurationContext;
import org.uberfire.ext.layout.editor.client.api.RenderingContext;
import org.uberfire.mvp.Command;
import static org.jboss.errai.common.client.dom.DOMUtil.addCSSClass;
@Dependent
public class DragHelperComponentColumn {
@Inject
LayoutDragComponentHelper helper;
@Inject
Document document;
private LayoutDragComponent layoutDragComponent;
private LayoutComponent layoutComponent;
public LayoutDragComponent getLayoutDragComponent() {
if (layoutDragComponent == null) {
layoutDragComponent =
helper.lookupDragTypeBean(layoutComponent.getDragTypeName());
}
return layoutDragComponent;
}
public boolean hasModalConfiguration() {
return getLayoutDragComponent() instanceof HasModalConfiguration;
}
public void setLayoutComponent(LayoutComponent layoutComponent) {
this.layoutComponent = layoutComponent;
}
public HTMLElement getPreviewElement(Widget context) {
HTMLElement div = document.createElement("div");
addCSSClass(div,
"uf-perspective-col");
addCSSClass(div,
"uf-le-overflow");
FlowPanel gwtDivWrapper = GWT.create(FlowPanel.class);
gwtDivWrapper.getElement().addClassName("uf-perspective-col");
gwtDivWrapper.add(getLayoutDragComponent()
.getPreviewWidget(new RenderingContext(layoutComponent,
context)).asWidget());
DOMUtil.appendWidgetToElement(div,
gwtDivWrapper);
return div;
}
public void showConfigModal(Command configurationFinish,
Command configurationCanceled,
Supplier<LayoutTemplate> currentLayoutTemplateSupplier) {
ModalConfigurationContext ctx = new ModalConfigurationContext(layoutComponent,
configurationFinish,
configurationCanceled,
currentLayoutTemplateSupplier);
Modal configModal = ((HasModalConfiguration)
getLayoutDragComponent()).getConfigurationModal(ctx);
configModal.show();
}
}
| apache-2.0 |
aeshell/aesh | aesh/src/main/java/org/aesh/command/activator/CommandActivatorProvider.java | 1055 | /*
* JBoss, Home of Professional Open Source
* Copyright 2014 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.aesh.command.activator;
/**
* @author jdenise@redhat.com
*/
public interface CommandActivatorProvider<CA extends CommandActivator> {
CA enhanceCommandActivator(CommandActivator commandActivator);
}
| apache-2.0 |
cyberdrcarr/guvnor | guvnor-webapp-drools/src/main/java/org/drools/guvnor/shared/simulation/SimulationPathModel.java | 3985 | /*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.shared.simulation;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import org.drools.guvnor.shared.api.PortableObject;
import java.util.Random;
import java.util.TreeMap;
/**
* This is a DTO.
*/
@XStreamAlias("SimulationPathModel")
public class SimulationPathModel implements PortableObject {
public static SimulationPathModel createNew() {
SimulationPathModel path = new SimulationPathModel();
path.addStep(SimulationStepModel.createNew(path));
todoCreateTestdata(path);
return path;
}
// TODO remove me
private static void todoCreateTestdata(SimulationPathModel path) {
long nextDistanceMillis = 1000;
Random random = new Random();
for (int i = 0; i < 5; i++) {
nextDistanceMillis += random.nextInt(2000) + 1000;
if (random.nextBoolean()) {
SimulationStepModel step = SimulationStepModel.createNew(path);
step.setDistanceMillis(nextDistanceMillis);
path.addStep(step);
}
}
}
private String name;
// TODO the field type should be NavigableMap - http://code.google.com/p/google-web-toolkit/issues/detail?id=4236
private TreeMap<Long, SimulationStepModel> steps = new TreeMap<Long, SimulationStepModel>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
// TODO the return type should be NavigableMap - http://code.google.com/p/google-web-toolkit/issues/detail?id=4236
public TreeMap<Long, SimulationStepModel> getSteps() {
return steps;
}
public void addStep(SimulationStepModel step) {
if (step.getPath() != this) {
throw new IllegalArgumentException("The simulation step's path ("
+ step.getPath() + ") is not this path (" + this + ").");
}
if (step.getDistanceMillis() == null) {
generateStepDistanceMillis(step);
}
if (steps.containsKey(step.getDistanceMillis())) {
throw new IllegalArgumentException("The simulation step's distanceMillis ("
+ step.getDistanceMillis() + ") is not unique.");
}
steps.put(step.getDistanceMillis(), step);
}
public void removeStep(SimulationStepModel step) {
if (step.getPath() != this) {
throw new IllegalArgumentException("The simulation step's path ("
+ step.getPath() + ") is not this path (" + this + ").");
}
Long distanceMillis = step.getDistanceMillis();
if (!steps.containsKey(distanceMillis)) {
throw new IllegalArgumentException("The simulation step's distanceMillis ("
+ distanceMillis + ") is not known. It probably changed while it was in the Map.");
}
steps.remove(distanceMillis);
}
private void generateStepDistanceMillis(SimulationStepModel step) {
Long stepDistanceMillis;
if (steps.isEmpty()) {
stepDistanceMillis = 0L;
} else {
stepDistanceMillis = steps.lastKey() + 1000L;
}
step.setDistanceMillis(stepDistanceMillis);
}
public long getMaximumDistanceMillis() {
if (steps.isEmpty()) {
return 0L;
}
return steps.lastKey();
}
}
| apache-2.0 |
mbiarnes/uberfire | uberfire-extensions/uberfire-preferences-ui-client/src/main/java/org/uberfire/ext/preferences/client/central/tree/TreeHierarchyStructurePresenter.java | 9165 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.preferences.client.central.tree;
import java.util.ArrayList;
import java.util.Collection;
import javax.enterprise.context.Dependent;
import javax.enterprise.event.Event;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.ErrorCallback;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.jboss.errai.ioc.client.api.ManagedInstance;
import org.uberfire.client.mvp.PlaceManager;
import org.uberfire.client.mvp.UberElement;
import org.uberfire.ext.preferences.client.central.hierarchy.HierarchyItemPresenter;
import org.uberfire.ext.preferences.client.central.hierarchy.HierarchyStructurePresenter;
import org.uberfire.ext.preferences.client.central.hierarchy.HierarchyStructureView;
import org.uberfire.ext.preferences.client.event.HierarchyItemFormInitializationEvent;
import org.uberfire.ext.preferences.client.event.PreferencesCentralSaveEvent;
import org.uberfire.ext.preferences.client.utils.PreferenceFormBeansInfo;
import org.uberfire.mvp.Command;
import org.uberfire.mvp.ParameterizedCommand;
import org.uberfire.preferences.shared.PreferenceScope;
import org.uberfire.preferences.shared.bean.BasePreference;
import org.uberfire.preferences.shared.bean.BasePreferencePortable;
import org.uberfire.preferences.shared.bean.PreferenceBeanServerStore;
import org.uberfire.preferences.shared.bean.PreferenceBeanStore;
import org.uberfire.preferences.shared.bean.PreferenceHierarchyElement;
import org.uberfire.preferences.shared.impl.PreferenceScopeResolutionStrategyInfo;
import org.uberfire.workbench.events.NotificationEvent;
@TreeView
@Dependent
public class TreeHierarchyStructurePresenter implements HierarchyStructurePresenter {
private final View view;
private final Caller<PreferenceBeanServerStore> preferenceBeanServerStoreCaller;
private final ManagedInstance<TreeHierarchyInternalItemPresenter> treeHierarchyInternalItemPresenterProvider;
private final ManagedInstance<TreeHierarchyLeafItemPresenter> treeHierarchyLeafItemPresenterProvider;
private final Event<HierarchyItemFormInitializationEvent> hierarchyItemFormInitializationEvent;
private final PlaceManager placeManager;
private final PreferenceBeanStore store;
private final Event<NotificationEvent> notification;
private final PreferenceFormBeansInfo preferenceFormBeansInfo;
private HierarchyItemPresenter hierarchyItem;
private PreferenceHierarchyElement<?> preferenceElement;
private PreferenceScopeResolutionStrategyInfo customScopeResolutionStrategyInfo;
private PreferenceScope scope;
@Inject
public TreeHierarchyStructurePresenter(final View view,
final Caller<PreferenceBeanServerStore> preferenceBeanServerStoreCaller,
final ManagedInstance<TreeHierarchyInternalItemPresenter> treeHierarchyInternalItemPresenterProvider,
final ManagedInstance<TreeHierarchyLeafItemPresenter> treeHierarchyLeafItemPresenterProvider,
final Event<HierarchyItemFormInitializationEvent> hierarchyItemFormInitializationEvent,
final PlaceManager placeManager,
final PreferenceBeanStore store,
final Event<NotificationEvent> notification,
final PreferenceFormBeansInfo preferenceFormBeansInfo) {
this.view = view;
this.preferenceBeanServerStoreCaller = preferenceBeanServerStoreCaller;
this.treeHierarchyInternalItemPresenterProvider = treeHierarchyInternalItemPresenterProvider;
this.treeHierarchyLeafItemPresenterProvider = treeHierarchyLeafItemPresenterProvider;
this.hierarchyItemFormInitializationEvent = hierarchyItemFormInitializationEvent;
this.placeManager = placeManager;
this.store = store;
this.notification = notification;
this.preferenceFormBeansInfo = preferenceFormBeansInfo;
}
@Override
public void init(final String rootIdentifier,
final PreferenceScopeResolutionStrategyInfo customScopeResolutionStrategyInfo,
final PreferenceScope scope) {
final TreeHierarchyStructurePresenter presenter = this;
this.customScopeResolutionStrategyInfo = customScopeResolutionStrategyInfo;
this.scope = scope;
final RemoteCallback<PreferenceHierarchyElement<?>> successCallback = rootPreference -> {
preferenceElement = rootPreference;
setupHierarchyItem(rootPreference);
view.init(presenter);
};
final ErrorCallback<Object> errorCallback = (message, throwable) -> {
throw new RuntimeException(throwable);
};
if (customScopeResolutionStrategyInfo != null) {
preferenceBeanServerStoreCaller.call(successCallback,
errorCallback).buildHierarchyStructureForPreference(rootIdentifier,
customScopeResolutionStrategyInfo);
} else {
preferenceBeanServerStoreCaller.call(successCallback,
errorCallback).buildHierarchyStructureForPreference(rootIdentifier);
}
}
public void saveEvent(@Observes PreferencesCentralSaveEvent event) {
final Collection<BasePreferencePortable<? extends BasePreference<?>>> preferencesToSave = getPreferencesToSave(preferenceElement);
final Command successCallback = () -> notification.fire(new NotificationEvent(view.getSaveSuccessMessage(),
NotificationEvent.NotificationType.SUCCESS));
final ParameterizedCommand<Throwable> errorCallback = parameter -> notification.fire(new NotificationEvent(view.getSaveErrorMessage(parameter.getMessage()),
NotificationEvent.NotificationType.ERROR));
if (scope != null) {
store.save(preferencesToSave,
scope,
successCallback,
errorCallback);
} else if (customScopeResolutionStrategyInfo != null) {
store.save(preferencesToSave,
customScopeResolutionStrategyInfo,
successCallback,
errorCallback);
} else {
store.save(preferencesToSave,
successCallback,
errorCallback);
}
}
void setupHierarchyItem(final PreferenceHierarchyElement<?> rootPreference) {
if (rootPreference.hasChildren()) {
hierarchyItem = treeHierarchyInternalItemPresenterProvider.get();
} else {
hierarchyItem = treeHierarchyLeafItemPresenterProvider.get();
}
hierarchyItem.init(rootPreference,
0,
!rootPreference.isSelectable());
if (rootPreference.isSelectable()) {
hierarchyItem.fireSelect();
}
}
Collection<BasePreferencePortable<? extends BasePreference<?>>> getPreferencesToSave(final PreferenceHierarchyElement<?> preferenceElement) {
Collection<BasePreferencePortable<? extends BasePreference<?>>> preferencesToSave = new ArrayList<>();
if (preferenceElement.isRoot()) {
preferencesToSave.add((BasePreferencePortable<? extends BasePreference<?>>) preferenceElement.getPortablePreference());
}
preferenceElement.getChildren().forEach(childElement -> {
preferencesToSave.addAll(getPreferencesToSave(childElement));
});
return preferencesToSave;
}
public HierarchyItemPresenter getHierarchyItem() {
return hierarchyItem;
}
@Override
public View getView() {
return view;
}
public interface View extends HierarchyStructureView,
UberElement<TreeHierarchyStructurePresenter> {
String getTranslation(String key);
String getSaveSuccessMessage();
String getSaveErrorMessage(String message);
}
}
| apache-2.0 |
hanst/elasticsearch | src/test/java/org/elasticsearch/search/scroll/SearchScrollTests.java | 28502 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.scroll;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.search.*;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.search.RestClearScrollAction;
import org.elasticsearch.rest.action.search.RestSearchScrollAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.junit.Test;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class SearchScrollTests extends ElasticsearchIntegrationTest {
@Test
public void testSimpleScrollQueryThenFetch() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject().field("field", i).endObject()).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.addSort("field", SortOrder.ASC)
.execute().actionGet();
try {
long counter = 0;
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(30));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
} finally {
clearScroll(searchResponse.getScrollId());
}
}
@Test
public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i = 0; i < 100; i++) {
String routing = "0";
if (i > 90) {
routing = "1";
} else if (i > 60) {
routing = "2";
}
client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", i).setRouting(routing).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch()
.setSearchType(SearchType.QUERY_THEN_FETCH)
.setQuery(matchAllQuery())
.setSize(3)
.setScroll(TimeValue.timeValueMinutes(2))
.addSort("field", SortOrder.ASC)
.execute().actionGet();
try {
long counter = 0;
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
for (int i = 0; i < 32; i++) {
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
}
// and now, the last one is one
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
// a the last is zero
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse.getHits().hits().length, equalTo(0));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
}
} finally {
clearScroll(searchResponse.getScrollId());
}
}
@Test
public void testScrollAndUpdateIndex() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 5)).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i = 0; i < 500; i++) {
client().prepareIndex("test", "tweet", Integer.toString(i)).setSource(
jsonBuilder().startObject().field("user", "kimchy").field("postDate", System.currentTimeMillis()).field("message", "test").endObject()).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "test")).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "test")).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(0l));
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(0l));
SearchResponse searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("user:kimchy"))
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.addSort("postDate", SortOrder.ASC)
.execute().actionGet();
try {
do {
for (SearchHit searchHit : searchResponse.getHits().hits()) {
Map<String, Object> map = searchHit.sourceAsMap();
map.put("message", "update");
client().prepareIndex("test", "tweet", searchHit.id()).setSource(map).execute().actionGet();
}
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).execute().actionGet();
} while (searchResponse.getHits().hits().length > 0);
client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "test")).execute().actionGet().getCount(), equalTo(0l));
assertThat(client().prepareCount().setQuery(termQuery("message", "test")).execute().actionGet().getCount(), equalTo(0l));
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(500l));
} finally {
clearScroll(searchResponse.getScrollId());
}
}
@Test
public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject().field("field", i).endObject()).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse1 = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addSort("field", SortOrder.ASC)
.execute().actionGet();
SearchResponse searchResponse2 = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addSort("field", SortOrder.ASC)
.execute().actionGet();
long counter1 = 0;
long counter2 = 0;
assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
}
searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
}
ClearScrollResponse clearResponse = client().prepareClearScroll()
.addScrollId(searchResponse1.getScrollId())
.addScrollId(searchResponse2.getScrollId())
.execute().actionGet();
assertThat(clearResponse.isSucceeded(), is(true));
assertThat(clearResponse.getNumFreed(), greaterThan(0));
assertThat(clearResponse.status(), equalTo(RestStatus.OK));
assertThrows(client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND);
assertThrows(client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND);
}
@Test
public void testClearNonExistentScrollId() throws Exception {
createIndex("idx");
ClearScrollResponse response = client().prepareClearScroll()
.addScrollId("cXVlcnlUaGVuRmV0Y2g7MzsyOlpBRC1qOUhrUjhhZ0NtQWUxU2FuWlE7MjpRcjRaNEJ2R1JZV1VEMW02ZGF1LW5ROzI6S0xUal9lZDRTd3lWNUhUU2VSb01CQTswOw==")
.get();
// Whether we actually clear a scroll, we can't know, since that information isn't serialized in the
// free search context response, which is returned from each node we want to clear a particular scroll.
assertThat(response.isSucceeded(), is(true));
assertThat(response.getNumFreed(), equalTo(0));
assertThat(response.status(), equalTo(RestStatus.NOT_FOUND));
}
@Test
public void testClearIllegalScrollId() throws Exception {
createIndex("idx");
try {
client().prepareClearScroll().addScrollId("c2Nhbjs2OzM0NDg1ODpzRlBLc0FXNlNyNm5JWUc1").get();
fail();
} catch (ElasticsearchIllegalArgumentException e) {
}
try {
// Fails during base64 decoding (Base64-encoded string must have at least four characters)
client().prepareClearScroll().addScrollId("a").get();
fail();
} catch (ElasticsearchIllegalArgumentException e) {
}
try {
client().prepareClearScroll().addScrollId("abcabc").get();
fail();
// if running without -ea this will also throw ElasticsearchIllegalArgumentException
} catch (UncategorizedExecutionException e) {
assertThat(e.getRootCause(), instanceOf(AssertionError.class));
}
}
@Test
public void testSimpleScrollQueryThenFetch_clearAllScrollIds() throws Exception {
client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject().field("field", i).endObject()).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse1 = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addSort("field", SortOrder.ASC)
.execute().actionGet();
SearchResponse searchResponse2 = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(35)
.setScroll(TimeValue.timeValueMinutes(2))
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addSort("field", SortOrder.ASC)
.execute().actionGet();
long counter1 = 0;
long counter2 = 0;
assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
}
searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
}
ClearScrollResponse clearResponse = client().prepareClearScroll().addScrollId("_all")
.execute().actionGet();
assertThat(clearResponse.isSucceeded(), is(true));
assertThat(clearResponse.getNumFreed(), greaterThan(0));
assertThat(clearResponse.status(), equalTo(RestStatus.OK));
assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND);
assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND);
}
@Test
// https://github.com/elasticsearch/elasticsearch/issues/4156
public void testDeepPaginationWithOneDocIndexAndDoNotBlowUp() throws Exception {
client().prepareIndex("index", "type", "1")
.setSource("field", "value")
.setRefresh(true)
.execute().get();
for (SearchType searchType : SearchType.values()) {
SearchRequestBuilder builder = client().prepareSearch("index")
.setSearchType(searchType)
.setQuery(QueryBuilders.matchAllQuery())
.setSize(Integer.MAX_VALUE);
if (searchType == SearchType.SCAN || searchType != SearchType.COUNT && randomBoolean()) {
builder.setScroll("1m");
}
SearchResponse response = builder.execute().actionGet();
try {
ElasticsearchAssertions.assertHitCount(response, 1l);
} finally {
String scrollId = response.getScrollId();
if (scrollId != null) {
clearScroll(scrollId);
}
}
}
}
@Test
public void testThatNonExistingScrollIdReturnsCorrectException() throws Exception {
client().prepareIndex("index", "type", "1").setSource("field", "value").execute().get();
refresh();
SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get();
assertThat(searchResponse.getScrollId(), is(notNullValue()));
ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get();
assertThat(clearScrollResponse.isSucceeded(), is(true));
assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND);
}
@Test
public void testStringSortMissingAscTerminates() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
.addMapping("test", "no_field", "type=string", "some_field", "type=string"));
client().prepareIndex("test", "test", "1").setSource("some_field", "test").get();
refresh();
SearchResponse response = client().prepareSearch("test")
.setTypes("test")
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last"))
.setScroll("1m")
.get();
assertHitCount(response, 1);
assertSearchHits(response, "1");
response = client().prepareSearchScroll(response.getScrollId()).get();
assertSearchResponse(response);
assertHitCount(response, 1);
assertNoSearchHits(response);
response = client().prepareSearch("test")
.setTypes("test")
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first"))
.setScroll("1m")
.get();
assertHitCount(response, 1);
assertSearchHits(response, "1");
response = client().prepareSearchScroll(response.getScrollId()).get();
assertHitCount(response, 1);
assertThat(response.getHits().getHits().length, equalTo(0));
}
@Test
public void testParseSearchScrollRequest() throws Exception {
BytesReference content = XContentFactory.jsonBuilder()
.startObject()
.field("scroll_id", "SCROLL_ID")
.field("scroll", "1m")
.endObject().bytes();
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
RestSearchScrollAction.buildFromContent(content, searchScrollRequest);
assertThat(searchScrollRequest.scrollId(), equalTo("SCROLL_ID"));
assertThat(searchScrollRequest.scroll().keepAlive(), equalTo(TimeValue.parseTimeValue("1m", null)));
}
@Test
public void testParseSearchScrollRequestWithInvalidJsonThrowsException() throws Exception {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
BytesReference invalidContent = XContentFactory.jsonBuilder().startObject()
.value("invalid_json").endObject().bytes();
try {
RestSearchScrollAction.buildFromContent(invalidContent, searchScrollRequest);
fail("expected parseContent failure");
} catch (Exception e) {
assertThat(e, instanceOf(ElasticsearchIllegalArgumentException.class));
assertThat(e.getMessage(), equalTo("Failed to parse request body"));
}
}
@Test
public void testParseSearchScrollRequestWithUnknownParamThrowsException() throws Exception {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
BytesReference invalidContent = XContentFactory.jsonBuilder().startObject()
.field("scroll_id", "value_2")
.field("unknown", "keyword")
.endObject().bytes();
try {
RestSearchScrollAction.buildFromContent(invalidContent, searchScrollRequest);
fail("expected parseContent failure");
} catch (Exception e) {
assertThat(e, instanceOf(ElasticsearchIllegalArgumentException.class));
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
}
}
@Test
public void testParseClearScrollRequest() throws Exception {
BytesReference content = XContentFactory.jsonBuilder().startObject()
.array("scroll_id", "value_1", "value_2")
.endObject().bytes();
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
RestClearScrollAction.buildFromContent(content, clearScrollRequest);
assertThat(clearScrollRequest.scrollIds(), contains("value_1", "value_2"));
}
@Test
public void testParseClearScrollRequestWithInvalidJsonThrowsException() throws Exception {
BytesReference invalidContent = XContentFactory.jsonBuilder().startObject()
.value("invalid_json").endObject().bytes();
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
try {
RestClearScrollAction.buildFromContent(invalidContent, clearScrollRequest);
fail("expected parseContent failure");
} catch (Exception e) {
assertThat(e, instanceOf(ElasticsearchIllegalArgumentException.class));
assertThat(e.getMessage(), equalTo("Failed to parse request body"));
}
}
@Test
public void testParseClearScrollRequestWithUnknownParamThrowsException() throws Exception {
BytesReference invalidContent = XContentFactory.jsonBuilder().startObject()
.array("scroll_id", "value_1", "value_2")
.field("unknown", "keyword")
.endObject().bytes();
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
try {
RestClearScrollAction.buildFromContent(invalidContent, clearScrollRequest);
fail("expected parseContent failure");
} catch (Exception e) {
assertThat(e, instanceOf(ElasticsearchIllegalArgumentException.class));
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
}
}
}
| apache-2.0 |
papicella/snappy-store | lgpl/gemfire-trove/src/main/java/com/gemstone/gnu/trove/TLongHashingStrategy.java | 1812 | ///////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2002, Eric D. Friedman All Rights Reserved.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
///////////////////////////////////////////////////////////////////////////////
package com.gemstone.gnu.trove;
import java.io.Serializable;
/**
* Interface to support pluggable hashing strategies in maps and sets.
* Implementors can use this interface to make the trove hashing
* algorithms use an optimal strategy when computing hashcodes.
*
* Created: Sun Nov 4 08:56:06 2001
*
* @author Eric D. Friedman
* @version $Id: TLongHashingStrategy.java,v 1.3 2002/09/22 21:53:42 ericdf Exp $
*/
public interface TLongHashingStrategy extends Serializable {
/**
* Computes a hash code for the specified long. Implementors
* can use the long's own value or a custom scheme designed to
* minimize collisions for a known set of input.
*
* @param val long for which the hashcode is to be computed
* @return the hashCode
*/
public int computeHashCode(long val);
} // TLongHashingStrategy
| apache-2.0 |
catholicon/jackrabbit-oak | oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/LoopbackBlobStoreTest.java | 12572 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.upgrade.cli.blob;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.oak.spi.blob.BlobOptions;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.io.InputStream;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertNotNull;
@SuppressWarnings("UnusedLabel")
@RunWith(JUnitParamsRunner.class)
public class LoopbackBlobStoreTest {
@Test(expected = UnsupportedOperationException.class)
public void writingBinariesIsNotSupported() throws IOException {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
when:
{
final String test = "Test";
blobStore.writeBlob(adaptToUtf8InputStream(test));
}
}
}
@Test(expected = UnsupportedOperationException.class)
public void writingBinariesWithBlobOptsIsNotSupported() throws IOException {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
final BlobOptions blobOptions = new BlobOptions();
when:
{
blobStore.writeBlob(adaptToUtf8InputStream("Test"),
blobOptions);
}
}
}
@Test
@Parameters(method = "blobIds")
public void getBlobIdShouldReturnTheSameValuePassedExceptOfNull(
final String blobId) {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
expect:
{
assertEquals(blobId, blobStore.getBlobId(blobId));
}
}
}
@SuppressWarnings("ConstantConditions")
@Test(expected = NullPointerException.class)
public void getBlobIdShouldThrowAnExceptionWhenNullIsPassed() {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
when:
{
blobStore.getBlobId(null);
}
}
}
@Test
@Parameters(method = "blobIds")
public void getReferenceShouldReturnTheSameValuePassedExceptOfNull(
final String blobId) {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
where:
{
expect:
{
assertEquals(blobId, blobStore.getReference(blobId));
}
}
}
}
@SuppressWarnings("ConstantConditions")
@Test(expected = NullPointerException.class)
public void getReferenceShouldThrowAnExceptionWhenNullIsPassed() {
given:
{
final BlobStore blobStore = new LoopbackBlobStore();
when:
{
blobStore.getReference(null);
}
}
}
@Test
@Parameters(method = "blobIds")
public void getBlobLengthShouldAlwaysReturnRealLengthOfBlobThatWillBeReturned(
final String blobId) throws IOException {
given:
{
final BlobStore store = new LoopbackBlobStore();
expect:
{
assertEquals(blobId.getBytes().length, store.getBlobLength(blobId));
}
}
}
@Test(expected = NullPointerException.class)
public void getBlobLengthShouldAlwaysThrowAnExceptionWhenNullBlobIdIsPassed()
throws IOException {
given:
{
final BlobStore store = new LoopbackBlobStore();
when:
{
store.getBlobLength(null);
}
}
}
@Test(expected = NullPointerException.class)
public void getInputStreamShouldAlwaysThrowAnExceptionWhenNullBlobIdIsPassed()
throws IOException {
given:
{
final BlobStore store = new LoopbackBlobStore();
when:
{
store.getInputStream(null);
}
}
}
@Test
@Parameters(method = "blobIds")
public void shouldAlwaysReturnStreamOfRequestedBlobIdUtf8BinRepresentation(
final String blobId) throws IOException {
given:
{
final String encoding = "UTF-8";
final BlobStore store = new LoopbackBlobStore();
when:
{
final InputStream inputStream = store.getInputStream(blobId);
then:
{
assertNotNull(inputStream);
}
and:
{
final String actualInputStreamAsString = IOUtils.toString(
inputStream, encoding);
then:
{
assertEquals(actualInputStreamAsString, blobId);
}
}
}
}
}
@Test
@Parameters(method = "blobIdsReads")
public void shouldAlwaysFillBufferWithRequestedBlobIdUtf8BinRepresentation(
final String blobId,
int offsetToRead,
int bufSize,
int bufOffset,
int lengthToRead,
final String expectedBufferContent,
final int expectedNumberOfBytesRead) throws IOException {
given:
{
final String encoding = "UTF-8";
final BlobStore blobStore = new LoopbackBlobStore();
final byte[] buffer = new byte[bufSize];
when:
{
final int numberOfBytesRead = blobStore.readBlob(
blobId, offsetToRead, buffer, bufOffset, lengthToRead);
and:
{
final String actualInputStreamAsString = IOUtils.toString(
buffer, encoding);
then:
{
assertEquals(numberOfBytesRead,
expectedNumberOfBytesRead);
assertEquals(expectedBufferContent,
encodeBufferFreeSpace(actualInputStreamAsString));
}
}
}
}
}
@Test(expected = UnsupportedOperationException.class)
@Parameters(method = "blobIdsFailedBufferReadsCases")
public void getInputStreamShouldAlwaysReturnExceptionIfBufferTooSmall(
final String blobId,
int offsetToRead,
int bufSize,
int bufOffset,
int lengthToRead) throws IOException {
given:
{
final BlobStore store = new LoopbackBlobStore();
final byte[] buffer = new byte[bufSize];
when:
{
store.readBlob(
blobId, offsetToRead, buffer, bufOffset, lengthToRead);
}
}
}
@Test(expected = IllegalArgumentException.class)
@Parameters(method = "blobIdsFailedOffsetReadsCases")
public void getInputStreamShouldAlwaysReturnExceptionIfBinaryOffsetIsBad(
final String blobId,
int offsetToRead,
int bufSize,
int bufOffset,
int lengthToRead) throws IOException {
given:
{
final BlobStore store = new LoopbackBlobStore();
final byte[] buffer = new byte[bufSize];
when:
{
store.readBlob(
blobId, offsetToRead, buffer, bufOffset, lengthToRead);
}
}
}
@SuppressWarnings("unused")
private Object blobIdsReads() {
return new Object[]{
//blobId, offsetToRead, bufSize, bufOffset, lengthToRead, expectedBufferContent, expectedNumOfBytesRead
new Object[]{
"", 0, 0, 0, 0, "", 0},
new Object[]{
"", 0, 0, 0, 1, "", 0},
new Object[]{
"IDX1", 0, 4, 0, 4, "IDX1", 4},
new Object[]{
"IDX1", 4, 0, 0, 4, "", 0},
new Object[]{
"IDX1", 4, 4, 0, 4, "####", 0},
new Object[]{
"IDX1", 0, 5, 0, 4, "IDX1#", 4},
new Object[]{
"IDX1", 1, 4, 0, 3, "DX1#", 3},
new Object[]{
"IDX1", 1, 4, 0, 4, "DX1#", 3},
new Object[]{
"ID2XXXXXXXXXXXYYZYZYYXYZYZYXYZQ", 10, 20, 3, 10, "###XXXXYYZYZY#######", 10},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 20, 3, 10, "###XXXXYYZY#########", 8},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 20, 3, 10, "###XXXXYYZY#########", 8},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 11, 3, 10, "###XXXXYYZY", 8},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 11, 2, 10, "##XXXXYYZY#", 8},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 11, 1, 10, "#XXXXYYZY##", 8},
};
}
@SuppressWarnings("unused")
private Object blobIdsFailedBufferReadsCases() {
return new Object[]{
//blobId, offsetToRead, bufferSize, bufferOffset, lengthToRead
new Object[]{
" ", 0, 0, 0, 1},
new Object[]{
"IDX1", 0, 3, 0, 4},
new Object[]{
"IDX1", 1, 3, 2, 3},
new Object[]{
"IDX1", 1, 2, 0, 3},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 10, 0, 30, 10},
};
}
@SuppressWarnings("unused")
private Object blobIdsFailedOffsetReadsCases() {
return new Object[]{
//blobId, offsetToRead, bufferSize, bufferOffset, lengthToRead
new Object[]{
"", 1, 50, 0, 0},
new Object[]{
"IDX1", 5, 50, 0, 3},
new Object[]{
"IDX1", 6, 50, 0, 4},
new Object[]{
"ID2XXXXXXXXXXXYYZY", 30, 50, 1, 10},
};
}
@SuppressWarnings("unused")
private Object blobIds() {
return new Object[]{
new Object[]{""},
new Object[]{"IDX1"},
new Object[]{"ID2XXXXXXXXXXXYYZYZYYXYZYZYXYZQ"},
new Object[]{"ABCQ"}
};
}
private String encodeBufferFreeSpace(final String actualInputStreamAsString) {
return actualInputStreamAsString.replace('\0', '#');
}
private InputStream adaptToUtf8InputStream(final String string)
throws IOException {
return IOUtils.toInputStream(string,
"UTF-8");
}
}
| apache-2.0 |
Darsstar/framework | server/src/main/java/com/vaadin/server/communication/PushRequestHandler.java | 10848 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.server.communication;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import org.atmosphere.cache.UUIDBroadcasterCache;
import org.atmosphere.client.TrackMessageSizeInterceptor;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereFramework;
import org.atmosphere.cpr.AtmosphereFramework.AtmosphereHandlerWrapper;
import org.atmosphere.cpr.AtmosphereHandler;
import org.atmosphere.cpr.AtmosphereInterceptor;
import org.atmosphere.cpr.AtmosphereRequestImpl;
import org.atmosphere.cpr.AtmosphereResponseImpl;
import org.atmosphere.interceptor.HeartbeatInterceptor;
import org.atmosphere.util.VoidAnnotationProcessor;
import com.vaadin.server.ServiceException;
import com.vaadin.server.ServletPortletHelper;
import com.vaadin.server.SessionExpiredHandler;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinResponse;
import com.vaadin.server.VaadinServletRequest;
import com.vaadin.server.VaadinServletResponse;
import com.vaadin.server.VaadinServletService;
import com.vaadin.server.VaadinSession;
import com.vaadin.shared.communication.PushConstants;
/**
* Handles requests to open a push (bidirectional) communication channel between
* the client and the server. After the initial request, communication through
* the push channel is managed by {@link PushAtmosphereHandler} and
* {@link PushHandler}
*
* @author Vaadin Ltd
* @since 7.1
*/
public class PushRequestHandler
implements SessionExpiredHandler {
private AtmosphereFramework atmosphere;
private PushHandler pushHandler;
public PushRequestHandler(VaadinServletService service)
throws ServiceException {
service.addServiceDestroyListener(event -> destroy());
final ServletConfig vaadinServletConfig = service.getServlet()
.getServletConfig();
pushHandler = createPushHandler(service);
atmosphere = getPreInitializedAtmosphere(vaadinServletConfig);
if (atmosphere == null) {
// Not initialized by JSR356WebsocketInitializer
getLogger().fine("Initializing Atmosphere for servlet "
+ vaadinServletConfig.getServletName());
try {
atmosphere = initAtmosphere(vaadinServletConfig);
} catch (Exception e) {
getLogger().log(Level.WARNING,
"Failed to initialize Atmosphere for "
+ service.getServlet().getServletName()
+ ". Push will not work.",
e);
return;
}
} else {
getLogger().fine("Using pre-initialized Atmosphere for servlet "
+ vaadinServletConfig.getServletName());
}
pushHandler.setLongPollingSuspendTimeout(
atmosphere.getAtmosphereConfig().getInitParameter(
com.vaadin.server.Constants.SERVLET_PARAMETER_PUSH_SUSPEND_TIMEOUT_LONGPOLLING,
-1));
for (AtmosphereHandlerWrapper handlerWrapper : atmosphere
.getAtmosphereHandlers().values()) {
AtmosphereHandler handler = handlerWrapper.atmosphereHandler;
if (handler instanceof PushAtmosphereHandler) {
// Map the (possibly pre-initialized) handler to the actual push
// handler
((PushAtmosphereHandler) handler).setPushHandler(pushHandler);
}
}
}
/**
* Creates a push handler for this request handler.
* <p>
* Create your own request handler and override this method if you want to
* customize the {@link PushHandler}, e.g. to dynamically decide the suspend
* timeout.
*
* @since 7.6
* @param service
* the vaadin service
* @return the push handler to use for this service
*/
protected PushHandler createPushHandler(VaadinServletService service) {
return new PushHandler(service);
}
private static final Logger getLogger() {
return Logger.getLogger(PushRequestHandler.class.getName());
}
/**
* Returns an AtmosphereFramework instance which was initialized in the
* servlet context init phase by {@link JSR356WebsocketInitializer}, if such
* exists
*/
private AtmosphereFramework getPreInitializedAtmosphere(
ServletConfig vaadinServletConfig) {
String attributeName = JSR356WebsocketInitializer
.getAttributeName(vaadinServletConfig.getServletName());
Object framework = vaadinServletConfig.getServletContext()
.getAttribute(attributeName);
if (framework instanceof AtmosphereFramework) {
return (AtmosphereFramework) framework;
}
return null;
}
/**
* Initializes Atmosphere for the given ServletConfiguration
*
* @since 7.5.0
* @param vaadinServletConfig
* The servlet configuration for the servlet which should have
* Atmosphere support
*/
static AtmosphereFramework initAtmosphere(
final ServletConfig vaadinServletConfig) {
AtmosphereFramework atmosphere = new AtmosphereFramework(false, false) {
@Override
protected void analytics() {
// Overridden to disable version number check
}
@Override
public AtmosphereFramework addInitParameter(String name,
String value) {
if (vaadinServletConfig.getInitParameter(name) == null) {
super.addInitParameter(name, value);
}
return this;
}
};
atmosphere.addAtmosphereHandler("/*", new PushAtmosphereHandler());
atmosphere.addInitParameter(ApplicationConfig.BROADCASTER_CACHE,
UUIDBroadcasterCache.class.getName());
atmosphere.addInitParameter(ApplicationConfig.ANNOTATION_PROCESSOR,
VoidAnnotationProcessor.class.getName());
atmosphere.addInitParameter(ApplicationConfig.PROPERTY_SESSION_SUPPORT,
"true");
atmosphere.addInitParameter(ApplicationConfig.MESSAGE_DELIMITER,
String.valueOf(PushConstants.MESSAGE_DELIMITER));
atmosphere.addInitParameter(
ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER,
"false");
// Disable heartbeat (it does not emit correct events client side)
// https://github.com/Atmosphere/atmosphere-javascript/issues/141
atmosphere.addInitParameter(
ApplicationConfig.DISABLE_ATMOSPHEREINTERCEPTORS,
HeartbeatInterceptor.class.getName());
final String bufferSize = String
.valueOf(PushConstants.WEBSOCKET_BUFFER_SIZE);
atmosphere.addInitParameter(ApplicationConfig.WEBSOCKET_BUFFER_SIZE,
bufferSize);
atmosphere.addInitParameter(ApplicationConfig.WEBSOCKET_MAXTEXTSIZE,
bufferSize);
atmosphere.addInitParameter(ApplicationConfig.WEBSOCKET_MAXBINARYSIZE,
bufferSize);
atmosphere.addInitParameter(
ApplicationConfig.PROPERTY_ALLOW_SESSION_TIMEOUT_REMOVAL,
"false");
// This prevents Atmosphere from recreating a broadcaster after it has
// already been destroyed when the servlet is being undeployed
// (see #20026)
atmosphere.addInitParameter(ApplicationConfig.RECOVER_DEAD_BROADCASTER,
"false");
// Disable Atmosphere's message about commercial support
atmosphere.addInitParameter("org.atmosphere.cpr.showSupportMessage",
"false");
try {
atmosphere.init(vaadinServletConfig);
// Ensure the client-side knows how to split the message stream
// into individual messages when using certain transports
AtmosphereInterceptor trackMessageSize = new TrackMessageSizeInterceptor();
trackMessageSize.configure(atmosphere.getAtmosphereConfig());
atmosphere.interceptor(trackMessageSize);
} catch (ServletException e) {
throw new RuntimeException("Atmosphere init failed", e);
}
return atmosphere;
}
@Override
public boolean handleRequest(VaadinSession session, VaadinRequest request,
VaadinResponse response) throws IOException {
if (!ServletPortletHelper.isPushRequest(request)) {
return false;
}
if (request instanceof VaadinServletRequest) {
if (atmosphere == null) {
response.sendError(500,
"Atmosphere initialization failed. No push available.");
return true;
}
try {
atmosphere.doCometSupport(
AtmosphereRequestImpl
.wrap((VaadinServletRequest) request),
AtmosphereResponseImpl
.wrap((VaadinServletResponse) response));
} catch (ServletException e) {
// TODO PUSH decide how to handle
throw new RuntimeException(e);
}
} else {
throw new IllegalArgumentException(
"Portlets not currently supported");
}
return true;
}
public void destroy() {
atmosphere.destroy();
}
/*
* (non-Javadoc)
*
* @see
* com.vaadin.server.SessionExpiredHandler#handleSessionExpired(com.vaadin
* .server.VaadinRequest, com.vaadin.server.VaadinResponse)
*/
@Override
public boolean handleSessionExpired(VaadinRequest request,
VaadinResponse response) throws IOException {
// Websockets request must be handled by accepting the websocket
// connection and then sending session expired so we let
// PushRequestHandler handle it
return handleRequest(null, request, response);
}
}
| apache-2.0 |
Pushjet/Pushjet-Android | gradle/wrapper/dists/gradle-1.12-all/4ff8jj5a73a7zgj5nnzv1ubq0/gradle-1.12/src/cpp/org/gradle/nativebinaries/platform/internal/PlatformInternal.java | 815 | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.nativebinaries.platform.internal;
import org.gradle.nativebinaries.platform.Platform;
public interface PlatformInternal extends Platform {
String getCompatibilityString();
}
| bsd-2-clause |
oetting/jodd | jodd-bean/src/main/java/jodd/typeconverter/impl/BooleanArrayConverter.java | 6595 | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.typeconverter.impl;
import jodd.typeconverter.TypeConverter;
import jodd.typeconverter.TypeConverterManagerBean;
import jodd.util.StringUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Converts given object to <code>boolean[]</code>.
*/
public class BooleanArrayConverter implements TypeConverter<boolean[]> {
protected final TypeConverterManagerBean typeConverterManagerBean;
public BooleanArrayConverter(TypeConverterManagerBean typeConverterManagerBean) {
this.typeConverterManagerBean = typeConverterManagerBean;
}
public boolean[] convert(Object value) {
if (value == null) {
return null;
}
Class valueClass = value.getClass();
if (valueClass.isArray() == false) {
// source is not an array
return convertValueToArray(value);
}
// source is an array
return convertArrayToArray(value);
}
/**
* Converts type using type converter manager.
*/
protected boolean convertType(Object value) {
return typeConverterManagerBean.convertType(value, boolean.class).booleanValue();
}
/**
* Creates an array with single element.
*/
protected boolean[] convertToSingleElementArray(Object value) {
return new boolean[] {convertType(value)};
}
/**
* Converts non-array value to array. Detects various
* collection types and iterates them to make conversion
* and to create target array.
*/
protected boolean[] convertValueToArray(Object value) {
if (value instanceof List) {
List list = (List) value;
boolean[] target = new boolean[list.size()];
for (int i = 0; i < list.size(); i++) {
Object element = list.get(i);
target[i] = convertType(element);
}
return target;
}
if (value instanceof Collection) {
Collection collection = (Collection) value;
boolean[] target = new boolean[collection.size()];
int i = 0;
for (Object element : collection) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof Iterable) {
Iterable iterable = (Iterable) value;
ArrayList<Boolean> booleanArrayList = new ArrayList<>();
for (Object element : iterable) {
boolean convertedValue = convertType(element);
booleanArrayList.add(Boolean.valueOf(convertedValue));
}
boolean[] array = new boolean[booleanArrayList.size()];
for (int i = 0; i < booleanArrayList.size(); i++) {
Boolean b = booleanArrayList.get(i);
array[i] = b.booleanValue();
}
return array;
}
if (value instanceof CharSequence) {
String[] strings = StringUtil.splitc(value.toString(), ArrayConverter.NUMBER_DELIMITERS);
return convertArrayToArray(strings);
}
// everything else:
return convertToSingleElementArray(value);
}
/**
* Converts array value to array.
*/
protected boolean[] convertArrayToArray(Object value) {
Class valueComponentType = value.getClass().getComponentType();
if (valueComponentType == boolean.class) {
// equal types, no conversion needed
return (boolean[]) value;
}
boolean[] result;
if (valueComponentType.isPrimitive()) {
// convert primitive array to target array
result = convertPrimitiveArrayToArray(value, valueComponentType);
} else {
// convert object array to target array
Object[] array = (Object[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = convertType(array[i]);
}
}
return result;
}
/**
* Converts primitive array to target array.
*/
protected boolean[] convertPrimitiveArrayToArray(Object value, Class primitiveComponentType) {
boolean[] result = null;
if (primitiveComponentType == boolean[].class) {
return (boolean[]) value;
}
if (primitiveComponentType == int.class) {
int[] array = (int[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == long.class) {
long[] array = (long[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == float.class) {
float[] array = (float[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == double.class) {
double[] array = (double[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == short.class) {
short[] array = (short[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == byte.class) {
byte[] array = (byte[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
else if (primitiveComponentType == char.class) {
char[] array = (char[]) value;
result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] != 0;
}
}
return result;
}
} | bsd-2-clause |
Chilledheart/chromium | chrome/android/java/src/org/chromium/chrome/browser/enhancedbookmarks/EnhancedBookmarkRecyclerView.java | 5528 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.enhancedbookmarks;
import android.content.Context;
import android.graphics.Rect;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.View;
import android.widget.Checkable;
import org.chromium.base.VisibleForTesting;
import org.chromium.chrome.R;
import org.chromium.components.bookmarks.BookmarkId;
import java.util.List;
/**
* Container for all bookmark items shown in enhanced bookmark manager.
*/
public class EnhancedBookmarkRecyclerView extends RecyclerView implements
EnhancedBookmarkUIObserver {
private EnhancedBookmarkDelegate mDelegate;
private View mEmptyView;
private RecyclerView.ItemDecoration mVerticalSpaceItemDecoration;
/**
* Provides a way to override the default spacing between 2 items in RecyclerView.
*/
private static class VerticalSpaceItemDecoration extends RecyclerView.ItemDecoration {
private final int mSpacing;
public VerticalSpaceItemDecoration(int spacing) {
this.mSpacing = spacing;
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent,
RecyclerView.State state) {
outRect.bottom = mSpacing;
}
}
/**
* Constructs a new instance of enhanced bookmark recycler view.
*/
public EnhancedBookmarkRecyclerView(Context context, AttributeSet attrs) {
super(context, attrs);
setLayoutManager(new LinearLayoutManager(context));
setHasFixedSize(true);
}
/**
* Sets the view to be shown if there are no items in adapter.
*/
void setEmptyView(View emptyView) {
mEmptyView = emptyView;
}
// RecyclerView implementation
@Override
public void setAdapter(final Adapter adapter) {
super.setAdapter(adapter);
adapter.registerAdapterDataObserver(new AdapterDataObserver() {
@Override
public void onChanged() {
super.onChanged();
updateEmptyViewVisibility(adapter);
}
@Override
public void onItemRangeInserted(int positionStart, int itemCount) {
super.onItemRangeInserted(positionStart, itemCount);
updateEmptyViewVisibility(adapter);
}
@Override
public void onItemRangeRemoved(int positionStart, int itemCount) {
super.onItemRangeRemoved(positionStart, itemCount);
updateEmptyViewVisibility(adapter);
}
});
updateEmptyViewVisibility(adapter);
}
@VisibleForTesting
@Override
public EnhancedBookmarkItemsAdapter getAdapter() {
return (EnhancedBookmarkItemsAdapter) super.getAdapter();
}
/**
* Unlike ListView or GridView, RecyclerView does not provide default empty
* view implementation. We need to check it ourselves.
*/
private void updateEmptyViewVisibility(Adapter adapter) {
mEmptyView.setVisibility(adapter.getItemCount() == 0 ? View.VISIBLE : View.GONE);
}
// EnhancedBookmarkUIObserver implementations
@Override
public void onEnhancedBookmarkDelegateInitialized(EnhancedBookmarkDelegate delegate) {
mDelegate = delegate;
mDelegate.addUIObserver(this);
EnhancedBookmarkItemsAdapter adapter = new EnhancedBookmarkItemsAdapter(getContext());
adapter.onEnhancedBookmarkDelegateInitialized(mDelegate);
setAdapter(adapter);
}
@Override
public void onDestroy() {
mDelegate.removeUIObserver(this);
}
@Override
public void onAllBookmarksStateSet() {
scrollToPosition(0);
// Restores to the default vertical spacing.
if (mVerticalSpaceItemDecoration != null) {
removeItemDecoration(mVerticalSpaceItemDecoration);
mVerticalSpaceItemDecoration = null;
}
}
@Override
public void onFolderStateSet(BookmarkId folder) {
scrollToPosition(0);
// Restores to the default vertical spacing.
if (mVerticalSpaceItemDecoration != null) {
removeItemDecoration(mVerticalSpaceItemDecoration);
mVerticalSpaceItemDecoration = null;
}
}
@Override
public void onFilterStateSet(EnhancedBookmarkFilter filter) {
assert filter == EnhancedBookmarkFilter.OFFLINE_PAGES;
scrollToPosition(0);
// For "Saved offline" filter view, more spacing is needed between 2 items since the added
// line to show offline page size eats up the default spacing.
if (mVerticalSpaceItemDecoration == null) {
mVerticalSpaceItemDecoration =
new VerticalSpaceItemDecoration(R.dimen.offline_page_item_vertical_spacing);
addItemDecoration(mVerticalSpaceItemDecoration);
}
}
@Override
public void onSelectionStateChange(List<BookmarkId> selectedBookmarks) {
if (!mDelegate.isSelectionEnabled()) {
for (int i = 0; i < getLayoutManager().getChildCount(); ++i) {
View child = getLayoutManager().getChildAt(i);
if (child instanceof Checkable) ((Checkable) child).setChecked(false);
}
}
}
}
| bsd-3-clause |
hzhao/lemur-galago | core/src/main/java/org/lemurproject/galago/core/index/merge/DocumentLengthsMerger.java | 2973 | // BSD License (http://lemurproject.org/galago-license)
package org.lemurproject.galago.core.index.merge;
import org.lemurproject.galago.core.index.disk.DiskLengthsWriter;
import org.lemurproject.galago.core.retrieval.iterator.LengthsIterator;
import org.lemurproject.galago.core.retrieval.processing.ScoringContext;
import org.lemurproject.galago.core.types.FieldLengthData;
import org.lemurproject.galago.tupleflow.Processor;
import org.lemurproject.galago.tupleflow.TupleFlowParameters;
import org.lemurproject.galago.utility.CmpUtil;
import java.io.IOException;
import java.util.List;
import java.util.PriorityQueue;
/**
*
* @author sjh
*/
public class DocumentLengthsMerger extends GenericIndexMerger<FieldLengthData> {
public DocumentLengthsMerger(TupleFlowParameters p) throws Exception {
super(p);
}
@Override
public boolean mappingKeys() {
return false; // keys are not mappable
}
@Override
public Processor<FieldLengthData> createIndexWriter(TupleFlowParameters parameters) throws IOException {
return new DiskLengthsWriter(parameters);
}
@Override
public void performValueMerge(byte[] key, List<KeyIteratorWrapper> keyIterators) throws IOException {
PriorityQueue<LengthIteratorWrapper> lenQueue = new PriorityQueue<>();
for (KeyIteratorWrapper wrapper : keyIterators) {
lenQueue.offer(new LengthIteratorWrapper(this.partIds.get(wrapper), (LengthsIterator) wrapper.getIterator().getValueIterator(), this.mappingReader));
}
while (!lenQueue.isEmpty()) {
LengthIteratorWrapper head = lenQueue.poll();
while (!head.isDone()) {
this.writer.process(new FieldLengthData(key, head.currentDocument, head.currentLength));
head.next();
}
}
}
private static class LengthIteratorWrapper implements Comparable<LengthIteratorWrapper> {
int indexId;
ScoringContext sc;
LengthsIterator iterator;
long currentDocument;
int currentLength;
DocumentMappingReader mapping;
private LengthIteratorWrapper(int indexId, LengthsIterator iterator, DocumentMappingReader mapping) {
this.indexId = indexId;
this.iterator = iterator;
this.mapping = mapping;
this.sc = new ScoringContext();
// initialization
load();
}
public void next() throws IOException {
iterator.movePast(iterator.currentCandidate());
if (!iterator.isDone()) {
load();
}
}
// changes the document numbers in the extent array
private void load() {
long currentIdentifier = iterator.currentCandidate();
sc.document = currentIdentifier;
this.currentDocument = mapping.map(indexId, currentIdentifier);
this.currentLength = iterator.length(sc);
}
public boolean isDone() {
return iterator.isDone();
}
@Override
public int compareTo(LengthIteratorWrapper other) {
return CmpUtil.compare(currentDocument, other.currentDocument);
}
}
}
| bsd-3-clause |
nwjs/chromium.src | components/image_fetcher/android/java/src/org/chromium/components/image_fetcher/ImageFetcher.java | 10016 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.image_fetcher;
import android.graphics.Bitmap;
import android.media.ThumbnailUtils;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.Callback;
import jp.tomorrowkey.android.gifplayer.BaseGifImage;
/**
* Blueprint and some implementation for image fetching. Use ImageFetcherFactory for any
* ImageFetcher instantiation.
*/
public abstract class ImageFetcher {
// All UMA client names collected here to prevent duplicates. While adding a new client, please
// update the histogram suffix ImageFetcherClients in histograms.xml as well.
public static final String ANSWER_SUGGESTIONS_UMA_CLIENT_NAME = "AnswerSuggestions";
public static final String ASSISTANT_DETAILS_UMA_CLIENT_NAME = "AssistantDetails";
public static final String ASSISTANT_INFO_BOX_UMA_CLIENT_NAME = "AssistantInfoBox";
public static final String AUTOFILL_CARD_ART_UMA_CLIENT_NAME = "AutofillCardArt";
public static final String CRYPTIDS_UMA_CLIENT_NAME = "Cryptids";
public static final String ENTITY_SUGGESTIONS_UMA_CLIENT_NAME = "EntitySuggestions";
public static final String FEED_UMA_CLIENT_NAME = "Feed";
public static final String LIGHTWEIGHT_REACTIONS_UMA_CLIENT_NAME = "LightweightReactions";
public static final String NTP_ANIMATED_LOGO_UMA_CLIENT_NAME = "NewTabPageAnimatedLogo";
public static final String PRICE_DROP_NOTIFICATION = "PriceDropNotification";
public static final String POWER_BOOKMARKS_CLIENT_NAME = "PowerBookmarks";
public static final String QUERY_TILE_UMA_CLIENT_NAME = "QueryTiles";
public static final String VIDEO_TUTORIALS_IPH_UMA_CLIENT_NAME = "VideoTutorialsIPH";
public static final String VIDEO_TUTORIALS_LIST_UMA_CLIENT_NAME = "VideoTutorialsList";
public static final String WEB_ID_ACCOUNT_SELECTION_UMA_CLIENT_NAME = "WebIDAccountSelection";
public static final String WEB_NOTES_UMA_CLIENT_NAME = "WebNotes";
/**
* Encapsulates image fetching customization options. Supports a subset of the native
* ImageFetcherParams. The image resizing is done in Java.
*/
public static class Params {
static final int DEFAULT_IMAGE_SIZE = 0;
static final int INVALID_EXPIRATION_INTERVAL = 0;
/**
* Creates image fetcher parameters. The image will not be resized.
* @See {@link #Params(String, String, int, int, int)}.
*/
public static Params create(final String url, String clientName) {
return new Params(url, clientName, DEFAULT_IMAGE_SIZE, DEFAULT_IMAGE_SIZE,
INVALID_EXPIRATION_INTERVAL);
}
/**
* Creates image fetcher parameters with image size specified.
* @See {@link #Params(String, String, int, int, int)}.
*/
public static Params create(final String url, String clientName, int width, int height) {
return new Params(url, clientName, width, height, INVALID_EXPIRATION_INTERVAL);
}
/**
* Only used in rare cases. Creates image fetcher parameters that keeps the cache file for a
* certain period of time.
* @See {@link #Params(String, String, int, int, int)}.
*/
public static Params createWithExpirationInterval(final String url, String clientName,
int width, int height, int expirationIntervalMinutes) {
assert expirationIntervalMinutes > INVALID_EXPIRATION_INTERVAL
: "Must specify a positive expiration interval, or use other constructors.";
return new Params(url, clientName, width, height, expirationIntervalMinutes);
}
private Params(String url, String clientName, int width, int height,
int expirationIntervalMinutes) {
assert expirationIntervalMinutes >= INVALID_EXPIRATION_INTERVAL
: "Expiration interval should be non negative.";
this.url = url;
this.clientName = clientName;
this.width = width;
this.height = height;
this.expirationIntervalMinutes = expirationIntervalMinutes;
}
@Override
public boolean equals(Object other) {
if (other == this) return true;
if (!(other instanceof ImageFetcher.Params)) return false;
ImageFetcher.Params otherParams = (ImageFetcher.Params) other;
return url.equals(otherParams.url) && clientName.equals(otherParams.clientName)
&& width == otherParams.width && height == otherParams.height
&& expirationIntervalMinutes == otherParams.expirationIntervalMinutes;
}
@Override
public int hashCode() {
int result = (url != null) ? url.hashCode() : 0;
result = 31 * result + ((clientName != null) ? clientName.hashCode() : 0);
result = 31 * result + width;
result = 31 * result + height;
result = 31 * result + expirationIntervalMinutes;
return result;
}
/**
* The url to fetch the image from.
*/
public final String url;
/**
* Name of the cached image fetcher client to report UMA metrics for.
*/
public final String clientName;
/**
* The new bitmap's desired width (in pixels). If the given value is <= 0, the image won't
* be scaled.
*/
public final int width;
/**
* The new bitmap's desired height (in pixels). If the given value is <= 0, the image won't
* be scaled.
*/
public final int height;
/**
* Only specifies in rare cases to keep the cache file on disk for certain period of time.
* Measured in minutes. Any value <= 0 will be ignored.
*/
public final int expirationIntervalMinutes;
}
/** Base class that can be used for testing. */
public abstract static class ImageFetcherForTesting extends ImageFetcher {
public ImageFetcherForTesting() {}
}
// Singleton ImageFetcherBridge.
private ImageFetcherBridge mImageFetcherBridge;
/** Copy-constructor to support composite instances of ImageFetcher. */
public ImageFetcher(ImageFetcher imageFetcher) {
mImageFetcherBridge = imageFetcher.getImageFetcherBridge();
}
/** Base constructor that takes an ImageFetcherBridge. */
public ImageFetcher(ImageFetcherBridge imageFetcherBridge) {
mImageFetcherBridge = imageFetcherBridge;
}
/** Test constructor */
private ImageFetcher() {}
protected ImageFetcherBridge getImageFetcherBridge() {
return mImageFetcherBridge;
}
/**
* Try to resize the given image if the conditions are met.
*
* @param bitmap The input bitmap, will be recycled if scaled.
* @param width The desired width of the output.
* @param height The desired height of the output.
*
* @return The resized image, or the original image if the conditions aren't met.
*/
@VisibleForTesting
public static Bitmap resizeImage(@Nullable Bitmap bitmap, int width, int height) {
if (bitmap != null && width > 0 && height > 0 && bitmap.getWidth() != width
&& bitmap.getHeight() != height) {
/* The resizing rules are the as follows:
(1) The image will be scaled up (if smaller) in a way that maximizes the area of the
source bitmap that's in the destination bitmap.
(2) A crop is made in the middle of the bitmap for the given size (width, height).
The x/y are placed appropriately (conceptually just think of it as a properly sized
chunk taken from the middle). */
return ThumbnailUtils.extractThumbnail(
bitmap, width, height, ThumbnailUtils.OPTIONS_RECYCLE_INPUT);
} else {
return bitmap;
}
}
/**
* Report an event metric.
*
* @param clientName Name of the cached image fetcher client to report UMA metrics for.
* @param eventId The event to be reported
*/
public void reportEvent(String clientName, @ImageFetcherEvent int eventId) {
mImageFetcherBridge.reportEvent(clientName, eventId);
}
/**
* Fetch the gif for the given url.
*
* @param params The parameters to specify image fetching details. If using CachedImageFetcher
* to fetch images and gifs, use separate {@link Params#clientName} for them.
* @param callback The function which will be called when the image is ready; will be called
* with null result if fetching fails.
*/
public abstract void fetchGif(
final ImageFetcher.Params params, Callback<BaseGifImage> callback);
/**
* Fetches the image based on customized parameters specified.
*
* @param params The parameters to specify image fetching details.
* @param callback The function which will be called when the image is ready; will be called
* with null result if fetching fails;
*/
public abstract void fetchImage(final Params params, Callback<Bitmap> callback);
/**
* Clear the cache of any bitmaps that may be in-memory.
*/
public abstract void clear();
/**
* Returns the type of Image Fetcher this is based on class arrangements. See
* image_fetcher_service.h for a detailed description of the available configurations.
*
* @return the type of the image fetcher this class maps to in native.
*/
public abstract @ImageFetcherConfig int getConfig();
/**
* Destroy method, called to clear resources to prevent leakage.
*/
public abstract void destroy();
}
| bsd-3-clause |
yubo/program | ds/demo/projects/xj/release/1.2.6/src/edu/usfca/xj/appkit/utils/XJDialogProgress.java | 4478 | /*
[The "BSD licence"]
Copyright (c) 2005 Jean Bovet
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.usfca.xj.appkit.utils;
import edu.usfca.xj.appkit.frame.XJDialog;
import edu.usfca.xj.appkit.frame.XJFrame;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
public class XJDialogProgress extends XJDialog {
protected XJDialogProgressDelegate delegate;
protected JLabel infoLabel;
protected JProgressBar progressBar;
protected JButton cancelButton;
public XJDialogProgress(XJFrame owner, boolean modal) {
super(owner==null?null:owner.getJavaContainer(), modal);
init();
}
public XJDialogProgress(XJFrame owner) {
super(owner==null?null:owner.getJavaContainer(), false);
init();
}
public XJDialogProgress(Container owner, boolean modal) {
super(owner, modal);
init();
}
public XJDialogProgress(Container owner) {
super(owner, false);
init();
}
public void init() {
setResizable(false);
setSize(400, 90);
initComponents();
cancelButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent event) {
if(delegate != null)
delegate.dialogDidCancel();
}
});
}
public void setCancellable(boolean flag) {
cancelButton.setEnabled(flag);
}
public void setIndeterminate(boolean flag) {
if(flag) {
setProgress(0);
setProgressMax(0);
}
progressBar.setIndeterminate(flag);
}
public void setDelegate(XJDialogProgressDelegate delegate) {
this.delegate = delegate;
}
public void setInfo(String info) {
infoLabel.setText(info);
}
public void setProgress(float value) {
setProgress((int)value);
}
public void setProgress(int value) {
progressBar.setValue(value);
}
public void setProgressMax(int value) {
progressBar.setMaximum(value);
}
private void initComponents() {
Container contentPane = getContentPane();
contentPane.setLayout(new GridBagLayout());
infoLabel = new JLabel();
progressBar = new JProgressBar();
cancelButton = new JButton("Cancel");
setTitle("Operation in progress");
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 0;
gbc.gridy = 0;
gbc.gridwidth = 2;
gbc.anchor = GridBagConstraints.WEST;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.weightx = 1;
gbc.insets = new Insets(20, 20, 0, 0);
contentPane.add(infoLabel, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gbc.gridwidth = 1;
gbc.insets = new Insets(0, 20, 20, 10);
contentPane.add(progressBar, gbc);
gbc.gridx = 1;
gbc.anchor = GridBagConstraints.CENTER;
gbc.fill = GridBagConstraints.NONE;
gbc.weightx = 0;
gbc.insets = new Insets(0, 0, 20, 20);
contentPane.add(cancelButton, gbc);
}
}
| bsd-3-clause |