repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
iainmcgin/OpenYOLO-Android
spi/javatests/java/org/openyolo/spi/assetlinks/data/WebAssetStatementDeserializerTest.java
1656
/* * Copyright 2016 The OpenYOLO Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openyolo.spi.assetlinks.data; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; import java.util.List; import org.json.JSONObject; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import org.valid4j.errors.RequireViolation; /** * Tests for {@link WebAssetStatementDeserializer}. */ @RunWith(RobolectricTestRunner.class) @Config(manifest = Config.NONE) public class WebAssetStatementDeserializerTest { @Test(expected = RequireViolation.class) public void testNullJson() { new WebAssetStatementDeserializer().deserialize(null); } @Test public void testNoTarget() { JSONObject json = new JSONObject(); final List<WebSiteAssetStatement> assetStatements = new WebAssetStatementDeserializer() .deserialize(json); assertNotNull(assetStatements); assertTrue(assetStatements.isEmpty()); } }
apache-2.0
coding0011/elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java
18616
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.support.mapper; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.DELETED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * This store reads + writes {@link ExpressionRoleMapping role mappings} in an Elasticsearch * {@link RestrictedIndicesNames#SECURITY_MAIN_ALIAS index}. * <br> * The store is responsible for all read and write operations as well as * {@link #resolveRoles(UserData, ActionListener) resolving roles}. * <p> * No caching is done by this class, it is handled at a higher level and no polling for changes * is done by this class. Modification operations make a best effort attempt to clear the cache * on all nodes for the user that was modified. */ public class NativeRoleMappingStore implements UserRoleMapper { private static final Logger logger = LogManager.getLogger(NativeRoleMappingStore.class); static final String DOC_TYPE_FIELD = "doc_type"; static final String DOC_TYPE_ROLE_MAPPING = "role-mapping"; private static final String ID_PREFIX = DOC_TYPE_ROLE_MAPPING + "_"; private static final ActionListener<Object> NO_OP_ACTION_LISTENER = new ActionListener<Object>() { @Override public void onResponse(Object o) { // nothing } @Override public void onFailure(Exception e) { // nothing } }; private final Settings settings; private final Client client; private final SecurityIndexManager securityIndex; private final ScriptService scriptService; private final List<String> realmsToRefresh = new CopyOnWriteArrayList<>(); public NativeRoleMappingStore(Settings settings, Client client, SecurityIndexManager securityIndex, ScriptService scriptService) { this.settings = settings; this.client = client; this.securityIndex = securityIndex; this.scriptService = scriptService; } private String getNameFromId(String id) { assert id.startsWith(ID_PREFIX); return id.substring(ID_PREFIX.length()); } private String getIdForName(String name) { return ID_PREFIX + name; } /** * Loads all mappings from the index. * <em>package private</em> for unit testing */ protected void loadMappings(ActionListener<List<ExpressionRoleMapping>> listener) { if (securityIndex.isIndexUpToDate() == false) { listener.onFailure(new IllegalStateException( "Security index is not on the current version - the native realm will not be operational until " + "the upgrade API is run on the security index")); return; } final QueryBuilder query = QueryBuilders.termQuery(DOC_TYPE_FIELD, DOC_TYPE_ROLE_MAPPING); final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS) .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) .request(); request.indicesOptions().ignoreUnavailable(); ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, ActionListener.wrap((Collection<ExpressionRoleMapping> mappings) -> listener.onResponse(mappings.stream().filter(Objects::nonNull).collect(Collectors.toList())), ex -> { logger.error(new ParameterizedMessage("failed to load role mappings from index [{}] skipping all mappings.", SECURITY_MAIN_ALIAS), ex); listener.onResponse(Collections.emptyList()); })), doc -> buildMapping(getNameFromId(doc.getId()), doc.getSourceRef())); } } protected ExpressionRoleMapping buildMapping(String id, BytesReference source) { try (InputStream stream = source.streamInput(); XContentParser parser = XContentType.JSON.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return ExpressionRoleMapping.parse(id, parser); } catch (Exception e) { logger.warn(new ParameterizedMessage("Role mapping [{}] cannot be parsed and will be skipped", id), e); return null; } } /** * Stores (create or update) a single mapping in the index */ public void putRoleMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) { modifyMapping(request.getName(), this::innerPutMapping, request, listener); } /** * Deletes a named mapping from the index */ public void deleteRoleMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) { modifyMapping(request.getName(), this::innerDeleteMapping, request, listener); } private <Request, Result> void modifyMapping(String name, CheckedBiConsumer<Request, ActionListener<Result>, Exception> inner, Request request, ActionListener<Result> listener) { if (securityIndex.isIndexUpToDate() == false) { listener.onFailure(new IllegalStateException( "Security index is not on the current version - the native realm will not be operational until " + "the upgrade API is run on the security index")); } else { try { inner.accept(request, ActionListener.wrap(r -> refreshRealms(listener, r), listener::onFailure)); } catch (Exception e) { logger.error(new ParameterizedMessage("failed to modify role-mapping [{}]", name), e); listener.onFailure(e); } } } private void innerPutMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) { final ExpressionRoleMapping mapping = request.getMapping(); securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final XContentBuilder xContentBuilder; try { xContentBuilder = mapping.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true); } catch (IOException e) { listener.onFailure(e); return; } executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareIndex(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, getIdForName(mapping.getName())) .setSource(xContentBuilder) .setRefreshPolicy(request.getRefreshPolicy()) .request(), new ActionListener<IndexResponse>() { @Override public void onResponse(IndexResponse indexResponse) { boolean created = indexResponse.getResult() == CREATED; listener.onResponse(created); } @Override public void onFailure(Exception e) { logger.error(new ParameterizedMessage("failed to put role-mapping [{}]", mapping.getName()), e); listener.onFailure(e); } }, client::index); }); } private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) { final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); if (frozenSecurityIndex.indexExists() == false) { listener.onResponse(false); } else if (securityIndex.isAvailable() == false) { listener.onFailure(frozenSecurityIndex.getUnavailableReason()); } else { securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareDelete(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, getIdForName(request.getName())) .setRefreshPolicy(request.getRefreshPolicy()) .request(), new ActionListener<DeleteResponse>() { @Override public void onResponse(DeleteResponse deleteResponse) { boolean deleted = deleteResponse.getResult() == DELETED; listener.onResponse(deleted); } @Override public void onFailure(Exception e) { logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e); listener.onFailure(e); } }, client::delete); }); } } /** * Retrieves one or more mappings from the index. * If <code>names</code> is <code>null</code> or {@link Set#isEmpty empty}, then this retrieves all mappings. * Otherwise it retrieves the specified mappings by name. */ public void getRoleMappings(Set<String> names, ActionListener<List<ExpressionRoleMapping>> listener) { if (names == null || names.isEmpty()) { getMappings(listener); } else { getMappings(new ActionListener<List<ExpressionRoleMapping>>() { @Override public void onResponse(List<ExpressionRoleMapping> mappings) { final List<ExpressionRoleMapping> filtered = mappings.stream() .filter(m -> names.contains(m.getName())) .collect(Collectors.toList()); listener.onResponse(filtered); } @Override public void onFailure(Exception e) { listener.onFailure(e); } }); } } private void getMappings(ActionListener<List<ExpressionRoleMapping>> listener) { if (securityIndex.isAvailable()) { loadMappings(listener); } else { logger.info("The security index is not yet available - no role mappings can be loaded"); if (logger.isDebugEnabled()) { logger.debug("Security Index [{}] [exists: {}] [available: {}] [mapping up to date: {}]", SECURITY_MAIN_ALIAS, securityIndex.indexExists(), securityIndex.isAvailable(), securityIndex.isMappingUpToDate() ); } listener.onResponse(Collections.emptyList()); } } /** * Provides usage statistics for this store. * The resulting map contains the keys * <ul> * <li><code>size</code> - The total number of mappings stored in the index</li> * <li><code>enabled</code> - The number of mappings that are * {@link ExpressionRoleMapping#isEnabled() enabled}</li> * </ul> */ public void usageStats(ActionListener<Map<String, Object>> listener) { if (securityIndex.isAvailable() == false) { reportStats(listener, Collections.emptyList()); } else { getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure)); } } private void reportStats(ActionListener<Map<String, Object>> listener, List<ExpressionRoleMapping> mappings) { Map<String, Object> usageStats = new HashMap<>(); usageStats.put("size", mappings.size()); usageStats.put("enabled", mappings.stream().filter(ExpressionRoleMapping::isEnabled).count()); listener.onResponse(usageStats); } public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) { if (isMoveFromRedToNonRed(previousState, currentState) || isIndexDeleted(previousState, currentState) || previousState.isIndexUpToDate != currentState.isIndexUpToDate) { refreshRealms(NO_OP_ACTION_LISTENER, null); } } private <Result> void refreshRealms(ActionListener<Result> listener, Result result) { if (realmsToRefresh.isEmpty()) { listener.onResponse(result); return; } final String[] realmNames = this.realmsToRefresh.toArray(Strings.EMPTY_ARRAY); executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearRealmCacheAction.INSTANCE, new ClearRealmCacheRequest().realms(realmNames), ActionListener.wrap( response -> { logger.debug((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage( "Cleared cached in realms [{}] due to role mapping change", Arrays.toString(realmNames))); listener.onResponse(result); }, ex -> { logger.warn(new ParameterizedMessage("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)), ex); listener.onFailure(ex); })); } @Override public void resolveRoles(UserData user, ActionListener<Set<String>> listener) { getRoleMappings(null, ActionListener.wrap( mappings -> { final ExpressionModel model = user.asModel(); final Set<String> roles = mappings.stream() .filter(ExpressionRoleMapping::isEnabled) .filter(m -> m.getExpression().match(model)) .flatMap(m -> { final Set<String> roleNames = m.getRoleNames(scriptService, model); logger.trace("Applying role-mapping [{}] to user-model [{}] produced role-names [{}]", m.getName(), model, roleNames); return roleNames.stream(); }) .collect(Collectors.toSet()); logger.debug("Mapping user [{}] to roles [{}]", user, roles); listener.onResponse(roles); }, listener::onFailure )); } /** * Indicates that the provided realm should have its cache cleared if this store is updated * (that is, {@link #putRoleMapping(PutRoleMappingRequest, ActionListener)} or * {@link #deleteRoleMapping(DeleteRoleMappingRequest, ActionListener)} are called). * @see ClearRealmCacheAction */ @Override public void refreshRealmOnChange(CachingRealm realm) { realmsToRefresh.add(realm.name()); } }
apache-2.0
shun634501730/java_source_cn
src_en/com/sun/org/apache/xerces/internal/impl/dv/xs/YearMonthDurationDV.java
2247
/* * Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.impl.dv.xs; import java.math.BigInteger; import javax.xml.datatype.DatatypeConstants; import javax.xml.datatype.Duration; import com.sun.org.apache.xerces.internal.impl.dv.InvalidDatatypeValueException; import com.sun.org.apache.xerces.internal.impl.dv.ValidationContext; /** * Used to validate the <yearMonthDuration> type * * @xerces.internal * * @author Ankit Pasricha, IBM * * @version $Id: YearMonthDurationDV.java,v 1.6 2010-11-01 04:39:47 joehw Exp $ */ class YearMonthDurationDV extends DurationDV { public Object getActualValue(String content, ValidationContext context) throws InvalidDatatypeValueException { try { return parse(content, DurationDV.YEARMONTHDURATION_TYPE); } catch (Exception ex) { throw new InvalidDatatypeValueException("cvc-datatype-valid.1.2.1", new Object[]{content, "yearMonthDuration"}); } } protected Duration getDuration(DateTimeData date) { int sign = 1; if ( date.year<0 || date.month<0) { sign = -1; } return datatypeFactory.newDuration(sign == 1, date.year != DatatypeConstants.FIELD_UNDEFINED?BigInteger.valueOf(sign*date.year):null, date.month != DatatypeConstants.FIELD_UNDEFINED?BigInteger.valueOf(sign*date.month):null, null, null, null, null); } }
apache-2.0
tyler-travis/openstorefront
server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/test/system/AsyncProxyTest.java
2955
/* * Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.usu.sdl.openstorefront.web.test.system; import edu.usu.sdl.openstorefront.core.api.LookupService; import edu.usu.sdl.openstorefront.core.api.model.AsyncTaskCallback; import edu.usu.sdl.openstorefront.core.api.model.TaskFuture; import edu.usu.sdl.openstorefront.core.api.model.TaskRequest; import edu.usu.sdl.openstorefront.core.entity.ErrorTypeCode; import edu.usu.sdl.openstorefront.service.manager.AsyncTaskManager; import edu.usu.sdl.openstorefront.web.test.BaseTestCase; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; /** * * @author dshurtleff */ public class AsyncProxyTest extends BaseTestCase { private List<ErrorTypeCode> errorTypeCodes = new ArrayList<>(); public AsyncProxyTest() { this.description = "Async Proxy Test"; } @Override protected void runInternalTest() { results.append("Call back style: <br>"); TaskRequest taskRequest = new TaskRequest(); taskRequest.setAllowMultiple(true); taskRequest.setName(UUID.randomUUID().toString()); taskRequest.setCallback(new AsyncTaskCallback() { @Override public void beforeExecute(TaskFuture taskFuture) { } @Override public void afterExecute(TaskFuture taskFuture) { try { results.append("Runnning in callback: <br>"); List<ErrorTypeCode> errorTypeCodesLocal = (List<ErrorTypeCode>) taskFuture.getFuture().get(); errorTypeCodesLocal.forEach(code -> { results.append(code.getCode()).append(" - ").append(code.getDescription()).append("<br>"); }); } catch (InterruptedException | ExecutionException ex) { throw new RuntimeException(ex); } } }); LookupService asyncLookup = service.getAsyncProxy(service.getLookupService(), taskRequest); asyncLookup.findLookup(ErrorTypeCode.class); results.append("Lookup style: <br>"); TaskFuture taskFuture = AsyncTaskManager.getTaskByName(taskRequest.getName()); try { errorTypeCodes = (List<ErrorTypeCode>) taskFuture.getFuture().get(); errorTypeCodes.forEach(code -> { results.append(code.getCode()).append(" - ").append(code.getDescription()).append("<br>"); }); Thread.sleep(100); } catch (InterruptedException | ExecutionException ex) { throw new RuntimeException(ex); } } }
apache-2.0
samaitra/ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/transactions/AtomicVolatilePartitionCounterStateConsistencyTest.java
2030
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.transactions; import java.util.Collection; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.junit.Ignore; /** */ public class AtomicVolatilePartitionCounterStateConsistencyTest extends AtomicPartitionCounterStateConsistencyTest { /** {@inheritDoc} */ @Override protected boolean persistenceEnabled() { return false; } /** {@inheritDoc} */ @Override protected int partitions() { return 1024; } /** {@inheritDoc} */ @Ignore @Override public void testSingleThreadedUpdateOrder() throws Exception { // Not applicable for volatile mode. } /** {@inheritDoc} */ @Ignore @Override public void testPartitionConsistencyCancelledRebalanceCoordinatorIsDemander() throws Exception { // Not applicable for volatile mode. } /** {@inheritDoc} */ @Ignore @Override public void testLateAffinityChangeDuringExchange() throws Exception { // Not applicable for volatile mode. } /** {@inheritDoc} */ @Override protected void forceCheckpoint(Collection<Ignite> nodes) throws IgniteCheckedException { // No-op. } }
apache-2.0
Nmishin/jagger
dbapi/src/main/java/com/griddynamics/jagger/dbapi/parameter/GroupKey.java
2536
/* * Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved * http://www.griddynamics.com * * This library is free software; you can redistribute it and/or modify it under the terms of * the Apache License; either * version 2.0 of the License, or any later version. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.griddynamics.jagger.dbapi.parameter; import com.google.common.base.Objects; public class GroupKey { private String upperName; private String leftName; public GroupKey(String upperName) { this.upperName = upperName; this.leftName = upperName; } public GroupKey(String upperName, String leftName) { this.upperName = upperName; this.leftName = leftName; } public String getUpperName() { return upperName; } public String getLeftName() { return leftName; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GroupKey groupKey = (GroupKey) o; if (leftName != null ? !leftName.equals(groupKey.leftName) : groupKey.leftName != null) return false; if (upperName != null ? !upperName.equals(groupKey.upperName) : groupKey.upperName != null) return false; return true; } @Override public int hashCode() { int result = upperName != null ? upperName.hashCode() : 0; result = 31 * result + (leftName != null ? leftName.hashCode() : 0); return result; } @Override public String toString() { return Objects.toStringHelper(this) .add("upperName", upperName) .add("leftName", leftName) .toString(); } }
apache-2.0
ryanemerson/activemq-artemis
tests/activemq5-unit-tests/src/test/java/org/apache/activemq/openwire/v4/RemoveSubscriptionInfoTest.java
2049
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.v4; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.activemq.openwire.*; import org.apache.activemq.command.*; /** * Test case for the OpenWire marshalling for RemoveSubscriptionInfo * * * NOTE!: This file is auto generated - do not modify! * if you need to make a change, please see the modify the groovy scripts in the * under src/gram/script and then use maven openwire:generate to regenerate * this file. * * */ public class RemoveSubscriptionInfoTest extends BaseCommandTestSupport { public static RemoveSubscriptionInfoTest SINGLETON = new RemoveSubscriptionInfoTest(); public Object createObject() throws Exception { RemoveSubscriptionInfo info = new RemoveSubscriptionInfo(); populateObject(info); return info; } protected void populateObject(Object object) throws Exception { super.populateObject(object); RemoveSubscriptionInfo info = (RemoveSubscriptionInfo) object; info.setConnectionId(createConnectionId("ConnectionId:1")); info.setSubscriptionName("SubcriptionName:2"); info.setClientId("ClientId:3"); } }
apache-2.0
shivpun/spring-framework
spring-context/src/main/java/org/springframework/cache/annotation/CacheConfig.java
2865
/* * Copyright 2002-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cache.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * {@code @CacheConfig} provides a mechanism for sharing common cache-related * settings at the class level. * * <p>When this annotation is present on a given class, it provides a set * of default settings for any cache operation defined in that class. * * @author Stephane Nicoll * @author Sam Brannen * @since 4.1 */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface CacheConfig { /** * Names of the default caches to consider for caching operations defined * in the annotated class. * <p>If none is set at the operation level, these are used instead of the default. * <p>May be used to determine the target cache (or caches), matching the * qualifier value or the bean names of a specific bean definition. */ String[] cacheNames() default {}; /** * The bean name of the default {@link org.springframework.cache.interceptor.KeyGenerator} to * use for the class. * <p>If none is set at the operation level, this one is used instead of the default. * <p>The key generator is mutually exclusive with the use of a custom key. When such key is * defined for the operation, the value of this key generator is ignored. */ String keyGenerator() default ""; /** * The bean name of the custom {@link org.springframework.cache.CacheManager} to use to * create a default {@link org.springframework.cache.interceptor.CacheResolver} if none * is set already. * <p>If no resolver and no cache manager are set at the operation level, and no cache * resolver is set via {@link #cacheResolver}, this one is used instead of the default. * @see org.springframework.cache.interceptor.SimpleCacheResolver */ String cacheManager() default ""; /** * The bean name of the custom {@link org.springframework.cache.interceptor.CacheResolver} to use. * <p>If no resolver and no cache manager are set at the operation level, this one is used * instead of the default. */ String cacheResolver() default ""; }
apache-2.0
jensim/astrix
astrix-context/src/main/java/com/avanza/astrix/beans/ft/FaultToleranceServiceBeanProxyFactory.java
1277
/* * Copyright 2014 Avanza Bank AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.avanza.astrix.beans.ft; import com.avanza.astrix.beans.core.AstrixBeanKey; import com.avanza.astrix.beans.core.BeanProxy; import com.avanza.astrix.beans.service.ServiceBeanProxyFactory; /** * * @author Elias Lindholm * */ final class FaultToleranceServiceBeanProxyFactory implements ServiceBeanProxyFactory { private final BeanFaultToleranceFactory ftFactory; public FaultToleranceServiceBeanProxyFactory(BeanFaultToleranceFactory ftFactory) { this.ftFactory = ftFactory; } @Override public BeanProxy create(AstrixBeanKey<?> beanKey) { return ftFactory.createFaultToleranceProxy(beanKey); } @Override public int order() { return 1; } }
apache-2.0
freeVM/freeVM
enhanced/java/classlib/modules/lang-management/src/test/api/java/org/apache/harmony/lang/management/tests/java/lang/management/MemoryMXBeanTest.java
2895
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.harmony.lang.management.tests.java.lang.management; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryUsage; import junit.framework.TestCase; public class MemoryMXBeanTest extends TestCase { private MemoryMXBean mb; protected void setUp() throws Exception { super.setUp(); mb = ManagementFactory.getMemoryMXBean(); assertNotNull(mb); } protected void tearDown() throws Exception { super.tearDown(); } /* * Test method for 'java.lang.management.MemoryMXBean.getHeapMemoryUsage()' */ public void testGetHeapMemoryUsage() { MemoryUsage mu = mb.getHeapMemoryUsage(); assertNotNull(mu); assertTrue(mu.getCommitted() >= mu.getUsed()); assertTrue(mu.getCommitted() <= mu.getMax()); assertTrue(mu.getUsed() <= mu.getMax()); } /* * Test method for 'java.lang.management.MemoryMXBean.getNonHeapMemoryUsage()' */ public void testGetNonHeapMemoryUsage() { MemoryUsage mu = mb.getNonHeapMemoryUsage(); assertNotNull(mu); assertTrue(mu.getCommitted() >= mu.getUsed()); if (mu.getMax() != -1) { // If max is defined then used and committed will always // be less than or equal to it assertTrue(mu.getCommitted() <= mu.getMax()); assertTrue(mu.getUsed() <= mu.getMax()); } } /* * Test method for 'java.lang.management.MemoryMXBean.getObjectPendingFinalizationCount()' */ public void testGetObjectPendingFinalizationCount() { assertTrue(mb.getObjectPendingFinalizationCount() > -1); } /* * Test method for 'java.lang.management.MemoryMXBean.setVerbose(boolean)' */ public void testSetVerbose() { boolean initialVal = mb.isVerbose(); mb.setVerbose(!initialVal); assertTrue(mb.isVerbose() != initialVal); mb.setVerbose(initialVal); assertTrue(mb.isVerbose() == initialVal); } }
apache-2.0
ops4j/org.ops4j.pax.exam2
containers/pax-exam-container-remote/src/main/java/org/ops4j/pax/exam/container/remote/Parser.java
1966
/* * Copyright (C) 2010 Toni Menzel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ops4j.pax.exam.container.remote; import static org.ops4j.pax.exam.OptionUtils.filter; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.container.remote.options.RBCLookupTimeoutOption; import org.ops4j.pax.exam.container.remote.options.RBCPortOption; /** * Minimal parser for the rbcremote fragment. */ public class Parser { private String host; private Integer port; private long timeout; public Parser(Option[] options) { extractArguments(filter(RBCPortOption.class, options)); extractArguments(filter(RBCLookupTimeoutOption.class, options)); assert port != null : "Port should never be null."; assert host != null : "Host should never be null."; } private void extractArguments(RBCLookupTimeoutOption[] options) { for (RBCLookupTimeoutOption op : options) { timeout = op.getTimeout(); } } private void extractArguments(RBCPortOption[] rbcPortOptions) { for (RBCPortOption op : rbcPortOptions) { host = op.getHost(); port = op.getPort(); } } public String getHost() { return host; } public Integer getRMIPort() { return port; } public long getRMILookupTimpout() { return timeout; } public Integer getPort() { return port; } }
apache-2.0
huminzhi/SalesforceMobileSDK-Android
libs/SmartStore/src/com/salesforce/androidsdk/smartstore/store/IndexSpec.java
5296
/* * Copyright (c) 2012, salesforce.com, inc. * All rights reserved. * Redistribution and use of this software in source and binary forms, with or * without modification, are permitted provided that the following conditions * are met: * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of salesforce.com, inc. nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission of salesforce.com, inc. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.androidsdk.smartstore.store; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.salesforce.androidsdk.smartstore.store.SmartStore.Type; /** * Simple class to represent index spec */ public class IndexSpec { public final String path; public final Type type; public final String columnName; public IndexSpec(String path, Type type) { this.path = path; this.type = type; this.columnName = null; // undefined } public IndexSpec(String path, Type type, String columnName) { this.path = path; this.type = type; this.columnName = columnName; } @Override public int hashCode() { int result = 17; result = 31 * result + path.hashCode(); result = 31 * result + type.hashCode(); if (columnName != null) result = 31 * result + columnName.hashCode(); return result; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (obj == this) return true; if (!(obj instanceof IndexSpec)) return false; IndexSpec rhs = (IndexSpec) obj; boolean result = true; result = result && path.equals(rhs.path); result = result && type.equals(rhs.type); if (columnName == null) result = result && (columnName == rhs.columnName); else result = result && columnName.equals(rhs.columnName); return result; } /** * @return path | type */ public String getPathType() { return path + "|" + type; } /** * @return JSONObject for this IndexSpec * @throws JSONException */ public JSONObject toJSON() throws JSONException { JSONObject json = new JSONObject(); json.put("path", path); json.put("type", type); json.put("columnName", columnName); return json; } /** * @param indexSpecs * @return JSONArray for the array of IndexSpec's * @throws JSONException */ public static JSONArray toJSON(IndexSpec[] indexSpecs) throws JSONException { JSONArray json = new JSONArray(); for(IndexSpec indexSpec : indexSpecs) { json.put(indexSpec.toJSON()); } return json; } /** * @param jsonArray * @return IndexSpec[] from a JSONArray * @throws JSONException */ public static IndexSpec[] fromJSON(JSONArray jsonArray) throws JSONException { List<IndexSpec> list = new ArrayList<IndexSpec>(); for(int i=0; i<jsonArray.length(); i++) { list.add(IndexSpec.fromJSON(jsonArray.getJSONObject(i))); } return list.toArray(new IndexSpec[0]); } /** * Return IndexSpec given JSONObject * @param json * @return * @throws JSONException */ public static IndexSpec fromJSON(JSONObject json) throws JSONException { return new IndexSpec(json.getString("path"), Type.valueOf(json.getString("type")), json.optString("columnName")); } /** * @param indexSpecs * @return map index spec path to index spec */ public static Map<String, IndexSpec> mapForIndexSpecs(IndexSpec[] indexSpecs) { Map<String, IndexSpec> map = new HashMap<String, IndexSpec>(); for (IndexSpec indexSpec : indexSpecs) { map.put(indexSpec.path, indexSpec); } return map; } /** * @param indexSpecs * @return true if at least one of the indexSpec is of type full_text */ public static boolean hasFTS(IndexSpec[] indexSpecs) { for (IndexSpec indexSpec : indexSpecs) { if (indexSpec.type == Type.full_text) { return true; } } return false; } }
apache-2.0
MikeThomsen/nifi
nifi-registry/nifi-registry-core/nifi-registry-web-ui/src/test/java/org/apache/nifi/registry/ui/ITCreateDuplicateBucket.java
9295
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.registry.ui; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.openqa.selenium.Alert; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.NoAlertPresentException; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import io.github.bonigarcia.wdm.WebDriverManager; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class ITCreateDuplicateBucket { private WebDriver driver; private String baseUrl; private boolean acceptNextAlert = true; private WebDriverWait wait; private StringBuffer verificationErrors = new StringBuffer(); @Before public void setUp() throws Exception { WebDriverManager.chromedriver().setup(); driver = new ChromeDriver(); baseUrl = "http://localhost:18080/nifi-registry"; wait = new WebDriverWait(driver, 30); } @Test public void testCreateDuplicateBucket() throws Exception { // go directly to settings by URL driver.get(baseUrl + "/#/administration/workflow"); // wait for administration route to load wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='no-buckets-message']"))); // confirm new bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='new-bucket-button']"))); // select new bucket button WebElement newBucketButton = driver.findElement(By.cssSelector("[data-automation-id='new-bucket-button']")); newBucketButton.click(); // wait for new bucket dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // confirm bucket name field exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); // place cursor in bucket name field WebElement bucketNameInput = driver.findElement(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input")); bucketNameInput.clear(); // name the bucket ABC bucketNameInput.sendKeys("ABC"); // confirm create bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='create-new-bucket-button']"))); // select create bucket button WebElement createNewBucketButton = driver.findElement(By.cssSelector("[data-automation-id='create-new-bucket-button']")); createNewBucketButton.click(); // wait for create bucket dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // verify bucket added List<WebElement> bucketCount = driver.findElements(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container > div")); assertEquals(1, bucketCount.size()); // confirm new bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='new-bucket-button']"))); // select new bucket button newBucketButton = driver.findElement(By.cssSelector("[data-automation-id='new-bucket-button']")); newBucketButton.click(); // wait for new bucket dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // confirm bucket name field exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); // place cursor in bucket name field bucketNameInput = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); bucketNameInput.clear(); // name the bucket ABC again bucketNameInput.sendKeys("ABC"); // confirm create bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='create-new-bucket-button']"))); // select create bucket button createNewBucketButton = driver.findElement(By.cssSelector("[data-automation-id='create-new-bucket-button']")); createNewBucketButton.click(); // wait for the new bucket dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // wait for error dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); // confirm the duplicate bucket error WebElement selectOKButton = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); Actions actions = new Actions(driver); actions.moveToElement(selectOKButton).click().build().perform(); // wait for the confirm dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); // verify bucket ABC still there bucketCount = driver.findElements(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container > div")); assertEquals(1, bucketCount.size()); } @After public void tearDown() throws Exception { // bucket cleanup // confirm all buckets checkbox exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container-column-header div.mat-checkbox-inner-container"))); // select all buckets checkbox WebElement selectAllCheckbox = driver.findElement(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container-column-header div.mat-checkbox-inner-container")); selectAllCheckbox.click(); // confirm actions drop down menu exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-workflow-administration-perspective-buckets-container button.mat-fds-primary"))); // select actions drop down WebElement selectActions = driver.findElement(By.cssSelector("#nifi-registry-workflow-administration-perspective-buckets-container button.mat-fds-primary")); selectActions.click(); // select delete WebElement selectDeleteBucket = driver.findElement(By.cssSelector("div.mat-menu-content button.mat-menu-item")); JavascriptExecutor executor = (JavascriptExecutor)driver; executor.executeScript("arguments[0].click();", selectDeleteBucket); // verify bucket deleted WebElement confirmDelete = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.fds-dialog-actions button.mat-fds-warn"))); confirmDelete.click(); wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='no-buckets-message']"))); driver.quit(); String verificationErrorString = verificationErrors.toString(); if (!"".equals(verificationErrorString)) { fail(verificationErrorString); } } private boolean isElementPresent(By by) { try { driver.findElement(by); return true; } catch (NoSuchElementException e) { return false; } } private boolean isAlertPresent() { try { driver.switchTo().alert(); return true; } catch (NoAlertPresentException e) { return false; } } private String closeAlertAndGetItsText() { try { Alert alert = driver.switchTo().alert(); String alertText = alert.getText(); if (acceptNextAlert) { alert.accept(); } else { alert.dismiss(); } return alertText; } finally { acceptNextAlert = true; } } }
apache-2.0
keizer619/siddhi
modules/siddhi-core/src/main/java/org/wso2/siddhi/core/util/extension/holder/WindowProcessorExtensionHolder.java
1383
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.siddhi.core.util.extension.holder; import org.wso2.siddhi.core.config.ExecutionPlanContext; import org.wso2.siddhi.core.query.processor.stream.window.WindowProcessor; public class WindowProcessorExtensionHolder extends AbstractExtensionHolder { private static WindowProcessorExtensionHolder instance; private WindowProcessorExtensionHolder(ExecutionPlanContext executionPlanContext) { super(WindowProcessor.class, executionPlanContext); } public static WindowProcessorExtensionHolder getInstance(ExecutionPlanContext executionPlanContext) { if (instance == null) { instance = new WindowProcessorExtensionHolder(executionPlanContext); } return instance; } }
apache-2.0
peterl1084/framework
uitest/src/main/java/com/vaadin/tests/elements/abstracttextfield/AbstractFieldElementSetValueReadOnly.java
1989
package com.vaadin.tests.elements.abstracttextfield; import com.vaadin.server.VaadinRequest; import com.vaadin.tests.components.AbstractTestUI; import com.vaadin.ui.AbstractField; import com.vaadin.ui.AbstractMultiSelect; import com.vaadin.ui.AbstractSingleSelect; import com.vaadin.ui.CheckBox; import com.vaadin.ui.CheckBoxGroup; import com.vaadin.ui.ComboBox; import com.vaadin.ui.DateField; import com.vaadin.ui.ListSelect; import com.vaadin.ui.NativeSelect; import com.vaadin.ui.PasswordField; import com.vaadin.ui.RadioButtonGroup; import com.vaadin.ui.RichTextArea; import com.vaadin.ui.Slider; import com.vaadin.ui.TextArea; import com.vaadin.ui.TextField; import com.vaadin.ui.TwinColSelect; public class AbstractFieldElementSetValueReadOnly extends AbstractTestUI { private AbstractField<?>[] fields = { new TextArea(), new TextField(), new DateField(), new PasswordField(), new CheckBox(), new RichTextArea(), new Slider() }; private AbstractMultiSelect<?>[] multiSelects = { new ListSelect(), new CheckBoxGroup(), new TwinColSelect() }; private AbstractSingleSelect<?>[] singleSelects = { new ComboBox(), new NativeSelect(), new RadioButtonGroup() }; @Override protected void setup(VaadinRequest request) { for (AbstractField field : fields) { field.setReadOnly(true); addComponent(field); } for (AbstractMultiSelect multiSelect : multiSelects) { multiSelect.setReadOnly(true); addComponent(multiSelect); } for (AbstractSingleSelect singleSelect : singleSelects) { singleSelect.setReadOnly(true); addComponent(singleSelect); } } @Override protected String getTestDescription() { return "When vaadin element is set ReadOnly, setValue() method should raise an exception"; } @Override protected Integer getTicketNumber() { return 14068; } }
apache-2.0
dsyang/buck
src/com/facebook/buck/rules/query/QueryCache.java
2062
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules.query; import com.facebook.buck.query.CachingQueryEvaluator; import com.facebook.buck.query.QueryEvaluator; import com.facebook.buck.query.QueryException; import com.facebook.buck.query.QueryExpression; import com.facebook.buck.rules.TargetGraph; import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import java.util.Objects; import java.util.concurrent.ExecutionException; /** Cache that evaluates and stores the result of a dependency {@link Query}. */ public class QueryCache { private final LoadingCache<TargetGraph, CachingQueryEvaluator> evaluators; public QueryCache() { evaluators = CacheBuilder.newBuilder().build(CacheLoader.from(CachingQueryEvaluator::new)); } QueryEvaluator getQueryEvaluator(TargetGraph targetGraph) { try { return evaluators.get(targetGraph); } catch (ExecutionException e) { throw new RuntimeException("Failed to obtain query evaluator", e); } } @VisibleForTesting boolean isPresent(TargetGraph targetGraph, GraphEnhancementQueryEnvironment env, Query query) throws ExecutionException, QueryException { CachingQueryEvaluator evaluator = evaluators.getIfPresent(targetGraph); return Objects.nonNull(evaluator) && evaluator.isPresent(QueryExpression.parse(query.getQuery(), env)); } }
apache-2.0
akalankapagoda/andes
modules/andes-core/broker/src/main/java/org/wso2/andes/server/management/MBeanInvocationHandlerImpl.java
11273
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.server.management; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.security.AccessControlContext; import java.security.AccessController; import java.util.Set; import javax.management.Attribute; import javax.management.JMException; import javax.management.MBeanInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.remote.JMXConnectionNotification; import javax.management.remote.JMXPrincipal; import javax.management.remote.MBeanServerForwarder; import javax.security.auth.Subject; import org.apache.log4j.Logger; import org.wso2.andes.server.logging.actors.ManagementActor; import org.wso2.andes.server.logging.messages.ManagementConsoleMessages; import org.wso2.andes.server.registry.ApplicationRegistry; import org.wso2.andes.server.security.SecurityManager; import org.wso2.andes.server.security.access.Operation; /** * This class can be used by the JMXConnectorServer as an InvocationHandler for the mbean operations. It delegates * JMX access decisions to the SecurityPlugin. */ public class MBeanInvocationHandlerImpl implements InvocationHandler, NotificationListener { private static final Logger _logger = Logger.getLogger(MBeanInvocationHandlerImpl.class); private final static String DELEGATE = "JMImplementation:type=MBeanServerDelegate"; private MBeanServer _mbs; private static ManagementActor _logActor; public static MBeanServerForwarder newProxyInstance() { final InvocationHandler handler = new MBeanInvocationHandlerImpl(); final Class<?>[] interfaces = new Class[] { MBeanServerForwarder.class }; _logActor = new ManagementActor(ApplicationRegistry.getInstance().getRootMessageLogger()); Object proxy = Proxy.newProxyInstance(MBeanServerForwarder.class.getClassLoader(), interfaces, handler); return MBeanServerForwarder.class.cast(proxy); } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { final String methodName = getMethodName(method, args); if (methodName.equals("getMBeanServer")) { return _mbs; } if (methodName.equals("setMBeanServer")) { if (args[0] == null) { throw new IllegalArgumentException("Null MBeanServer"); } if (_mbs != null) { throw new IllegalArgumentException("MBeanServer object already initialized"); } _mbs = (MBeanServer) args[0]; return null; } // Retrieve Subject from current AccessControlContext AccessControlContext acc = AccessController.getContext(); Subject subject = Subject.getSubject(acc); try { // Allow operations performed locally on behalf of the connector server itself if (subject == null) { return method.invoke(_mbs, args); } if (args == null || DELEGATE.equals(args[0])) { return method.invoke(_mbs, args); } // Restrict access to "createMBean" and "unregisterMBean" to any user if (methodName.equals("createMBean") || methodName.equals("unregisterMBean")) { _logger.debug("User trying to create or unregister an MBean"); throw new SecurityException("Access denied: " + methodName); } // Allow querying available object names if (methodName.equals("queryNames")) { return method.invoke(_mbs, args); } // Retrieve JMXPrincipal from Subject Set<JMXPrincipal> principals = subject.getPrincipals(JMXPrincipal.class); if (principals == null || principals.isEmpty()) { throw new SecurityException("Access denied: no JMX principal"); } // Save the subject SecurityManager.setThreadSubject(subject); // Get the component, type and impact, which may be null String type = getType(method, args); String vhost = getVirtualHost(method, args); int impact = getImpact(method, args); // Get the security manager for the virtual host (if set) SecurityManager security; if (vhost == null) { security = ApplicationRegistry.getInstance().getSecurityManager(); } else { security = ApplicationRegistry.getInstance().getVirtualHostRegistry().getVirtualHost(vhost).getSecurityManager(); } if (isAccessMethod(methodName) || impact == MBeanOperationInfo.INFO) { // Check for read-only method invocation permission if (!security.authoriseMethod(Operation.ACCESS, type, methodName)) { throw new SecurityException("Permission denied: Access " + methodName); } } else if (isUpdateMethod(methodName)) { // Check for setting properties permission if (!security.authoriseMethod(Operation.UPDATE, type, methodName)) { throw new SecurityException("Permission denied: Update " + methodName); } } else { // Check for invoking/executing method action/operation permission if (!security.authoriseMethod(Operation.EXECUTE, type, methodName)) { throw new SecurityException("Permission denied: Execute " + methodName); } } // Actually invoke the method return method.invoke(_mbs, args); } catch (InvocationTargetException e) { throw e.getTargetException(); } } private String getType(Method method, Object[] args) { if (args[0] instanceof ObjectName) { ObjectName object = (ObjectName) args[0]; String type = object.getKeyProperty("type"); return type; } return null; } private String getVirtualHost(Method method, Object[] args) { if (args[0] instanceof ObjectName) { ObjectName object = (ObjectName) args[0]; String vhost = object.getKeyProperty("VirtualHost"); if(vhost != null) { try { //if the name is quoted in the ObjectName, unquote it vhost = ObjectName.unquote(vhost); } catch(IllegalArgumentException e) { //ignore, this just means the name is not quoted //and can be left unchanged } } return vhost; } return null; } private String getMethodName(Method method, Object[] args) { String methodName = method.getName(); // if arguments are set, try and work out real method name if (args != null && args.length >= 1 && args[0] instanceof ObjectName) { if (methodName.equals("getAttribute")) { methodName = "get" + (String) args[1]; } else if (methodName.equals("setAttribute")) { methodName = "set" + ((Attribute) args[1]).getName(); } else if (methodName.equals("invoke")) { methodName = (String) args[1]; } } return methodName; } private int getImpact(Method method, Object[] args) { //handle invocation of other methods on mbeans if ((args[0] instanceof ObjectName) && (method.getName().equals("invoke"))) { //get invoked method name String mbeanMethod = (args.length > 1) ? (String) args[1] : null; if (mbeanMethod == null) { return -1; } try { //Get the impact attribute MBeanInfo mbeanInfo = _mbs.getMBeanInfo((ObjectName) args[0]); if (mbeanInfo != null) { MBeanOperationInfo[] opInfos = mbeanInfo.getOperations(); for (MBeanOperationInfo opInfo : opInfos) { if (opInfo.getName().equals(mbeanMethod)) { return opInfo.getImpact(); } } } } catch (JMException ex) { _logger.error("Unable to determine mbean impact for method : " + mbeanMethod, ex); } } return -1; } private boolean isAccessMethod(String methodName) { //handle standard get/query/is methods from MBeanServer return (methodName.startsWith("query") || methodName.startsWith("get") || methodName.startsWith("is")); } private boolean isUpdateMethod(String methodName) { //handle standard set methods from MBeanServer return methodName.startsWith("set"); } public void handleNotification(Notification notification, Object handback) { assert notification instanceof JMXConnectionNotification; // only RMI Connections are serviced here, Local API atta // rmi://169.24.29.116 guest 3 String[] connectionData = ((JMXConnectionNotification) notification).getConnectionId().split(" "); String user = connectionData[1]; if (notification.getType().equals(JMXConnectionNotification.OPENED)) { _logActor.message(ManagementConsoleMessages.OPEN(user)); } else if (notification.getType().equals(JMXConnectionNotification.CLOSED) || notification.getType().equals(JMXConnectionNotification.FAILED)) { _logActor.message(ManagementConsoleMessages.CLOSE()); } } }
apache-2.0
batfish/batfish
projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/vendor_family/juniper/JuniperFamily.java
3192
package org.batfish.datamodel.vendor_family.juniper; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.annotations.VisibleForTesting; import java.io.Serializable; import java.util.Collections; import java.util.SortedMap; import java.util.TreeMap; import org.batfish.datamodel.AaaAuthenticationLoginList; import org.batfish.datamodel.AuthenticationMethod; import org.batfish.datamodel.Line; public class JuniperFamily implements Serializable { private static final String PROP_LINES = "lines"; private static final String PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD = "rootAuthenticationEncryptedPassword"; private static final String PROP_SYSTEM_AUTHENTICATION_ORDER = "systemAuthenticationOrder"; private static final String PROP_TACPLUS_SERVERS = "tacplusServers"; @VisibleForTesting public static final String CONSOLE_LINE_NAME = "console"; @VisibleForTesting public static final String AUXILIARY_LINE_NAME = "auxiliary"; private SortedMap<String, Line> _lines; private String _rootAuthenticationEncryptedPassword; private AaaAuthenticationLoginList _systemAuthenticationOrder; private SortedMap<String, TacplusServer> _tacplusServers; public JuniperFamily() { _lines = new TreeMap<>(); _tacplusServers = new TreeMap<>(); _systemAuthenticationOrder = // default authentication order is just password authentication new AaaAuthenticationLoginList( Collections.singletonList(AuthenticationMethod.PASSWORD), true); // Juniper has by default the console and aux lines enabled Line console = new Line(CONSOLE_LINE_NAME); console.setAaaAuthenticationLoginList(_systemAuthenticationOrder); _lines.put(CONSOLE_LINE_NAME, console); Line aux = new Line(AUXILIARY_LINE_NAME); aux.setAaaAuthenticationLoginList(_systemAuthenticationOrder); _lines.put(AUXILIARY_LINE_NAME, aux); } @JsonProperty(PROP_LINES) public SortedMap<String, Line> getLines() { return _lines; } @JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD) public String getRootAuthenticationEncryptedPassword() { return _rootAuthenticationEncryptedPassword; } @JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER) public AaaAuthenticationLoginList getSystemAuthenticationOrder() { return _systemAuthenticationOrder; } @JsonProperty(PROP_TACPLUS_SERVERS) public SortedMap<String, TacplusServer> getTacplusServers() { return _tacplusServers; } @JsonProperty(PROP_LINES) public void setLines(SortedMap<String, Line> lines) { _lines = lines; } @JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD) public void setRootAuthenticationEncryptedPassword(String rootAuthenticationEncryptedPassword) { _rootAuthenticationEncryptedPassword = rootAuthenticationEncryptedPassword; } @JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER) public void setSystemAuthenticationOrder(AaaAuthenticationLoginList authenticationOrder) { _systemAuthenticationOrder = authenticationOrder; } @JsonProperty(PROP_TACPLUS_SERVERS) public void setTacplusServers(SortedMap<String, TacplusServer> tacplusServers) { _tacplusServers = tacplusServers; } }
apache-2.0
nmldiegues/stibt
infinispan/core/src/main/java/org/infinispan/container/entries/package-info.java
1532
/* * JBoss, Home of Professional Open Source * Copyright 2010 Red Hat Inc. and/or its affiliates and other * contributors as indicated by the @author tags. All rights reserved. * See the copyright.txt in the distribution for a full listing of * individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ /** * Entries which are stored in data containers. This package contains different implementations of * entries based on the information needed to store an entry. Certain entries need more information - such as timestamps * and lifespans, if they are used - than others, and the appropriate implementation is selected dynamically. This * helps minimize Infinispan's memory requirements without storing unnecessary metadata. */ package org.infinispan.container.entries;
apache-2.0
ty1er/incubator-asterixdb
asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java
8750
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.external.classad; import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Random; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.asterix.om.base.AMutableInt32; public class Util { // convert escapes in-place // the string can only shrink while converting escapes so we can safely convert in-place. private static final Pattern OCTAL = Pattern.compile("\\\\([0-3][0-7]{0,2})"); public static boolean convertEscapes(AMutableCharArrayString text) { boolean validStr = true; if (text.getLength() == 0) { return true; } int dest = 0; boolean hasOctal = false; for (int source = 0; source < text.getLength(); ++source) { char ch = text.charAt(source); // scan for escapes, a terminating slash cannot be an escape if (ch == '\\' && source < text.getLength() - 1) { ++source; // skip the \ character ch = text.charAt(source); // The escape part should be re-validated switch (ch) { case 'b': ch = '\b'; break; case 'f': ch = '\f'; break; case 'n': ch = '\n'; break; case 'r': ch = '\r'; break; case 't': ch = '\t'; break; case '\\': ch = '\\'; break; default: if (Lexer.isodigit(ch)) { hasOctal = true; ++dest; } break; } } if (dest == source) { // no need to assign ch to text when we haven't seen any escapes yet. // text[dest] = ch; ++dest; } else { try { text.erase(dest); text.setChar(dest, ch); ++dest; --source; } catch (Throwable th) { th.printStackTrace(); } } } if (dest < text.getLength()) { text.erase(dest); text.setLength(dest); } // silly, but to fulfull the original contract for this function // we need to remove the last character in the string if it is a '\0' // (earlier logic guaranteed that a '\0' can ONLY be the last character) if (text.getLength() > 0 && (text.charAt(text.getLength() - 1) == '\0')) { text.erase(text.getLength() - 1); } if (hasOctal) { Matcher m = OCTAL.matcher(text.toString()); StringBuffer out = new StringBuffer(); while (m.find()) { int octet = Integer.parseInt(m.group(1), 8); if (octet == 0 || octet > 255) { return false; } m.appendReplacement(out, String.valueOf((char) octet)); } m.appendTail(out); text.setValue(new String(out.toString().getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8)); } return validStr; } public static Random initialized = new Random((new Date()).getTime()); public static int getRandomInteger() { return initialized.nextInt(); } public static double getRandomReal() { return initialized.nextDouble(); } public static int timezoneOffset(ClassAdTime clock) { return clock.getOffset(); } public static void getLocalTime(ClassAdTime now, ClassAdTime localtm) { localtm.setValue(Calendar.getInstance(), now); localtm.isAbsolute(true); } public static void absTimeToString(ClassAdTime atime, AMutableCharArrayString buffer) { DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); //"yyyy-MM-dd'T'HH:mm:ss" //2004-01-01T00:00:00+11:00 formatter.setTimeZone(TimeZone.getTimeZone("GMT")); buffer.appendString(formatter.format(atime.getCalendar().getTime())); buffer.appendString( (atime.getOffset() >= 0 ? "+" : "-") + String.format("%02d", (Math.abs(atime.getOffset()) / 3600000)) + ":" + String.format("%02d", ((Math.abs(atime.getOffset() / 60) % 60)))); } public static void relTimeToString(long rsecs, AMutableCharArrayString buffer) { double fractional_seconds; int days, hrs, mins; double secs; if (rsecs < 0) { buffer.appendChar('-'); rsecs = -rsecs; } fractional_seconds = rsecs % 1000; days = (int) (rsecs / 1000); hrs = days % 86400; mins = hrs % 3600; secs = (mins % 60) + (fractional_seconds / 1000.0); days = days / 86400; hrs = hrs / 3600; mins = mins / 60; if (days != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%d+%02d:%02d:%02d", days, hrs, mins, (int) secs)); } else { buffer.appendString(String.format("%d+%02d:%02d:%g", days, hrs, mins, secs)); } } else if (hrs != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d:%02d:%02d", hrs, mins, (int) secs)); } else { buffer.appendString(String.format("%02d:%02d:%02g", hrs, mins, secs)); } } else if (mins != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d:%02d", mins, (int) secs)); } else { buffer.appendString(String.format("%02d:%02g", mins, secs)); } return; } else { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d", (int) secs)); } else { buffer.appendString(String.format("%02g", secs)); } } } public static void dayNumbers(int year, int month, int day, AMutableInt32 weekday, AMutableInt32 yearday) { int fixed = fixedFromGregorian(year, month, day); int jan1_fixed = fixedFromGregorian(year, 1, 1); weekday.setValue(fixed % 7); yearday.setValue(fixed - jan1_fixed); return; } public static int fixedFromGregorian(int year, int month, int day) { int fixed; int month_adjustment; if (month <= 2) { month_adjustment = 0; } else if (isLeapYear(year)) { month_adjustment = -1; } else { month_adjustment = -2; } fixed = 365 * (year - 1) + ((year - 1) / 4) - ((year - 1) / 100) + ((year - 1) / 400) + ((367 * month - 362) / 12) + month_adjustment + day; return fixed; } public static boolean isLeapYear(int year) { int mod4; int mod400; boolean leap_year; mod4 = year % 4; mod400 = year % 400; if (mod4 == 0 && mod400 != 100 && mod400 != 200 && mod400 != 300) { leap_year = true; } else { leap_year = false; } return leap_year; } public static int isInf(double x) { if (Double.isInfinite(x)) { return (x < 0.0) ? (-1) : 1; } return 0; } public static boolean isNan(double x) { return Double.isNaN(x); } }
apache-2.0
dharshanaw/carbon-identity-framework
components/entitlement/org.wso2.carbon.identity.entitlement.common/src/main/java/org/wso2/carbon/identity/entitlement/common/dto/ElementCountDTO.java
1931
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.entitlement.common.dto; public class ElementCountDTO { private int subElementCount; private int attributeDesignatorsElementCount; private int attributeValueElementCount; private int attributeSelectorElementCount; public int getSubElementCount() { return subElementCount; } public void setSubElementCount(int subElementCount) { this.subElementCount = subElementCount; } public int getAttributeSelectorElementCount() { return attributeSelectorElementCount; } public void setAttributeSelectorElementCount(int attributeSelectorElementCount) { this.attributeSelectorElementCount = attributeSelectorElementCount; } public int getAttributeValueElementCount() { return attributeValueElementCount; } public void setAttributeValueElementCount(int attributeValueElementCount) { this.attributeValueElementCount = attributeValueElementCount; } public int getAttributeDesignatorsElementCount() { return attributeDesignatorsElementCount; } public void setAttributeDesignatorsElementCount(int attributeDesignatorsElementCount) { this.attributeDesignatorsElementCount = attributeDesignatorsElementCount; } }
apache-2.0
sijie/bookkeeper
stream/distributedlog/common/src/main/java/org/apache/distributedlog/common/concurrent/AsyncSemaphore.java
5678
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.distributedlog.common.concurrent; import java.util.LinkedList; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.RejectedExecutionException; import java.util.function.Supplier; import javax.annotation.concurrent.GuardedBy; import org.apache.bookkeeper.common.concurrent.FutureUtils; import org.apache.distributedlog.common.util.Permit; /** * An AsyncSemaphore is a traditional semaphore but with asynchronous * execution. * * <p>Grabbing a permit returns a `Future[Permit]`. * * <p>Basic usage: * {{{ * val semaphore = new AsyncSemaphore(n) * ... * semaphore.acquireAndRun() { * somethingThatReturnsFutureT() * } * }}} * * <p>Calls to acquire() and acquireAndRun are serialized, and tickets are * given out fairly (in order of arrival). */ public class AsyncSemaphore { private final Optional<Integer> maxWaiters; private final Permit semaphorePermit = new Permit() { @Override public void release() { releasePermit(this); } }; @GuardedBy("this") private Optional<Throwable> closed = Optional.empty(); @GuardedBy("this") private final LinkedList<CompletableFuture<Permit>> waitq; @GuardedBy("this") private int availablePermits; public AsyncSemaphore(int initialPermits, Optional<Integer> maxWaiters) { this.availablePermits = initialPermits; this.waitq = new LinkedList<>(); this.maxWaiters = maxWaiters; } private synchronized void releasePermit(Permit permit) { CompletableFuture<Permit> next = waitq.pollFirst(); if (null != next) { next.complete(permit); } else { availablePermits += 1; } } private CompletableFuture<Permit> newFuturePermit() { return FutureUtils.value(semaphorePermit); } /** * Acquire a [[Permit]], asynchronously. * * <p>Be sure to `permit.release()` in a * - `finally` block of your `onSuccess` callback * - `ensure` block of your future chain * * <p>Interrupting this future is only advisory, and will not release the permit * if the future has already been satisfied. * * @note This method always return the same instance of [[Permit]]. * @return a `Future[Permit]` when the `Future` is satisfied, computation can proceed, * or a Future.Exception[RejectedExecutionException]` if the configured maximum * number of waiters would be exceeded. */ public synchronized CompletableFuture<Permit> acquire() { if (closed.isPresent()) { return FutureUtils.exception(closed.get()); } if (availablePermits > 0) { availablePermits -= 1; return newFuturePermit(); } else { if (maxWaiters.isPresent() && waitq.size() >= maxWaiters.get()) { return FutureUtils.exception(new RejectedExecutionException("Max waiters exceeded")); } else { CompletableFuture<Permit> future = FutureUtils.createFuture(); future.whenComplete((value, cause) -> { synchronized (AsyncSemaphore.this) { waitq.remove(future); } }); waitq.addLast(future); return future; } } } /** * Fail the semaphore and stop it from distributing further permits. Subsequent * attempts to acquire a permit fail with `exc`. This semaphore's queued waiters * are also failed with `exc`. */ public synchronized void fail(Throwable exc) { closed = Optional.of(exc); for (CompletableFuture<Permit> future : waitq) { future.cancel(true); } waitq.clear(); } /** * Execute the function asynchronously when a permit becomes available. * * <p>If the function throws a non-fatal exception, the exception is returned as part of the Future. * For all exceptions, the permit would be released before returning. * * @return a Future[T] equivalent to the return value of the input function. If the configured * maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is * returned. */ public <T> CompletableFuture<T> acquireAndRun(Supplier<CompletableFuture<T>> func) { return acquire().thenCompose(permit -> { CompletableFuture<T> future; try { future = func.get(); future.whenComplete((value, cause) -> permit.release()); return future; } catch (Throwable cause) { permit.release(); throw cause; } }); } }
apache-2.0
pk1057/asterisk-java
src/test/java/org/asteriskjava/pbx/agi/RateLimiterTest.java
647
package org.asteriskjava.pbx.agi; import static org.junit.Assert.assertTrue; import org.asteriskjava.pbx.agi.RateLimiter; import org.junit.Test; public class RateLimiterTest { @Test public void test() throws InterruptedException { long now = System.currentTimeMillis(); RateLimiter limiter = new RateLimiter(3); for (int i = 0; i < 15; i++) { limiter.acquire(); System.out.println(System.currentTimeMillis()); Thread.sleep(100); } // this should have taken around 5 seconds assertTrue(System.currentTimeMillis() - now > 4000L); } }
apache-2.0
idea4bsd/idea4bsd
platform/platform-impl/src/com/intellij/openapi/vfs/encoding/EncodingUtil.java
11775
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.encoding; import com.intellij.AppTopics; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileDocumentManagerAdapter; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectLocator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.messages.MessageBusConnection; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; public class EncodingUtil { enum Magic8 { ABSOLUTELY, WELL_IF_YOU_INSIST, NO_WAY } // check if file can be loaded in the encoding correctly: // returns true if bytes on disk, converted to text with the charset, converted back to bytes matched static Magic8 isSafeToReloadIn(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytes, @NotNull Charset charset) { // file has BOM but the charset hasn't byte[] bom = virtualFile.getBOM(); if (bom != null && !CharsetToolkit.canHaveBom(charset, bom)) return Magic8.NO_WAY; // the charset has mandatory BOM (e.g. UTF-xx) but the file hasn't or has wrong byte[] mandatoryBom = CharsetToolkit.getMandatoryBom(charset); if (mandatoryBom != null && !ArrayUtil.startsWith(bytes, mandatoryBom)) return Magic8.NO_WAY; String loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, charset).toString(); String separator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null); String toSave = StringUtil.convertLineSeparators(loaded, separator); String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile); if (failReason != null && CharsetToolkit.UTF8_CHARSET.equals(virtualFile.getCharset()) && !CharsetToolkit.UTF8_CHARSET.equals(charset)) { return Magic8.NO_WAY; // can't reload utf8-autodetected file in another charset } byte[] bytesToSave; try { bytesToSave = toSave.getBytes(charset); } catch (UnsupportedOperationException e) { return Magic8.NO_WAY; } if (bom != null && !ArrayUtil.startsWith(bytesToSave, bom)) { bytesToSave = ArrayUtil.mergeArrays(bom, bytesToSave); // for 2-byte encodings String.getBytes(Charset) adds BOM automatically } return !Arrays.equals(bytesToSave, bytes) ? Magic8.NO_WAY : loaded.equals(text) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST; } static Magic8 isSafeToConvertTo(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytesOnDisk, @NotNull Charset charset) { try { String lineSeparator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null); String textToSave = lineSeparator.equals("\n") ? text : StringUtil.convertLineSeparators(text, lineSeparator); Pair<Charset, byte[]> chosen = LoadTextUtil.chooseMostlyHarmlessCharset(virtualFile.getCharset(), charset, textToSave); byte[] saved = chosen.second; CharSequence textLoadedBack = LoadTextUtil.getTextByBinaryPresentation(saved, charset); return !text.equals(textLoadedBack.toString()) ? Magic8.NO_WAY : Arrays.equals(saved, bytesOnDisk) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST; } catch (UnsupportedOperationException e) { // unsupported encoding return Magic8.NO_WAY; } } static void saveIn(@NotNull final Document document, final Editor editor, @NotNull final VirtualFile virtualFile, @NotNull final Charset charset) { FileDocumentManager documentManager = FileDocumentManager.getInstance(); documentManager.saveDocument(document); final Project project = ProjectLocator.getInstance().guessProjectForFile(virtualFile); boolean writable = project == null ? virtualFile.isWritable() : ReadonlyStatusHandler.ensureFilesWritable(project, virtualFile); if (!writable) { CommonRefactoringUtil.showErrorHint(project, editor, "Cannot save the file " + virtualFile.getPresentableUrl(), "Unable to Save", null); return; } // first, save the file in the new charset and then mark the file as having the correct encoding try { ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<Object, IOException>() { @Override public Object compute() throws IOException { virtualFile.setCharset(charset); LoadTextUtil.write(project, virtualFile, virtualFile, document.getText(), document.getModificationStamp()); return null; } }); } catch (IOException io) { Messages.showErrorDialog(project, io.getMessage(), "Error Writing File"); } EncodingProjectManagerImpl.suppressReloadDuring(() -> EncodingManager.getInstance().setEncoding(virtualFile, charset)); } static void reloadIn(@NotNull final VirtualFile virtualFile, @NotNull final Charset charset) { final FileDocumentManager documentManager = FileDocumentManager.getInstance(); //Project project = ProjectLocator.getInstance().guessProjectForFile(myFile); //if (documentManager.isFileModified(myFile)) { // int result = Messages.showDialog(project, "File is modified. Reload file anyway?", "File is Modified", new String[]{"Reload", "Cancel"}, 0, AllIcons.General.WarningDialog); // if (result != 0) return; //} if (documentManager.getCachedDocument(virtualFile) == null) { // no need to reload document EncodingManager.getInstance().setEncoding(virtualFile, charset); return; } final Disposable disposable = Disposer.newDisposable(); MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(disposable); connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerAdapter() { @Override public void beforeFileContentReload(VirtualFile file, @NotNull Document document) { if (!file.equals(virtualFile)) return; Disposer.dispose(disposable); // disconnect EncodingManager.getInstance().setEncoding(file, charset); LoadTextUtil.setCharsetWasDetectedFromBytes(file, null); } }); // if file was modified, the user will be asked here try { EncodingProjectManagerImpl.suppressReloadDuring(() -> ((VirtualFileListener)documentManager).contentsChanged( new VirtualFileEvent(null, virtualFile, virtualFile.getName(), virtualFile.getParent()))); } finally { Disposer.dispose(disposable); } } // returns (hardcoded charset from the file type, explanation) or (null, null) if file type does not restrict encoding @NotNull private static Pair<Charset, String> checkHardcodedCharsetFileType(@NotNull VirtualFile virtualFile) { FileType fileType = virtualFile.getFileType(); if (fileType.isBinary()) return Pair.create(null, "binary file"); // in lesser IDEs all special file types are plain text so check for that first if (fileType == FileTypes.PLAIN_TEXT) return Pair.create(null, null); if (fileType == StdFileTypes.GUI_DESIGNER_FORM) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA GUI Designer form"); if (fileType == StdFileTypes.IDEA_MODULE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA module file"); if (fileType == StdFileTypes.IDEA_PROJECT) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA project file"); if (fileType == StdFileTypes.IDEA_WORKSPACE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA workspace file"); if (fileType == StdFileTypes.PROPERTIES) return Pair.create(virtualFile.getCharset(), ".properties file"); if (fileType == StdFileTypes.XML || fileType == StdFileTypes.JSPX) { return Pair.create(virtualFile.getCharset(), "XML file"); } return Pair.create(null, null); } @NotNull // returns pair (existing charset (null means N/A); failReason: null means enabled, notnull means disabled and contains error message) public static Pair<Charset, String> checkCanReload(@NotNull VirtualFile virtualFile) { if (virtualFile.isDirectory()) { return Pair.create(null, "file is a directory"); } FileDocumentManager documentManager = FileDocumentManager.getInstance(); Document document = documentManager.getDocument(virtualFile); if (document == null) return Pair.create(null, "binary file"); Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile); Charset existing = charsetFromContent; String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile); if (failReason != null) { // no point changing encoding if it was auto-detected existing = virtualFile.getCharset(); } else if (charsetFromContent != null) { failReason = "hard coded in text"; } else { Pair<Charset, String> fileTypeCheck = checkHardcodedCharsetFileType(virtualFile); if (fileTypeCheck.second != null) { failReason = fileTypeCheck.second; existing = fileTypeCheck.first; } } if (failReason != null) { return Pair.create(existing, failReason); } return Pair.create(virtualFile.getCharset(), null); } @Nullable("null means enabled, notnull means disabled and contains error message") static String checkCanConvert(@NotNull VirtualFile virtualFile) { if (virtualFile.isDirectory()) { return "file is a directory"; } String failReason = null; Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile); if (charsetFromContent != null) { failReason = "Encoding is hard-coded in the text"; } else { Pair<Charset, String> check = checkHardcodedCharsetFileType(virtualFile); if (check.second != null) { failReason = check.second; } } if (failReason != null) { return failReason; } return null; } // null means enabled, (current charset, error description) otherwise @Nullable public static Pair<Charset, String> checkSomeActionEnabled(@NotNull VirtualFile selectedFile) { String saveError = checkCanConvert(selectedFile); if (saveError == null) return null; Pair<Charset, String> reloadError = checkCanReload(selectedFile); if (reloadError.second == null) return null; return Pair.create(reloadError.first, saveError); } }
apache-2.0
OpenSOC/opensoc-streaming
OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java
36860
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensoc.topology.runner; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import oi.thekraken.grok.api.Grok; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.lang.StringUtils; import org.apache.storm.hdfs.bolt.HdfsBolt; import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat; import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat; import org.apache.storm.hdfs.bolt.format.FileNameFormat; import org.apache.storm.hdfs.bolt.format.RecordFormat; import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units; import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy; import org.apache.storm.hdfs.bolt.sync.SyncPolicy; import org.apache.storm.hdfs.common.rotation.MoveFileAction; import org.json.simple.JSONObject; import storm.kafka.BrokerHosts; import storm.kafka.KafkaSpout; import storm.kafka.SpoutConfig; import storm.kafka.ZkHosts; import storm.kafka.bolt.KafkaBolt; import backtype.storm.Config; import backtype.storm.LocalCluster; import backtype.storm.StormSubmitter; import backtype.storm.generated.Grouping; import backtype.storm.spout.RawScheme; import backtype.storm.spout.SchemeAsMultiScheme; import backtype.storm.topology.BoltDeclarer; import backtype.storm.topology.TopologyBuilder; import backtype.storm.tuple.Fields; import com.esotericsoftware.kryo.serializers.FieldSerializer; import com.esotericsoftware.kryo.serializers.MapSerializer; import com.opensoc.alerts.TelemetryAlertsBolt; import com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter; import com.opensoc.alerts.interfaces.AlertsAdapter; import com.opensoc.enrichment.adapters.cif.CIFHbaseAdapter; import com.opensoc.enrichment.adapters.geo.GeoMysqlAdapter; import com.opensoc.enrichment.adapters.host.HostFromPropertiesFileAdapter; import com.opensoc.enrichment.adapters.whois.WhoisHBaseAdapter; import com.opensoc.enrichment.adapters.threat.ThreatHbaseAdapter; import com.opensoc.enrichment.common.GenericEnrichmentBolt; import com.opensoc.enrichment.interfaces.EnrichmentAdapter; import com.opensoc.hbase.HBaseBolt; import com.opensoc.hbase.HBaseStreamPartitioner; import com.opensoc.hbase.TupleTableConfig; import com.opensoc.helpers.topology.Cli; import com.opensoc.helpers.topology.SettingsLoader; import com.opensoc.index.interfaces.IndexAdapter; import com.opensoc.indexing.TelemetryIndexingBolt; import com.opensoc.json.serialization.JSONKryoSerializer; public abstract class TopologyRunner { protected Configuration config; protected TopologyBuilder builder; protected Config conf; protected boolean local_mode = true; protected boolean debug = true; protected String config_path = null; protected String default_config_path = "OpenSOC_Configs"; protected boolean success = false; protected Stack<String> messageComponents = new Stack<String>(); protected Stack<String> errorComponents = new Stack<String>(); protected Stack<String> alertComponents = new Stack<String>(); protected Stack<String> dataComponents = new Stack<String>(); protected Stack<String> terminalComponents = new Stack<String>(); public void initTopology(String args[], String subdir) throws Exception { Cli command_line = new Cli(args); command_line.parse(); System.out.println("[OpenSOC] Starting topology deployment..."); debug = command_line.isDebug(); System.out.println("[OpenSOC] Debug mode set to: " + debug); local_mode = command_line.isLocal_mode(); System.out.println("[OpenSOC] Local mode set to: " + local_mode); if (command_line.getPath() != null) { config_path = command_line.getPath(); System.out .println("[OpenSOC] Setting config path to external config path: " + config_path); } else { config_path = default_config_path; System.out .println("[OpenSOC] Initializing from default internal config path: " + config_path); } String topology_conf_path = config_path + "/topologies/" + subdir + "/topology.conf"; String environment_identifier_path = config_path + "/topologies/environment_identifier.conf"; String topology_identifier_path = config_path + "/topologies/" + subdir + "/topology_identifier.conf"; System.out.println("[OpenSOC] Looking for environment identifier: " + environment_identifier_path); System.out.println("[OpenSOC] Looking for topology identifier: " + topology_identifier_path); System.out.println("[OpenSOC] Looking for topology config: " + topology_conf_path); config = new PropertiesConfiguration(topology_conf_path); JSONObject environment_identifier = SettingsLoader .loadEnvironmentIdnetifier(environment_identifier_path); JSONObject topology_identifier = SettingsLoader .loadTopologyIdnetifier(topology_identifier_path); String topology_name = SettingsLoader.generateTopologyName( environment_identifier, topology_identifier); System.out.println("[OpenSOC] Initializing Topology: " + topology_name); builder = new TopologyBuilder(); conf = new Config(); conf.registerSerialization(JSONObject.class, MapSerializer.class); conf.setDebug(debug); System.out.println("[OpenSOC] Initializing Spout: " + topology_name); if (command_line.isGenerator_spout()) { String component_name = config.getString("spout.test.name", "DefaultTopologySpout"); success = initializeTestingSpout(component_name); messageComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "spout.test"); } if (!command_line.isGenerator_spout()) { String component_name = config.getString("spout.kafka.name", "DefaultTopologyKafkaSpout"); success = initializeKafkaSpout(component_name); messageComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "spout.kafka"); } if (config.getBoolean("bolt.parser.enabled", true)) { String component_name = config.getString("bolt.parser.name", "DefaultTopologyParserBot"); success = initializeParsingBolt(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); dataComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.parser"); } if (config.getBoolean("bolt.enrichment.geo.enabled", false)) { String component_name = config.getString( "bolt.enrichment.geo.name", "DefaultGeoEnrichmentBolt"); success = initializeGeoEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.geo"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "mysql"); } if (config.getBoolean("bolt.enrichment.host.enabled", false)) { String component_name = config.getString( "bolt.enrichment.host.name", "DefaultHostEnrichmentBolt"); success = initializeHostsEnrichment(topology_name, component_name, "OpenSOC_Configs/etc/whitelists/known_hosts.conf"); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.host"); } if (config.getBoolean("bolt.enrichment.whois.enabled", false)) { String component_name = config.getString( "bolt.enrichment.whois.name", "DefaultWhoisEnrichmentBolt"); success = initializeWhoisEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.whois"); } if (config.getBoolean("bolt.enrichment.cif.enabled", false)) { String component_name = config.getString( "bolt.enrichment.cif.name", "DefaultCIFEnrichmentBolt"); success = initializeCIFEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.cif"); } if (config.getBoolean("bolt.enrichment.threat.enabled", false)) { String component_name = config.getString( "bolt.enrichment.threat.name", "DefaultThreatEnrichmentBolt"); success = initializeThreatEnrichment(topology_name, component_name); messageComponents.add(component_name); errorComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.enrichment.threat"); } if (config.getBoolean("bolt.alerts.enabled", false)) { String component_name = config.getString("bolt.alerts.name", "DefaultAlertsBolt"); success = initializeAlerts(topology_name, component_name, config_path + "/topologies/" + subdir + "/alerts.xml", environment_identifier, topology_identifier); messageComponents.add(component_name); errorComponents.add(component_name); alertComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.alerts"); } if (config.getBoolean("bolt.alerts.indexing.enabled") && config.getBoolean("bolt.alerts.enabled")) { String component_name = config.getString( "bolt.alerts.indexing.name", "DefaultAlertsBolt"); success = initializeAlertIndexing(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.alerts.indexing"); } if (config.getBoolean("bolt.kafka.enabled", false)) { String component_name = config.getString("bolt.kafka.name", "DefaultKafkaBolt"); success = initializeKafkaBolt(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] Component " + component_name + " initialized"); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.kafka"); } if (config.getBoolean("bolt.indexing.enabled", true)) { String component_name = config.getString("bolt.indexing.name", "DefaultIndexingBolt"); success = initializeIndexingBolt(component_name); errorComponents.add(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.indexing"); } if (config.getBoolean("bolt.hdfs.enabled", false)) { String component_name = config.getString("bolt.hdfs.name", "DefaultHDFSBolt"); success = initializeHDFSBolt(topology_name, component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.hdfs"); } if (config.getBoolean("bolt.error.indexing.enabled")) { String component_name = config.getString( "bolt.error.indexing.name", "DefaultErrorIndexingBolt"); success = initializeErrorIndexBolt(component_name); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.error"); } if (config.containsKey("bolt.hbase.enabled") && config.getBoolean("bolt.hbase.enabled")) { String component_name = config.getString("bolt.hbase.name", "DefaultHbaseBolt"); String shuffleType = config.getString("bolt.hbase.shuffle.type", "direct"); success = initializeHbaseBolt(component_name, shuffleType); terminalComponents.add(component_name); System.out.println("[OpenSOC] ------Component " + component_name + " initialized with the following settings:"); SettingsLoader.printConfigOptions((PropertiesConfiguration) config, "bolt.hbase"); } System.out.println("[OpenSOC] Topology Summary: "); System.out.println("[OpenSOC] Message Stream: " + printComponentStream(messageComponents)); System.out.println("[OpenSOC] Alerts Stream: " + printComponentStream(alertComponents)); System.out.println("[OpenSOC] Error Stream: " + printComponentStream(errorComponents)); System.out.println("[OpenSOC] Data Stream: " + printComponentStream(dataComponents)); System.out.println("[OpenSOC] Terminal Components: " + printComponentStream(terminalComponents)); if (local_mode) { conf.setNumWorkers(config.getInt("num.workers")); conf.setMaxTaskParallelism(1); LocalCluster cluster = new LocalCluster(); cluster.submitTopology(topology_name, conf, builder.createTopology()); } else { conf.setNumWorkers(config.getInt("num.workers")); conf.setNumAckers(config.getInt("num.ackers")); StormSubmitter.submitTopology(topology_name, conf, builder.createTopology()); } } private String printComponentStream(List<String> messageComponents) { StringBuilder print_string = new StringBuilder(); for (String component : messageComponents) { print_string.append(component + " -> "); } print_string.append("[TERMINAL COMPONENT]"); return print_string.toString(); } public boolean initializeHbaseBolt(String name, String shuffleType) { try { String messageUpstreamComponent = dataComponents.get(dataComponents .size()-1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String tableName = config.getString("bolt.hbase.table.name") .toString(); TupleTableConfig hbaseBoltConfig = new TupleTableConfig(tableName, config.getString("bolt.hbase.table.key.tuple.field.name") .toString(), config.getString( "bolt.hbase.table.timestamp.tuple.field.name") .toString()); String allColumnFamiliesColumnQualifiers = config.getString( "bolt.hbase.table.fields").toString(); // This is expected in the form // "<cf1>:<cq11>,<cq12>,<cq13>|<cf2>:<cq21>,<cq22>|......." String[] tokenizedColumnFamiliesWithColumnQualifiers = StringUtils .split(allColumnFamiliesColumnQualifiers, "\\|"); for (String tokenizedColumnFamilyWithColumnQualifiers : tokenizedColumnFamiliesWithColumnQualifiers) { String[] cfCqTokens = StringUtils.split( tokenizedColumnFamilyWithColumnQualifiers, ":"); String columnFamily = cfCqTokens[0]; String[] columnQualifiers = StringUtils.split(cfCqTokens[1], ","); for (String columnQualifier : columnQualifiers) { hbaseBoltConfig.addColumn(columnFamily, columnQualifier); } // hbaseBoltConfig.setDurability(Durability.valueOf(conf.get( // "storm.topology.pcap.bolt.hbase.durability").toString())); hbaseBoltConfig.setBatch(Boolean.valueOf(config.getString( "bolt.hbase.enable.batching").toString())); HBaseBolt hbase_bolt = new HBaseBolt(hbaseBoltConfig, config.getString("kafka.zk.list"), config.getString("kafka.zk.port")); hbase_bolt.setAutoAck(true); BoltDeclarer declarer = builder.setBolt(name, hbase_bolt, config.getInt("bolt.hbase.parallelism.hint")) .setNumTasks(config.getInt("bolt.hbase.num.tasks")); if (Grouping._Fields.CUSTOM_OBJECT.toString().equalsIgnoreCase( shuffleType)) { declarer.customGrouping( messageUpstreamComponent, "pcap_data_stream", new HBaseStreamPartitioner( hbaseBoltConfig.getTableName(), 0, Integer.parseInt(conf .get("bolt.hbase.partitioner.region.info.refresh.interval.mins") .toString()))); } else if (Grouping._Fields.DIRECT.toString().equalsIgnoreCase( shuffleType)) { declarer.fieldsGrouping(messageUpstreamComponent, "pcap_data_stream", new Fields("pcap_id")); } } } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeErrorIndexBolt(String component_name) { try { Class loaded_class = Class.forName(config.getString("bolt.error.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); String dateFormat = "yyyy.MM"; if (config.containsKey("bolt.alerts.indexing.timestamp")) { dateFormat = config.getString("bolt.alerts.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName( config.getString("bolt.error.indexing.indexname")) .withDocumentName( config.getString("bolt.error.indexing.documentname")) .withIndexTimestamp(dateFormat) .withBulk(config.getInt("bolt.error.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); BoltDeclarer declarer = builder .setBolt( component_name, indexing_bolt, config.getInt("bolt.error.indexing.parallelism.hint")) .setNumTasks(config.getInt("bolt.error.indexing.num.tasks")); for (String component : errorComponents) declarer.shuffleGrouping(component, "error"); return true; } catch (Exception e) { e.printStackTrace(); return false; } } private boolean initializeKafkaSpout(String name) { try { BrokerHosts zk = new ZkHosts(config.getString("kafka.zk")); String input_topic = config.getString("spout.kafka.topic"); SpoutConfig kafkaConfig = new SpoutConfig(zk, input_topic, "", input_topic); kafkaConfig.scheme = new SchemeAsMultiScheme(new RawScheme()); kafkaConfig.forceFromStart = Boolean.valueOf("True"); kafkaConfig.startOffsetTime = -1; builder.setSpout(name, new KafkaSpout(kafkaConfig), config.getInt("spout.kafka.parallelism.hint")).setNumTasks( config.getInt("spout.kafka.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } abstract boolean initializeParsingBolt(String topology_name, String name); abstract boolean initializeTestingSpout(String name); private boolean initializeGeoEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] keys_from_settings = config.getStringArray("bolt.enrichment.geo.fields"); List<String> geo_keys = new ArrayList<String>(Arrays.asList(keys_from_settings)); GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter( config.getString("mysql.ip"), config.getInt("mysql.port"), config.getString("mysql.username"), config.getString("mysql.password"), config.getString("bolt.enrichment.geo.adapter.table")); GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.geo.enrichment_tag")) .withOutputFieldName(topology_name) .withAdapter(geo_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM")) .withKeys(geo_keys).withMetricConfiguration(config); builder.setBolt(name, geo_enrichment, config.getInt("bolt.enrichment.geo.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.geo.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeHostsEnrichment(String topology_name, String name, String hosts_path) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); List<String> hosts_keys = new ArrayList<String>(); hosts_keys.add(config.getString("source.ip")); hosts_keys.add(config.getString("dest.ip")); Map<String, JSONObject> known_hosts = SettingsLoader .loadKnownHosts(hosts_path); HostFromPropertiesFileAdapter host_adapter = new HostFromPropertiesFileAdapter( known_hosts); GenericEnrichmentBolt host_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.host.enrichment_tag")) .withAdapter(host_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM")) .withOutputFieldName(topology_name).withKeys(hosts_keys) .withMetricConfiguration(config); builder.setBolt(name, host_enrichment, config.getInt("bolt.enrichment.host.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks( config.getInt("bolt.enrichment.host.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } @SuppressWarnings("rawtypes") private boolean initializeAlerts(String topology_name, String name, String alerts_path, JSONObject environment_identifier, JSONObject topology_identifier) { try { Class loaded_class = Class.forName(config.getString("bolt.alerts.adapter")); Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class}); Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, config.getString("bolt.alerts.adapter") + "."); System.out.println("Adapter Settings: "); SettingsLoader.printOptionalSettings(settings); AlertsAdapter alerts_adapter = (AlertsAdapter) constructor.newInstance(settings); String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); JSONObject alerts_identifier = SettingsLoader .generateAlertsIdentifier(environment_identifier, topology_identifier); TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt() .withIdentifier(alerts_identifier).withMaxCacheSize(1000) .withMaxTimeRetain(3600).withAlertsAdapter(alerts_adapter) .withOutputFieldName("message") .withMetricConfiguration(config); builder.setBolt(name, alerts_bolt, config.getInt("bolt.alerts.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.alerts.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeAlertIndexing(String name) { try{ String messageUpstreamComponent = alertComponents.get(alertComponents .size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Class loaded_class = Class.forName(config.getString("bolt.alerts.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); String dateFormat = "yyyy.MM.dd"; if (config.containsKey("bolt.alerts.indexing.timestamp")) { dateFormat = config.getString("bolt.alerts.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName( config.getString("bolt.alerts.indexing.indexname")) .withDocumentName( config.getString("bolt.alerts.indexing.documentname")) .withIndexTimestamp(dateFormat) .withBulk(config.getInt("bolt.alerts.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); String alerts_name = config.getString("bolt.alerts.indexing.name"); builder.setBolt(alerts_name, indexing_bolt, config.getInt("bolt.indexing.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "alert") .setNumTasks(config.getInt("bolt.indexing.num.tasks")); } catch(Exception e) { e.printStackTrace(); return false; } return true; } private boolean initializeKafkaBolt(String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Map<String, String> kafka_broker_properties = new HashMap<String, String>(); kafka_broker_properties.put("zk.connect", config.getString("kafka.zk")); kafka_broker_properties.put("metadata.broker.list", config.getString("kafka.br")); kafka_broker_properties.put("serializer.class", "com.opensoc.json.serialization.JSONKafkaSerializer"); kafka_broker_properties.put("key.serializer.class", "kafka.serializer.StringEncoder"); String output_topic = config.getString("bolt.kafka.topic"); conf.put("kafka.broker.properties", kafka_broker_properties); conf.put("topic", output_topic); builder.setBolt(name, new KafkaBolt<String, JSONObject>(), config.getInt("bolt.kafka.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "message") .setNumTasks(config.getInt("bolt.kafka.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeWhoisEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] keys_from_settings = config.getString("bolt.enrichment.whois.fields").split(","); List<String> whois_keys = new ArrayList<String>(Arrays.asList(keys_from_settings)); EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter( config.getString("bolt.enrichment.whois.hbase.table.name"), config.getString("kafka.zk.list"), config.getString("kafka.zk.port")); GenericEnrichmentBolt whois_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.whois.enrichment_tag")) .withOutputFieldName(topology_name) .withAdapter(whois_adapter) .withMaxTimeRetain( config.getInt("bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM")) .withKeys(whois_keys).withMetricConfiguration(config); builder.setBolt(name, whois_enrichment, config.getInt("bolt.enrichment.whois.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks( config.getInt("bolt.enrichment.whois.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeIndexingBolt(String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); Class loaded_class = Class.forName(config.getString("bolt.indexing.adapter")); IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance(); Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, "optional.settings.bolt.index.search."); if(settings != null && settings.size() > 0) { adapter.setOptionalSettings(settings); System.out.println("[OpenSOC] Index Bolt picket up optional settings:"); SettingsLoader.printOptionalSettings(settings); } // dateFormat defaults to hourly if not specified String dateFormat = "yyyy.MM.dd.hh"; if (config.containsKey("bolt.indexing.timestamp")) { dateFormat = config.getString("bolt.indexing.timestamp"); } TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt() .withIndexIP(config.getString("es.ip")) .withIndexPort(config.getInt("es.port")) .withClusterName(config.getString("es.clustername")) .withIndexName(config.getString("bolt.indexing.indexname")) .withIndexTimestamp(dateFormat) .withDocumentName( config.getString("bolt.indexing.documentname")) .withBulk(config.getInt("bolt.indexing.bulk")) .withIndexAdapter(adapter) .withMetricConfiguration(config); builder.setBolt(name, indexing_bolt, config.getInt("bolt.indexing.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.indexing.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeThreatEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); String[] fields = config.getStringArray("bolt.enrichment.threat.fields"); List<String> threat_keys = new ArrayList<String>(Arrays.asList(fields)); GenericEnrichmentBolt threat_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.threat.enrichment_tag")) .withAdapter( new ThreatHbaseAdapter(config .getString("kafka.zk.list"), config .getString("kafka.zk.port"), config .getString("bolt.enrichment.threat.tablename"))) .withOutputFieldName(topology_name) .withEnrichmentTag(config.getString("bolt.enrichment.threat.enrichment_tag")) .withKeys(threat_keys) .withMaxTimeRetain( config.getInt("bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM")) .withMetricConfiguration(config); builder.setBolt(name, threat_enrichment, config.getInt("bolt.enrichment.threat.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.threat.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeCIFEnrichment(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); List<String> cif_keys = new ArrayList<String>(); String[] ipFields = config.getStringArray("bolt.enrichment.cif.fields.ip"); cif_keys.addAll(Arrays.asList(ipFields)); String[] hostFields = config.getStringArray("bolt.enrichment.cif.fields.host"); cif_keys.addAll(Arrays.asList(hostFields)); String[] emailFields = config.getStringArray("bolt.enrichment.cif.fields.email"); cif_keys.addAll(Arrays.asList(emailFields)); GenericEnrichmentBolt cif_enrichment = new GenericEnrichmentBolt() .withEnrichmentTag( config.getString("bolt.enrichment.cif.enrichment_tag")) .withAdapter( new CIFHbaseAdapter(config .getString("kafka.zk.list"), config .getString("kafka.zk.port"), config .getString("bolt.enrichment.cif.tablename"))) .withOutputFieldName(topology_name) .withKeys(cif_keys) .withMaxTimeRetain( config.getInt("bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES")) .withMaxCacheSize( config.getInt("bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM")) .withMetricConfiguration(config); builder.setBolt(name, cif_enrichment, config.getInt("bolt.enrichment.cif.parallelism.hint")) .fieldsGrouping(messageUpstreamComponent, "message", new Fields("key")) .setNumTasks(config.getInt("bolt.enrichment.cif.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } private boolean initializeHDFSBolt(String topology_name, String name) { try { String messageUpstreamComponent = messageComponents .get(messageComponents.size() - 1); System.out.println("[OpenSOC] ------" + name + " is initializing from " + messageUpstreamComponent); RecordFormat format = new DelimitedRecordFormat() .withFieldDelimiter( config.getString("bolt.hdfs.field.delimiter") .toString()).withFields( new Fields("message")); // sync the file system after every x number of tuples SyncPolicy syncPolicy = new CountSyncPolicy(Integer.valueOf(config .getString("bolt.hdfs.batch.size").toString())); // rotate files when they reach certain size FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy( Float.valueOf(config.getString( "bolt.hdfs.file.rotation.size.in.mb").toString()), Units.MB); FileNameFormat fileNameFormat = new DefaultFileNameFormat() .withPath(config.getString("bolt.hdfs.wip.file.path") .toString()); // Post rotate action MoveFileAction moveFileAction = (new MoveFileAction()) .toDestination(config.getString( "bolt.hdfs.finished.file.path").toString()); HdfsBolt hdfsBolt = new HdfsBolt() .withFsUrl( config.getString("bolt.hdfs.file.system.url") .toString()) .withFileNameFormat(fileNameFormat) .withRecordFormat(format) .withRotationPolicy(rotationPolicy) .withSyncPolicy(syncPolicy) .addRotationAction(moveFileAction); if (config.getString("bolt.hdfs.compression.codec.class") != null) { hdfsBolt.withCompressionCodec(config.getString( "bolt.hdfs.compression.codec.class").toString()); } builder.setBolt(name, hdfsBolt, config.getInt("bolt.hdfs.parallelism.hint")) .shuffleGrouping(messageUpstreamComponent, "message") .setNumTasks(config.getInt("bolt.hdfs.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } }
apache-2.0
OSS-TheWeatherCompany/dasein-cloud-core
src/main/java/org/dasein/cloud/VisibleScope.java
1298
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; /** * Defines what level data gets cached at. * <p>Created by AndyLyall: 02/25/14 13:35 PM</p> * @author Andy Lyall * @version 2014.03 initial version * @since 2014.03 */ public enum VisibleScope { /** * Resource is visibile across the entire account */ ACCOUNT_GLOBAL, /** * Resource is visible across one whole region */ ACCOUNT_REGION, /** * Resource is visible across one whole datacenter */ ACCOUNT_DATACENTER }
apache-2.0
chtyim/cdap
cdap-cli/src/main/java/co/cask/cdap/cli/completer/element/HttpMethodPrefixCompleter.java
3087
/* * Copyright © 2014 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License */ package co.cask.cdap.cli.completer.element; import co.cask.cdap.api.service.http.ServiceHttpEndpoint; import co.cask.cdap.cli.CLIConfig; import co.cask.cdap.cli.ProgramIdArgument; import co.cask.cdap.cli.util.ArgumentParser; import co.cask.cdap.client.ServiceClient; import co.cask.cdap.common.NotFoundException; import co.cask.cdap.common.UnauthorizedException; import co.cask.cdap.proto.Id; import co.cask.common.cli.completers.PrefixCompleter; import com.google.common.collect.Lists; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; /** * Prefix completer for Http methods. */ public class HttpMethodPrefixCompleter extends PrefixCompleter { private static final String PROGRAM_ID = "programId"; private static final String PATTERN = String.format("call service <%s>", PROGRAM_ID); private final ServiceClient serviceClient; private final EndpointCompleter completer; private final CLIConfig cliConfig; public HttpMethodPrefixCompleter(final ServiceClient serviceClient, final CLIConfig cliConfig, String prefix, EndpointCompleter completer) { super(prefix, completer); this.cliConfig = cliConfig; this.serviceClient = serviceClient; this.completer = completer; } @Override public int complete(String buffer, int cursor, List<CharSequence> candidates) { Map<String, String> arguments = ArgumentParser.getArguments(buffer, PATTERN); ProgramIdArgument programIdArgument = ArgumentParser.parseProgramId(arguments.get(PROGRAM_ID)); if (programIdArgument != null) { Id.Service service = Id.Service.from(cliConfig.getCurrentNamespace(), programIdArgument.getAppId(), programIdArgument.getProgramId()); completer.setEndpoints(getMethods(service)); } else { completer.setEndpoints(Collections.<String>emptyList()); } return super.complete(buffer, cursor, candidates); } public Collection<String> getMethods(Id.Service serviceId) { Collection<String> httpMethods = Lists.newArrayList(); try { for (ServiceHttpEndpoint endpoint : serviceClient.getEndpoints(serviceId)) { String method = endpoint.getMethod(); if (!httpMethods.contains(method)) { httpMethods.add(method); } } } catch (IOException | UnauthorizedException | NotFoundException ignored) { } return httpMethods; } }
apache-2.0
hurricup/intellij-community
plugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/refs/JavaFxControllerFieldSearcher.java
4457
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml.refs; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiUtilCore; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.util.Processor; import com.intellij.util.QueryExecutor; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.javaFX.fxml.FxmlConstants; import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex; import java.util.List; /** * User: anna * Date: 3/29/13 */ public class JavaFxControllerFieldSearcher implements QueryExecutor<PsiReference, ReferencesSearch.SearchParameters>{ @Override public boolean execute(@NotNull final ReferencesSearch.SearchParameters queryParameters, @NotNull final Processor<PsiReference> consumer) { final PsiElement elementToSearch = queryParameters.getElementToSearch(); if (elementToSearch instanceof PsiField) { final PsiField field = (PsiField)elementToSearch; final PsiClass containingClass = ApplicationManager.getApplication().runReadAction(new Computable<PsiClass>() { @Override public PsiClass compute() { return field.getContainingClass(); } }); if (containingClass != null) { final String qualifiedName = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Override public String compute() { return containingClass.getQualifiedName(); } }); if (qualifiedName != null) { Project project = PsiUtilCore.getProjectInReadAction(containingClass); final List<PsiFile> fxmlWithController = JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName); for (final PsiFile file : fxmlWithController) { ApplicationManager.getApplication().runReadAction(() -> { final String fieldName = field.getName(); if (fieldName == null) return; final VirtualFile virtualFile = file.getViewProvider().getVirtualFile(); final SearchScope searchScope = queryParameters.getEffectiveSearchScope(); boolean contains = searchScope instanceof LocalSearchScope ? ((LocalSearchScope)searchScope).isInScope(virtualFile) : ((GlobalSearchScope)searchScope).contains(virtualFile); if (contains) { file.accept(new XmlRecursiveElementVisitor() { @Override public void visitXmlAttributeValue(final XmlAttributeValue value) { final PsiReference reference = value.getReference(); if (reference != null) { final PsiElement resolve = reference.resolve(); if (resolve instanceof XmlAttributeValue) { final PsiElement parent = resolve.getParent(); if (parent instanceof XmlAttribute) { final XmlAttribute attribute = (XmlAttribute)parent; if (FxmlConstants.FX_ID.equals(attribute.getName()) && fieldName.equals(attribute.getValue())) { consumer.process(reference); } } } } } }); } }); } } } } return true; } }
apache-2.0
Donnerbart/hazelcast
hazelcast/src/main/java/com/hazelcast/map/impl/querycache/subscriber/NodeQueryCacheConfigurator.java
3996
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.querycache.subscriber; import com.hazelcast.config.Config; import com.hazelcast.config.MapConfig; import com.hazelcast.config.QueryCacheConfig; import com.hazelcast.internal.config.ConfigUtils; import com.hazelcast.map.impl.querycache.QueryCacheConfigurator; import com.hazelcast.map.impl.querycache.QueryCacheEventService; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Node side implementation of {@link QueryCacheConfigurator}. * * @see QueryCacheConfigurator */ public class NodeQueryCacheConfigurator extends AbstractQueryCacheConfigurator { private final Config config; public NodeQueryCacheConfigurator(Config config, ClassLoader configClassLoader, QueryCacheEventService eventService) { super(configClassLoader, eventService); this.config = config; } @Override public QueryCacheConfig getOrCreateConfiguration(String mapName, String cacheName, String cacheId) { MapConfig mapConfig = config.getMapConfig(mapName); QueryCacheConfig queryCacheConfig = findQueryCacheConfigFromMapConfig(mapConfig, cacheName); if (queryCacheConfig != null) { setPredicateImpl(queryCacheConfig); setEntryListener(mapName, cacheId, queryCacheConfig); return queryCacheConfig; } QueryCacheConfig newConfig = new QueryCacheConfig(cacheName); mapConfig.getQueryCacheConfigs().add(newConfig); return newConfig; } @Override public QueryCacheConfig getOrNull(String mapName, String cacheName, String cacheId) { MapConfig mapConfig = config.getMapConfigOrNull(mapName); if (mapConfig == null) { return null; } QueryCacheConfig queryCacheConfig = findQueryCacheConfigFromMapConfig(mapConfig, cacheName); if (queryCacheConfig != null) { setPredicateImpl(queryCacheConfig); setEntryListener(mapName, cacheId, queryCacheConfig); return queryCacheConfig; } return queryCacheConfig; } private QueryCacheConfig findQueryCacheConfigFromMapConfig(MapConfig mapConfig, String cacheName) { List<QueryCacheConfig> queryCacheConfigs = mapConfig.getQueryCacheConfigs(); Map<String, QueryCacheConfig> allQueryCacheConfigs = new HashMap<String, QueryCacheConfig>(queryCacheConfigs.size()); for (QueryCacheConfig queryCacheConfig : queryCacheConfigs) { allQueryCacheConfigs.put(queryCacheConfig.getName(), queryCacheConfig); } return ConfigUtils.lookupByPattern(config.getConfigPatternMatcher(), allQueryCacheConfigs, cacheName); } @Override public void removeConfiguration(String mapName, String cacheName) { MapConfig mapConfig = config.getMapConfig(mapName); List<QueryCacheConfig> queryCacheConfigs = mapConfig.getQueryCacheConfigs(); if (queryCacheConfigs == null || queryCacheConfigs.isEmpty()) { return; } Iterator<QueryCacheConfig> iterator = queryCacheConfigs.iterator(); while (iterator.hasNext()) { QueryCacheConfig config = iterator.next(); if (config.getName().equals(cacheName)) { iterator.remove(); } } } }
apache-2.0
jmandawg/camel
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/springboot/TwitterComponentConfiguration.java
3496
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.twitter.springboot; import org.springframework.boot.context.properties.ConfigurationProperties; /** * This component integrates with Twitter to send tweets or search for tweets * and more. * * Generated by camel-package-maven-plugin - do not edit this file! */ @ConfigurationProperties(prefix = "camel.component.twitter") public class TwitterComponentConfiguration { /** * The access token */ private String accessToken; /** * The access token secret */ private String accessTokenSecret; /** * The consumer key */ private String consumerKey; /** * The consumer secret */ private String consumerSecret; /** * The http proxy host which can be used for the camel-twitter. */ private String httpProxyHost; /** * The http proxy user which can be used for the camel-twitter. */ private String httpProxyUser; /** * The http proxy password which can be used for the camel-twitter. */ private String httpProxyPassword; /** * The http proxy port which can be used for the camel-twitter. */ private int httpProxyPort; public String getAccessToken() { return accessToken; } public void setAccessToken(String accessToken) { this.accessToken = accessToken; } public String getAccessTokenSecret() { return accessTokenSecret; } public void setAccessTokenSecret(String accessTokenSecret) { this.accessTokenSecret = accessTokenSecret; } public String getConsumerKey() { return consumerKey; } public void setConsumerKey(String consumerKey) { this.consumerKey = consumerKey; } public String getConsumerSecret() { return consumerSecret; } public void setConsumerSecret(String consumerSecret) { this.consumerSecret = consumerSecret; } public String getHttpProxyHost() { return httpProxyHost; } public void setHttpProxyHost(String httpProxyHost) { this.httpProxyHost = httpProxyHost; } public String getHttpProxyUser() { return httpProxyUser; } public void setHttpProxyUser(String httpProxyUser) { this.httpProxyUser = httpProxyUser; } public String getHttpProxyPassword() { return httpProxyPassword; } public void setHttpProxyPassword(String httpProxyPassword) { this.httpProxyPassword = httpProxyPassword; } public int getHttpProxyPort() { return httpProxyPort; } public void setHttpProxyPort(int httpProxyPort) { this.httpProxyPort = httpProxyPort; } }
apache-2.0
gh351135612/presto
presto-hive/src/main/java/com/facebook/presto/hive/HiveWriteUtils.java
42741
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.HdfsEnvironment.HdfsContext; import com.facebook.presto.hive.metastore.Database; import com.facebook.presto.hive.metastore.Partition; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.hive.metastore.Storage; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.hive.s3.PrestoS3FileSystem; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.spi.type.BooleanType; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Decimals; import com.facebook.presto.spi.type.DoubleType; import com.facebook.presto.spi.type.IntegerType; import com.facebook.presto.spi.type.RealType; import com.facebook.presto.spi.type.SmallintType; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.TinyintType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarbinaryType; import com.facebook.presto.spi.type.VarcharType; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Shorts; import com.google.common.primitives.SignedBytes; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.ByteWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.joda.time.DateTimeZone; import java.io.IOException; import java.math.BigInteger; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.concurrent.TimeUnit; import static com.facebook.presto.hive.HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR; import static com.facebook.presto.hive.HiveUtil.checkCondition; import static com.facebook.presto.hive.HiveUtil.isArrayType; import static com.facebook.presto.hive.HiveUtil.isMapType; import static com.facebook.presto.hive.HiveUtil.isRowType; import static com.facebook.presto.hive.metastore.MetastoreUtil.getProtectMode; import static com.facebook.presto.hive.metastore.MetastoreUtil.verifyOnline; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.google.common.base.Strings.padEnd; import static java.lang.Float.intBitsToFloat; import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT; import static org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getVarcharTypeInfo; import static org.joda.time.DateTimeZone.UTC; public final class HiveWriteUtils { @SuppressWarnings("OctalInteger") private static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777); private HiveWriteUtils() { } public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName) { try { boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT); Object writer = Class.forName(outputFormatName).getConstructor().newInstance(); return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL); } catch (IOException | ReflectiveOperationException e) { throw new PrestoException(HIVE_WRITER_DATA_ERROR, e); } } @SuppressWarnings("deprecation") public static Serializer initializeSerializer(Configuration conf, Properties properties, String serializerName) { try { Serializer result = (Serializer) Class.forName(serializerName).getConstructor().newInstance(); result.initialize(conf, properties); return result; } catch (SerDeException | ReflectiveOperationException e) { throw Throwables.propagate(e); } } public static ObjectInspector getJavaObjectInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return javaBooleanObjectInspector; } else if (type.equals(BigintType.BIGINT)) { return javaLongObjectInspector; } else if (type.equals(IntegerType.INTEGER)) { return javaIntObjectInspector; } else if (type.equals(SmallintType.SMALLINT)) { return javaShortObjectInspector; } else if (type.equals(TinyintType.TINYINT)) { return javaByteObjectInspector; } else if (type.equals(RealType.REAL)) { return javaFloatObjectInspector; } else if (type.equals(DoubleType.DOUBLE)) { return javaDoubleObjectInspector; } else if (type instanceof VarcharType) { return writableStringObjectInspector; } else if (type instanceof CharType) { return writableHiveCharObjectInspector; } else if (type.equals(VarbinaryType.VARBINARY)) { return javaByteArrayObjectInspector; } else if (type.equals(DateType.DATE)) { return javaDateObjectInspector; } else if (type.equals(TimestampType.TIMESTAMP)) { return javaTimestampObjectInspector; } else if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } else if (isArrayType(type)) { return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0))); } else if (isMapType(type)) { ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0)); ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1)); return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector); } else if (isRowType(type)) { return ObjectInspectorFactory.getStandardStructObjectInspector( type.getTypeSignature().getParameters().stream() .map(parameter -> parameter.getNamedTypeSignature().getName()) .collect(toList()), type.getTypeParameters().stream() .map(HiveWriteUtils::getJavaObjectInspector) .collect(toList())); } throw new IllegalArgumentException("unsupported type: " + type); } public static Object getField(Type type, Block block, int position) { if (block.isNull(position)) { return null; } if (BooleanType.BOOLEAN.equals(type)) { return type.getBoolean(block, position); } if (BigintType.BIGINT.equals(type)) { return type.getLong(block, position); } if (IntegerType.INTEGER.equals(type)) { return (int) type.getLong(block, position); } if (SmallintType.SMALLINT.equals(type)) { return (short) type.getLong(block, position); } if (TinyintType.TINYINT.equals(type)) { return (byte) type.getLong(block, position); } if (RealType.REAL.equals(type)) { return intBitsToFloat((int) type.getLong(block, position)); } if (DoubleType.DOUBLE.equals(type)) { return type.getDouble(block, position); } if (type instanceof VarcharType) { return new Text(type.getSlice(block, position).getBytes()); } if (type instanceof CharType) { CharType charType = (CharType) type; return new Text(padEnd(type.getSlice(block, position).toStringUtf8(), charType.getLength(), ' ')); } if (VarbinaryType.VARBINARY.equals(type)) { return type.getSlice(block, position).getBytes(); } if (DateType.DATE.equals(type)) { long days = type.getLong(block, position); return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days))); } if (TimestampType.TIMESTAMP.equals(type)) { long millisUtc = type.getLong(block, position); return new Timestamp(millisUtc); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getHiveDecimal(decimalType, block, position); } if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } return Collections.unmodifiableList(list); } if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } return Collections.unmodifiableMap(map); } if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); List<Type> fieldTypes = type.getTypeParameters(); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count"); List<Object> row = new ArrayList<>(rowBlock.getPositionCount()); for (int i = 0; i < rowBlock.getPositionCount(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); row.add(element); } return Collections.unmodifiableList(row); } throw new PrestoException(NOT_SUPPORTED, "unsupported type: " + type); } public static void checkTableIsWritable(Table table, boolean writesToNonManagedTablesEnabled) { if (!writesToNonManagedTablesEnabled && !table.getTableType().equals(MANAGED_TABLE.toString())) { throw new PrestoException(NOT_SUPPORTED, "Cannot write to non-managed Hive table"); } checkWritable( new SchemaTableName(table.getDatabaseName(), table.getTableName()), Optional.empty(), getProtectMode(table), table.getParameters(), table.getStorage()); } public static void checkPartitionIsWritable(String partitionName, Partition partition) { checkWritable( new SchemaTableName(partition.getDatabaseName(), partition.getTableName()), Optional.of(partitionName), getProtectMode(partition), partition.getParameters(), partition.getStorage()); } private static void checkWritable( SchemaTableName tableName, Optional<String> partitionName, ProtectMode protectMode, Map<String, String> parameters, Storage storage) { String tablePartitionDescription = "Table '" + tableName + "'"; if (partitionName.isPresent()) { tablePartitionDescription += " partition '" + partitionName.get() + "'"; } // verify online verifyOnline(tableName, partitionName, protectMode, parameters); // verify not read only if (protectMode.readOnly) { throw new HiveReadOnlyException(tableName, partitionName); } // verify sorting if (storage.isSorted()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed sorted tables is not supported. %s", tablePartitionDescription)); } // verify skew info if (storage.isSkewed()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables with skew is not supported. %s", tablePartitionDescription)); } } public static Path getTableDefaultLocation(HdfsContext context, SemiTransactionalHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName) { Optional<String> location = getDatabase(metastore, schemaName).getLocation(); if (!location.isPresent() || location.get().isEmpty()) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not set", schemaName)); } Path databasePath = new Path(location.get()); if (!isS3FileSystem(context, hdfsEnvironment, databasePath)) { if (!pathExists(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location does not exist: %s", schemaName, databasePath)); } if (!isDirectory(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not a directory: %s", schemaName, databasePath)); } } return new Path(databasePath, tableName); } private static Database getDatabase(SemiTransactionalHiveMetastore metastore, String database) { return metastore.getDatabase(database).orElseThrow(() -> new SchemaNotFoundException(database)); } public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).exists(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isS3FileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) instanceof PrestoS3FileSystem; } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isViewFileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { // Hadoop 1.x does not have the ViewFileSystem class return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) .getClass().getName().equals("org.apache.hadoop.fs.viewfs.ViewFileSystem"); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } private static FileSystem getRawFileSystem(FileSystem fileSystem) { if (fileSystem instanceof FilterFileSystem) { return getRawFileSystem(((FilterFileSystem) fileSystem).getRawFileSystem()); } return fileSystem; } private static boolean isDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).isDirectory(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static Path createTemporaryPath(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path targetPath) { // use a per-user temporary directory to avoid permission problems String temporaryPrefix = "/tmp/presto-" + context.getIdentity().getUser(); // use relative temporary directory on ViewFS if (isViewFileSystem(context, hdfsEnvironment, targetPath)) { temporaryPrefix = ".hive-staging"; } // create a temporary directory on the same filesystem Path temporaryRoot = new Path(targetPath, temporaryPrefix); Path temporaryPath = new Path(temporaryRoot, randomUUID().toString()); createDirectory(context, hdfsEnvironment, temporaryPath); return temporaryPath; } public static void createDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { if (!hdfsEnvironment.getFileSystem(context, path).mkdirs(path, ALL_PERMISSIONS)) { throw new IOException("mkdirs returned false"); } } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to create directory: " + path, e); } // explicitly set permission since the default umask overrides it on creation try { hdfsEnvironment.getFileSystem(context, path).setPermission(path, ALL_PERMISSIONS); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to set permission on directory: " + path, e); } } public static boolean isWritableType(HiveType hiveType) { return isWritableType(hiveType.getTypeInfo()); } private static boolean isWritableType(TypeInfo typeInfo) { switch (typeInfo.getCategory()) { case PRIMITIVE: PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); return isWritablePrimitiveType(primitiveCategory); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo()); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo; return isWritableType(listTypeInfo.getListElementTypeInfo()); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveWriteUtils::isWritableType); } return false; } private static boolean isWritablePrimitiveType(PrimitiveCategory primitiveCategory) { switch (primitiveCategory) { case BOOLEAN: case LONG: case INT: case SHORT: case BYTE: case FLOAT: case DOUBLE: case STRING: case DATE: case TIMESTAMP: case BINARY: case DECIMAL: case VARCHAR: case CHAR: return true; } return false; } public static List<ObjectInspector> getRowColumnInspectors(List<Type> types) { return types.stream() .map(HiveWriteUtils::getRowColumnInspector) .collect(toList()); } public static ObjectInspector getRowColumnInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return writableBooleanObjectInspector; } if (type.equals(BigintType.BIGINT)) { return writableLongObjectInspector; } if (type.equals(IntegerType.INTEGER)) { return writableIntObjectInspector; } if (type.equals(SmallintType.SMALLINT)) { return writableShortObjectInspector; } if (type.equals(TinyintType.TINYINT)) { return writableByteObjectInspector; } if (type.equals(RealType.REAL)) { return writableFloatObjectInspector; } if (type.equals(DoubleType.DOUBLE)) { return writableDoubleObjectInspector; } if (type instanceof VarcharType) { VarcharType varcharType = (VarcharType) type; int varcharLength = varcharType.getLength(); // VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) { return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharLength)); } // Unbounded VARCHAR is not supported by Hive. // Values for such columns must be stored as STRING in Hive else if (varcharLength == VarcharType.UNBOUNDED_LENGTH) { return writableStringObjectInspector; } } if (isCharType(type)) { CharType charType = (CharType) type; int charLength = charType.getLength(); return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength)); } if (type.equals(VarbinaryType.VARBINARY)) { return writableBinaryObjectInspector; } if (type.equals(DateType.DATE)) { return writableDateObjectInspector; } if (type.equals(TimestampType.TIMESTAMP)) { return writableTimestampObjectInspector; } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } if (isArrayType(type) || isMapType(type) || isRowType(type)) { return getJavaObjectInspector(type); } throw new IllegalArgumentException("unsupported type: " + type); } public static FieldSetter createFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { if (type.equals(BooleanType.BOOLEAN)) { return new BooleanFieldSetter(rowInspector, row, field); } if (type.equals(BigintType.BIGINT)) { return new BigintFieldBuilder(rowInspector, row, field); } if (type.equals(IntegerType.INTEGER)) { return new IntFieldSetter(rowInspector, row, field); } if (type.equals(SmallintType.SMALLINT)) { return new SmallintFieldSetter(rowInspector, row, field); } if (type.equals(TinyintType.TINYINT)) { return new TinyintFieldSetter(rowInspector, row, field); } if (type.equals(RealType.REAL)) { return new FloatFieldSetter(rowInspector, row, field); } if (type.equals(DoubleType.DOUBLE)) { return new DoubleFieldSetter(rowInspector, row, field); } if (type instanceof VarcharType) { return new VarcharFieldSetter(rowInspector, row, field, type); } if (type instanceof CharType) { return new CharFieldSetter(rowInspector, row, field, type); } if (type.equals(VarbinaryType.VARBINARY)) { return new BinaryFieldSetter(rowInspector, row, field); } if (type.equals(DateType.DATE)) { return new DateFieldSetter(rowInspector, row, field); } if (type.equals(TimestampType.TIMESTAMP)) { return new TimestampFieldSetter(rowInspector, row, field); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return new DecimalFieldSetter(rowInspector, row, field, decimalType); } if (isArrayType(type)) { return new ArrayFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0)); } if (isMapType(type)) { return new MapFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0), type.getTypeParameters().get(1)); } if (isRowType(type)) { return new RowFieldSetter(rowInspector, row, field, type.getTypeParameters()); } throw new IllegalArgumentException("unsupported type: " + type); } public abstract static class FieldSetter { protected final SettableStructObjectInspector rowInspector; protected final Object row; protected final StructField field; protected FieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { this.rowInspector = requireNonNull(rowInspector, "rowInspector is null"); this.row = requireNonNull(row, "row is null"); this.field = requireNonNull(field, "field is null"); } public abstract void setField(Block block, int position); } private static class BooleanFieldSetter extends FieldSetter { private final BooleanWritable value = new BooleanWritable(); public BooleanFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BooleanType.BOOLEAN.getBoolean(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class BigintFieldBuilder extends FieldSetter { private final LongWritable value = new LongWritable(); public BigintFieldBuilder(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BigintType.BIGINT.getLong(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class IntFieldSetter extends FieldSetter { private final IntWritable value = new IntWritable(); public IntFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(IntegerType.INTEGER.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class SmallintFieldSetter extends FieldSetter { private final ShortWritable value = new ShortWritable(); public SmallintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(Shorts.checkedCast(SmallintType.SMALLINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TinyintFieldSetter extends FieldSetter { private final ByteWritable value = new ByteWritable(); public TinyintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(SignedBytes.checkedCast(TinyintType.TINYINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class DoubleFieldSetter extends FieldSetter { private final DoubleWritable value = new DoubleWritable(); public DoubleFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(DoubleType.DOUBLE.getDouble(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class FloatFieldSetter extends FieldSetter { private final FloatWritable value = new FloatWritable(); public FloatFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(intBitsToFloat((int) RealType.REAL.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class VarcharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class CharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public CharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class BinaryFieldSetter extends FieldSetter { private final BytesWritable value = new BytesWritable(); public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { byte[] bytes = VarbinaryType.VARBINARY.getSlice(block, position).getBytes(); value.set(bytes, 0, bytes.length); rowInspector.setStructFieldData(row, field, value); } } private static class DateFieldSetter extends FieldSetter { private final DateWritable value = new DateWritable(); public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(DateType.DATE.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TimestampFieldSetter extends FieldSetter { private final TimestampWritable value = new TimestampWritable(); public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { long millisUtc = TimestampType.TIMESTAMP.getLong(block, position); value.setTime(millisUtc); rowInspector.setStructFieldData(row, field, value); } } private static class DecimalFieldSetter extends FieldSetter { private final HiveDecimalWritable value = new HiveDecimalWritable(); private final DecimalType decimalType; public DecimalFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, DecimalType decimalType) { super(rowInspector, row, field); this.decimalType = decimalType; } @Override public void setField(Block block, int position) { value.set(getHiveDecimal(decimalType, block, position)); rowInspector.setStructFieldData(row, field, value); } } private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position) { BigInteger unscaledValue; if (decimalType.isShort()) { unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position)); } else { unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position)); } return HiveDecimal.create(unscaledValue, decimalType.getScale()); } private static class ArrayFieldSetter extends FieldSetter { private final Type elementType; public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType) { super(rowInspector, row, field); this.elementType = requireNonNull(elementType, "elementType is null"); } @Override public void setField(Block block, int position) { Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } rowInspector.setStructFieldData(row, field, list); } } private static class MapFieldSetter extends FieldSetter { private final Type keyType; private final Type valueType; public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType) { super(rowInspector, row, field); this.keyType = requireNonNull(keyType, "keyType is null"); this.valueType = requireNonNull(valueType, "valueType is null"); } @Override public void setField(Block block, int position) { Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(mapBlock.getPositionCount() * 2); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } rowInspector.setStructFieldData(row, field, map); } } private static class RowFieldSetter extends FieldSetter { private final List<Type> fieldTypes; public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes) { super(rowInspector, row, field); this.fieldTypes = ImmutableList.copyOf(fieldTypes); } @Override public void setField(Block block, int position) { Block rowBlock = block.getObject(position, Block.class); // TODO reuse row object and use FieldSetters, like we do at the top level // Ideally, we'd use the same recursive structure starting from the top, but // this requires modeling row types in the same way we model table rows // (multiple blocks vs all fields packed in a single block) List<Object> value = new ArrayList<>(fieldTypes.size()); for (int i = 0; i < fieldTypes.size(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); value.add(element); } rowInspector.setStructFieldData(row, field, value); } } }
apache-2.0
dahlstrom-g/intellij-community
plugins/InspectionGadgets/src/com/siyeh/ig/logging/LoggerInitializedWithForeignClassInspection.java
9735
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.siyeh.ig.logging; import com.intellij.codeInspection.CommonQuickFixBundle; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.ListTable; import com.intellij.codeInspection.ui.ListWrappingTableModel; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtil; import com.intellij.util.xmlb.Accessor; import com.intellij.util.xmlb.SerializationFilterBase; import com.intellij.util.xmlb.XmlSerializer; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.psiutils.ClassUtils; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.ui.UiUtils; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class LoggerInitializedWithForeignClassInspection extends BaseInspection { @NonNls private static final String DEFAULT_FACTORY_CLASS_NAMES = // Log4J 1 "org.apache.log4j.Logger," + // SLF4J "org.slf4j.LoggerFactory," + // Apache Commons Logging "org.apache.commons.logging.LogFactory," + // Java Util Logging "java.util.logging.Logger," + // Log4J 2 "org.apache.logging.log4j.LogManager"; @NonNls private static final String DEFAULT_FACTORY_METHOD_NAMES = //Log4J 1 "getLogger," + // SLF4J "getLogger," + // Apache Commons Logging "getLog," + // Java Util Logging "getLogger," + // Log4J 2 "getLogger"; protected final List<String> loggerFactoryClassNames = new ArrayList<>(); protected final List<String> loggerFactoryMethodNames = new ArrayList<>(); @SuppressWarnings("PublicField") public String loggerClassName = DEFAULT_FACTORY_CLASS_NAMES; @SuppressWarnings("PublicField") public @NonNls String loggerFactoryMethodName = DEFAULT_FACTORY_METHOD_NAMES; { parseString(loggerClassName, loggerFactoryClassNames); parseString(loggerFactoryMethodName, loggerFactoryMethodNames); } @Override public JComponent createOptionsPanel() { final ListTable table = new ListTable( new ListWrappingTableModel(Arrays.asList(loggerFactoryClassNames, loggerFactoryMethodNames), InspectionGadgetsBundle.message("logger.factory.class.name"), InspectionGadgetsBundle.message("logger.factory.method.name"))); final String title = InspectionGadgetsBundle.message("logger.initialized.with.foreign.options.title"); return UiUtils.createAddRemoveTreeClassChooserPanel(table, title); } @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.problem.descriptor"); } @Override @Nullable protected InspectionGadgetsFix buildFix(Object... infos) { return new LoggerInitializedWithForeignClassFix((String)infos[0]); } @Override public BaseInspectionVisitor buildVisitor() { return new LoggerInitializedWithForeignClassVisitor(); } @Override public void readSettings(@NotNull Element element) throws InvalidDataException { super.readSettings(element); parseString(loggerClassName, loggerFactoryClassNames); parseString(loggerFactoryMethodName, loggerFactoryMethodNames); if (loggerFactoryClassNames.size() != loggerFactoryMethodNames.size() || loggerFactoryClassNames.isEmpty()) { parseString(DEFAULT_FACTORY_CLASS_NAMES, loggerFactoryClassNames); parseString(DEFAULT_FACTORY_METHOD_NAMES, loggerFactoryMethodNames); } } @Override public void writeSettings(@NotNull Element element) throws WriteExternalException { loggerClassName = formatString(loggerFactoryClassNames); loggerFactoryMethodName = formatString(loggerFactoryMethodNames); if (loggerFactoryMethodName.equals(DEFAULT_FACTORY_METHOD_NAMES) && loggerClassName.equals(DEFAULT_FACTORY_CLASS_NAMES)) { // to prevent changing inspection profile with new default, which is mistakenly always written because of bug in serialization below. loggerFactoryMethodName = "getLogger," + "getLogger," + "getLog," + "getLogger"; // these broken settings are restored correctly in readSettings() } XmlSerializer.serializeInto(this, element, new SerializationFilterBase() { @Override protected boolean accepts(@NotNull Accessor accessor, @NotNull Object bean, @Nullable Object beanValue) { final @NonNls String factoryName = accessor.getName(); if ("loggerClassName".equals(factoryName) && DEFAULT_FACTORY_CLASS_NAMES.equals(beanValue)) { return false; } if ("loggerFactoryMethodNames".equals(factoryName) && DEFAULT_FACTORY_METHOD_NAMES.equals(beanValue)) { return false; } return true; } }); } private static final class LoggerInitializedWithForeignClassFix extends InspectionGadgetsFix { private final String newClassName; private LoggerInitializedWithForeignClassFix(String newClassName) { this.newClassName = newClassName; } @Override @NotNull public String getName() { return CommonQuickFixBundle.message("fix.replace.with.x", newClassName+".class"); } @NotNull @Override public String getFamilyName() { return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.fix.family.name"); } @Override protected void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); if (!(element instanceof PsiClassObjectAccessExpression)) { return; } final PsiClassObjectAccessExpression classObjectAccessExpression = (PsiClassObjectAccessExpression)element; PsiReplacementUtil.replaceExpression(classObjectAccessExpression, newClassName + ".class", new CommentTracker()); } } private class LoggerInitializedWithForeignClassVisitor extends BaseInspectionVisitor { @Override public void visitClassObjectAccessExpression(PsiClassObjectAccessExpression expression) { super.visitClassObjectAccessExpression(expression); PsiElement parent = expression.getParent(); if (parent instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)parent; if (!expression.equals(referenceExpression.getQualifierExpression())) { return; } @NonNls final String name = referenceExpression.getReferenceName(); if (!"getName".equals(name)) { return; } final PsiElement grandParent = referenceExpression.getParent(); if (!(grandParent instanceof PsiMethodCallExpression)) { return; } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent; final PsiExpressionList list = methodCallExpression.getArgumentList(); if (!list.isEmpty()) { return; } parent = methodCallExpression.getParent(); } if (!(parent instanceof PsiExpressionList)) { return; } final PsiElement grandParent = parent.getParent(); if (!(grandParent instanceof PsiMethodCallExpression)) { return; } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent; final PsiExpressionList argumentList = methodCallExpression.getArgumentList(); final PsiExpression[] expressions = argumentList.getExpressions(); if (expressions.length != 1) { return; } PsiClass containingClass = ClassUtils.getContainingClass(expression); while (containingClass instanceof PsiAnonymousClass) { containingClass = ClassUtils.getContainingClass(containingClass); } if (containingClass == null) { return; } final String containingClassName = containingClass.getName(); if (containingClassName == null) { return; } final PsiMethod method = methodCallExpression.resolveMethod(); if (method == null) { return; } final PsiClass aClass = method.getContainingClass(); if (aClass == null) { return; } final String className = aClass.getQualifiedName(); final int index = loggerFactoryClassNames.indexOf(className); if (index < 0) { return; } final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final String referenceName = methodExpression.getReferenceName(); final String loggerFactoryMethodName = loggerFactoryMethodNames.get(index); if (!loggerFactoryMethodName.equals(referenceName)) { return; } final PsiTypeElement operand = expression.getOperand(); final PsiClass initializerClass = PsiUtil.resolveClassInClassTypeOnly(operand.getType()); if (initializerClass == null) { return; } if (containingClass.equals(initializerClass)) { return; } registerError(expression, containingClassName); } } }
apache-2.0
pluto-build/java-util
src/test/java/com/cedarsoftware/util/TestInetAddressUtilities.java
1810
package com.cedarsoftware.util; import org.junit.Assert; import org.junit.Test; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.net.InetAddress; /** * useful InetAddress Utilities * * @author Kenneth Partlow * <br> * Copyright (c) Cedar Software LLC * <br><br> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <br><br> * http://www.apache.org/licenses/LICENSE-2.0 * <br><br> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class TestInetAddressUtilities { @Test public void testMapUtilitiesConstructor() throws Exception { Constructor<InetAddressUtilities> con = InetAddressUtilities.class.getDeclaredConstructor(); Assert.assertEquals(Modifier.PRIVATE, con.getModifiers() & Modifier.PRIVATE); con.setAccessible(true); Assert.assertNotNull(con.newInstance()); } @Test public void testGetIpAddress() throws Exception { byte[] bytes = InetAddress.getLocalHost().getAddress(); Assert.assertArrayEquals(bytes, InetAddressUtilities.getIpAddress()); } @Test public void testGetLocalHost() throws Exception { String name = InetAddress.getLocalHost().getHostName(); Assert.assertEquals(name, InetAddressUtilities.getHostName()); } }
apache-2.0
dahlstrom-g/intellij-community
java/java-tests/testSrc/com/intellij/java/psi/formatter/java/JavaFormatterAlignmentTest.java
30615
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.psi.formatter.java; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.openapi.util.TextRange; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.util.IncorrectOperationException; import static com.intellij.formatting.FormatterTestUtils.Action.REFORMAT_WITH_CONTEXT; /** * Is intended to hold specific java formatting tests for alignment settings ( * {@code Project Settings - Code Style - Alignment and Braces}). * * @author Denis Zhdanov */ public class JavaFormatterAlignmentTest extends AbstractJavaFormatterTest { public void testChainedMethodsAlignment() { // Inspired by IDEA-30369 getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).CONTINUATION_INDENT_SIZE = 8; doTest(); } public void testMethodAndChainedField() { // Inspired by IDEA-79806 getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doMethodTest( "Holder.INSTANCE\n" + " .foo();", "Holder.INSTANCE\n" + " .foo();" ); } public void testChainedMethodWithComments() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doMethodTest("AAAAA.b()\n" + ".c() // comment after line\n" + ".d()\n" + ".e();", "AAAAA.b()\n" + " .c() // comment after line\n" + " .d()\n" + " .e();"); } public void testChainedMethodWithBlockComment() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doTextTest("class X {\n" + " public void test() {\n" + " AAAAAA.b()\n" + ".c()\n" + ".d()\n" + " /* simple block comment */\n" + ".e();\n" + " }\n" + "}", "class X {\n" + " public void test() {\n" + " AAAAAA.b()\n" + " .c()\n" + " .d()\n" + " /* simple block comment */\n" + " .e();\n" + " }\n" + "}"); } public void testMultipleMethodAnnotationsCommentedInTheMiddle() { getSettings().BLANK_LINES_AFTER_CLASS_HEADER = 1; getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).INDENT_SIZE = 4; // Inspired by IDEA-53942 doTextTest( "public class Test {\n" + " @Override\n" + "// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" + " @XmlTransient\n" + " void foo() {\n" + "}\n" + "}", "public class Test {\n" + "\n" + " @Override\n" + "// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" + " @XmlTransient\n" + " void foo() {\n" + " }\n" + "}" ); } public void testTernaryOperator() { // Inspired by IDEADEV-13018 getSettings().ALIGN_MULTILINE_TERNARY_OPERATION = true; doMethodTest("int i = a ? x\n" + ": y;", "int i = a ? x\n" + " : y;"); } public void testMethodCallArgumentsAndSmartTabs() throws IncorrectOperationException { // Inspired by IDEADEV-20144. getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).SMART_TABS = true; getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE).USE_TAB_CHARACTER = true; doTextTest("class Foo {\n" + " void foo() {\n" + " bar(new Object[] {\n" + " \"hello1\",\n" + " \"hello2\", add(\"hello3\",\n" + " \"world\")\n" + "});" + " }}", "class Foo {\n" + "\tvoid foo() {\n" + "\t\tbar(new Object[]{\n" + "\t\t\t\t\"hello1\",\n" + "\t\t\t\t\"hello2\", add(\"hello3\",\n" + "\t\t\t\t \"world\")\n" + "\t\t});\n" + "\t}\n" + "}"); } public void testArrayInitializer() throws IncorrectOperationException { // Inspired by IDEADEV-16136 getSettings().ARRAY_INITIALIZER_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS; getSettings().ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION = true; doTextTest( "@SuppressWarnings({\"UseOfSystemOutOrSystemErr\", \"AssignmentToCollectionOrArrayFieldFromParameter\", \"ReturnOfCollectionOrArrayField\"})\n" + "public class Some {\n" + "}", "@SuppressWarnings({\"UseOfSystemOutOrSystemErr\",\n" + " \"AssignmentToCollectionOrArrayFieldFromParameter\",\n" + " \"ReturnOfCollectionOrArrayField\"})\n" + "public class Some {\n" + "}"); } public void testMethodBrackets() { // Inspired by IDEA-53013 getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = true; getSettings().ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION = false; getSettings().ALIGN_MULTILINE_PARAMETERS = true; getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true; getSettings().METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE = true; doClassTest( "public void foo(int i,\n" + " int j) {\n" + "}\n" + "\n" + " public void bar() {\n" + " foo(1,\n" + " 2);\n" + " }", "public void foo(int i,\n" + " int j\n" + " ) {\n" + "}\n" + "\n" + "public void bar() {\n" + " foo(1,\n" + " 2\n" + " );\n" + "}" ); // Inspired by IDEA-55306 getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = false; getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = false; String method = "executeCommand(new Command<Boolean>() {\n" + " public Boolean run() throws ExecutionException {\n" + " return doInterrupt();\n" + " }\n" + "});"; doMethodTest(method, method); } public void testFieldInColumnsAlignment() { // Inspired by IDEA-55147 getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true; getSettings().FIELD_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP; getSettings().VARIABLE_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP; doTextTest( "public class FormattingTest {\n" + "\n" + " int start = 1;\n" + " double end = 2;\n" + "\n" + " int i2 = 1;\n" + " double dd2,\n" + " dd3 = 2;\n" + "\n" + " // asd\n" + " char ccc3 = 'a';\n" + " double ddd31, ddd32 = 1;\n" + "\n" + " private\n" + " final String s4 = \"\";\n" + " private\n" + " transient int i4 = 1;\n" + "\n" + " private final String s5 = \"xxx\";\n" + " private transient int iiii5 = 1;\n" + " /*sdf*/\n" + " @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" + "}", "public class FormattingTest {\n" + "\n" + " int start = 1;\n" + " double end = 2;\n" + "\n" + " int i2 = 1;\n" + " double dd2,\n" + " dd3 = 2;\n" + "\n" + " // asd\n" + " char ccc3 = 'a';\n" + " double ddd31, ddd32 = 1;\n" + "\n" + " private\n" + " final String s4 = \"\";\n" + " private\n" + " transient int i4 = 1;\n" + "\n" + " private final String s5 = \"xxx\";\n" + " private transient int iiii5 = 1;\n" + " /*sdf*/\n" + " @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" + "}" ); } public void testTabsAndFieldsInColumnsAlignment() { // Inspired by IDEA-56242 getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true; getIndentOptions().USE_TAB_CHARACTER = true; doTextTest( "public class Test {\n" + "\tprivate Long field2 = null;\n" + "\tprivate final Object field1 = null;\n" + "\tprivate int i = 1;\n" + "}", "public class Test {\n" + "\tprivate Long field2 = null;\n" + "\tprivate final Object field1 = null;\n" + "\tprivate int i = 1;\n" + "}" ); } public void testDoNotAlignIfNotEnabled() { getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = false; doTextTest( "public class Test {\n" + "private Long field2 = null;\n" + "private final Object field1 = null;\n" + "private int i = 1;\n" + "}", "public class Test {\n" + " private Long field2 = null;\n" + " private final Object field1 = null;\n" + " private int i = 1;\n" + "}" ); } public void testAnnotatedAndNonAnnotatedFieldsInColumnsAlignment() { // Inspired by IDEA-60237 getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true; doTextTest( "public class Test {\n" + " @Id\n" + " private final String name;\n" + " @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" + " private String value;\n" + " private boolean required;\n" + " private String unsetValue;\n" + "}", "public class Test {\n" + " @Id\n" + " private final String name;\n" + " @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" + " private String value;\n" + " private boolean required;\n" + " private String unsetValue;\n" + "}" ); } public void testAlignThrowsKeyword() { // Inspired by IDEA-63820 getSettings().ALIGN_THROWS_KEYWORD = true; doClassTest( "public void test()\n" + " throws Exception {}", "public void test()\n" + "throws Exception {\n" + "}" ); getSettings().ALIGN_THROWS_KEYWORD = false; doClassTest( "public void test()\n" + " throws Exception {}", "public void test()\n" + " throws Exception {\n" + "}" ); } public void testAlignResourceList() { getSettings().KEEP_SIMPLE_BLOCKS_IN_ONE_LINE = true; getSettings().ALIGN_MULTILINE_RESOURCES = true; doMethodTest("try (MyResource r1 = null;\n" + "MyResource r2 = null) { }", "try (MyResource r1 = null;\n" + " MyResource r2 = null) { }"); getSettings().ALIGN_MULTILINE_RESOURCES = false; doMethodTest("try (MyResource r1 = null;\n" + "MyResource r2 = null) { }", "try (MyResource r1 = null;\n" + " MyResource r2 = null) { }"); } public void testChainedMethodCallsAfterFieldsChain_WithAlignment() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS; doMethodTest( "a.current.current.current.getThis().getThis().getThis();", "a.current.current.current.getThis()\n" + " .getThis()\n" + " .getThis();" ); doMethodTest( "a.current.current.current.getThis().getThis().getThis().current.getThis().getThis().getThis().getThis();", "a.current.current.current.getThis()\n" + " .getThis()\n" + " .getThis().current.getThis()\n" + " .getThis()\n" + " .getThis()\n" + " .getThis();" ); String onlyMethodCalls = "getThis().getThis().getThis();"; String formatedMethodCalls = "getThis().getThis()\n" + " .getThis();"; doMethodTest(onlyMethodCalls, formatedMethodCalls); } public void testChainedMethodCallsAfterFieldsChain_WithoutAlignment() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false; getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS; doMethodTest( "a.current.current.current.getThis().getThis().getThis();", "a.current.current.current.getThis()\n" + " .getThis()\n" + " .getThis();" ); } public void testChainedMethodCalls_WithChopDownIfLongOption() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ON_EVERY_ITEM; // it's equal to "Chop down if long" getSettings().RIGHT_MARGIN = 50; String before = "a.current.current.getThis().getThis().getThis().getThis().getThis();"; doMethodTest( before, "a.current.current.getThis()\n" + " .getThis()\n" + " .getThis()\n" + " .getThis()\n" + " .getThis();" ); getSettings().RIGHT_MARGIN = 80; doMethodTest(before, before); } public void testChainedMethodCalls_WithWrapIfNeededOption() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false; getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; getSettings().RIGHT_MARGIN = 50; String before = "a.current.current.getThis().getThis().getThis().getThis();"; doMethodTest( before, "a.current.current.getThis().getThis()\n" + " .getThis().getThis();" ); getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doMethodTest( before, "a.current.current.getThis().getThis()\n" + " .getThis().getThis();" ); getSettings().RIGHT_MARGIN = 75; doMethodTest(before, before); } public void testAlignMethodCalls_PassedAsParameters_InMethodCall() { getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doMethodTest( "test(call1(),\n" + " call2(),\n" + " call3());\n", "test(call1(),\n" + " call2(),\n" + " call3());\n" ); } public void testLocalVariablesAlignment() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "int a = 2;\n" + "String myString = \"my string\"", "int a = 2;\n" + "String myString = \"my string\"" ); } public void testAlignOnlyDeclarationStatements() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( " String s;\n" + " int a = 2;\n" + "s = \"abs\";\n" + "long stamp = 12;", "String s;\n" + "int a = 2;\n" + "s = \"abs\";\n" + "long stamp = 12;" ); } public void testAlignFieldDeclarations() { getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true; doClassTest( "char a = '2';\n" + "int aaaaa = 3;\n" + "String b;", "char a = '2';\n" + "int aaaaa = 3;\n" + "String b;"); } public void testAlignVarDeclarations() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "char a = '2';\n" + "int aaaaa = 3;\n" + "String b;", "char a = '2';\n" + "int aaaaa = 3;\n" + "String b;"); } public void testDoNotAlignWhenBlankLine() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "int a = 2;\n" + "\n" + "String myString = \"my string\"", "int a = 2;\n" + "\n" + "String myString = \"my string\"" ); } public void testDoNotAlignWhenGroupInterrupted() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "int a = 2;\n" + "System.out.println(\"hi!\")\n" + "String myString = \"my string\"", "int a = 2;\n" + "System.out.println(\"hi!\")\n" + "String myString = \"my string\"" ); } public void testDoNotAlignMultiDeclarations() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( " int a, b = 2;\n" + "String myString = \"my string\"", "int a, b = 2;\n" + "String myString = \"my string\"" ); } public void testDoNotAlignMultilineParams() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "int a = 12;\n" + " Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};", "int a = 12;\n" + "Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};" ); doMethodTest( " Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};\n" + "int c = 12;", "Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};\n" + "int c = 12;" ); doMethodTest( " int ac = 99;\n" + "Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};\n" + "int c = 12;", "int ac = 99;\n" + "Runnable runnable = new Runnable() {\n" + " @Override\n" + " public void run() {\n" + " System.out.println(\"AAA!\");\n" + " }\n" + "};\n" + "int c = 12;" ); } public void testDoNotAlign_IfFirstMultiline() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "int\n" + " i = 0;\n" + "int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" + "int var1 = 1;\n" + "int var2 = 2;", "int\n" + " i = 0;\n" + "int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" + "int var1 = 1;\n" + "int var2 = 2;" ); } public void testAlign_InMethod() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doClassTest( "public void run() {\n" + "\n" + " int a = 2;\n" + " String superString = \"\";\n" + "\n" + " test(call1(), call2(), call3());\n" + " }", "public void run() {\n" + "\n" + " int a = 2;\n" + " String superString = \"\";\n" + "\n" + " test(call1(), call2(), call3());\n" + "}" ); doClassTest( "public void run() {\n" + "\n" + " test(call1(), call2(), call3());\n" + "\n" + " int a = 2;\n" + " String superString = \"\";\n" + "}", "public void run() {\n" + "\n" + " test(call1(), call2(), call3());\n" + "\n" + " int a = 2;\n" + " String superString = \"\";\n" + "}"); } public void test_Shift_All_AlignedParameters() { myLineRange = new TextRange(2, 2); getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTextTest( REFORMAT_WITH_CONTEXT, "public class Test {\n" + "\n" + " public void fooooo(String foo,\n" + " String booo,\n" + " String kakadoo) {\n" + "\n" + " }\n" + "\n" + "}", "public class Test {\n" + "\n" + " public void fooooo(String foo,\n" + " String booo,\n" + " String kakadoo) {\n" + "\n" + " }\n" + "\n" + "}" ); } public void test_Align_UnselectedField_IfNeeded() { myLineRange = new TextRange(2, 2); getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true; doTextTest( REFORMAT_WITH_CONTEXT, "public class Test {\n" + " public int i = 1;\n" + " public String iiiiiiiiii = 2;\n" + "}", "public class Test {\n" + " public int i = 1;\n" + " public String iiiiiiiiii = 2;\n" + "}" ); } public void test_Align_UnselectedVariable_IfNeeded() { myLineRange = new TextRange(3, 3); getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doTextTest( REFORMAT_WITH_CONTEXT, "public class Test {\n" + " public void test() {\n" + " int s = 2;\n" + " String sssss = 3;\n" + " }\n" + "}", "public class Test {\n" + " public void test() {\n" + " int s = 2;\n" + " String sssss = 3;\n" + " }\n" + "}" ); } public void test_Align_ConsecutiveVars_InsideIfBlock() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( "if (a > 2) {\n" + "int a=2;\n" + "String name=\"Yarik\";\n" + "}\n", "if (a > 2) {\n" + " int a = 2;\n" + " String name = \"Yarik\";\n" + "}\n" ); } public void test_Align_ConsecutiveVars_InsideForBlock() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( " for (int i = 0; i < 10; i++) {\n" + " int a=2;\n" + " String name=\"Xa\";\n" + " }\n", "for (int i = 0; i < 10; i++) {\n" + " int a = 2;\n" + " String name = \"Xa\";\n" + "}\n" ); } public void test_Align_ConsecutiveVars_InsideTryBlock() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( " try {\n" + " int x = getX();\n" + " String name = \"Ha\";\n" + " }\n" + " catch (IOException exception) {\n" + " int y = 12;\n" + " String test = \"Test\";\n" + " }\n" + " finally {\n" + " int z = 12;\n" + " String zzzz = \"pnmhd\";\n" + " }\n", "try {\n" + " int x = getX();\n" + " String name = \"Ha\";\n" + "} catch (IOException exception) {\n" + " int y = 12;\n" + " String test = \"Test\";\n" + "} finally {\n" + " int z = 12;\n" + " String zzzz = \"pnmhd\";\n" + "}\n" ); } public void test_Align_ConsecutiveVars_InsideCodeBlock() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doMethodTest( " System.out.println(\"AAAA\");\n" + " int a = 2;\n" + " \n" + " {\n" + " int x=2;\n" + " String name=3;\n" + " }\n", "System.out.println(\"AAAA\");\n" + "int a = 2;\n" + "\n" + "{\n" + " int x = 2;\n" + " String name = 3;\n" + "}\n" ); } public void test_AlignComments_BetweenChainedMethodCalls() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doMethodTest( "ActionBarPullToRefresh.from(getActivity())\n" + " // Mark the ListView as pullable\n" + " .theseChildrenArePullable(eventsListView)\n" + " // Set the OnRefreshListener\n" + " .listener(this)\n" + " // Use the AbsListView delegate for StickyListHeadersListView\n" + " .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" + " // Finally commit the setup to our PullToRefreshLayout\n" + " .setup(mPullToRefreshLayout);", "ActionBarPullToRefresh.from(getActivity())\n" + " // Mark the ListView as pullable\n" + " .theseChildrenArePullable(eventsListView)\n" + " // Set the OnRefreshListener\n" + " .listener(this)\n" + " // Use the AbsListView delegate for StickyListHeadersListView\n" + " .useViewDelegate(StickyListHeadersListView.class, new AbsListViewDelegate())\n" + " // Finally commit the setup to our PullToRefreshLayout\n" + " .setup(mPullToRefreshLayout);" ); } public void test_AlignComments_2() { getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true; doClassTest( "public String returnWithBuilder2() {\n" + " return MoreObjects\n" + " .toStringHelper(this)\n" + " .add(\"value\", value)\n" + " // comment\n" + " .toString();\n" + " }", "public String returnWithBuilder2() {\n" + " return MoreObjects\n" + " .toStringHelper(this)\n" + " .add(\"value\", value)\n" + " // comment\n" + " .toString();\n" + "}" ); } public void test_AlignSubsequentOneLineMethods() { getSettings().KEEP_SIMPLE_METHODS_IN_ONE_LINE = true; getSettings().ALIGN_SUBSEQUENT_SIMPLE_METHODS = true; doTextTest( "public class Test {\n" + "\n" + " public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" + "\n" + " public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" + "\n" + "}", "public class Test {\n" + "\n" + " public void testSuperDuperFuckerMother() { System.out.println(\"AAA\"); }\n" + "\n" + " public void testCounterMounter() { System.out.println(\"XXXX\"); }\n" + "\n" + "}" ); } public void test_alignAssignments() { getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true; doTextTest( "public class Test {\n" + " void foo(int a, int xyz) {\n" + " a = 9999;\n" + " xyz = 1;\n" + " }\n" + "}", "public class Test {\n" + " void foo(int a, int xyz) {\n" + " a = 9999;\n" + " xyz = 1;\n" + " }\n" + "}" ); } public void test_alignMultilineAssignments() { getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true; getSettings().ALIGN_MULTILINE_ASSIGNMENT = true; doTextTest( "public class Test {\n" + " void foo(int a, int xyz) {\n" + " a = 9999;\n" + " xyz = a = \n" + " a = 12;\n" + " }\n" + "}", "public class Test {\n" + " void foo(int a, int xyz) {\n" + " a = 9999;\n" + " xyz = a =\n" + " a = 12;\n" + " }\n" + "}" ); } public void test_alignMultilineAssignmentsMixedWithDeclaration() { getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true; getSettings().ALIGN_MULTILINE_ASSIGNMENT = true; getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; doTextTest( "public class Test {\n" + " void foo(int a, int xyz, int bc) {\n" + " bc = 9999;\n" + " a = 9999;\n" + " int basdf = 1234;\n" + " int as = 3;\n" + " xyz = a = \n" + " a = 12;\n" + " }\n" + "}", "public class Test {\n" + " void foo(int a, int xyz, int bc) {\n" + " bc = 9999;\n" + " a = 9999;\n" + " int basdf = 1234;\n" + " int as = 3;\n" + " xyz = a =\n" + " a = 12;\n" + " }\n" + "}" ); } public void test_alignAssignmentsFields() { getSettings().ALIGN_CONSECUTIVE_ASSIGNMENTS = true; doTextTest( "public class Test {\n" + " void foo(A a, int xyz) {\n" + " a.bar = 9999;\n" + " xyz = 1;\n" + " }\n" + "}", "public class Test {\n" + " void foo(A a, int xyz) {\n" + " a.bar = 9999;\n" + " xyz = 1;\n" + " }\n" + "}" ); } public void test_alignMultilineTextBlock() { getJavaSettings().ALIGN_MULTILINE_TEXT_BLOCKS = true; doTextTest( "public class Test {\n" + " void foo() {\n" + " String block = \"\"\"\n" + " text\n" + " block\n" + " \"\"\";\n" + " }\n" + "}", "public class Test {\n" + " void foo() {\n" + " String block = \"\"\"\n" + " text\n" + " block\n" + " \"\"\";\n" + " }\n" + "}" ); } @SuppressWarnings("unused") public void _testIdea199677() { getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true; getSettings().CALL_PARAMETERS_WRAP = 2; getSettings().CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = true; getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true; doTextTest( "public class Main {\n" + "\n" + " public static void main(String[] args) {\n" + " int one = 1;\n" + " int a_million_dollars = 1000000;\n" + "\n" + " doSomething(one, a_million_dollars);\n" + " }\n" + "\n" + " private static void doSomething(int one, int two) {\n" + " }\n" + "\n" + "}", "public class Main {\n" + "\n" + " public static void main(String[] args) {\n" + " int one = 1;\n" + " int a_million_dollars = 1000000;\n" + "\n" + " doSomething(\n" + " one,\n" + " a_million_dollars\n" + " );\n" + " }\n" + "\n" + " private static void doSomething(int one, int two) {\n" + " }\n" + "\n" + "}" ); } }
apache-2.0
kidebit/AudioBlurp
appinventor/appengine/tests/com/google/appinventor/server/FileExporterImplTest.java
7269
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.server; import com.google.appinventor.server.storage.StorageIo; import com.google.appinventor.server.storage.StorageIoInstanceHolder; import com.google.appinventor.server.storage.UnauthorizedAccessException; import com.google.appinventor.shared.rpc.project.Project; import com.google.appinventor.shared.rpc.project.ProjectSourceZip; import com.google.appinventor.shared.rpc.project.RawFile; import com.google.appinventor.shared.rpc.project.TextFile; import com.google.appinventor.shared.storage.StorageUtil; import com.google.common.io.ByteStreams; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * Tests for {@link FileExporterImpl}. * */ public class FileExporterImplTest extends LocalDatastoreTestCase { private static final String USER_ID = "1"; // The following represent a fake project, containing both source and // output files, for the purpose of testing. private static final String FAKE_PROJECT_TYPE = "FakeProjectType"; private static final String PROJECT_NAME = "Project1"; private static final String FORM1_NAME = "Screen1"; private static final String FORM1_QUALIFIED_NAME = "com.yourdomain." + FORM1_NAME; private static final String FORM1_CONTENT = "Form A\nEnd Form"; private static final String IMAGE1_NAME = "Image.jpg"; private static final byte[] IMAGE_CONTENT = { (byte) 0, (byte) 1, (byte) 32, (byte) 255}; private static final String TARGET1_NAME = "Project1.apk"; private static final String TARGET1_QUALIFIED_NAME = "build/target1/" + TARGET1_NAME; private static final byte[] TARGET1_CONTENT = "pk1".getBytes(); private static final String TARGET2_NAME = "Project2.pak"; private static final String TARGET2_QUALIFIED_NAME = "build/target2/" + TARGET2_NAME; private static final byte[] TARGET2_CONTENT = "pk2".getBytes(); private static final String SETTINGS = ""; private static final String HISTORY = "1:History"; private StorageIo storageIo; private FileExporterImpl exporter; private long projectId; @Override protected void setUp() throws Exception { super.setUp(); storageIo = StorageIoInstanceHolder.INSTANCE; exporter = new FileExporterImpl(); Project project = new Project(PROJECT_NAME); project.setProjectType(FAKE_PROJECT_TYPE); project.setProjectHistory(HISTORY); project.addTextFile(new TextFile(FORM1_QUALIFIED_NAME, "")); projectId = storageIo.createProject(USER_ID, project, SETTINGS); storageIo.uploadFile(projectId, FORM1_QUALIFIED_NAME, USER_ID, FORM1_CONTENT, StorageUtil.DEFAULT_CHARSET); storageIo.addSourceFilesToProject(USER_ID, projectId, false, IMAGE1_NAME); storageIo.uploadRawFile(projectId, IMAGE1_NAME, USER_ID, true, IMAGE_CONTENT); storageIo.addOutputFilesToProject(USER_ID, projectId, TARGET1_QUALIFIED_NAME); storageIo.uploadRawFile(projectId, TARGET1_QUALIFIED_NAME, USER_ID, true, TARGET1_CONTENT); storageIo.addOutputFilesToProject(USER_ID, projectId, TARGET2_QUALIFIED_NAME); storageIo.uploadRawFile(projectId, TARGET2_QUALIFIED_NAME, USER_ID, true, TARGET2_CONTENT); } private Map<String, byte[]> testExportProjectSourceZipHelper(ProjectSourceZip project) throws IOException { ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(project.getContent())); Map<String, byte[]> content = new HashMap<String, byte[]>(); ZipEntry zipEntry; while ((zipEntry = zis.getNextEntry()) != null) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteStreams.copy(zis, baos); content.put(zipEntry.getName(), baos.toByteArray()); } assertEquals(content.size(), project.getFileCount()); assertTrue(content.containsKey(FORM1_QUALIFIED_NAME)); assertTrue(content.containsKey(IMAGE1_NAME)); assertFalse(content.containsKey(TARGET1_NAME)); assertEquals(FORM1_CONTENT, new String(content.get(FORM1_QUALIFIED_NAME), StorageUtil.DEFAULT_CHARSET)); assertTrue(Arrays.equals(IMAGE_CONTENT, content.get(IMAGE1_NAME))); return content; } public void testExportProjectSourceZipWithoutHistory() throws IOException { ProjectSourceZip project = exporter.exportProjectSourceZip(USER_ID, projectId, false, false, null); Map<String, byte[]> content = testExportProjectSourceZipHelper(project); assertEquals(2, content.size()); /* Do not expect remix history when includeProjectHistory parameter is false * as in the publish case. */ assertFalse(content.containsKey(FileExporter.REMIX_INFORMATION_FILE_PATH)); } // TODO(user): Add test with properly formatted history public void testExportProjectSourceZipWithHistory() throws IOException { ProjectSourceZip project = exporter.exportProjectSourceZip(USER_ID, projectId, true, false, null); Map<String, byte[]> content = testExportProjectSourceZipHelper(project); assertEquals(3, content.size()); // Expect the remix file to be in assertTrue(content.containsKey(FileExporter.REMIX_INFORMATION_FILE_PATH)); assertEquals(HISTORY, new String(content.get(FileExporter.REMIX_INFORMATION_FILE_PATH), StorageUtil.DEFAULT_CHARSET)); } public void testExportProjectSourceZipWithNonExistingProject() throws IOException { try { exporter.exportProjectSourceZip(USER_ID, projectId + 1, false, false, null); fail(); } catch (Exception e) { assertTrue(e instanceof IllegalArgumentException || e.getCause() instanceof IllegalArgumentException); } } public void testExportProjectOutputFileWithTarget() throws IOException { RawFile file = exporter.exportProjectOutputFile(USER_ID, projectId, "target1"); assertEquals(TARGET1_NAME, file.getFileName()); assertTrue(Arrays.equals(TARGET1_CONTENT, file.getContent())); } public void testExportProjectOutputFileWithNonExistingTraget() throws IOException { try { exporter.exportProjectOutputFile(USER_ID, projectId, "target3"); fail(); } catch (IllegalArgumentException e) { // expected } } public void testExportFile() throws IOException { RawFile file = exporter.exportFile(USER_ID, projectId, FORM1_QUALIFIED_NAME); assertEquals(FORM1_QUALIFIED_NAME, file.getFileName()); assertEquals(FORM1_CONTENT, new String(file.getContent(), StorageUtil.DEFAULT_CHARSET)); } public void testExportFileWithNonExistingFile() throws IOException { final String nonExistingFileName = FORM1_QUALIFIED_NAME + "1"; try { exporter.exportFile(USER_ID, projectId, nonExistingFileName); fail(); } catch (RuntimeException e) { // expected // note that FileExporter throws an explicit RuntimeException } } // TODO(user): Add test of exportAllProjectsSourceZip(). }
apache-2.0
tombujok/hazelcast
hazelcast-client/src/main/java/com/hazelcast/client/spi/ClientPartitionService.java
1185
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.spi; import com.hazelcast.core.Partition; import com.hazelcast.nio.Address; import com.hazelcast.nio.serialization.Data; /** * Partition service for Hazelcast clients. * * Allows to retrieve information about the partition count, the partition owner or the partitionId of a key. */ public interface ClientPartitionService { Address getPartitionOwner(int partitionId); int getPartitionId(Data key); int getPartitionId(Object key); int getPartitionCount(); Partition getPartition(int partitionId); }
apache-2.0
zer0se7en/netty
handler/src/test/java/io/netty/handler/ssl/PemEncodedTest.java
3656
/* * Copyright 2016 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.security.PrivateKey; import io.netty.buffer.UnpooledByteBufAllocator; import org.junit.Test; import io.netty.handler.ssl.util.SelfSignedCertificate; import io.netty.util.ReferenceCountUtil; public class PemEncodedTest { @Test public void testPemEncodedOpenSsl() throws Exception { testPemEncoded(SslProvider.OPENSSL); } @Test public void testPemEncodedOpenSslRef() throws Exception { testPemEncoded(SslProvider.OPENSSL_REFCNT); } private static void testPemEncoded(SslProvider provider) throws Exception { assumeTrue(OpenSsl.isAvailable()); assumeFalse(OpenSsl.useKeyManagerFactory()); PemPrivateKey pemKey; PemX509Certificate pemCert; SelfSignedCertificate ssc = new SelfSignedCertificate(); try { pemKey = PemPrivateKey.valueOf(toByteArray(ssc.privateKey())); pemCert = PemX509Certificate.valueOf(toByteArray(ssc.certificate())); } finally { ssc.delete(); } SslContext context = SslContextBuilder.forServer(pemKey, pemCert) .sslProvider(provider) .build(); assertEquals(1, pemKey.refCnt()); assertEquals(1, pemCert.refCnt()); try { assertTrue(context instanceof ReferenceCountedOpenSslContext); } finally { ReferenceCountUtil.release(context); assertRelease(pemKey); assertRelease(pemCert); } } @Test(expected = IllegalArgumentException.class) public void testEncodedReturnsNull() throws Exception { PemPrivateKey.toPEM(UnpooledByteBufAllocator.DEFAULT, true, new PrivateKey() { @Override public String getAlgorithm() { return null; } @Override public String getFormat() { return null; } @Override public byte[] getEncoded() { return null; } }); } private static void assertRelease(PemEncoded encoded) { assertTrue(encoded.release()); } private static byte[] toByteArray(File file) throws Exception { FileInputStream in = new FileInputStream(file); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) != -1) { baos.write(buf, 0, len); } } finally { baos.close(); } return baos.toByteArray(); } finally { in.close(); } } }
apache-2.0
zhihuij/druid
processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
6989
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.filter; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import com.google.common.primitives.Floats; import io.druid.common.guava.GuavaUtils; import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.filter.DimensionPredicateFilter; import io.druid.segment.filter.SelectorFilter; import java.nio.ByteBuffer; import java.util.Objects; /** */ public class SelectorDimFilter implements DimFilter { private final String dimension; private final String value; private final ExtractionFn extractionFn; private final Object initLock = new Object(); private DruidLongPredicate longPredicate; private DruidFloatPredicate floatPredicate; @JsonCreator public SelectorDimFilter( @JsonProperty("dimension") String dimension, @JsonProperty("value") String value, @JsonProperty("extractionFn") ExtractionFn extractionFn ) { Preconditions.checkArgument(dimension != null, "dimension must not be null"); this.dimension = dimension; this.value = Strings.nullToEmpty(value); this.extractionFn = extractionFn; } @Override public byte[] getCacheKey() { byte[] dimensionBytes = StringUtils.toUtf8(dimension); byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value); byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey(); return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length) .put(DimFilterUtils.SELECTOR_CACHE_ID) .put(dimensionBytes) .put(DimFilterUtils.STRING_SEPARATOR) .put(valueBytes) .put(DimFilterUtils.STRING_SEPARATOR) .put(extractionFnBytes) .array(); } @Override public DimFilter optimize() { return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize(); } @Override public Filter toFilter() { if (extractionFn == null) { return new SelectorFilter(dimension, value); } else { final String valueOrNull = Strings.emptyToNull(value); final DruidPredicateFactory predicateFactory = new DruidPredicateFactory() { @Override public Predicate<String> makeStringPredicate() { return Predicates.equalTo(valueOrNull); } @Override public DruidLongPredicate makeLongPredicate() { initLongPredicate(); return longPredicate; } @Override public DruidFloatPredicate makeFloatPredicate() { initFloatPredicate(); return floatPredicate; } }; return new DimensionPredicateFilter(dimension, predicateFactory, extractionFn); } } @JsonProperty public String getDimension() { return dimension; } @JsonProperty public String getValue() { return value; } @JsonProperty public ExtractionFn getExtractionFn() { return extractionFn; } @Override public String toString() { if (extractionFn != null) { return String.format("%s(%s) = %s", extractionFn, dimension, value); } else { return String.format("%s = %s", dimension, value); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SelectorDimFilter that = (SelectorDimFilter) o; if (!dimension.equals(that.dimension)) { return false; } if (value != null ? !value.equals(that.value) : that.value != null) { return false; } return extractionFn != null ? extractionFn.equals(that.extractionFn) : that.extractionFn == null; } @Override public RangeSet<String> getDimensionRangeSet(String dimension) { if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) { return null; } RangeSet<String> retSet = TreeRangeSet.create(); retSet.add(Range.singleton(Strings.nullToEmpty(value))); return retSet; } @Override public int hashCode() { int result = dimension.hashCode(); result = 31 * result + (value != null ? value.hashCode() : 0); result = 31 * result + (extractionFn != null ? extractionFn.hashCode() : 0); return result; } private void initLongPredicate() { if (longPredicate != null) { return; } synchronized (initLock) { if (longPredicate != null) { return; } final Long valueAsLong = GuavaUtils.tryParseLong(value); if (valueAsLong == null) { longPredicate = DruidLongPredicate.ALWAYS_FALSE; } else { // store the primitive, so we don't unbox for every comparison final long unboxedLong = valueAsLong.longValue(); longPredicate = new DruidLongPredicate() { @Override public boolean applyLong(long input) { return input == unboxedLong; } }; } } } private void initFloatPredicate() { if (floatPredicate != null) { return; } synchronized (initLock) { if (floatPredicate != null) { return; } final Float valueAsFloat = Floats.tryParse(value); if (valueAsFloat == null) { floatPredicate = DruidFloatPredicate.ALWAYS_FALSE; } else { final int floatBits = Float.floatToIntBits(valueAsFloat); floatPredicate = new DruidFloatPredicate() { @Override public boolean applyFloat(float input) { return Float.floatToIntBits(input) == floatBits; } }; } } } }
apache-2.0
Ant-Droid/android_frameworks_base_OLD
location/lib/java/com/android/location/provider/ActivityRecognitionProviderWatcher.java
3055
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.location.provider; import android.annotation.NonNull; import android.annotation.Nullable; import android.hardware.location.IActivityRecognitionHardware; import android.hardware.location.IActivityRecognitionHardwareWatcher; import android.os.Binder; import android.os.IBinder; import android.os.Process; import android.os.RemoteException; import android.util.Log; /** * A watcher class for Activity-Recognition instances. * * @deprecated use {@link ActivityRecognitionProviderClient} instead. */ @Deprecated public class ActivityRecognitionProviderWatcher { private static final String TAG = "ActivityRecognitionProviderWatcher"; private static ActivityRecognitionProviderWatcher sWatcher; private static final Object sWatcherLock = new Object(); private ActivityRecognitionProvider mActivityRecognitionProvider; private ActivityRecognitionProviderWatcher() {} public static ActivityRecognitionProviderWatcher getInstance() { synchronized (sWatcherLock) { if (sWatcher == null) { sWatcher = new ActivityRecognitionProviderWatcher(); } return sWatcher; } } private IActivityRecognitionHardwareWatcher.Stub mWatcherStub = new IActivityRecognitionHardwareWatcher.Stub() { @Override public void onInstanceChanged(IActivityRecognitionHardware instance) { int callingUid = Binder.getCallingUid(); if (callingUid != Process.SYSTEM_UID) { Log.d(TAG, "Ignoring calls from non-system server. Uid: " + callingUid); return; } try { mActivityRecognitionProvider = new ActivityRecognitionProvider(instance); } catch (RemoteException e) { Log.e(TAG, "Error creating Hardware Activity-Recognition", e); } } }; /** * Gets the binder needed to interact with proxy provider in the platform. */ @NonNull public IBinder getBinder() { return mWatcherStub; } /** * Gets an object that supports the functionality of {@link ActivityRecognitionProvider}. * * @return Non-null value if the functionality is supported by the platform, false otherwise. */ @Nullable public ActivityRecognitionProvider getActivityRecognitionProvider() { return mActivityRecognitionProvider; } }
apache-2.0
tombujok/hazelcast
hazelcast/src/test/java/com/hazelcast/internal/adapter/ICacheReplaceEntryProcessor.java
1482
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.adapter; import javax.cache.processor.EntryProcessor; import javax.cache.processor.EntryProcessorException; import javax.cache.processor.MutableEntry; import java.io.Serializable; public class ICacheReplaceEntryProcessor implements EntryProcessor<Integer, String, String>, Serializable { private static final long serialVersionUID = -396575576353368113L; @Override public String process(MutableEntry<Integer, String> entry, Object... arguments) throws EntryProcessorException { String value = entry.getValue(); if (value == null) { return null; } String oldString = (String) arguments[0]; String newString = (String) arguments[1]; String result = value.replace(oldString, newString); entry.setValue(result); return result; } }
apache-2.0
jandppw/ppwcode-recovered-from-google-code
java/vernacular/persistence/dev/d20081014-1359/src/main/java/org/ppwcode/vernacular/persistence_III/AbstractPersistentBean.java
2465
/*<license> Copyright 2005 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.persistence_III; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import java.io.Serializable; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.ppwcode.vernacular.semantics_VI.bean.AbstractRousseauBean; /** * A partial implementation of the interface {@link PersistentBean}. * * @author Nele Smeets * @author Ruben Vandeginste * @author Jan Dockx * @author PeopleWare n.v. * * @mudo We now have a dependency here on JPA via annotations. Also, the listener is defined in a subpackage, which * depends on this package. This introduces a cycle! This is a bad idea. Like this, you always need the JPA * libraries, even if they are annotations, because the annotations are loaded in the import statements too * (at least under 1.5). Thus, the annotations must go, and we need to use the xml files. */ @Copyright("2004 - $Date$, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision$", date = "$Date$") public abstract class AbstractPersistentBean<_Id_ extends Serializable> extends AbstractRousseauBean implements PersistentBean<_Id_> { /*<property name="id">*/ //------------------------------------------------------------------ public final _Id_ getPersistenceId() { return $persistenceId; } public final boolean hasSamePersistenceId(final PersistentBean<_Id_> other) { return (other != null) && ((getPersistenceId() == null) ? other.getPersistenceId() == null : getPersistenceId().equals(other.getPersistenceId())); } public final void setPersistenceId(final _Id_ persistenceId) { $persistenceId = persistenceId; } // @Id // @GeneratedValue // @Column(name="persistenceId") private _Id_ $persistenceId; /*</property>*/ }
apache-2.0
aurbroszniowski/ehcache3
impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java
17637
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.config.builders; import org.ehcache.CacheManager; import org.ehcache.PersistentCacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.Configuration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.EhcacheManager; import org.ehcache.core.spi.store.heap.SizeOfEngine; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceCreationConfiguration; import java.io.File; import java.util.Collection; import java.util.HashSet; import java.util.Set; import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableSet; import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; /** * The {@code CacheManagerBuilder} enables building cache managers using a fluent style. * <p> * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new * instance without modifying the one on which the method was called. * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. */ public class CacheManagerBuilder<T extends CacheManager> implements Builder<T> { private final ConfigurationBuilder configBuilder; private final Set<Service> services; /** * Builds a {@link CacheManager} or a subtype of it and initializes it if requested. * * @param init whether the returned {@code CacheManager} is to be initialized or not * @return a {@code CacheManager} or a subtype of it */ public T build(final boolean init) { final T cacheManager = newCacheManager(services, configBuilder.build()); if(init) { cacheManager.init(); } return cacheManager; } /** * Builds a {@link CacheManager} or a subtype of it uninitialized. * * @return a {@code CacheManager} or a subtype of it uninitialized */ @Override public T build() { return build(false); } private CacheManagerBuilder() { this.configBuilder = newConfigurationBuilder(); this.services = emptySet(); } private CacheManagerBuilder(CacheManagerBuilder<T> builder, Set<Service> services) { this.configBuilder = builder.configBuilder; this.services = unmodifiableSet(services); } private CacheManagerBuilder(CacheManagerBuilder<T> builder, ConfigurationBuilder configBuilder) { this.configBuilder = configBuilder; this.services = builder.services; } /** * Creates a new {@link CacheManager} based on the provided configuration. * The returned {@code CacheManager} is uninitialized. * * @param configuration the configuration to use * @return a {@code CacheManager} */ public static CacheManager newCacheManager(final Configuration configuration) { return new EhcacheManager(configuration); } T newCacheManager(Collection<Service> services, final Configuration configuration) { final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services); return cast(ehcacheManager); } @SuppressWarnings("unchecked") T cast(EhcacheManager ehcacheManager) { return (T) ehcacheManager; } /** * Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder. * * @param alias the cache alias * @param configuration the {@code CacheConfiguration} * @param <K> the cache key type * @param <V> the cache value type * @return a new builder with the added cache configuration * * @see CacheConfigurationBuilder */ public <K, V> CacheManagerBuilder<T> withCache(String alias, CacheConfiguration<K, V> configuration) { return new CacheManagerBuilder<>(this, configBuilder.addCache(alias, configuration)); } /** * Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by * building it from the provided {@link Builder}. * * @param alias the cache alias * @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from * @param <K> the cache key type * @param <V> the cache value type * @return a new builder with the added cache configuration * * @see CacheConfigurationBuilder */ public <K, V> CacheManagerBuilder<T> withCache(String alias, Builder<? extends CacheConfiguration<K, V>> configurationBuilder) { return withCache(alias, configurationBuilder.build()); } /** * Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which * will optionally add configurations to the returned builder. * * @param cfg the {@code CacheManagerConfiguration} to use * @param <N> the subtype of {@code CacheManager} * @return a new builder ready to build a more specific subtype of cache manager * * @see #persistence(String) * @see PersistentCacheManager * @see CacheManagerPersistenceConfiguration */ public <N extends T> CacheManagerBuilder<N> with(CacheManagerConfiguration<N> cfg) { return cfg.builder(this); } /** * Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration} * built using the provided {@link Builder}. * * @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from * @return a new builder ready to build a more specific subtype of cache manager * * @see CacheConfigurationBuilder */ public <N extends T> CacheManagerBuilder<N> with(Builder<? extends CacheManagerConfiguration<N>> cfgBuilder) { return with(cfgBuilder.build()); } /** * Adds a {@link Service} instance to the returned builder. * <p> * The service instance will be used by the constructed {@link CacheManager}. * * @param service the {@code Service} to add * @return a new builder with the added service */ public CacheManagerBuilder<T> using(Service service) { Set<Service> newServices = new HashSet<>(services); newServices.add(service); return new CacheManagerBuilder<>(this, newServices); } /** * Adds a default {@link Copier} for the specified type to the returned builder. * * @param clazz the {@code Class} for which the copier is * @param copier the {@code Copier} instance * @param <C> the type which can be copied * @return a new builder with the added default copier */ public <C> CacheManagerBuilder<T> withCopier(Class<C> clazz, Class<? extends Copier<C>> copier) { DefaultCopyProviderConfiguration service = configBuilder.findServiceByClass(DefaultCopyProviderConfiguration.class); if (service == null) { service = new DefaultCopyProviderConfiguration(); service.addCopierFor(clazz, copier); return new CacheManagerBuilder<>(this, configBuilder.addService(service)); } else { DefaultCopyProviderConfiguration newConfig = new DefaultCopyProviderConfiguration(service); newConfig.addCopierFor(clazz, copier, true); return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); } } /** * Adds a default {@link Serializer} for the specified type to the returned builder. * * @param clazz the {@code Class} for which the serializer is * @param serializer the {@code Serializer} instance * @param <C> the type which can be serialized * @return a new builder with the added default serializer */ public <C> CacheManagerBuilder<T> withSerializer(Class<C> clazz, Class<? extends Serializer<C>> serializer) { DefaultSerializationProviderConfiguration service = configBuilder.findServiceByClass(DefaultSerializationProviderConfiguration.class); if (service == null) { service = new DefaultSerializationProviderConfiguration(); service.addSerializerFor(clazz, serializer); return new CacheManagerBuilder<>(this, configBuilder.addService(service)); } else { DefaultSerializationProviderConfiguration newConfig = new DefaultSerializationProviderConfiguration(service); newConfig.addSerializerFor(clazz, serializer, true); return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); } } /** * Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to * size, to the returned builder. * * @param size the max object graph size * @return a new builder with the added configuration */ public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectGraph(long size) { DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); if (configuration == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size))); } else { ConfigurationBuilder builder = configBuilder.removeService(configuration); return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(configuration .getMaxObjectSize(), configuration.getUnit(), size))); } } /** * Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to * the returned builder. * * @param size the max object size * @param unit the max object size unit * @return a new builder with the added configuration */ public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) { DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); if (configuration == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE))); } else { ConfigurationBuilder builder = configBuilder.removeService(configuration); return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, configuration .getMaxObjectGraphSize()))); } } /** * Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultWriteBehindThreadPool(String threadPoolAlias) { WriteBehindProviderConfiguration config = configBuilder.findServiceByClass(WriteBehindProviderConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); } } /** * Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned * builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultDiskStoreThreadPool(String threadPoolAlias) { OffHeapDiskStoreProviderConfiguration config = configBuilder.findServiceByClass(OffHeapDiskStoreProviderConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); } } /** * Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned * builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultEventListenersThreadPool(String threadPoolAlias) { CacheEventDispatcherFactoryConfiguration config = configBuilder.findServiceByClass(CacheEventDispatcherFactoryConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); } } /** * Adds a {@link ServiceCreationConfiguration} to the returned builder. * <p> * These configurations are used to load services and configure them at creation time. * * @param serviceConfiguration the {@code ServiceCreationConfiguration} to use * @return a new builder with the added configuration */ public CacheManagerBuilder<T> using(ServiceCreationConfiguration<?> serviceConfiguration) { return new CacheManagerBuilder<>(this, configBuilder.addService(serviceConfiguration)); } /** * Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder. * <p> * Duplicate service creation configuration will cause a cache manager to fail to initialize. * * @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use * @return a new builder with the replaced configuration */ public CacheManagerBuilder<T> replacing(ServiceCreationConfiguration<?> overwriteServiceConfiguration) { ServiceCreationConfiguration<?> existingConfiguration = configBuilder.findServiceByClass(overwriteServiceConfiguration.getClass()); return new CacheManagerBuilder<>(this, configBuilder.removeService(existingConfiguration) .addService(overwriteServiceConfiguration)); } /** * Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder * * @param classLoader the class loader to use * @return a new builder with the added class loader */ public CacheManagerBuilder<T> withClassLoader(ClassLoader classLoader) { return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader)); } /** * Creates a new {@code CacheManagerBuilder} * * @return the cache manager builder */ public static CacheManagerBuilder<CacheManager> newCacheManagerBuilder() { return new CacheManagerBuilder<>(); } /** * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual * level of persistence is configured on the disk resource pool per cache. * * @param rootDirectory the root directory to use for disk storage * @return a {@code CacheManagerConfiguration} * * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ public static CacheManagerConfiguration<PersistentCacheManager> persistence(String rootDirectory) { return persistence(new File(rootDirectory)); } /** * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual * level of persistence is configured on the disk resource pool per cache. * * @param rootDirectory the root directory to use for disk storage * @return a {@code CacheManagerConfiguration} * * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ public static CacheManagerConfiguration<PersistentCacheManager> persistence(File rootDirectory) { return new CacheManagerPersistenceConfiguration(rootDirectory); } }
apache-2.0
ern/elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfigTests.java
4017
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.datafeed; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; public class DelayedDataCheckConfigTests extends AbstractSerializingTestCase<DelayedDataCheckConfig> { @Override protected DelayedDataCheckConfig createTestInstance(){ return createRandomizedConfig(100); } @Override protected Writeable.Reader<DelayedDataCheckConfig> instanceReader() { return DelayedDataCheckConfig::new; } @Override protected DelayedDataCheckConfig doParseInstance(XContentParser parser) { return DelayedDataCheckConfig.STRICT_PARSER.apply(parser, null); } public void testConstructor() { expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.MINUS_ONE)); expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.timeValueHours(25))); } public void testEnabledDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(5)); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true)); assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(TimeValue.timeValueHours(5))); } public void testDisabledDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.disabledDelayedDataCheckConfig(); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(false)); assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(null)); } public void testDefaultDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig(); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true)); assertThat(delayedDataCheckConfig.getCheckWindow(), is(nullValue())); } public static DelayedDataCheckConfig createRandomizedConfig(long bucketSpanMillis) { boolean enabled = randomBoolean(); TimeValue timeWindow = null; if (enabled || randomBoolean()) { // time span is required to be at least 1 millis, so we use a custom method to generate a time value here timeWindow = new TimeValue(randomLongBetween(bucketSpanMillis,bucketSpanMillis*2)); } return new DelayedDataCheckConfig(enabled, timeWindow); } @Override protected DelayedDataCheckConfig mutateInstance(DelayedDataCheckConfig instance) throws IOException { boolean enabled = instance.isEnabled(); TimeValue timeWindow = instance.getCheckWindow(); switch (between(0, 1)) { case 0: enabled = enabled == false; if (randomBoolean()) { timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000)); } else { timeWindow = null; } break; case 1: if (timeWindow == null) { timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000)); } else { timeWindow = new TimeValue(timeWindow.getMillis() + between(10, 100)); } enabled = true; break; default: throw new AssertionError("Illegal randomisation branch"); } return new DelayedDataCheckConfig(enabled, timeWindow); } }
apache-2.0
piyushsh/choco3
choco-solver/src/main/java/org/chocosolver/solver/thread/AbstractParallelSlave.java
3193
/** * Copyright (c) 2014, * Charles Prud'homme (TASC, INRIA Rennes, LINA CNRS UMR 6241), * Jean-Guillaume Fages (COSLING S.A.S.). * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.chocosolver.solver.thread; /** * Slave born to be mastered and work in parallel * * @author Jean-Guillaume Fages */ public abstract class AbstractParallelSlave<P extends AbstractParallelMaster> { //*********************************************************************************** // VARIABLES //*********************************************************************************** public P master; public final int id; //*********************************************************************************** // CONSTRUCTORS //*********************************************************************************** /** * Create a slave born to be mastered and work in parallel * * @param master master solver * @param id slave unique name */ public AbstractParallelSlave(P master, int id) { this.master = master; this.id = id; } //*********************************************************************************** // SUB-PROBLEM SOLVING //*********************************************************************************** /** * Creates a new thread to work in parallel */ public void workInParallel() { Thread t = new Thread() { @Override public void run() { work(); master.wishGranted(); } }; t.start(); } /** * do something */ public abstract void work(); }
bsd-3-clause
opticod/BuildmLearn-Toolkit-Android
source-code/app/src/main/java/org/buildmlearn/toolkit/flashcardtemplate/data/DataUtils.java
1360
package org.buildmlearn.toolkit.flashcardtemplate.data; import org.w3c.dom.Document; import org.xml.sax.SAXException; import java.io.File; import java.io.IOException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; /** * Created by Anupam (opticod) on 10/8/16. */ /** * @brief Contains xml data utils for flash card template's simulator. */ public class DataUtils { public static String[] readTitleAuthor() { String result[] = new String[2]; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false); DocumentBuilder db; Document doc; try { File fXmlFile = new File(org.buildmlearn.toolkit.flashcardtemplate.Constants.XMLFileName); db = dbf.newDocumentBuilder(); doc = db.parse(fXmlFile); doc.normalize(); result[0] = doc.getElementsByTagName("title").item(0).getChildNodes() .item(0).getNodeValue(); result[1] = doc.getElementsByTagName("name").item(0).getChildNodes() .item(0).getNodeValue(); } catch (ParserConfigurationException | SAXException | IOException e) { e.printStackTrace(); } return result; } }
bsd-3-clause
chrisrico/XChange
xchange-bitmarket/src/test/java/org/knowm/xchange/bitmarket/BitMarketAssert.java
6414
package org.knowm.xchange.bitmarket; import static org.assertj.core.api.Assertions.assertThat; import java.math.BigDecimal; import java.util.List; import java.util.Map; import org.knowm.xchange.bitmarket.dto.account.BitMarketBalance; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketOrderBook; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTicker; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTrade; import org.knowm.xchange.bitmarket.dto.trade.BitMarketOrder; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.UserTrade; public class BitMarketAssert { public static void assertEquals(Balance o1, Balance o2) { assertThat(o1.getCurrency()).isEqualTo(o2.getCurrency()); assertThat(o1.getTotal()).isEqualTo(o2.getTotal()); assertThat(o1.getAvailable()).isEqualTo(o2.getAvailable()); assertThat(o1.getFrozen()).isEqualTo(o2.getFrozen()); } public static void assertEquals(Trade o1, Trade o2) { assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getId()).isEqualTo(o2.getId()); } public static void assertEquals(UserTrade o1, UserTrade o2) { assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getOrderId()).isEqualTo(o2.getOrderId()); assertThat(o1.getFeeAmount()).isEqualTo(o2.getFeeAmount()); assertThat(o1.getFeeCurrency()).isEqualTo(o2.getFeeCurrency()); } public static void assertEquals(LimitOrder o1, LimitOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); } public static void assertEqualsWithoutTimestamp(LimitOrder o1, LimitOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); } public static void assertEquals(Ticker o1, Ticker o2) { assertThat(o1.getBid()).isEqualTo(o2.getBid()); assertThat(o1.getAsk()).isEqualTo(o2.getAsk()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getHigh()).isEqualTo(o2.getHigh()); assertThat(o1.getLast()).isEqualTo(o2.getLast()); assertThat(o1.getLow()).isEqualTo(o2.getLow()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getVolume()).isEqualTo(o2.getVolume()); assertThat(o1.getVwap()).isEqualTo(o2.getVwap()); } public static void assertEquals(OrderBook o1, OrderBook o2) { assertThat(o1.getTimeStamp()).isEqualTo(o2.getTimeStamp()); assertEquals(o1.getAsks(), o2.getAsks()); assertEquals(o1.getBids(), o2.getBids()); } public static void assertEquals(List<LimitOrder> o1, List<LimitOrder> o2) { assertThat(o1.size()).isEqualTo(o2.size()); for (int i = 0; i < o1.size(); i++) { assertEqualsWithoutTimestamp(o1.get(i), o2.get(i)); } } public static void assertEquals(BitMarketOrder o1, BitMarketOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getMarket()).isEqualTo(o2.getMarket()); assertThat(o1.getAmount()).isEqualTo(o2.getAmount()); assertThat(o1.getRate()).isEqualTo(o2.getRate()); assertThat(o1.getFiat()).isEqualTo(o2.getFiat()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getTime()).isEqualTo(o2.getTime()); } public static void assertEquals(BitMarketOrderBook o1, BitMarketOrderBook o2) { assertEquals(o1.getAsks(), o2.getAsks()); assertEquals(o1.getBids(), o2.getBids()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketTicker o1, BitMarketTicker o2) { assertThat(o1.getAsk()).isEqualTo(o2.getAsk()); assertThat(o1.getBid()).isEqualTo(o2.getBid()); assertThat(o1.getLast()).isEqualTo(o2.getLast()); assertThat(o1.getLow()).isEqualTo(o2.getLow()); assertThat(o1.getHigh()).isEqualTo(o2.getHigh()); assertThat(o1.getVwap()).isEqualTo(o2.getVwap()); assertThat(o1.getVolume()).isEqualTo(o2.getVolume()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketTrade o1, BitMarketTrade o2) { assertThat(o1.getTid()).isEqualTo(o2.getTid()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getAmount()).isEqualTo(o2.getAmount()); assertThat(o1.getDate()).isEqualTo(o2.getDate()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketBalance o1, BitMarketBalance o2) { assertEquals(o1.getAvailable(), o2.getAvailable()); assertEquals(o1.getBlocked(), o2.getBlocked()); } private static void assertEquals(Map<String, BigDecimal> o1, Map<String, BigDecimal> o2) { assertThat(o1.size()).isEqualTo(o2.size()); for (String key : o1.keySet()) { assertThat(o1.get(key)).isEqualTo(o2.get(key)); } } private static void assertEquals(BigDecimal[][] o1, BigDecimal[][] o2) { assertThat(o1.length).isEqualTo(o2.length); for (int i = 0; i < o1.length; i++) { assertThat(o1[i].length).isEqualTo(o2[i].length); for (int j = 0; j < o1[i].length; j++) { assertThat(o1[i][j]).isEqualTo(o2[i][j]); } } } }
mit
chrisrico/XChange
xchange-exx/src/test/java/org/knowm/xchange/test/exx/AccountServiceIntegration.java
1369
package org.knowm.xchange.test.exx; import java.io.IOException; import org.knowm.xchange.Exchange; import org.knowm.xchange.ExchangeFactory; import org.knowm.xchange.ExchangeSpecification; import org.knowm.xchange.exx.EXXExchange; import org.knowm.xchange.service.account.AccountService; /** * kevinobamatheus@gmail.com * * @author kevingates */ public class AccountServiceIntegration { public static void main(String[] args) { try { getAssetInfo(); } catch (IOException e) { e.printStackTrace(); } } private static void getAssetInfo() throws IOException { String apiKey = ""; String secretKey = ""; Exchange exchange = ExchangeFactory.INSTANCE.createExchange(EXXExchange.class.getName()); ExchangeSpecification exchangeSpecification = exchange.getDefaultExchangeSpecification(); exchangeSpecification.setSslUri("https://trade.exx.com"); exchangeSpecification.setApiKey(apiKey); exchangeSpecification.setSecretKey(secretKey); exchange.applySpecification(exchangeSpecification); AccountService accountService = exchange.getAccountService(); try { System.out.println("accountInfo"); System.out.println(accountService.getAccountInfo()); System.out.println(accountService.getAccountInfo().getWallets()); } catch (IOException e) { e.printStackTrace(); } } }
mit
yangqiang1223/AndroidBinding
Core/AndroidBinding/src/gueei/binding/DependentObservable.java
1894
package gueei.binding; import java.util.Collection; import java.util.ArrayList; public abstract class DependentObservable<T> extends Observable<T> implements Observer{ protected IObservable<?>[] mDependents; public DependentObservable(Class<T> type, IObservable<?>... dependents) { super(type); for(IObservable<?> o : dependents){ o.subscribe(this); } this.mDependents = dependents; this.onPropertyChanged(null, new ArrayList<Object>()); } // This is provided in case the constructor can't be used. // Not intended for normal usage public void addDependents(IObservable<?>... dependents){ IObservable<?>[] temp = mDependents; mDependents = new IObservable<?>[temp.length + dependents.length]; int len = temp.length; for(int i=0; i<len; i++){ mDependents[i] = temp[i]; } int len2 = dependents.length; for(int i=0; i<len2; i++){ mDependents[i+len] = dependents[i]; dependents[i].subscribe(this); } this.onPropertyChanged(null, new ArrayList<Object>()); } public abstract T calculateValue(Object... args) throws Exception; public final void onPropertyChanged(IObservable<?> prop, Collection<Object> initiators) { dirty = true; initiators.add(this); this.notifyChanged(initiators); } private boolean dirty = false; @Override public T get() { if (dirty){ int len = mDependents.length; Object[] values = new Object[len]; for(int i=0; i<len; i++){ values[i] = mDependents[i].get(); } try{ T value = this.calculateValue(values); this.setWithoutNotify(value); }catch(Exception e){ BindingLog.exception ("DependentObservable.CalculateValue()", e); } dirty = false; } return super.get(); } public boolean isDirty() { return dirty; } public void setDirty(boolean dirty) { this.dirty = dirty; } }
mit
zfu/java-design-patterns
visitor/src/main/java/com/iluwatar/Sergeant.java
321
package com.iluwatar; public class Sergeant extends Unit { public Sergeant(Unit ... children) { super(children); } @Override public void accept(UnitVisitor visitor) { visitor.visitSergeant(this); super.accept(visitor); } @Override public String toString() { return "sergeant"; } }
mit
Ori-Libhaber/che-core
ide/che-core-ide-app/src/main/java/org/eclipse/che/ide/actions/SaveAllAction.java
3624
/******************************************************************************* * Copyright (c) 2012-2015 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.actions; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.inject.Inject; import com.google.inject.Singleton; import org.eclipse.che.api.analytics.client.logger.AnalyticsEventLogger; import org.eclipse.che.ide.Resources; import org.eclipse.che.ide.api.action.ActionEvent; import org.eclipse.che.ide.api.action.ProjectAction; import org.eclipse.che.ide.api.editor.EditorAgent; import org.eclipse.che.ide.api.editor.EditorInput; import org.eclipse.che.ide.api.editor.EditorPartPresenter; import org.eclipse.che.ide.api.editor.EditorWithAutoSave; import org.eclipse.che.ide.util.loging.Log; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** @author Evgen Vidolob */ @Singleton public class SaveAllAction extends ProjectAction { private final EditorAgent editorAgent; private final AnalyticsEventLogger eventLogger; @Inject public SaveAllAction(EditorAgent editorAgent, Resources resources, AnalyticsEventLogger eventLogger) { super("Save All", "Save all changes for project", resources.save()); this.editorAgent = editorAgent; this.eventLogger = eventLogger; } /** {@inheritDoc} */ @Override public void actionPerformed(ActionEvent e) { eventLogger.log(this); Collection<EditorPartPresenter> values = editorAgent.getOpenedEditors().values(); List<EditorPartPresenter> editors = new ArrayList<>(values); save(editors); } private void save(final List<EditorPartPresenter> editors) { if (editors.isEmpty()) { return; } final EditorPartPresenter editorPartPresenter = editors.get(0); if (editorPartPresenter.isDirty()) { editorPartPresenter.doSave(new AsyncCallback<EditorInput>() { @Override public void onFailure(Throwable caught) { Log.error(SaveAllAction.class, caught); //try to save other files editors.remove(editorPartPresenter); save(editors); } @Override public void onSuccess(EditorInput result) { editors.remove(editorPartPresenter); save(editors); } }); } else { editors.remove(editorPartPresenter); save(editors); } } /** {@inheritDoc} */ @Override public void updateProjectAction(ActionEvent e) { // e.getPresentation().setVisible(true); boolean hasDirtyEditor = false; for (EditorPartPresenter editor : editorAgent.getOpenedEditors().values()) { if(editor instanceof EditorWithAutoSave) { if (((EditorWithAutoSave)editor).isAutoSaveEnabled()) { continue; } } if (editor.isDirty()) { hasDirtyEditor = true; break; } } e.getPresentation().setEnabledAndVisible(hasDirtyEditor); } }
epl-1.0
royleexhFake/mayloon-portingtool
net.sourceforge.jseditor/src-jseditor/net/sourceforge/jseditor/editors/PredicateWordRule.java
3101
/* * Created on May 13, 2003 *======================================================================== * Modifications history *======================================================================== * $Log: PredicateWordRule.java,v $ * Revision 1.2 2003/05/30 20:53:09 agfitzp * 0.0.2 : Outlining is now done as the user types. Some other bug fixes. * *======================================================================== */ package net.sourceforge.jseditor.editors; import org.eclipse.jface.text.rules.ICharacterScanner; import org.eclipse.jface.text.rules.IPredicateRule; import org.eclipse.jface.text.rules.IToken; import org.eclipse.jface.text.rules.Token; import org.eclipse.jface.text.rules.WordRule; import org.eclipse.jface.text.rules.IWordDetector; /** * @author fitzpata */ public class PredicateWordRule extends WordRule implements IPredicateRule { /* (non-Javadoc) * @see org.eclipse.jface.text.rules.IPredicateRule#getSuccessToken() */ protected IToken successToken = Token.UNDEFINED; public void addWords(String[] tokens, IToken token) { for (int i = 0; i < tokens.length; i++) { addWord(tokens[i], token); } } public IToken getSuccessToken() { return successToken; } /* (non-Javadoc) * @see org.eclipse.jface.text.rules.IPredicateRule#evaluate(org.eclipse.jface.text.rules.ICharacterScanner, boolean) */ public IToken evaluate(ICharacterScanner scanner, boolean resume) { successToken = this.evaluate(scanner, resume);//true); return successToken; } /** * Creates a rule which, with the help of an word detector, will return the token * associated with the detected word. If no token has been associated, the scanner * will be rolled back and an undefined token will be returned in order to allow * any subsequent rules to analyze the characters. * * @param detector the word detector to be used by this rule, may not be <code>null</code> * * @see #addWord */ public PredicateWordRule(IWordDetector detector) { super(detector); } /** * Creates a rule which, with the help of an word detector, will return the token * associated with the detected word. If no token has been associated, the * specified default token will be returned. * * @param detector the word detector to be used by this rule, may not be <code>null</code> * @param defaultToken the default token to be returned on success * if nothing else is specified, may not be <code>null</code> * * @see #addWord */ public PredicateWordRule(IWordDetector detector, IToken defaultToken) { super(detector, defaultToken); } public PredicateWordRule(IWordDetector detector, String tokenString, IToken tokenType) { super(detector); this.addWord(tokenString, tokenType); } public PredicateWordRule(IWordDetector detector, String[] tokens, IToken tokenType) { super(detector); this.addWords(tokens, tokenType); } public PredicateWordRule(IWordDetector detector, IToken defaultToken, String[] tokens, IToken tokenType) { super(detector, defaultToken); this.addWords(tokens, tokenType); } }
epl-1.0
aljiru/che-core
platform-api/che-core-api-core/src/test/java/org/eclipse/che/api/core/util/ProcessUtilTest.java
3462
/******************************************************************************* * Copyright (c) 2012-2015 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.api.core.util; import org.testng.Assert; import org.testng.annotations.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; /** @author andrew00x */ public class ProcessUtilTest { @Test public void testKill() throws Exception { final Process p = Runtime.getRuntime().exec(new String[]{"/bin/bash", "-c", "sleep 10; echo wake\\ up"}); final List<String> stdout = new ArrayList<>(); final List<String> stderr = new ArrayList<>(); final IOException[] processError = new IOException[1]; final CountDownLatch latch = new CountDownLatch(1); final long start = System.currentTimeMillis(); new Thread() { public void run() { try { ProcessUtil.process(p, new LineConsumer() { @Override public void writeLine(String line) throws IOException { stdout.add(line); } @Override public void close() throws IOException { } }, new LineConsumer() { @Override public void writeLine(String line) throws IOException { stderr.add(line); } @Override public void close() throws IOException { } } ); } catch (IOException e) { processError[0] = e; // throw when kill process } finally { latch.countDown(); } } }.start(); Thread.sleep(1000); // give time to start process Assert.assertTrue(ProcessUtil.isAlive(p), "Process is not started."); ProcessUtil.kill(p); // kill process latch.await(15, TimeUnit.SECONDS); // should not stop here if process killed final long end = System.currentTimeMillis(); // System process sleeps 10 seconds. It is safety to check we done in less then 3 sec. Assert.assertTrue((end - start) < 3000, "Fail kill process"); System.out.println(processError[0]); //processError[0].printStackTrace(); System.out.println(stdout); System.out.println(stderr); } }
epl-1.0
rgom/Pydev
plugins/com.python.pydev.refactoring/src/com/python/pydev/refactoring/wizards/rename/visitors/FindCallVisitor.java
2648
/** * Copyright (c) 2005-2012 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Eclipse Public License (EPL). * Please see the license.txt included with this distribution for details. * Any modifications to this file must keep this entire header intact. */ package com.python.pydev.refactoring.wizards.rename.visitors; import java.util.Stack; import org.python.pydev.parser.jython.SimpleNode; import org.python.pydev.parser.jython.Visitor; import org.python.pydev.parser.jython.ast.Call; import org.python.pydev.parser.jython.ast.Name; import org.python.pydev.parser.jython.ast.NameTok; /** * This visitor is used to find a call given its ast * * @author Fabio */ public class FindCallVisitor extends Visitor { private Name name; private NameTok nameTok; private Call call; private Stack<Call> lastCall = new Stack<Call>(); public FindCallVisitor(Name name) { this.name = name; } public FindCallVisitor(NameTok nameTok) { this.nameTok = nameTok; } public Call getCall() { return call; } @Override public Object visitCall(Call node) throws Exception { if (this.call != null) { return null; } if (node.func == name) { //check the name (direct) this.call = node; } else if (nameTok != null) { //check the name tok (inside of attribute) lastCall.push(node); Object r = super.visitCall(node); lastCall.pop(); if (this.call != null) { return null; } return r; } if (this.call != null) { return null; } return super.visitCall(node); } @Override public Object visitNameTok(NameTok node) throws Exception { if (node == nameTok) { if (lastCall.size() > 0) { call = lastCall.peek(); } return null; } return super.visitNameTok(node); } public static Call findCall(NameTok nametok, SimpleNode root) { FindCallVisitor visitor = new FindCallVisitor(nametok); try { visitor.traverse(root); } catch (Exception e) { throw new RuntimeException(e); } return visitor.call; } public static Call findCall(Name name, SimpleNode root) { FindCallVisitor visitor = new FindCallVisitor(name); try { visitor.traverse(root); } catch (Exception e) { throw new RuntimeException(e); } return visitor.call; } }
epl-1.0
royleexhFake/mayloon-portingtool
com.intel.ide.eclipse.mpt/src/com/intel/ide/eclipse/mpt/classpath/ContactedClasses.java
4878
package com.intel.ide.eclipse.mpt.classpath; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Properties; import com.intel.ide.eclipse.mpt.launching.J2SCyclicProjectUtils; public class ContactedClasses extends Resource implements IExternalResource, IClasspathContainer { private String binRelativePath; private List classList; private List externalList; public void load() { File file = getAbsoluteFile(); classList = new ArrayList(); externalList = new ArrayList(); if (file.exists()) { Properties props = PathUtil.loadJZ(file); String[] reses = PathUtil.getResources(props); binRelativePath = props.getProperty(PathUtil.J2S_OUTPUT_PATH); for (int i = 0; i < reses.length; i++) { if (reses[i] != null) { String res = reses[i].trim(); if (res.endsWith(".z.js")) { ContactedClasses jz = new ContactedClasses(); jz.setFolder(this.getAbsoluteFolder()); jz.setRelativePath(res); jz.setParent(this); externalList.add(jz); } else if (res.endsWith(".js")) { ContactedUnitClass unit = new ContactedUnitClass(); unit.setFolder(this.getAbsoluteFolder()); unit.setRelativePath(res); unit.parseClassName(); unit.setParent(this); classList.add(unit); } else if (res.endsWith(".css")) { CSSResource css = new CSSResource(); css.setFolder(this.getAbsoluteFolder()); css.setRelativePath(res); css.setParent(this); externalList.add(css); } } } } } public void store(Properties props) { Resource[] reses = getChildren(); StringBuffer buf = new StringBuffer(); for (int i = 0; i < reses.length; i++) { String str = reses[i].toResourceString(); buf.append(str); if (i != reses.length - 1) { buf.append(","); } } props.setProperty(PathUtil.J2S_RESOURCES_LIST, buf.toString()); props.setProperty(PathUtil.J2S_OUTPUT_PATH, binRelativePath); } public Resource[] getChildren() { if (externalList == null || classList == null) { this.load(); } int size = externalList.size(); Resource[] res = new Resource[classList.size() + size]; for (int i = 0; i < size; i++) { res[i] = (Resource) externalList.get(i); } for (int i = 0; i < classList.size(); i++) { res[i + size] = (Resource) classList.get(i); } return res; } public ContactedUnitClass[] getClasses() { return (ContactedUnitClass[]) classList.toArray(new ContactedClasses[0]); } public IExternalResource[] getExternals() { return (IExternalResource[]) externalList.toArray(new IExternalResource[0]); } public String getBinRelativePath() { return binRelativePath; } public void setBinRelativePath(String binRelativePath) { this.binRelativePath = binRelativePath; } public String toHTMLString() { if (getRelativePath() != null && getRelativePath().endsWith(".j2x")) { return ""; } Resource p = this.getParent(); if (p != null) { if (p instanceof ContactedClasses) { Resource pp = p.getParent(); if (pp != null && pp instanceof ContactedClasses) { return ""; } } } StringBuffer buf = new StringBuffer(); if (externalList == null) { this.load(); } for (Iterator iter = externalList.iterator(); iter.hasNext();) { Resource res = (Resource) iter.next(); if (!J2SCyclicProjectUtils.visit(res)) { continue; } buf.append(res.toHTMLString()); } buf.append("<script type=\"text/javascript\" src=\""); String binFolder = getBinRelativePath(); if (binFolder != null) { String binPath = binFolder.trim(); if (binPath.length() != 0) { buf.append(binPath); if (!binPath.endsWith("/")) { buf.append("/"); } } } if (p != null) { if (p instanceof ContactedClasses) { ContactedClasses cc = (ContactedClasses) p; String path = cc.getRelativePath(); int idx = path.lastIndexOf('/'); if (idx != -1) { buf.append(path.substring(0, idx + 1)); } } else if (p instanceof CompositeResources) { CompositeResources cc = (CompositeResources) p; String binRelative = cc.getBinRelativePath(); if (binRelative != null) { if (binRelative.length() != 0 && getRelativePath().endsWith(".z.js")) { return ""; } buf.append(binRelative); } } } buf.append(getRelativePath()); buf.append("\"></script>\r\n"); return buf.toString(); } public String toJ2XString() { if (getName().endsWith(".j2x")) { try { return getAbsoluteFile().getCanonicalPath(); } catch (IOException e) { e.printStackTrace(); } } return ""; } public int getType() { return CONTAINER; } }
epl-1.0
AdmireTheDistance/android_libcore
ojluni/src/main/java/java/nio/HeapLongBuffer.java
6185
/* * Copyright (C) 2014 The Android Open Source Project * Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.nio; /** * A read/write HeapLongBuffer. */ class HeapLongBuffer extends LongBuffer { // For speed these fields are actually declared in X-Buffer; // these declarations are here as documentation /* protected final long[] hb; protected final int offset; */ HeapLongBuffer(int cap, int lim) { // package-private this(cap, lim, false); } HeapLongBuffer(int cap, int lim, boolean isReadOnly) { // package-private super(-1, 0, lim, cap, new long[cap], 0); this.isReadOnly = isReadOnly; } HeapLongBuffer(long[] buf, int off, int len) { // package-private this(buf, off, len, false); } HeapLongBuffer(long[] buf, int off, int len, boolean isReadOnly) { // package-private super(-1, off, off + len, buf.length, buf, 0); this.isReadOnly = isReadOnly; } protected HeapLongBuffer(long[] buf, int mark, int pos, int lim, int cap, int off) { this(buf, mark, pos, lim, cap, off, false); } protected HeapLongBuffer(long[] buf, int mark, int pos, int lim, int cap, int off, boolean isReadOnly) { super(mark, pos, lim, cap, buf, off); this.isReadOnly = isReadOnly; } public LongBuffer slice() { return new HeapLongBuffer(hb, -1, 0, this.remaining(), this.remaining(), this.position() + offset, isReadOnly); } public LongBuffer duplicate() { return new HeapLongBuffer(hb, this.markValue(), this.position(), this.limit(), this.capacity(), offset, isReadOnly); } public LongBuffer asReadOnlyBuffer() { return new HeapLongBuffer(hb, this.markValue(), this.position(), this.limit(), this.capacity(), offset, true); } protected int ix(int i) { return i + offset; } public long get() { return hb[ix(nextGetIndex())]; } public long get(int i) { return hb[ix(checkIndex(i))]; } public LongBuffer get(long[] dst, int offset, int length) { checkBounds(offset, length, dst.length); if (length > remaining()) throw new BufferUnderflowException(); System.arraycopy(hb, ix(position()), dst, offset, length); position(position() + length); return this; } public boolean isDirect() { return false; } public boolean isReadOnly() { return isReadOnly; } public LongBuffer put(long x) { if (isReadOnly) { throw new ReadOnlyBufferException(); } hb[ix(nextPutIndex())] = x; return this; } public LongBuffer put(int i, long x) { if (isReadOnly) { throw new ReadOnlyBufferException(); } hb[ix(checkIndex(i))] = x; return this; } public LongBuffer put(long[] src, int offset, int length) { if (isReadOnly) { throw new ReadOnlyBufferException(); } checkBounds(offset, length, src.length); if (length > remaining()) throw new BufferOverflowException(); System.arraycopy(src, offset, hb, ix(position()), length); position(position() + length); return this; } public LongBuffer put(LongBuffer src) { if (isReadOnly) { throw new ReadOnlyBufferException(); } if (src instanceof HeapLongBuffer) { if (src == this) throw new IllegalArgumentException(); HeapLongBuffer sb = (HeapLongBuffer) src; int n = sb.remaining(); if (n > remaining()) throw new BufferOverflowException(); System.arraycopy(sb.hb, sb.ix(sb.position()), hb, ix(position()), n); sb.position(sb.position() + n); position(position() + n); } else if (src.isDirect()) { int n = src.remaining(); if (n > remaining()) throw new BufferOverflowException(); src.get(hb, ix(position()), n); position(position() + n); } else { super.put(src); } return this; } public LongBuffer compact() { if (isReadOnly) { throw new ReadOnlyBufferException(); } System.arraycopy(hb, ix(position()), hb, ix(0), remaining()); position(remaining()); limit(capacity()); discardMark(); return this; } public ByteOrder order() { return ByteOrder.nativeOrder(); } }
gpl-2.0
manjeetk09/GoogleScrapper
opennlp/tools/doccat/DocumentCategorizerContextGenerator.java
1622
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.doccat; import java.util.Collection; import java.util.LinkedList; import java.util.Map; /** * Context generator for document categorizer */ class DocumentCategorizerContextGenerator { private FeatureGenerator[] mFeatureGenerators; DocumentCategorizerContextGenerator(FeatureGenerator... featureGenerators) { mFeatureGenerators = featureGenerators; } public String[] getContext(String[] text, Map<String, Object> extraInformation) { Collection<String> context = new LinkedList<>(); for (FeatureGenerator mFeatureGenerator : mFeatureGenerators) { Collection<String> extractedFeatures = mFeatureGenerator.extractFeatures(text, extraInformation); context.addAll(extractedFeatures); } return context.toArray(new String[context.size()]); } }
gpl-2.0
JetBrains/jdk8u_jdk
src/share/classes/javax/crypto/CipherOutputStream.java
8241
/* * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.crypto; import java.io.*; /** * A CipherOutputStream is composed of an OutputStream and a Cipher so * that write() methods first process the data before writing them out * to the underlying OutputStream. The cipher must be fully * initialized before being used by a CipherOutputStream. * * <p> For example, if the cipher is initialized for encryption, the * CipherOutputStream will attempt to encrypt data before writing out the * encrypted data. * * <p> This class adheres strictly to the semantics, especially the * failure semantics, of its ancestor classes * java.io.OutputStream and java.io.FilterOutputStream. This class * has exactly those methods specified in its ancestor classes, and * overrides them all. Moreover, this class catches all exceptions * that are not thrown by its ancestor classes. In particular, this * class catches BadPaddingException and other exceptions thrown by * failed integrity checks during decryption. These exceptions are not * re-thrown, so the client will not be informed that integrity checks * failed. Because of this behavior, this class may not be suitable * for use with decryption in an authenticated mode of operation (e.g. GCM) * if the application requires explicit notification when authentication * fails. Such an application can use the Cipher API directly as an * alternative to using this class. * * <p> It is crucial for a programmer using this class not to use * methods that are not defined or overriden in this class (such as a * new method or constructor that is later added to one of the super * classes), because the design and implementation of those methods * are unlikely to have considered security impact with regard to * CipherOutputStream. * * @author Li Gong * @see java.io.OutputStream * @see java.io.FilterOutputStream * @see javax.crypto.Cipher * @see javax.crypto.CipherInputStream * * @since 1.4 */ public class CipherOutputStream extends FilterOutputStream { // the cipher engine to use to process stream data private Cipher cipher; // the underlying output stream private OutputStream output; /* the buffer holding one byte of incoming data */ private byte[] ibuffer = new byte[1]; // the buffer holding data ready to be written out private byte[] obuffer; // stream status private boolean closed = false; /** * * Constructs a CipherOutputStream from an OutputStream and a * Cipher. * <br>Note: if the specified output stream or cipher is * null, a NullPointerException may be thrown later when * they are used. * * @param os the OutputStream object * @param c an initialized Cipher object */ public CipherOutputStream(OutputStream os, Cipher c) { super(os); output = os; cipher = c; }; /** * Constructs a CipherOutputStream from an OutputStream without * specifying a Cipher. This has the effect of constructing a * CipherOutputStream using a NullCipher. * <br>Note: if the specified output stream is null, a * NullPointerException may be thrown later when it is used. * * @param os the OutputStream object */ protected CipherOutputStream(OutputStream os) { super(os); output = os; cipher = new NullCipher(); } /** * Writes the specified byte to this output stream. * * @param b the <code>byte</code>. * @exception IOException if an I/O error occurs. * @since JCE1.2 */ public void write(int b) throws IOException { ibuffer[0] = (byte) b; obuffer = cipher.update(ibuffer, 0, 1); if (obuffer != null) { output.write(obuffer); obuffer = null; } }; /** * Writes <code>b.length</code> bytes from the specified byte array * to this output stream. * <p> * The <code>write</code> method of * <code>CipherOutputStream</code> calls the <code>write</code> * method of three arguments with the three arguments * <code>b</code>, <code>0</code>, and <code>b.length</code>. * * @param b the data. * @exception NullPointerException if <code>b</code> is null. * @exception IOException if an I/O error occurs. * @see javax.crypto.CipherOutputStream#write(byte[], int, int) * @since JCE1.2 */ public void write(byte b[]) throws IOException { write(b, 0, b.length); } /** * Writes <code>len</code> bytes from the specified byte array * starting at offset <code>off</code> to this output stream. * * @param b the data. * @param off the start offset in the data. * @param len the number of bytes to write. * @exception IOException if an I/O error occurs. * @since JCE1.2 */ public void write(byte b[], int off, int len) throws IOException { obuffer = cipher.update(b, off, len); if (obuffer != null) { output.write(obuffer); obuffer = null; } } /** * Flushes this output stream by forcing any buffered output bytes * that have already been processed by the encapsulated cipher object * to be written out. * * <p>Any bytes buffered by the encapsulated cipher * and waiting to be processed by it will not be written out. For example, * if the encapsulated cipher is a block cipher, and the total number of * bytes written using one of the <code>write</code> methods is less than * the cipher's block size, no bytes will be written out. * * @exception IOException if an I/O error occurs. * @since JCE1.2 */ public void flush() throws IOException { if (obuffer != null) { output.write(obuffer); obuffer = null; } output.flush(); } /** * Closes this output stream and releases any system resources * associated with this stream. * <p> * This method invokes the <code>doFinal</code> method of the encapsulated * cipher object, which causes any bytes buffered by the encapsulated * cipher to be processed. The result is written out by calling the * <code>flush</code> method of this output stream. * <p> * This method resets the encapsulated cipher object to its initial state * and calls the <code>close</code> method of the underlying output * stream. * * @exception IOException if an I/O error occurs. * @since JCE1.2 */ public void close() throws IOException { if (closed) { return; } closed = true; try { obuffer = cipher.doFinal(); } catch (IllegalBlockSizeException | BadPaddingException e) { obuffer = null; } try { flush(); } catch (IOException ignored) {} out.close(); } }
gpl-2.0
mdaniel/svn-caucho-com-resin
modules/jaxb/src/javax/xml/bind/annotation/XmlAccessType.java
1117
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package javax.xml.bind.annotation; public enum XmlAccessType { FIELD, NONE, PROPERTY, PUBLIC_MEMBER; }
gpl-2.0
hustodemon/spacewalk
java/code/src/com/redhat/rhn/taskomatic/task/RhnQueueJob.java
4855
/** * Copyright (c) 2009--2012 Red Hat, Inc. * * This software is licensed to you under the GNU General Public License, * version 2 (GPLv2). There is NO WARRANTY for this software, express or * implied, including the implied warranties of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 * along with this software; if not, see * http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. * * Red Hat trademarks are not licensed under GPLv2. No permission is * granted to use or replicate Red Hat trademarks that are incorporated * in this software or its documentation. */ package com.redhat.rhn.taskomatic.task; import com.redhat.rhn.common.conf.Config; import com.redhat.rhn.common.hibernate.HibernateFactory; import com.redhat.rhn.taskomatic.TaskoRun; import com.redhat.rhn.taskomatic.task.threaded.TaskQueue; import com.redhat.rhn.taskomatic.task.threaded.TaskQueueFactory; import org.apache.log4j.FileAppender; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import java.io.IOException; /** * Custom Quartz Job implementation which only allows one thread to * run at a time. All other threads return without performing any work. * This policy was chosen instead of blocking so as to reduce threading * problems inside Quartz itself. * * @version $Rev $ * */ public abstract class RhnQueueJob implements RhnJob { private TaskoRun jobRun = null; protected abstract Logger getLogger(); /** * {@inheritDoc} */ public void appendExceptionToLogError(Exception e) { getLogger().error(e.getMessage()); getLogger().error(e.getCause()); } private void logToNewFile() { PatternLayout pattern = new PatternLayout(DEFAULT_LOGGING_LAYOUT); try { getLogger().removeAllAppenders(); FileAppender appender = new FileAppender(pattern, jobRun.buildStdOutputLogPath()); getLogger().addAppender(appender); } catch (IOException e) { getLogger().warn("Logging to file disabled"); } } /** * {@inheritDoc} */ public void execute(JobExecutionContext ctx, TaskoRun runIn) throws JobExecutionException { setJobRun(runIn); try { execute(ctx); } catch (Exception e) { if (HibernateFactory.getSession().getTransaction().isActive()) { HibernateFactory.rollbackTransaction(); HibernateFactory.closeSession(); } appendExceptionToLogError(e); jobRun.saveStatus(TaskoRun.STATUS_FAILED); } HibernateFactory.commitTransaction(); HibernateFactory.closeSession(); } /** * {@inheritDoc} */ public void execute(JobExecutionContext ctx) throws JobExecutionException { TaskQueueFactory factory = TaskQueueFactory.get(); String queueName = getQueueName(); TaskQueue queue = factory.getQueue(queueName); if (queue == null) { try { queue = factory.createQueue(queueName, getDriverClass(), getLogger()); } catch (Exception e) { getLogger().error(e); return; } } if (queue.changeRun(jobRun)) { jobRun.start(); HibernateFactory.commitTransaction(); HibernateFactory.closeSession(); logToNewFile(); getLogger().debug("Starting run " + jobRun.getId()); } else { // close current run TaskoRun run = (TaskoRun) HibernateFactory.reload(jobRun); run.appendToOutputLog("Run with id " + queue.getQueueRun().getId() + " handles the whole task queue."); run.skipped(); HibernateFactory.commitTransaction(); HibernateFactory.closeSession(); } int defaultItems = 3; if (queueName.equals("channel_repodata")) { defaultItems = 1; } int maxWorkItems = Config.get().getInt("taskomatic." + queueName + "_max_work_items", defaultItems); if (queue.getQueueSize() < maxWorkItems) { queue.run(this); } else { getLogger().debug("Maximum number of workers already put ... skipping."); } } /** * @return Returns the run. */ public TaskoRun getRun() { return jobRun; } /** * @param runIn The run to set. */ public void setJobRun(TaskoRun runIn) { jobRun = runIn; } protected abstract Class getDriverClass(); protected abstract String getQueueName(); }
gpl-2.0
ekummerfeld/GdistanceP
tetrad-lib/src/main/java/edu/cmu/tetrad/search/mb/Mmhc.java
4266
/////////////////////////////////////////////////////////////////////////////// // For information as to what this class does, see the Javadoc, below. // // Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, // // 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph // // Ramsey, and Clark Glymour. // // // // This program is free software; you can redistribute it and/or modify // // it under the terms of the GNU General Public License as published by // // the Free Software Foundation; either version 2 of the License, or // // (at your option) any later version. // // // // This program is distributed in the hope that it will be useful, // // but WITHOUT ANY WARRANTY; without even the implied warranty of // // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License // // along with this program; if not, write to the Free Software // // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // /////////////////////////////////////////////////////////////////////////////// package edu.cmu.tetrad.search.mb; import edu.cmu.tetrad.data.DataSet; import edu.cmu.tetrad.data.IKnowledge; import edu.cmu.tetrad.data.Knowledge2; import edu.cmu.tetrad.graph.EdgeListGraph; import edu.cmu.tetrad.graph.Graph; import edu.cmu.tetrad.graph.Node; import edu.cmu.tetrad.search.FgesOrienter; import edu.cmu.tetrad.search.GraphSearch; import edu.cmu.tetrad.search.IndependenceTest; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Implements the MMHC algorithm. * * @author Joseph Ramsey (this version). */ public class Mmhc implements GraphSearch { /** * The independence test used for the PC search. */ private IndependenceTest independenceTest; /** * The maximum number of nodes conditioned on in the search. */ private int depth = Integer.MAX_VALUE; private DataSet data; private IKnowledge knowledge = new Knowledge2(); //=============================CONSTRUCTORS==========================// public Mmhc(IndependenceTest test, DataSet dataSet) { this.depth = -1; this.independenceTest = test; this.data = dataSet; } //==============================PUBLIC METHODS========================// public IndependenceTest getIndependenceTest() { return independenceTest; } public int getDepth() { return depth; } public long getElapsedTime() { return 0; } /** * Runs PC starting with a fully connected graph over all of the variables in the domain of the independence test. */ public Graph search() { List<Node> variables = independenceTest.getVariables(); Mmmb mmmb = new Mmmb(independenceTest, getDepth(), true); Map<Node, List<Node>> pc = new HashMap<>(); for (Node x : variables) { pc.put(x, mmmb.getPc(x)); } Graph graph = new EdgeListGraph(); for (Node x : variables) { graph.addNode(x); } for (Node x : variables) { for (Node y : pc.get(x)) { if (!graph.isAdjacentTo(x, y)) { graph.addUndirectedEdge(x, y); } } } FgesOrienter orienter = new FgesOrienter(data); orienter.orient(graph); return graph; } public IKnowledge getKnowledge() { return knowledge; } public void setKnowledge(IKnowledge knowledge) { if (knowledge == null) { throw new NullPointerException(); } this.knowledge = knowledge; } public void setDepth(int depth) { this.depth = depth; } }
gpl-2.0
rex-xxx/mt6572_x201
external/apache-harmony/security/src/test/api/java/org/apache/harmony/security/tests/java/security/cert/CertificateExpiredExceptionTest.java
3216
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vera Y. Petrashkova */ package org.apache.harmony.security.tests.java.security.cert; import java.security.cert.CertificateExpiredException; import junit.framework.TestCase; /** * Tests for <code>DigestException</code> class constructors and methods. * */ public class CertificateExpiredExceptionTest extends TestCase { public static void main(String[] args) { } /** * Constructor for CertificateExpiredExceptionTests. * * @param arg0 */ public CertificateExpiredExceptionTest(String arg0) { super(arg0); } static String[] msgs = { "", "Check new message", "Check new message Check new message Check new message Check new message Check new message" }; static Throwable tCause = new Throwable("Throwable for exception"); /** * Test for <code>CertificateExpiredException()</code> constructor * Assertion: constructs CertificateExpiredException with no detail message */ public void testCertificateExpiredException01() { CertificateExpiredException tE = new CertificateExpiredException(); assertNull("getMessage() must return null.", tE.getMessage()); assertNull("getCause() must return null", tE.getCause()); } /** * Test for <code>CertificateExpiredException(String)</code> constructor * Assertion: constructs CertificateExpiredException with detail message * msg. Parameter <code>msg</code> is not null. */ public void testCertificateExpiredException02() { CertificateExpiredException tE; for (int i = 0; i < msgs.length; i++) { tE = new CertificateExpiredException(msgs[i]); assertEquals("getMessage() must return: ".concat(msgs[i]), tE .getMessage(), msgs[i]); assertNull("getCause() must return null", tE.getCause()); } } /** * Test for <code>CertificateExpiredException(String)</code> constructor * Assertion: constructs CertificateExpiredException when <code>msg</code> * is null */ public void testCertificateExpiredException03() { String msg = null; CertificateExpiredException tE = new CertificateExpiredException(msg); assertNull("getMessage() must return null.", tE.getMessage()); assertNull("getCause() must return null", tE.getCause()); } }
gpl-2.0
WelcomeHUME/svn-caucho-com-resin
modules/resin/src/com/caucho/ejb/cfg/EjbJar.java
5125
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * Free SoftwareFoundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package com.caucho.ejb.cfg; import com.caucho.config.ConfigException; import com.caucho.config.types.DescriptionGroupConfig; import com.caucho.config.types.Signature; import com.caucho.util.L10N; import com.caucho.vfs.Path; import java.util.*; import javax.annotation.PostConstruct; /** * Configuration for an ejb bean. */ public class EjbJar extends DescriptionGroupConfig { private static final L10N L = new L10N(EjbJar.class); private final EjbConfig _config; private String _ejbModuleName; private Path _rootPath; private boolean _isMetadataComplete; private boolean _isSkip; public EjbJar(EjbConfig config, String ejbModuleName, Path rootPath) { _config = config; _ejbModuleName = ejbModuleName; _rootPath = rootPath; } public String getModuleName() { return _ejbModuleName; } public void setModuleName(String moduleName) { _ejbModuleName = moduleName; } public void setVersion(String version) { } public void setSchemaLocation(String value) { } public void setSkip(boolean isSkip) { _isSkip = isSkip; } public boolean isSkip() { return _isSkip; } public void setMetadataComplete(boolean isMetadataComplete) { _isMetadataComplete = isMetadataComplete; } public boolean isMetadataComplete() { return _isMetadataComplete; } public EjbEnterpriseBeans createEnterpriseBeans() throws ConfigException { return new EjbEnterpriseBeans(_config, this, _ejbModuleName); } public InterceptorsConfig createInterceptors() throws ConfigException { return new InterceptorsConfig(_config); } public Relationships createRelationships() throws ConfigException { return new Relationships(_config); } public AssemblyDescriptor createAssemblyDescriptor() throws ConfigException { return new AssemblyDescriptor(this, _config); } public void addQueryFunction(QueryFunction fun) { } public void setBooleanLiteral(BooleanLiteral literal) { } ContainerTransaction createContainerTransaction() { return new ContainerTransaction(this, _config); } MethodPermission createMethodPermission() { return new MethodPermission(_config); } public String toString() { return getClass().getSimpleName() + "[" + _rootPath.getFullPath() + "]"; } public class MethodPermission { EjbConfig _config; MethodSignature _method; ArrayList<String> _roles; MethodPermission(EjbConfig config) { _config = config; } public void setDescription(String description) { } public void setUnchecked(boolean unchecked) { } public void setRoleName(String roleName) { if (_roles == null) _roles = new ArrayList<String>(); _roles.add(roleName); } public void setMethod(MethodSignature method) { _method = method; } @PostConstruct public void init() throws ConfigException { if (isSkip()) return; EjbBean bean = _config.getBeanConfig(_method.getEJBName()); if (bean == null) throw new ConfigException(L.l("'{0}' is an unknown bean.", _method.getEJBName())); EjbMethodPattern method = bean.createMethod(_method); if (_roles != null) method.setRoles(_roles); } } public static class QueryFunction { FunctionSignature _sig; String _sql; public void setSignature(Signature sig) throws ConfigException { _sig = new FunctionSignature(sig.getSignature()); } public FunctionSignature getSignature() { return _sig; } public void setSQL(String sql) throws ConfigException { _sql = sql; } public String getSQL() { return _sql; } @PostConstruct public void init() { _sig.setSQL(_sql); } } public static class Relationships { EjbConfig _config; Relationships(EjbConfig config) { _config = config; } } }
gpl-2.0
shygoo/project64
Android/app/src/main/java/emu/project64/input/TouchController.java
20746
package emu.project64.input; import java.util.Set; import emu.project64.AndroidDevice; import emu.project64.input.map.TouchMap; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.graphics.Point; import android.os.Vibrator; import android.util.SparseIntArray; import android.view.MotionEvent; import android.view.View; import android.view.View.OnTouchListener; /** * A class for generating N64 controller commands from a touchscreen. */ public class TouchController extends AbstractController implements OnTouchListener { public interface OnStateChangedListener { /** * Called after the analog stick values have changed. * * @param axisFractionX The x-axis fraction, between -1 and 1, inclusive. * @param axisFractionY The y-axis fraction, between -1 and 1, inclusive. */ public void onAnalogChanged( float axisFractionX, float axisFractionY ); /** * Called after auto-hold button state changed. * * @param pressed The auto-hold state. * @param index The index of the auto-hold mask. */ public void onAutoHold( boolean pressed, int index ); } public static final int AUTOHOLD_METHOD_DISABLED = 0; public static final int AUTOHOLD_METHOD_LONGPRESS = 1; public static final int AUTOHOLD_METHOD_SLIDEOUT = 2; /** The number of milliseconds to wait before auto-holding (long-press method). */ private static final int AUTOHOLD_LONGPRESS_TIME = 1000; /** The pattern vibration when auto-hold is engaged. */ private static final long[] AUTOHOLD_VIBRATE_PATTERN = { 0, 50, 50, 50 }; /** The number of milliseconds of vibration when pressing a key. */ private static final int FEEDBACK_VIBRATE_TIME = 50; /** The maximum number of pointers to query. */ private static final int MAX_POINTER_IDS = 256; /** The state change listener. */ private final OnStateChangedListener mListener; /** The map from screen coordinates to N64 controls. */ private final TouchMap mTouchMap; /** The map from pointer ids to N64 controls. */ private final SparseIntArray mPointerMap = new SparseIntArray(); /** The method used for auto-holding buttons. */ private final int mAutoHoldMethod; /** The set of auto-holdable buttons. */ private final Set<Integer> mAutoHoldables; /** Whether touchscreen feedback is enabled. */ private final boolean mTouchscreenFeedback; /** The touch state of each pointer. True indicates down, false indicates up. */ private final boolean[] mTouchState = new boolean[MAX_POINTER_IDS]; /** The x-coordinate of each pointer, between 0 and (screenwidth-1), inclusive. */ private final int[] mPointerX = new int[MAX_POINTER_IDS]; /** The y-coordinate of each pointer, between 0 and (screenheight-1), inclusive. */ private final int[] mPointerY = new int[MAX_POINTER_IDS]; /** The pressed start time of each pointer. */ private final long[] mStartTime = new long[MAX_POINTER_IDS]; /** The time between press and release of each pointer. */ private final long[] mElapsedTime = new long[MAX_POINTER_IDS]; /** * The identifier of the pointer associated with the analog stick. -1 indicates the stick has * been released. */ private int mAnalogPid = -1; /** The touch event source to listen to, or 0 to listen to all sources. */ private int mSourceFilter = 0; private Vibrator mVibrator = null; /** * Instantiates a new touch controller. * * @param touchMap The map from touch coordinates to N64 controls. * @param view The view receiving touch event data. * @param listener The listener for controller state changes. * @param vibrator The haptic feedback device. MUST BE NULL if vibrate permission not granted. * @param autoHoldMethod The method for auto-holding buttons. * @param touchscreenFeedback True if haptic feedback should be used. * @param autoHoldableButtons The N64 commands that correspond to auto-holdable buttons. */ public TouchController( TouchMap touchMap, View view, OnStateChangedListener listener, Vibrator vibrator, int autoHoldMethod, boolean touchscreenFeedback, Set<Integer> autoHoldableButtons ) { mListener = listener; mTouchMap = touchMap; mVibrator = vibrator; mAutoHoldMethod = autoHoldMethod; mTouchscreenFeedback = touchscreenFeedback; mAutoHoldables = autoHoldableButtons; view.setOnTouchListener( this ); } /** * Sets the touch event source filter. * * @param source The source to listen to, or 0 to listen to all sources. */ public void setSourceFilter( int source ) { mSourceFilter = source; } /* * (non-Javadoc) * * @see android.view.View.OnTouchListener#onTouch(android.view.View, android.view.MotionEvent) */ @SuppressLint( "ClickableViewAccessibility" ) @Override @TargetApi( 9 ) public boolean onTouch( View view, MotionEvent event ) { // Filter by source, if applicable int source = AndroidDevice.IS_GINGERBREAD ? event.getSource() : 0; if( mSourceFilter != 0 && mSourceFilter != source ) return false; int action = event.getAction(); int actionCode = action & MotionEvent.ACTION_MASK; int pid = -1; switch( actionCode ) { case MotionEvent.ACTION_POINTER_DOWN: // A non-primary touch has been made pid = event.getPointerId( action >> MotionEvent.ACTION_POINTER_INDEX_SHIFT ); mStartTime[pid] = System.currentTimeMillis(); mTouchState[pid] = true; break; case MotionEvent.ACTION_POINTER_UP: // A non-primary touch has been released pid = event.getPointerId( action >> MotionEvent.ACTION_POINTER_INDEX_SHIFT ); mElapsedTime[pid] = System.currentTimeMillis() - mStartTime[pid]; mTouchState[pid] = false; break; case MotionEvent.ACTION_DOWN: // A touch gesture has started (e.g. analog stick movement) for( int i = 0; i < event.getPointerCount(); i++ ) { pid = event.getPointerId( i ); mStartTime[pid] = System.currentTimeMillis(); mTouchState[pid] = true; } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: // A touch gesture has ended or canceled (e.g. analog stick movement) for( int i = 0; i < event.getPointerCount(); i++ ) { pid = event.getPointerId( i ); mElapsedTime[pid] = System.currentTimeMillis() - mStartTime[pid]; mTouchState[pid] = false; } break; default: break; } // Update the coordinates of down pointers and record max PID for speed int maxPid = -1; for( int i = 0; i < event.getPointerCount(); i++ ) { pid = event.getPointerId( i ); if( pid > maxPid ) maxPid = pid; if( mTouchState[pid] ) { mPointerX[pid] = (int) event.getX( i ); mPointerY[pid] = (int) event.getY( i ); } } // Process each touch processTouches( mTouchState, mPointerX, mPointerY, mElapsedTime, maxPid ); return true; } /** * Sets the N64 controller state based on where the screen is (multi-) touched. Values outside * the ranges listed below are safe. * * @param touchstate The touch state of each pointer. True indicates down, false indicates up. * @param pointerX The x-coordinate of each pointer, between 0 and (screenwidth-1), inclusive. * @param pointerY The y-coordinate of each pointer, between 0 and (screenheight-1), inclusive. * @param maxPid Maximum ID of the pointers that have changed (speed optimization). */ private void processTouches( boolean[] touchstate, int[] pointerX, int[] pointerY, long[] elapsedTime, int maxPid ) { boolean analogMoved = false; // Process each pointer in sequence for( int pid = 0; pid <= maxPid; pid++ ) { // Release analog if its pointer is not touching the screen if( pid == mAnalogPid && !touchstate[pid] ) { analogMoved = true; mAnalogPid = -1; mState.axisFractionX = 0; mState.axisFractionY = 0; } // Process button inputs if( pid != mAnalogPid ) processButtonTouch( touchstate[pid], pointerX[pid], pointerY[pid], elapsedTime[pid], pid ); // Process analog inputs if( touchstate[pid] && processAnalogTouch( pid, pointerX[pid], pointerY[pid] ) ) analogMoved = true; } // Call the super method to send the input to the core notifyChanged(); // Update the skin if the virtual analog stick moved if( analogMoved && mListener != null ) mListener.onAnalogChanged( mState.axisFractionX, mState.axisFractionY ); } /** * Process a touch as if intended for a button. Values outside the ranges listed below are safe. * * @param touched Whether the button is pressed or not. * @param xLocation The x-coordinate of the touch, between 0 and (screenwidth-1), inclusive. * @param yLocation The y-coordinate of the touch, between 0 and (screenheight-1), inclusive. * @param pid The identifier of the touch pointer. */ private void processButtonTouch( boolean touched, int xLocation, int yLocation, long timeElapsed, int pid ) { // Determine the index of the button that was pressed int index = touched ? mTouchMap.getButtonPress( xLocation, yLocation ) : mPointerMap.get( pid, TouchMap.UNMAPPED ); // Update the pointer map if( !touched ) { // Finger lifted off screen, forget what this pointer was touching mPointerMap.delete( pid ); } else { // Determine where the finger came from if is was slid int prevIndex = mPointerMap.get( pid, TouchMap.UNMAPPED ); // Finger touched somewhere on screen, remember what this pointer is touching mPointerMap.put( pid, index ); if( prevIndex != index ) { // Finger slid from somewhere else, act accordingly // There are three possibilities: // - old button --> new button // - nothing --> new button // - old button --> nothing // Reset this pointer's start time mStartTime[pid] = System.currentTimeMillis(); if( prevIndex != TouchMap.UNMAPPED ) { // Slid off a valid button if( !isAutoHoldable( prevIndex ) || mAutoHoldMethod == AUTOHOLD_METHOD_DISABLED ) { // Slid off a non-auto-hold button setTouchState( prevIndex, false ); } else { // Slid off an auto-hold button switch( mAutoHoldMethod ) { case AUTOHOLD_METHOD_LONGPRESS: // Using long-press method, release auto-hold button if( mListener != null ) mListener.onAutoHold( false, prevIndex ); setTouchState( prevIndex, false ); break; case AUTOHOLD_METHOD_SLIDEOUT: // Using slide-off method, engage auto-hold button if( mVibrator != null ) { mVibrator.cancel(); mVibrator.vibrate( AUTOHOLD_VIBRATE_PATTERN, -1 ); } if( mListener != null ) mListener.onAutoHold( true, prevIndex ); setTouchState( prevIndex, true ); break; } } } } } if( index != TouchMap.UNMAPPED ) { // Finger is on a valid button // Provide simple vibration feedback for any valid button when first touched if( touched && mTouchscreenFeedback && mVibrator != null ) { boolean firstTouched; if( index < NUM_N64_BUTTONS ) { // Single button pressed firstTouched = !mState.buttons[index]; } else { // Two d-pad buttons pressed simultaneously switch( index ) { case TouchMap.DPD_RU: firstTouched = !( mState.buttons[DPD_R] && mState.buttons[DPD_U] ); break; case TouchMap.DPD_RD: firstTouched = !( mState.buttons[DPD_R] && mState.buttons[DPD_D] ); break; case TouchMap.DPD_LD: firstTouched = !( mState.buttons[DPD_L] && mState.buttons[DPD_D] ); break; case TouchMap.DPD_LU: firstTouched = !( mState.buttons[DPD_L] && mState.buttons[DPD_U] ); break; default: firstTouched = false; break; } } if( firstTouched ) { mVibrator.cancel(); mVibrator.vibrate( FEEDBACK_VIBRATE_TIME ); } } // Set the controller state accordingly if( touched || !isAutoHoldable( index ) || mAutoHoldMethod == AUTOHOLD_METHOD_DISABLED ) { // Finger just touched a button (any kind) OR // Finger just lifted off non-auto-holdable button setTouchState( index, touched ); // Do not provide auto-hold feedback yet } else { // Finger just lifted off an auto-holdable button switch( mAutoHoldMethod ) { case AUTOHOLD_METHOD_SLIDEOUT: // Release auto-hold button if using slide-off method if( mListener != null ) mListener.onAutoHold( false, index ); setTouchState( index, false ); break; case AUTOHOLD_METHOD_LONGPRESS: if( timeElapsed < AUTOHOLD_LONGPRESS_TIME ) { // Release auto-hold if short-pressed if( mListener != null ) mListener.onAutoHold( false, index ); setTouchState( index, false ); } else { // Engage auto-hold if long-pressed if( mVibrator != null ) { mVibrator.cancel(); mVibrator.vibrate( AUTOHOLD_VIBRATE_PATTERN, -1 ); } if( mListener != null ) mListener.onAutoHold( true, index ); setTouchState( index, true ); } break; } } } } /** * Checks if the button mapped to an N64 command is auto-holdable. * * @param commandIndex The index to the N64 command. * * @return True if the button mapped to the command is auto-holdable. */ private boolean isAutoHoldable( int commandIndex ) { return mAutoHoldables != null && mAutoHoldables.contains( commandIndex ); } /** * Sets the state of a button, and handles the D-Pad diagonals. * * @param index Which button is affected. * @param touched Whether the button is pressed or not. */ private void setTouchState( int index, boolean touched ) { // Set the button state if( index < AbstractController.NUM_N64_BUTTONS ) { // A single button was pressed mState.buttons[index] = touched; } else { // Two d-pad buttons pressed simultaneously switch( index ) { case TouchMap.DPD_RU: mState.buttons[DPD_R] = touched; mState.buttons[DPD_U] = touched; break; case TouchMap.DPD_RD: mState.buttons[DPD_R] = touched; mState.buttons[DPD_D] = touched; break; case TouchMap.DPD_LD: mState.buttons[DPD_L] = touched; mState.buttons[DPD_D] = touched; break; case TouchMap.DPD_LU: mState.buttons[DPD_L] = touched; mState.buttons[DPD_U] = touched; break; default: break; } } } /** * Process a touch as if intended for the analog stick. Values outside the ranges listed below * are safe. * * @param pointerId The pointer identifier. * @param xLocation The x-coordinate of the touch, between 0 and (screenwidth-1), inclusive. * @param yLocation The y-coordinate of the touch, between 0 and (screenheight-1), inclusive. * * @return True, if the analog state changed. */ private boolean processAnalogTouch( int pointerId, int xLocation, int yLocation ) { // Get the cartesian displacement of the analog stick Point point = mTouchMap.getAnalogDisplacement( xLocation, yLocation ); // Compute the pythagorean displacement of the stick int dX = point.x; int dY = point.y; float displacement = (float) Math.sqrt( ( dX * dX ) + ( dY * dY ) ); // "Capture" the analog control if( mTouchMap.isInCaptureRange( displacement ) ) mAnalogPid = pointerId; if( pointerId == mAnalogPid ) { // User is controlling the analog stick // Limit range of motion to an octagon (like the real N64 controller) point = mTouchMap.getConstrainedDisplacement( dX, dY ); dX = point.x; dY = point.y; displacement = (float) Math.sqrt( ( dX * dX ) + ( dY * dY ) ); // Fraction of full-throttle, between 0 and 1, inclusive float p = mTouchMap.getAnalogStrength( displacement ); // Store the axis values in the super fields (screen y is inverted) mState.axisFractionX = p * dX / displacement; mState.axisFractionY = -p * dY / displacement; // Analog state changed return true; } // Analog state did not change return false; } }
gpl-2.0
faarwa/EngSocP5
zxing/core/test/src/com/google/zxing/aztec/AztecBlackBox1TestCase.java
1076
/* * Copyright 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.aztec; import com.google.zxing.BarcodeFormat; import com.google.zxing.common.AbstractBlackBoxTestCase; /** * @author David Olivier */ public final class AztecBlackBox1TestCase extends AbstractBlackBoxTestCase { public AztecBlackBox1TestCase() { super("test/data/blackbox/aztec-1", new AztecReader(), BarcodeFormat.AZTEC); addTest(12, 12, 0.0f); addTest(12, 12, 90.0f); addTest(12, 12, 180.0f); addTest(12, 12, 270.0f); } }
gpl-3.0
tr4656/Hungry
src/org/apache/commons/io/filefilter/PrefixFileFilter.java
7155
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io.filefilter; import java.io.File; import java.io.Serializable; import java.util.List; import org.apache.commons.io.IOCase; /** * Filters filenames for a certain prefix. * <p> * For example, to print all files and directories in the * current directory whose name starts with <code>Test</code>: * * <pre> * File dir = new File("."); * String[] files = dir.list( new PrefixFileFilter("Test") ); * for ( int i = 0; i &lt; files.length; i++ ) { * System.out.println(files[i]); * } * </pre> * * @since Commons IO 1.0 * @version $Revision: 1005099 $ $Date: 2010-10-06 17:13:01 +0100 (Wed, 06 Oct 2010) $ * * @author Stephen Colebourne * @author Federico Barbieri * @author Serge Knystautas * @author Peter Donald * @see FileFilterUtils#prefixFileFilter(String) * @see FileFilterUtils#prefixFileFilter(String, IOCase) */ public class PrefixFileFilter extends AbstractFileFilter implements Serializable { /** The filename prefixes to search for */ private final String[] prefixes; /** Whether the comparison is case sensitive. */ private final IOCase caseSensitivity; /** * Constructs a new Prefix file filter for a single prefix. * * @param prefix the prefix to allow, must not be null * @throws IllegalArgumentException if the prefix is null */ public PrefixFileFilter(String prefix) { this(prefix, IOCase.SENSITIVE); } /** * Constructs a new Prefix file filter for a single prefix * specifying case-sensitivity. * * @param prefix the prefix to allow, must not be null * @param caseSensitivity how to handle case sensitivity, null means case-sensitive * @throws IllegalArgumentException if the prefix is null * @since Commons IO 1.4 */ public PrefixFileFilter(String prefix, IOCase caseSensitivity) { if (prefix == null) { throw new IllegalArgumentException("The prefix must not be null"); } this.prefixes = new String[] {prefix}; this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity); } /** * Constructs a new Prefix file filter for any of an array of prefixes. * <p> * The array is not cloned, so could be changed after constructing the * instance. This would be inadvisable however. * * @param prefixes the prefixes to allow, must not be null * @throws IllegalArgumentException if the prefix array is null */ public PrefixFileFilter(String[] prefixes) { this(prefixes, IOCase.SENSITIVE); } /** * Constructs a new Prefix file filter for any of an array of prefixes * specifying case-sensitivity. * <p> * The array is not cloned, so could be changed after constructing the * instance. This would be inadvisable however. * * @param prefixes the prefixes to allow, must not be null * @param caseSensitivity how to handle case sensitivity, null means case-sensitive * @throws IllegalArgumentException if the prefix is null * @since Commons IO 1.4 */ public PrefixFileFilter(String[] prefixes, IOCase caseSensitivity) { if (prefixes == null) { throw new IllegalArgumentException("The array of prefixes must not be null"); } this.prefixes = new String[prefixes.length]; System.arraycopy(prefixes, 0, this.prefixes, 0, prefixes.length); this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity); } /** * Constructs a new Prefix file filter for a list of prefixes. * * @param prefixes the prefixes to allow, must not be null * @throws IllegalArgumentException if the prefix list is null * @throws ClassCastException if the list does not contain Strings */ public PrefixFileFilter(List<String> prefixes) { this(prefixes, IOCase.SENSITIVE); } /** * Constructs a new Prefix file filter for a list of prefixes * specifying case-sensitivity. * * @param prefixes the prefixes to allow, must not be null * @param caseSensitivity how to handle case sensitivity, null means case-sensitive * @throws IllegalArgumentException if the prefix list is null * @throws ClassCastException if the list does not contain Strings * @since Commons IO 1.4 */ public PrefixFileFilter(List<String> prefixes, IOCase caseSensitivity) { if (prefixes == null) { throw new IllegalArgumentException("The list of prefixes must not be null"); } this.prefixes = prefixes.toArray(new String[prefixes.size()]); this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity); } /** * Checks to see if the filename starts with the prefix. * * @param file the File to check * @return true if the filename starts with one of our prefixes */ @Override public boolean accept(File file) { String name = file.getName(); for (String prefix : this.prefixes) { if (caseSensitivity.checkStartsWith(name, prefix)) { return true; } } return false; } /** * Checks to see if the filename starts with the prefix. * * @param file the File directory * @param name the filename * @return true if the filename starts with one of our prefixes */ @Override public boolean accept(File file, String name) { for (String prefix : prefixes) { if (caseSensitivity.checkStartsWith(name, prefix)) { return true; } } return false; } /** * Provide a String representaion of this file filter. * * @return a String representaion */ @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append(super.toString()); buffer.append("("); if (prefixes != null) { for (int i = 0; i < prefixes.length; i++) { if (i > 0) { buffer.append(","); } buffer.append(prefixes[i]); } } buffer.append(")"); return buffer.toString(); } }
gpl-3.0
jorgevasquezp/mucommander
src/main/com/mucommander/commons/file/filter/PathFilter.java
1047
/** * This file is part of muCommander, http://www.mucommander.com * Copyright (C) 2002-2010 Maxence Bernard * * muCommander is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * muCommander is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.mucommander.commons.file.filter; /** * <code>PathFilter</code> is a {@link FileFilter} that operates on absolute file paths. * * @see AbstractPathFilter * @author Maxence Bernard */ public interface PathFilter extends StringCriterionFilter { }
gpl-3.0
Niky4000/UsefulUtils
projects/others/eclipse-platform-parent/eclipse.jdt.core-master/org.eclipse.jdt.core.tests.model/workspace/Formatter/test133/A_in.java
5799
/******************************************************************************* * Copyright (c) 2000, 2003 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Common Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/cpl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.codeassist.impl; import java.util.Iterator; import java.util.Map; import org.eclipse.jdt.core.compiler.CharOperation; public class AssistOptions { /** * Option IDs */ public static final String OPTION_PerformVisibilityCheck = "org.eclipse.jdt.core.codeComplete.visibilityCheck"; //$NON-NLS-1$ public static final String OPTION_ForceImplicitQualification = "org.eclipse.jdt.core.codeComplete.forceImplicitQualification"; //$NON-NLS-1$ public static final String OPTION_FieldPrefixes = "org.eclipse.jdt.core.codeComplete.fieldPrefixes"; //$NON-NLS-1$ public static final String OPTION_StaticFieldPrefixes = "org.eclipse.jdt.core.codeComplete.staticFieldPrefixes"; //$NON-NLS-1$ public static final String OPTION_LocalPrefixes = "org.eclipse.jdt.core.codeComplete.localPrefixes"; //$NON-NLS-1$ public static final String OPTION_ArgumentPrefixes = "org.eclipse.jdt.core.codeComplete.argumentPrefixes"; //$NON-NLS-1$ public static final String OPTION_FieldSuffixes = "org.eclipse.jdt.core.codeComplete.fieldSuffixes"; //$NON-NLS-1$ public static final String OPTION_StaticFieldSuffixes = "org.eclipse.jdt.core.codeComplete.staticFieldSuffixes"; //$NON-NLS-1$ public static final String OPTION_LocalSuffixes = "org.eclipse.jdt.core.codeComplete.localSuffixes"; //$NON-NLS-1$ public static final String OPTION_ArgumentSuffixes = "org.eclipse.jdt.core.codeComplete.argumentSuffixes"; //$NON-NLS-1$ public static final String ENABLED = "enabled"; //$NON-NLS-1$ public static final String DISABLED = "disabled"; //$NON-NLS-1$ public boolean checkVisibility = false; public boolean forceImplicitQualification = false; public char[][] fieldPrefixes = null; public char[][] staticFieldPrefixes = null; public char[][] localPrefixes = null; public char[][] argumentPrefixes = null; public char[][] fieldSuffixes = null; public char[][] staticFieldSuffixes = null; public char[][] localSuffixes = null; public char[][] argumentSuffixes = null; /** * Initializing the assist options with default settings */ public AssistOptions() { // Initializing the assist options with default settings } /** * Initializing the assist options with external settings */ public AssistOptions(Map settings) { if (settings == null) return; // filter options which are related to the assist component Iterator entries = settings.entrySet().iterator(); while (entries.hasNext()) { Map.Entry entry = (Map.Entry)entries.next(); if (!(entry.getKey() instanceof String)) continue; if (!(entry.getValue() instanceof String)) continue; String optionID = (String) entry.getKey(); String optionValue = (String) entry.getValue(); if (optionID.equals(OPTION_PerformVisibilityCheck)) { if (optionValue.equals(ENABLED)) { this.checkVisibility = true; } else if (optionValue.equals(DISABLED)) { this.checkVisibility = false; } continue; } else if (optionID.equals(OPTION_ForceImplicitQualification)) { if (optionValue.equals(ENABLED)) { this.forceImplicitQualification = true; } else if (optionValue.equals(DISABLED)) { this.forceImplicitQualification = false; } continue; } else if(optionID.equals(OPTION_FieldPrefixes)){ if (optionValue.length() == 0) { this.fieldPrefixes = null; } else { this.fieldPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_StaticFieldPrefixes)){ if (optionValue.length() == 0) { this.staticFieldPrefixes = null; } else { this.staticFieldPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_LocalPrefixes)){ if (optionValue.length() == 0) { this.localPrefixes = null; } else { this.localPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_ArgumentPrefixes)){ if (optionValue.length() == 0) { this.argumentPrefixes = null; } else { this.argumentPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_FieldSuffixes)){ if (optionValue.length() == 0) { this.fieldSuffixes = null; } else { this.fieldSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_StaticFieldSuffixes)){ if (optionValue.length() == 0) { this.staticFieldSuffixes = null; } else { this.staticFieldSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_LocalSuffixes)){ if (optionValue.length() == 0) { this.localSuffixes = null; } else { this.localSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } else if(optionID.equals(OPTION_ArgumentSuffixes)){ if (optionValue.length() == 0) { this.argumentSuffixes = null; } else { this.argumentSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray()); } continue; } } } }
gpl-3.0
dkmorb/axoloti
src/main/java/axoloti/menus/PopulatePatchMenu.java
2367
/** * Copyright (C) 2013 - 2016 Johannes Taelman * * This file is part of Axoloti. * * Axoloti is free software: you can redistribute it and/or modify it under the * terms of the GNU General Public License as published by the Free Software * Foundation, either version 3 of the License, or (at your option) any later * version. * * Axoloti is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with * Axoloti. If not, see <http://www.gnu.org/licenses/>. */ package axoloti.menus; import axoloti.MainFrame; import java.io.File; import java.io.FilenameFilter; import java.util.Arrays; import javax.swing.JMenu; import javax.swing.JMenuItem; /** * * @author jtaelman */ public class PopulatePatchMenu { static void PopulatePatchMenu(JMenu parent, String path, String ext) { File dir = new File(path); if (!dir.isDirectory()) { JMenuItem mi = new JMenuItem("no help patches found"); mi.setEnabled(false); parent.add(mi); return; } final String extension = ext; File[] files = dir.listFiles(new java.io.FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(files); for (File subdir : files) { JMenu fm = new JMenu(subdir.getName()); PopulatePatchMenu(fm, subdir.getPath(), extension); if (fm.getItemCount() > 0) { parent.add(fm); } } String filenames[] = dir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return (name.endsWith(extension)); } }); Arrays.sort(filenames); for (String fn : filenames) { String fn2 = fn.substring(0, fn.length() - 4); JMenuItem fm = new JMenuItem(fn2); fm.setActionCommand("open:" + path + File.separator + fn); fm.addActionListener(MainFrame.mainframe); parent.add(fm); } } }
gpl-3.0
pabalexa/calibre2opds
OpdsOutput/src/main/java/com/l2fprod/common/swing/plaf/ComponentAddon.java
1604
/** * L2FProd.com Common Components 7.3 License. * * Copyright 2005-2007 L2FProd.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.swing.plaf; /** * Each new component type of the library will contribute an addon to * the LookAndFeelAddons. A <code>ComponentAddon</code> is the * equivalent of a {@link javax.swing.LookAndFeel}but focused on one * component. <br> * * @author <a href="mailto:fred@L2FProd.com">Frederic Lavigne</a> */ public interface ComponentAddon { /** * @return the name of this addon */ String getName(); /** * Initializes this addon (i.e register UI classes, colors, fonts, * borders, any UIResource used by the component class). When * initializing, the addon can register different resources based on * the addon or the current look and feel. * * @param addon the current addon */ void initialize(LookAndFeelAddons addon); /** * Uninitializes this addon. * * @param addon */ void uninitialize(LookAndFeelAddons addon); }
gpl-3.0
routeKIT/routeKIT
src/org/itadaki/bzip2/HuffmanAllocator.java
6762
/* * Copyright (c) 2011 Matthew Francis * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.itadaki.bzip2; /** * An in-place, length restricted Canonical Huffman code length allocator * * Based on the algorithm proposed by R. L. Milidiú, A. A. Pessoa and E. S. Laber in "In-place * Length-Restricted Prefix Coding" (see: http://www-di.inf.puc-rio.br/~laber/public/spire98.ps) * and incorporating additional ideas from the implementation of "shcodec" by Simakov Alexander * (see: http://webcenter.ru/~xander/) */ public class HuffmanAllocator { /** * FIRST() function * @param array The code length array * @param i The input position * @param nodesToMove The number of internal nodes to be relocated * @return The smallest {@code k} such that {@code nodesToMove <= k <= i} and * {@code i <= (array[k] % array.length)} */ private static int first (final int[] array, int i, final int nodesToMove) { final int length = array.length; final int limit = i; int k = array.length - 2; while ((i >= nodesToMove) && ((array[i] % length) > limit)) { k = i; i -= (limit - i + 1); } i = Math.max (nodesToMove - 1, i); while (k > (i + 1)) { int temp = (i + k) >> 1; if ((array[temp] % length) > limit) { k = temp; } else { i = temp; } } return k; } /** * Fills the code array with extended parent pointers * @param array The code length array */ private static void setExtendedParentPointers (final int[] array) { final int length = array.length; array[0] += array[1]; for (int headNode = 0, tailNode = 1, topNode = 2; tailNode < (length - 1); tailNode++) { int temp; if ((topNode >= length) || (array[headNode] < array[topNode])) { temp = array[headNode]; array[headNode++] = tailNode; } else { temp = array[topNode++]; } if ((topNode >= length) || ((headNode < tailNode) && (array[headNode] < array[topNode]))) { temp += array[headNode]; array[headNode++] = tailNode + length; } else { temp += array[topNode++]; } array[tailNode] = temp; } } /** * Finds the number of nodes to relocate in order to achieve a given code length limit * @param array The code length array * @param maximumLength The maximum bit length for the generated codes * @return The number of nodes to relocate */ private static int findNodesToRelocate (final int[] array, final int maximumLength) { int currentNode = array.length - 2; for (int currentDepth = 1; (currentDepth < (maximumLength - 1)) && (currentNode > 1); currentDepth++) { currentNode = first (array, currentNode - 1, 0); } return currentNode; } /** * A final allocation pass with no code length limit * @param array The code length array */ private static void allocateNodeLengths (final int[] array) { int firstNode = array.length - 2; int nextNode = array.length - 1; for (int currentDepth = 1, availableNodes = 2; availableNodes > 0; currentDepth++) { final int lastNode = firstNode; firstNode = first (array, lastNode - 1, 0); for (int i = availableNodes - (lastNode - firstNode); i > 0; i--) { array[nextNode--] = currentDepth; } availableNodes = (lastNode - firstNode) << 1; } } /** * A final allocation pass that relocates nodes in order to achieve a maximum code length limit * @param array The code length array * @param nodesToMove The number of internal nodes to be relocated * @param insertDepth The depth at which to insert relocated nodes */ private static void allocateNodeLengthsWithRelocation (final int[] array, final int nodesToMove, final int insertDepth) { int firstNode = array.length - 2; int nextNode = array.length - 1; int currentDepth = (insertDepth == 1) ? 2 : 1; int nodesLeftToMove = (insertDepth == 1) ? nodesToMove - 2 : nodesToMove; for (int availableNodes = currentDepth << 1; availableNodes > 0; currentDepth++) { final int lastNode = firstNode; firstNode = (firstNode <= nodesToMove) ? firstNode : first (array, lastNode - 1, nodesToMove); int offset = 0; if (currentDepth >= insertDepth) { offset = Math.min (nodesLeftToMove, 1 << (currentDepth - insertDepth)); } else if (currentDepth == (insertDepth - 1)) { offset = 1; if ((array[firstNode]) == lastNode) { firstNode++; } } for (int i = availableNodes - (lastNode - firstNode + offset); i > 0; i--) { array[nextNode--] = currentDepth; } nodesLeftToMove -= offset; availableNodes = (lastNode - firstNode + offset) << 1; } } /** * Allocates Canonical Huffman code lengths in place based on a sorted frequency array * @param array On input, a sorted array of symbol frequencies; On output, an array of Canonical * Huffman code lengths * @param maximumLength The maximum code length. Must be at least {@code ceil(log2(array.length))} */ public static void allocateHuffmanCodeLengths (final int[] array, final int maximumLength) { switch (array.length) { case 2: array[1] = 1; case 1: array[0] = 1; return; } /* Pass 1 : Set extended parent pointers */ setExtendedParentPointers (array); /* Pass 2 : Find number of nodes to relocate in order to achieve maximum code length */ int nodesToRelocate = findNodesToRelocate (array, maximumLength); /* Pass 3 : Generate code lengths */ if ((array[0] % array.length) >= nodesToRelocate) { allocateNodeLengths (array); } else { int insertDepth = maximumLength - (32 - Integer.numberOfLeadingZeros (nodesToRelocate - 1)); allocateNodeLengthsWithRelocation (array, nodesToRelocate, insertDepth); } } /** * Non-instantiable */ private HuffmanAllocator() { } }
gpl-3.0
jmcPereira/overture
ide/core/src/main/java/org/overture/ide/core/resources/ModelBuildPath.java
6618
/* * #%~ * org.overture.ide.core * %% * Copyright (C) 2008 - 2014 Overture * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-3.0.html>. * #~% */ package org.overture.ide.core.resources; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.List; import java.util.Vector; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IProject; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.overture.ide.core.VdmCore; import org.overture.ide.internal.core.ResourceManager; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class ModelBuildPath { final IVdmProject vdmProject; final IProject project; final File modelPathFile; List<IContainer> srcPaths = new Vector<IContainer>(); IContainer output; IContainer library; public ModelBuildPath(IVdmProject project) { this.vdmProject = project; this.project = (IProject) this.vdmProject.getAdapter(IProject.class); IPath base = this.project.getLocation(); base = base.append(".modelpath"); this.modelPathFile = base.toFile(); this.output = this.project.getFolder("generated"); this.library = this.project.getFolder("lib"); parse(); } private boolean hasModelPath() { return this.modelPathFile.exists(); } private IContainer getDefaultModelSrcPath() { return this.project; } public List<IContainer> getModelSrcPaths() { List<IContainer> tmp = new Vector<IContainer>(srcPaths.size()); tmp.addAll(srcPaths); return tmp; } public synchronized IContainer getOutput() { return this.output; } public synchronized IContainer getLibrary() { return this.library; } private synchronized void parse() { if (!hasModelPath()) { srcPaths.add(getDefaultModelSrcPath()); return; } try { File file = this.modelPathFile; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document doc = db.parse(file); doc.getDocumentElement().normalize(); NodeList nodeLst = doc.getElementsByTagName("modelpathentry"); for (int s = 0; s < nodeLst.getLength(); s++) { Node fstNode = nodeLst.item(s); if (fstNode.getNodeType() == Node.ELEMENT_NODE) { Node kindAttribute = fstNode.getAttributes().getNamedItem("kind"); String kindValue = kindAttribute.getNodeValue(); if (kindValue != null) { if (kindValue.equals("src")) { Node pathAttribute = fstNode.getAttributes().getNamedItem("path"); String pathValue = pathAttribute.getNodeValue(); if(pathValue.equals(".")) { add(getDefaultModelSrcPath()); }else { add(this.project.getFolder(pathValue)); } } else if (kindValue.equals("output")) { Node pathAttribute = fstNode.getAttributes().getNamedItem("path"); String pathValue = pathAttribute.getNodeValue(); output = this.project.getFolder(pathValue); } else if (kindValue.equals("library")) { Node pathAttribute = fstNode.getAttributes().getNamedItem("path"); String pathValue = pathAttribute.getNodeValue(); library = this.project.getFolder(pathValue); } } } } if(srcPaths.isEmpty()) { srcPaths.add(getDefaultModelSrcPath()); } } catch (Exception e) { VdmCore.log("Faild to parse .modelpath file", e); } } public synchronized void setOutput(IContainer container) { this.output = container; } public synchronized void setLibrary(IContainer container) { this.library = container; } public synchronized void add(IContainer container) { if(container instanceof IProject) { srcPaths.clear(); } else if(container instanceof IFolder) { String fullPath = container.getProjectRelativePath().toString(); boolean flag = true; for (IContainer s : srcPaths) { flag = flag && s.getProjectRelativePath().toString().startsWith(fullPath); } if(flag) srcPaths.clear(); } if (!srcPaths.contains(container)) { srcPaths.add(container); } } public synchronized void remove(IContainer container) { if (srcPaths.contains(container)) { srcPaths.remove(container); } } public synchronized boolean contains(IContainer container) { return srcPaths.contains(container); } public synchronized void save() throws CoreException { StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); sb.append("<modelpath>\n"); for (IContainer src : srcPaths) { if (src.getProjectRelativePath().toString().length() > 0) { sb.append("\t<modelpathentry kind=\"src\" path=\"" + src.getProjectRelativePath() + "\"/>\n"); }else if (src instanceof IProject) { sb.append("\t<modelpathentry kind=\"src\" path=\".\"/>\n"); } } if (output != null && output.getProjectRelativePath().toString().length() > 0) { sb.append("\t<modelpathentry kind=\"output\" path=\"" + output.getProjectRelativePath() + "\"/>\n"); } if (library != null && library.getProjectRelativePath().toString().length() > 0) { sb.append("\t<modelpathentry kind=\"library\" path=\"" + library.getProjectRelativePath() + "\"/>\n"); } sb.append("</modelpath>"); PrintWriter out = null; try { FileWriter outFile = new FileWriter(this.modelPathFile); out = new PrintWriter(outFile); out.println(sb.toString()); } catch (IOException e) { VdmCore.log("Faild to save .modelpath file", e); } finally { if (out != null) { out.close(); } } ResourceManager.getInstance().syncBuildPath(vdmProject); } /** * Reload the build path and discard any un-saved changes */ public void reload() { parse(); } }
gpl-3.0
jembi/openmrs-core
api/src/main/java/org/openmrs/annotation/OpenmrsProfile.java
1065
/** * This Source Code Form is subject to the terms of the Mozilla Public License, * v. 2.0. If a copy of the MPL was not distributed with this file, You can * obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under * the terms of the Healthcare Disclaimer located at http://openmrs.org/license. * * Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS * graphic logo is a trademark of OpenMRS Inc. */ package org.openmrs.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Place it on classes which you want to be beans created conditionally based on * OpenMRS version and/or started modules. * * @since 1.10, 1.9.8, 1.8.5, 1.7.5 */ @Target( { ElementType.TYPE }) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface OpenmrsProfile { public String openmrsVersion() default ""; public String[] modules() default {}; }
mpl-2.0
cswhite2000/ProjectAres
Util/core/src/main/java/tc/oc/commons/core/plugin/PluginFacet.java
1116
package tc.oc.commons.core.plugin; import java.util.Set; import tc.oc.commons.core.commands.CommandRegistry; import tc.oc.commons.core.commands.Commands; import tc.oc.commons.core.commands.NestedCommands; import tc.oc.minecraft.api.event.Activatable; import tc.oc.commons.core.inject.Facet; /** * Something that needs to be enabled and disabled (along with a plugin). * * Each plugin has a private set of facets, configured through a {@link PluginFacetBinder}. * To get the instances, @Inject a {@link Set< PluginFacet >}. * * Facets are automatically enabled and disabled at the same time as the * plugin they are bound to. * * If a facet implements the {@link tc.oc.minecraft.api.event.Listener} interfaces, * it will also be registered to receive events. * * If it implements {@link Commands} or {@link NestedCommands}, it will be registered * through a {@link CommandRegistry}. * * Specific plugins may do other automatic things with their own facets, be we * don't yet have a framework for extending facets across all plugins. */ public interface PluginFacet extends Facet, Activatable { }
agpl-3.0
onursumer/cbioportal
service/src/test/java/org/cbioportal/service/impl/ExpressionEnrichmentServiceImplTest.java
18869
package org.cbioportal.service.impl; import java.math.BigDecimal; import java.util.*; import org.cbioportal.model.*; import org.cbioportal.model.meta.GenericAssayMeta; import org.cbioportal.persistence.MolecularDataRepository; import org.cbioportal.service.GeneService; import org.cbioportal.service.GenericAssayService; import org.cbioportal.service.MolecularProfileService; import org.cbioportal.service.SampleService; import org.cbioportal.service.exception.MolecularProfileNotFoundException; import org.cbioportal.service.util.ExpressionEnrichmentUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class ExpressionEnrichmentServiceImplTest extends BaseServiceImplTest { @InjectMocks private ExpressionEnrichmentServiceImpl enrichmentServiceImpl; @Mock private SampleService sampleService; @Mock private MolecularProfileService molecularProfileService; @Mock private MolecularDataRepository molecularDataRepository; @Mock private GeneService geneService; @Spy @InjectMocks private ExpressionEnrichmentUtil expressionEnrichmentUtil; @Mock private GenericAssayService genericAssayService; CancerStudy cancerStudy = new CancerStudy(); MolecularProfile geneMolecularProfile = new MolecularProfile(); MolecularProfileSamples molecularProfileSamples = new MolecularProfileSamples(); List<Sample> samples = new ArrayList<>(); Map<String, List<MolecularProfileCaseIdentifier>> molecularProfileCaseSets = new HashMap<>(); Map<String, List<MolecularProfileCaseIdentifier>> molecularProfilePatientLevelCaseSets = new HashMap<>(); // patient level only data public static final String SAMPLE_ID5 = "sample_id5"; @Before public void setup() throws MolecularProfileNotFoundException { cancerStudy.setReferenceGenome(ReferenceGenome.HOMO_SAPIENS_DEFAULT_GENOME_NAME); cancerStudy.setCancerStudyIdentifier(STUDY_ID); geneMolecularProfile.setCancerStudyIdentifier(STUDY_ID); geneMolecularProfile.setStableId(MOLECULAR_PROFILE_ID); geneMolecularProfile.setCancerStudy(cancerStudy); molecularProfileSamples.setMolecularProfileId(MOLECULAR_PROFILE_ID); molecularProfileSamples.setCommaSeparatedSampleIds("1,2,3,4"); Sample sample1 = new Sample(); sample1.setStableId(SAMPLE_ID1); sample1.setInternalId(1); sample1.setCancerStudyIdentifier(STUDY_ID); sample1.setPatientId(1); samples.add(sample1); Sample sample2 = new Sample(); sample2.setStableId(SAMPLE_ID2); sample2.setInternalId(2); sample2.setCancerStudyIdentifier(STUDY_ID); sample2.setPatientId(2); samples.add(sample2); Sample sample3 = new Sample(); sample3.setStableId(SAMPLE_ID3); sample3.setInternalId(3); sample3.setCancerStudyIdentifier(STUDY_ID); sample3.setPatientId(3); samples.add(sample3); Sample sample4 = new Sample(); sample4.setStableId(SAMPLE_ID4); sample4.setInternalId(4); sample4.setCancerStudyIdentifier(STUDY_ID); sample4.setPatientId(4); samples.add(sample4); List<MolecularProfileCaseIdentifier> alteredSampleIdentifieres = new ArrayList<>(); List<MolecularProfileCaseIdentifier> unalteredSampleIdentifieres = new ArrayList<>(); List<MolecularProfileCaseIdentifier> unalteredPatientLevelSampleIdentifieres = new ArrayList<>(); MolecularProfileCaseIdentifier caseIdentifier1 = new MolecularProfileCaseIdentifier(); caseIdentifier1.setMolecularProfileId(MOLECULAR_PROFILE_ID); caseIdentifier1.setCaseId(SAMPLE_ID1); alteredSampleIdentifieres.add(caseIdentifier1); MolecularProfileCaseIdentifier caseIdentifier2 = new MolecularProfileCaseIdentifier(); caseIdentifier2.setMolecularProfileId(MOLECULAR_PROFILE_ID); caseIdentifier2.setCaseId(SAMPLE_ID2); alteredSampleIdentifieres.add(caseIdentifier2); MolecularProfileCaseIdentifier caseIdentifier3 = new MolecularProfileCaseIdentifier(); caseIdentifier3.setMolecularProfileId(MOLECULAR_PROFILE_ID); caseIdentifier3.setCaseId(SAMPLE_ID3); unalteredSampleIdentifieres.add(caseIdentifier3); unalteredPatientLevelSampleIdentifieres.add(caseIdentifier3); MolecularProfileCaseIdentifier caseIdentifier4 = new MolecularProfileCaseIdentifier(); caseIdentifier4.setMolecularProfileId(MOLECULAR_PROFILE_ID); caseIdentifier4.setCaseId(SAMPLE_ID4); unalteredSampleIdentifieres.add(caseIdentifier4); unalteredPatientLevelSampleIdentifieres.add(caseIdentifier4); // patient level only data MolecularProfileCaseIdentifier caseIdentifier5 = new MolecularProfileCaseIdentifier(); caseIdentifier5.setMolecularProfileId(MOLECULAR_PROFILE_ID); caseIdentifier5.setCaseId(SAMPLE_ID5); unalteredPatientLevelSampleIdentifieres.add(caseIdentifier5); molecularProfileCaseSets.put("altered samples", alteredSampleIdentifieres); molecularProfileCaseSets.put("unaltered samples", unalteredSampleIdentifieres); molecularProfilePatientLevelCaseSets.put("altered samples", alteredSampleIdentifieres); molecularProfilePatientLevelCaseSets.put("unaltered samples", unalteredPatientLevelSampleIdentifieres); Mockito.when(molecularProfileService.getMolecularProfile(MOLECULAR_PROFILE_ID)) .thenReturn(geneMolecularProfile); Mockito.when(molecularDataRepository.getCommaSeparatedSampleIdsOfMolecularProfile(MOLECULAR_PROFILE_ID)) .thenReturn(molecularProfileSamples); Mockito.when(sampleService.fetchSamples(Arrays.asList(STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID), Arrays.asList(SAMPLE_ID3, SAMPLE_ID4, SAMPLE_ID1, SAMPLE_ID2), "ID")).thenReturn(samples); } @Test public void getGenomicEnrichments() throws Exception { geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.MRNA_EXPRESSION); List<GeneMolecularAlteration> molecularDataList = new ArrayList<GeneMolecularAlteration>(); GeneMolecularAlteration geneMolecularAlteration1 = new GeneMolecularAlteration(); geneMolecularAlteration1.setEntrezGeneId(ENTREZ_GENE_ID_2); geneMolecularAlteration1.setValues("2,3,2.1,3"); molecularDataList.add(geneMolecularAlteration1); GeneMolecularAlteration geneMolecularAlteration2 = new GeneMolecularAlteration(); geneMolecularAlteration2.setEntrezGeneId(ENTREZ_GENE_ID_3); geneMolecularAlteration2.setValues("1.1,5,2.3,3"); molecularDataList.add(geneMolecularAlteration2); Mockito.when(molecularDataRepository.getGeneMolecularAlterationsIterableFast(MOLECULAR_PROFILE_ID)) .thenReturn(molecularDataList); List<Gene> expectedGeneList = new ArrayList<>(); Gene gene1 = new Gene(); gene1.setEntrezGeneId(ENTREZ_GENE_ID_2); gene1.setHugoGeneSymbol(HUGO_GENE_SYMBOL_2); expectedGeneList.add(gene1); Gene gene2 = new Gene(); gene2.setEntrezGeneId(ENTREZ_GENE_ID_3); gene2.setHugoGeneSymbol(HUGO_GENE_SYMBOL_3); expectedGeneList.add(gene2); Mockito.when(geneService.fetchGenes(Arrays.asList("2", "3"), "ENTREZ_GENE_ID", "SUMMARY")) .thenReturn(expectedGeneList); List<GenomicEnrichment> result = enrichmentServiceImpl.getGenomicEnrichments(MOLECULAR_PROFILE_ID, molecularProfileCaseSets, EnrichmentType.SAMPLE); Assert.assertEquals(2, result.size()); GenomicEnrichment expressionEnrichment = result.get(0); Assert.assertEquals(ENTREZ_GENE_ID_2, expressionEnrichment.getEntrezGeneId()); Assert.assertEquals(HUGO_GENE_SYMBOL_2, expressionEnrichment.getHugoGeneSymbol()); Assert.assertEquals(null, expressionEnrichment.getCytoband()); Assert.assertEquals(2, expressionEnrichment.getGroupsStatistics().size()); GroupStatistics unalteredGroupStats = expressionEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation()); GroupStatistics alteredGroupStats = expressionEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.9475795430163914"), expressionEnrichment.getpValue()); expressionEnrichment = result.get(1); Assert.assertEquals(ENTREZ_GENE_ID_3, expressionEnrichment.getEntrezGeneId()); Assert.assertEquals(HUGO_GENE_SYMBOL_3, expressionEnrichment.getHugoGeneSymbol()); Assert.assertEquals(null, expressionEnrichment.getCytoband()); Assert.assertEquals(2, expressionEnrichment.getGroupsStatistics().size()); unalteredGroupStats = expressionEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation()); alteredGroupStats = expressionEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.8716148250471419"), expressionEnrichment.getpValue()); } @Test public void getGenericAssayEnrichments() throws Exception { geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.GENERIC_ASSAY); List<GenericAssayMolecularAlteration> molecularDataList = new ArrayList<GenericAssayMolecularAlteration>(); GenericAssayMolecularAlteration genericAssayMolecularAlteration1 = new GenericAssayMolecularAlteration(); genericAssayMolecularAlteration1.setGenericAssayStableId(HUGO_GENE_SYMBOL_1); genericAssayMolecularAlteration1.setValues("2,3,2.1,3"); molecularDataList.add(genericAssayMolecularAlteration1); GenericAssayMolecularAlteration genericAssayMolecularAlteration2 = new GenericAssayMolecularAlteration(); genericAssayMolecularAlteration2.setGenericAssayStableId(HUGO_GENE_SYMBOL_2); genericAssayMolecularAlteration2.setValues("1.1,5,2.3,3"); molecularDataList.add(genericAssayMolecularAlteration2); Mockito.when(molecularDataRepository.getGenericAssayMolecularAlterationsIterable(MOLECULAR_PROFILE_ID, null, "SUMMARY")).thenReturn(molecularDataList); Mockito.when(genericAssayService.getGenericAssayMetaByStableIdsAndMolecularIds( Arrays.asList(HUGO_GENE_SYMBOL_1, HUGO_GENE_SYMBOL_2), Arrays.asList(MOLECULAR_PROFILE_ID, MOLECULAR_PROFILE_ID), "SUMMARY")) .thenReturn(Arrays.asList(new GenericAssayMeta(HUGO_GENE_SYMBOL_1), new GenericAssayMeta(HUGO_GENE_SYMBOL_2))); List<GenericAssayEnrichment> result = enrichmentServiceImpl.getGenericAssayEnrichments(MOLECULAR_PROFILE_ID, molecularProfileCaseSets, EnrichmentType.SAMPLE); Assert.assertEquals(2, result.size()); GenericAssayEnrichment genericAssayEnrichment = result.get(0); Assert.assertEquals(HUGO_GENE_SYMBOL_1, genericAssayEnrichment.getStableId()); Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size()); GroupStatistics unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation()); GroupStatistics alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.9475795430163914"), genericAssayEnrichment.getpValue()); genericAssayEnrichment = result.get(1); Assert.assertEquals(HUGO_GENE_SYMBOL_2, genericAssayEnrichment.getStableId()); Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size()); unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation()); alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.8716148250471419"), genericAssayEnrichment.getpValue()); } @Test public void getGenericAssayPatientLevelEnrichments() throws Exception { geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.GENERIC_ASSAY); geneMolecularProfile.setPatientLevel(true); List<GenericAssayMolecularAlteration> molecularDataList = new ArrayList<GenericAssayMolecularAlteration>(); GenericAssayMolecularAlteration genericAssayMolecularAlteration1 = new GenericAssayMolecularAlteration(); genericAssayMolecularAlteration1.setGenericAssayStableId(HUGO_GENE_SYMBOL_1); genericAssayMolecularAlteration1.setValues("2,3,2.1,3,3,3"); molecularDataList.add(genericAssayMolecularAlteration1); GenericAssayMolecularAlteration genericAssayMolecularAlteration2 = new GenericAssayMolecularAlteration(); genericAssayMolecularAlteration2.setGenericAssayStableId(HUGO_GENE_SYMBOL_2); genericAssayMolecularAlteration2.setValues("1.1,5,2.3,3,3"); molecularDataList.add(genericAssayMolecularAlteration2); Mockito.when(molecularDataRepository.getGenericAssayMolecularAlterationsIterable(MOLECULAR_PROFILE_ID, null, "SUMMARY")).thenReturn(molecularDataList); Mockito.when(genericAssayService.getGenericAssayMetaByStableIdsAndMolecularIds( Arrays.asList(HUGO_GENE_SYMBOL_1, HUGO_GENE_SYMBOL_2), Arrays.asList(MOLECULAR_PROFILE_ID, MOLECULAR_PROFILE_ID), "SUMMARY")) .thenReturn(Arrays.asList(new GenericAssayMeta(HUGO_GENE_SYMBOL_1), new GenericAssayMeta(HUGO_GENE_SYMBOL_2))); // add 5th sample which is the second sample of patient 4 Sample sample5 = new Sample(); sample5.setStableId(SAMPLE_ID5); sample5.setInternalId(5); sample5.setCancerStudyIdentifier(STUDY_ID); sample5.setPatientId(4); samples.add(sample5); Mockito.when(sampleService.fetchSamples(Arrays.asList(STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID), Arrays.asList(SAMPLE_ID3, SAMPLE_ID4, SAMPLE_ID5, SAMPLE_ID1, SAMPLE_ID2), "ID")).thenReturn(samples); List<GenericAssayEnrichment> result = enrichmentServiceImpl.getGenericAssayEnrichments(MOLECULAR_PROFILE_ID, molecularProfilePatientLevelCaseSets, EnrichmentType.SAMPLE); Assert.assertEquals(2, result.size()); GenericAssayEnrichment genericAssayEnrichment = result.get(0); Assert.assertEquals(HUGO_GENE_SYMBOL_1, genericAssayEnrichment.getStableId()); Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size()); GroupStatistics unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation()); GroupStatistics alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.9475795430163914"), genericAssayEnrichment.getpValue()); genericAssayEnrichment = result.get(1); Assert.assertEquals(HUGO_GENE_SYMBOL_2, genericAssayEnrichment.getStableId()); Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size()); unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0); Assert.assertEquals("unaltered samples", unalteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation()); alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1); Assert.assertEquals("altered samples", alteredGroupStats.getName()); Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression()); Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation()); Assert.assertEquals(new BigDecimal("0.8716148250471419"), genericAssayEnrichment.getpValue()); } }
agpl-3.0
hpehl/testsuite
basic/src/test/java/org/jboss/hal/testsuite/test/configuration/undertow/ServletContainerTestCase.java
5479
package org.jboss.hal.testsuite.test.configuration.undertow; import org.apache.commons.lang.RandomStringUtils; import org.jboss.arquillian.graphene.page.Page; import org.jboss.arquillian.junit.Arquillian; import org.jboss.hal.testsuite.category.Shared; import org.jboss.hal.testsuite.page.config.UndertowServletPage; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.wildfly.extras.creaper.core.online.operations.Address; import org.wildfly.extras.creaper.core.online.operations.OperationException; import java.io.IOException; import java.util.concurrent.TimeoutException; @RunWith(Arquillian.class) @Category(Shared.class) public class ServletContainerTestCase extends UndertowTestCaseAbstract { @Page private UndertowServletPage page; //identifiers private static final String ALLOW_NON_STANDARD_WRAPPERS = "allow-non-standard-wrappers"; private static final String DEFAULT_BUFFER_CACHE = "default-buffer-cache"; private static final String DEFAULT_ENCODING = "default-encoding"; private static final String DEFAULT_SESSION_TIMEOUT = "default-session-timeout"; private static final String DIRECTORY_LISTING = "directory-listing"; private static final String DISABLE_CACHING_FOR_SECURED_PAGES = "disable-caching-for-secured-pages"; private static final String EAGER_FILTER_INITIALIZATION = "eager-filter-initialization"; private static final String IGNORE_FLUSH = "ignore-flush"; private static final String STACK_TRACE_ON_ERROR = "stack-trace-on-error"; private static final String USE_LISTENER_ENCODING = "use-listener-encoding"; //values private static final String STACK_TRACE_ON_ERROR_VALUE = "all"; private static final String SERVLET_CONTAINER = "servlet-container_" + RandomStringUtils.randomAlphanumeric(5); private static final Address SERVLET_CONTAINER_ADDRESS = UNDERTOW_ADDRESS.and("servlet-container", SERVLET_CONTAINER); @BeforeClass public static void setUp() throws InterruptedException, IOException, TimeoutException { operations.add(SERVLET_CONTAINER_ADDRESS); } @Before public void before() { page.navigate(); page.selectServletContainer(SERVLET_CONTAINER); } @AfterClass public static void tearDown() throws InterruptedException, IOException, TimeoutException, OperationException { operations.remove(SERVLET_CONTAINER_ADDRESS); } @Test public void setAllowNonStandardWrappersToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, ALLOW_NON_STANDARD_WRAPPERS, true); } @Test public void setAllowNonStandardWrappersToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, ALLOW_NON_STANDARD_WRAPPERS, false); } @Test public void editDefaultBufferCache() throws Exception { editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_BUFFER_CACHE, undertowOps.createBufferCache()); } @Test public void editDefaultEncoding() throws Exception { editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_ENCODING); } @Test public void editDefaultSessionTimeout() throws Exception { editTextAndVerify(SERVLET_CONTAINER_ADDRESS, DEFAULT_SESSION_TIMEOUT, 42); } @Test public void editDefaultSessionTimeoutInvalid() throws Exception { verifyIfErrorAppears(DEFAULT_SESSION_TIMEOUT, "54sdfg"); } @Test public void setDirectoryListingToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DIRECTORY_LISTING, true); } @Test public void setDirectoryListingToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DIRECTORY_LISTING, false); } @Test public void setDisableCachingForSecuredPagesToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DISABLE_CACHING_FOR_SECURED_PAGES, true); } @Test public void setDisableCachingForSecuredPagesToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, DISABLE_CACHING_FOR_SECURED_PAGES, false); } @Test public void setIgnoreFlushToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, IGNORE_FLUSH, true); } @Test public void setIgnoreFlushToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, IGNORE_FLUSH, false); } @Test public void setEagerFilterInitializationToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, EAGER_FILTER_INITIALIZATION, true); } @Test public void setEagerFilterInitializationToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, EAGER_FILTER_INITIALIZATION, false); } @Test public void selectStackTraceOnError() throws Exception { selectOptionAndVerify(SERVLET_CONTAINER_ADDRESS, STACK_TRACE_ON_ERROR, STACK_TRACE_ON_ERROR_VALUE); } @Test public void setUseListenerEncodingToTrue() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, USE_LISTENER_ENCODING, true); } @Test public void setUseListenerEncodingToFalse() throws Exception { editCheckboxAndVerify(SERVLET_CONTAINER_ADDRESS, USE_LISTENER_ENCODING, false); } }
lgpl-2.1
ljo/exist
src/org/exist/management/DummyAgent.java
1815
/* * eXist Open Source Native XML Database * Copyright (C) 2001-07 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * * $Id$ */ package org.exist.management; import org.exist.management.impl.PerInstanceMBean; import org.exist.storage.BrokerPool; import org.exist.util.DatabaseConfigurationException; /** * A dummy agent which will be used if JMX is disabled. It just acts as an empty * placeholder. */ public class DummyAgent implements Agent { @Override public void initDBInstance(final BrokerPool instance) { // do nothing } @Override public void closeDBInstance(final BrokerPool instance) { // nothing to do } @Override public void addMBean(final PerInstanceMBean mbean) throws DatabaseConfigurationException { // just do nothing } @Override public void changeStatus(final BrokerPool instance, final TaskStatus actualStatus) { // nothing to do } @Override public void updateStatus(final BrokerPool instance, final int percentage) { // nothing to do } }
lgpl-2.1
Alfresco/alfresco-repository
src/main/java/org/alfresco/repo/management/subsystems/LuceneChildApplicationContextFactory.java
1669
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.management.subsystems; import java.io.IOException; /** * @author Andy * */ public class LuceneChildApplicationContextFactory extends ChildApplicationContextFactory { /* (non-Javadoc) * @see org.alfresco.repo.management.subsystems.ChildApplicationContextFactory#createInitialState() */ @Override protected PropertyBackedBeanState createInitialState() throws IOException { return new ApplicationContextState(true); } protected void destroy(boolean isPermanent) { super.destroy(isPermanent); doInit(); } }
lgpl-3.0
Alfresco/alfresco-repository
src/main/java/org/alfresco/repo/template/LuceneSearchResultsMap.java
2177
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.template; import java.io.StringReader; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.model.ContentModel; import org.alfresco.service.ServiceRegistry; import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.NodeRef; import org.dom4j.Document; import org.dom4j.Element; import org.dom4j.io.SAXReader; /** * Provides functionality to execute a Lucene search string and return TemplateNode objects. * * @author Kevin Roast */ public class LuceneSearchResultsMap extends BaseSearchResultsMap { /** * Constructor * * @param parent The parent TemplateNode to execute searches from * @param services The ServiceRegistry to use */ public LuceneSearchResultsMap(TemplateNode parent, ServiceRegistry services) { super(parent, services); } /** * @see org.alfresco.repo.template.BaseTemplateMap#get(java.lang.Object) */ public Object get(Object key) { // execute the search return query(key.toString()); } }
lgpl-3.0
ruhan1/pnc
common/src/test/java/org/jboss/pnc/common/util/StreamCollectorsTest.java
1999
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.common.util; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Stream; import static org.assertj.core.api.Assertions.assertThat; /** * Author: Michal Szynkiewicz, michal.l.szynkiewicz@gmail.com * Date: 9/15/16 * Time: 1:37 PM */ public class StreamCollectorsTest { @Test public void shouldFlattenTwoLists() { List<String> listOne = Arrays.asList("one-1", "one-2", "one-3"); List<String> listTwo = Arrays.asList("two-1", "two-2"); List<String> actual = Stream.of(listOne, listTwo).collect(StreamCollectors.toFlatList()); List<String> expected = new ArrayList<>(listOne); expected.addAll(listTwo); assertThat(actual).hasSameElementsAs(expected); } @Test public void shouldFlattenOneList() { List<String> listOne = Arrays.asList("one-1", "one-2", "one-3"); List<String> actual = Stream.of(listOne).collect(StreamCollectors.toFlatList()); assertThat(actual).hasSameElementsAs(listOne); } @Test public void shouldFlattenNoList() { List<String> actual = Stream.<List<String>>of().collect(StreamCollectors.toFlatList()); assertThat(actual).isNotNull().isEmpty(); } }
apache-2.0
jarst/camel
tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/CamelChronicleTest.java
1206
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.itest.karaf; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.ops4j.pax.exam.junit.PaxExam; @Ignore @RunWith(PaxExam.class) public class CamelChronicleTest extends BaseKarafTest { public static final String COMPONENT = "chronicle"; @Test public void test() throws Exception { testComponent(COMPONENT); } }
apache-2.0
wso2/carbon-identity-framework
components/identity-mgt/org.wso2.carbon.identity.mgt/src/main/java/org/wso2/carbon/identity/mgt/util/Utils.java
16553
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.mgt.util; import org.apache.axiom.om.util.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.neethi.Policy; import org.apache.neethi.PolicyEngine; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.mgt.IdentityMgtConfig; import org.wso2.carbon.identity.mgt.constants.IdentityMgtConstants; import org.wso2.carbon.identity.mgt.dto.UserDTO; import org.wso2.carbon.identity.mgt.internal.IdentityMgtServiceComponent; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.user.api.Tenant; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.api.UserStoreManager; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.user.core.tenant.TenantManager; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.ByteArrayInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; /** * */ public class Utils { private static final Log log = LogFactory.getLog(Utils.class); private Utils() { } public static UserDTO processUserId(String userId) throws IdentityException { if (userId == null || userId.trim().length() < 1) { throw IdentityException.error("Can not proceed with out a user id"); } UserDTO userDTO = new UserDTO(userId); if (!IdentityMgtConfig.getInstance().isSaasEnabled()) { validateTenant(userDTO); } userDTO.setTenantId(getTenantId(userDTO.getTenantDomain())); return userDTO; } public static void validateTenant(UserDTO user) throws IdentityException { if (user.getTenantDomain() != null && !user.getTenantDomain().isEmpty()) { if (!user.getTenantDomain().equals( PrivilegedCarbonContext.getThreadLocalCarbonContext() .getTenantDomain())) { throw IdentityException.error( "Failed access to unauthorized tenant domain"); } user.setTenantId(getTenantId(user.getTenantDomain())); } } /** * gets no of verified user challenges * * @param userDTO bean class that contains user and tenant Information * @return no of verified challenges * @throws IdentityException if fails */ public static int getVerifiedChallenges(UserDTO userDTO) throws IdentityException { int noOfChallenges = 0; try { UserRegistry registry = IdentityMgtServiceComponent.getRegistryService(). getConfigSystemRegistry(MultitenantConstants.SUPER_TENANT_ID); String identityKeyMgtPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_CHALLENGES + RegistryConstants.PATH_SEPARATOR + userDTO.getUserId() + RegistryConstants.PATH_SEPARATOR + userDTO.getUserId(); Resource resource; if (registry.resourceExists(identityKeyMgtPath)) { resource = registry.get(identityKeyMgtPath); String property = resource.getProperty(IdentityMgtConstants.VERIFIED_CHALLENGES); if (property != null) { return Integer.parseInt(property); } } } catch (RegistryException e) { log.error("Error while processing userKey", e); } return noOfChallenges; } /** * gets the tenant id from the tenant domain * * @param domain - tenant domain name * @return tenantId * @throws IdentityException if fails or tenant doesn't exist */ public static int getTenantId(String domain) throws IdentityException { int tenantId; TenantManager tenantManager = IdentityMgtServiceComponent.getRealmService().getTenantManager(); if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(domain)) { tenantId = MultitenantConstants.SUPER_TENANT_ID; if (log.isDebugEnabled()) { String msg = "Domain is not defined implicitly. So it is Super Tenant domain."; log.debug(msg); } } else { try { tenantId = tenantManager.getTenantId(domain); if (tenantId < 1 && tenantId != MultitenantConstants.SUPER_TENANT_ID) { String msg = "This action can not be performed by the users in non-existing domains."; log.error(msg); throw IdentityException.error(msg); } } catch (org.wso2.carbon.user.api.UserStoreException e) { String msg = "Error in retrieving tenant id of tenant domain: " + domain + "."; log.error(msg, e); throw IdentityException.error(msg, e); } } return tenantId; } /** * Get the claims from the user store manager * * @param userName user name * @param tenantId tenantId * @param claim claim name * @return claim value * @throws IdentityException if fails */ public static String getClaimFromUserStoreManager(String userName, int tenantId, String claim) throws IdentityException { org.wso2.carbon.user.core.UserStoreManager userStoreManager = null; RealmService realmService = IdentityMgtServiceComponent.getRealmService(); String claimValue = ""; try { if (realmService.getTenantUserRealm(tenantId) != null) { userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId). getUserStoreManager(); } } catch (Exception e) { String msg = "Error retrieving the user store manager for tenant id : " + tenantId; log.error(msg, e); throw IdentityException.error(msg, e); } try { if (userStoreManager != null) { Map<String, String> claimsMap = userStoreManager .getUserClaimValues(userName, new String[]{claim}, UserCoreConstants.DEFAULT_PROFILE); if (claimsMap != null && !claimsMap.isEmpty()) { claimValue = claimsMap.get(claim); } } return claimValue; } catch (Exception e) { String msg = "Unable to retrieve the claim for user : " + userName; log.error(msg, e); throw IdentityException.error(msg, e); } } public static Map<String,String> getClaimsFromUserStoreManager(String userName, int tenantId, String[] claims) throws IdentityException { Map<String, String> claimValues = new HashMap<>(); org.wso2.carbon.user.core.UserStoreManager userStoreManager = null; RealmService realmService = IdentityMgtServiceComponent.getRealmService(); try { if (realmService.getTenantUserRealm(tenantId) != null) { userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId). getUserStoreManager(); } } catch (UserStoreException e) { throw IdentityException.error("Error retrieving the user store manager for tenant id : " + tenantId, e); } try { if (userStoreManager != null) { claimValues = userStoreManager.getUserClaimValues(userName, claims, UserCoreConstants.DEFAULT_PROFILE); } } catch (Exception e) { throw IdentityException.error("Unable to retrieve the claim for user : " + userName, e); } return claimValues; } /** * get email address from user store * * @param userName user name * @param tenantId tenant id * @return email address */ public static String getEmailAddressForUser(String userName, int tenantId) { String email = null; try { if (log.isDebugEnabled()) { log.debug("Retrieving email address from user profile."); } Tenant tenant = IdentityMgtServiceComponent.getRealmService(). getTenantManager().getTenant(tenantId); if (tenant != null && tenant.getAdminName().equals(userName)) { email = tenant.getEmail(); } if (email == null || email.trim().length() < 1) { email = getClaimFromUserStoreManager(userName, tenantId, UserCoreConstants.ClaimTypeURIs.EMAIL_ADDRESS); } if ((email == null || email.trim().length() < 1) && MultitenantUtils.isEmailUserName()) { email = UserCoreUtil.removeDomainFromName(userName); } } catch (Exception e) { String msg = "Unable to retrieve an email address associated with the given user : " + userName; log.warn(msg, e); // It is common to have users with no email address defined. } return email; } /** * Update Password with the user input * * @return true - if password was successfully reset * @throws IdentityException */ public static boolean updatePassword(String userId, int tenantId, String password) throws IdentityException { String tenantDomain = null; if (userId == null || userId.trim().length() < 1 || password == null || password.trim().length() < 1) { String msg = "Unable to find the required information for updating password"; log.error(msg); throw IdentityException.error(msg); } try { UserStoreManager userStoreManager = IdentityMgtServiceComponent. getRealmService().getTenantUserRealm(tenantId).getUserStoreManager(); userStoreManager.updateCredentialByAdmin(userId, password); if (log.isDebugEnabled()) { String msg = "Password is updated for user: " + userId; log.debug(msg); } return true; } catch (UserStoreException e) { String msg = "Error in changing the password, user name: " + userId + " domain: " + tenantDomain + "."; log.error(msg, e); throw IdentityException.error(msg, e); } } /** * @param value * @return * @throws UserStoreException */ public static String doHash(String value) throws UserStoreException { try { String digsestFunction = "SHA-256"; MessageDigest dgst = MessageDigest.getInstance(digsestFunction); byte[] byteValue = dgst.digest(value.getBytes()); return Base64.encode(byteValue); } catch (NoSuchAlgorithmException e) { log.error(e.getMessage(), e); throw new UserStoreException(e.getMessage(), e); } } /** * Set claim to user store manager * * @param userName user name * @param tenantId tenant id * @param claim claim uri * @param value claim value * @throws IdentityException if fails */ public static void setClaimInUserStoreManager(String userName, int tenantId, String claim, String value) throws IdentityException { org.wso2.carbon.user.core.UserStoreManager userStoreManager = null; RealmService realmService = IdentityMgtServiceComponent.getRealmService(); try { if (realmService.getTenantUserRealm(tenantId) != null) { userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId). getUserStoreManager(); } } catch (Exception e) { String msg = "Error retrieving the user store manager for the tenant"; log.error(msg, e); throw IdentityException.error(msg, e); } try { if (userStoreManager != null) { String oldValue = userStoreManager.getUserClaimValue(userName, claim, null); if (oldValue == null || !oldValue.equals(value)) { Map<String,String> claimMap = new HashMap<String,String>(); claimMap.put(claim, value); userStoreManager.setUserClaimValues(userName, claimMap, UserCoreConstants.DEFAULT_PROFILE); } } } catch (Exception e) { String msg = "Unable to set the claim for user : " + userName; log.error(msg, e); throw IdentityException.error(msg, e); } } public static String getUserStoreDomainName(String userName) { int index; String userDomain; if ((index = userName.indexOf(CarbonConstants.DOMAIN_SEPARATOR)) >= 0) { // remove domain name if exist userDomain = userName.substring(0, index); } else { userDomain = UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME; } return userDomain; } public static String[] getChallengeUris() { //TODO return new String[]{IdentityMgtConstants.DEFAULT_CHALLENGE_QUESTION_URI01, IdentityMgtConstants.DEFAULT_CHALLENGE_QUESTION_URI02}; } public static Policy getSecurityPolicy() { String policyString = " <wsp:Policy wsu:Id=\"UTOverTransport\" xmlns:wsp=\"http://schemas.xmlsoap.org/ws/2004/09/policy\"\n" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">\n" + " <wsp:ExactlyOne>\n" + " <wsp:All>\n" + " <sp:TransportBinding xmlns:sp=\"http://schemas.xmlsoap.org/ws/2005/07/securitypolicy\">\n" + " <wsp:Policy>\n" + " <sp:TransportToken>\n" + " <wsp:Policy>\n" + " <sp:HttpsToken RequireClientCertificate=\"true\"/>\n" + " </wsp:Policy>\n" + " </sp:TransportToken>\n" + " <sp:AlgorithmSuite>\n" + " <wsp:Policy>\n" + " <sp:Basic256/>\n" + " </wsp:Policy>\n" + " </sp:AlgorithmSuite>\n" + " <sp:Layout>\n" + " <wsp:Policy>\n" + " <sp:Lax/>\n" + " </wsp:Policy>\n" + " </sp:Layout>\n" + " <sp:IncludeTimestamp/>\n" + " </wsp:Policy>\n" + " </sp:TransportBinding>\n" + " </wsp:All>\n" + " </wsp:ExactlyOne>\n" + " </wsp:Policy>"; return PolicyEngine.getPolicy(new ByteArrayInputStream(policyString.getBytes())); } }
apache-2.0
yuruki/camel
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/search/TwitterSearchEndpoint.java
2347
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.twitter.search; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.twitter.AbstractTwitterEndpoint; import org.apache.camel.component.twitter.TwitterConfiguration; import org.apache.camel.component.twitter.TwitterHelper; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriPath; import org.apache.camel.util.ObjectHelper; /** * The Twitter Search component consumes search results. */ @UriEndpoint(firstVersion = "2.10.0", scheme = "twitter-search", title = "Twitter Search", syntax = "twitter-search:keywords", consumerClass = SearchConsumerHandler.class, label = "api,social") public class TwitterSearchEndpoint extends AbstractTwitterEndpoint { @UriPath(description = "The search keywords. Multiple values can be separated with comma.") @Metadata(required = "true") private String keywords; public TwitterSearchEndpoint(String uri, String remaining, TwitterSearchComponent component, TwitterConfiguration properties) { super(uri, component, properties); this.keywords = remaining; } @Override public Producer createProducer() throws Exception { return new SearchProducer(this, keywords); } @Override public Consumer createConsumer(Processor processor) throws Exception { return TwitterHelper.createConsumer(processor, this, new SearchConsumerHandler(this, keywords)); } }
apache-2.0
jomarko/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-api/src/main/java/org/kie/workbench/common/stunner/bpmn/definition/EndCompensationEvent.java
4567
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.definition; import java.util.Objects; import javax.validation.Valid; import org.jboss.errai.common.client.api.annotations.MapsTo; import org.jboss.errai.common.client.api.annotations.Portable; import org.jboss.errai.databinding.client.api.Bindable; import org.kie.workbench.common.forms.adf.definitions.annotations.FieldParam; import org.kie.workbench.common.forms.adf.definitions.annotations.FormDefinition; import org.kie.workbench.common.forms.adf.definitions.annotations.FormField; import org.kie.workbench.common.forms.adf.definitions.settings.FieldPolicy; import org.kie.workbench.common.stunner.bpmn.definition.property.background.BackgroundSet; import org.kie.workbench.common.stunner.bpmn.definition.property.dimensions.CircleDimensionSet; import org.kie.workbench.common.stunner.bpmn.definition.property.dimensions.Radius; import org.kie.workbench.common.stunner.bpmn.definition.property.event.compensation.CompensationEventExecutionSet; import org.kie.workbench.common.stunner.bpmn.definition.property.font.FontSet; import org.kie.workbench.common.stunner.bpmn.definition.property.general.BPMNGeneralSet; import org.kie.workbench.common.stunner.core.definition.annotation.Definition; import org.kie.workbench.common.stunner.core.definition.annotation.Property; import org.kie.workbench.common.stunner.core.definition.annotation.morph.Morph; import org.kie.workbench.common.stunner.core.util.HashUtil; import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.COLLAPSIBLE_CONTAINER; import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.FIELD_CONTAINER_PARAM; @Portable @Bindable @Definition @Morph(base = BaseEndEvent.class) @FormDefinition( startElement = "general", policy = FieldPolicy.ONLY_MARKED, defaultFieldSettings = {@FieldParam(name = FIELD_CONTAINER_PARAM, value = COLLAPSIBLE_CONTAINER)} ) public class EndCompensationEvent extends BaseEndEvent { @Property @FormField(afterElement = "general") @Valid private CompensationEventExecutionSet executionSet; public EndCompensationEvent() { this(new BPMNGeneralSet(""), new BackgroundSet(), new FontSet(), new CircleDimensionSet(new Radius()), new CompensationEventExecutionSet()); } public EndCompensationEvent(final @MapsTo("general") BPMNGeneralSet general, final @MapsTo("backgroundSet") BackgroundSet backgroundSet, final @MapsTo("fontSet") FontSet fontSet, final @MapsTo("dimensionsSet") CircleDimensionSet dimensionsSet, final @MapsTo("executionSet") CompensationEventExecutionSet executionSet) { super(general, backgroundSet, fontSet, dimensionsSet); this.executionSet = executionSet; } public CompensationEventExecutionSet getExecutionSet() { return executionSet; } public void setExecutionSet(CompensationEventExecutionSet executionSet) { this.executionSet = executionSet; } @Override public int hashCode() { return HashUtil.combineHashCodes(super.hashCode(), Objects.hashCode(executionSet)); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof EndCompensationEvent) { EndCompensationEvent other = (EndCompensationEvent) o; return super.equals(other) && Objects.equals(executionSet, other.executionSet); } return false; } }
apache-2.0
issaclee/silkroad
zeppelin-server/src/testpjava/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
7524
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.rest; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.server.ZeppelinServer; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.google.gson.Gson; /** * Test against spark cluster. * Spark cluster is started by CI server using testing/startSparkCluster.sh */ public class ZeppelinSparkClusterTest extends AbstractTestRestApi { Gson gson = new Gson(); @BeforeClass public static void init() throws Exception { AbstractTestRestApi.startUp(); } @AfterClass public static void destroy() throws Exception { AbstractTestRestApi.shutDown(); } private void waitForFinish(Paragraph p) { while (p.getStatus() != Status.FINISHED && p.getStatus() != Status.ERROR && p.getStatus() != Status.ABORT) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } } @Test public void basicRDDTransformationAndActionTest() throws IOException { // create new note Note note = ZeppelinServer.notebook.createNote(); // run markdown paragraph, again Paragraph p = note.addParagraph(); Map config = p.getConfig(); config.put("enabled", true); p.setConfig(config); p.setText("%spark print(sc.parallelize(1 to 10).reduce(_ + _))"); note.run(p.getId()); waitForFinish(p); assertEquals(Status.FINISHED, p.getStatus()); assertEquals("55", p.getResult().message()); ZeppelinServer.notebook.removeNote(note.id()); } @Test public void pySparkTest() throws IOException { // create new note Note note = ZeppelinServer.notebook.createNote(); int sparkVersion = getSparkVersionNumber(note); if (isPyspark() && sparkVersion >= 12) { // pyspark supported from 1.2.1 // run markdown paragraph, again Paragraph p = note.addParagraph(); Map config = p.getConfig(); config.put("enabled", true); p.setConfig(config); p.setText("%pyspark print(sc.parallelize(range(1, 11)).reduce(lambda a, b: a + b))"); note.run(p.getId()); waitForFinish(p); assertEquals(Status.FINISHED, p.getStatus()); assertEquals("55\n", p.getResult().message()); } ZeppelinServer.notebook.removeNote(note.id()); } @Test public void pySparkAutoConvertOptionTest() throws IOException { // create new note Note note = ZeppelinServer.notebook.createNote(); int sparkVersion = getSparkVersionNumber(note); if (isPyspark() && sparkVersion >= 14) { // auto_convert enabled from spark 1.4 // run markdown paragraph, again Paragraph p = note.addParagraph(); Map config = p.getConfig(); config.put("enabled", true); p.setConfig(config); p.setText("%pyspark\nfrom pyspark.sql.functions import *\n" + "print(sqlContext.range(0, 10).withColumn('uniform', rand(seed=10) * 3.14).count())"); note.run(p.getId()); waitForFinish(p); assertEquals(Status.FINISHED, p.getStatus()); assertEquals("10\n", p.getResult().message()); } ZeppelinServer.notebook.removeNote(note.id()); } @Test public void zRunTest() throws IOException { // create new note Note note = ZeppelinServer.notebook.createNote(); Paragraph p0 = note.addParagraph(); Map config0 = p0.getConfig(); config0.put("enabled", true); p0.setConfig(config0); p0.setText("%spark z.run(1)"); Paragraph p1 = note.addParagraph(); Map config1 = p1.getConfig(); config1.put("enabled", true); p1.setConfig(config1); p1.setText("%spark val a=10"); Paragraph p2 = note.addParagraph(); Map config2 = p2.getConfig(); config2.put("enabled", true); p2.setConfig(config2); p2.setText("%spark print(a)"); note.run(p0.getId()); waitForFinish(p0); assertEquals(Status.FINISHED, p0.getStatus()); note.run(p2.getId()); waitForFinish(p2); assertEquals(Status.FINISHED, p2.getStatus()); assertEquals("10", p2.getResult().message()); ZeppelinServer.notebook.removeNote(note.id()); } @Test public void pySparkDepLoaderTest() throws IOException { // create new note Note note = ZeppelinServer.notebook.createNote(); if (isPyspark() && getSparkVersionNumber(note) >= 14) { // restart spark interpreter List<InterpreterSetting> settings = ZeppelinServer.notebook.getBindedInterpreterSettings(note.id()); for (InterpreterSetting setting : settings) { if (setting.getGroup().equals("spark")) { ZeppelinServer.notebook.getInterpreterFactory().restart(setting.id()); break; } } // load dep Paragraph p0 = note.addParagraph(); Map config = p0.getConfig(); config.put("enabled", true); p0.setConfig(config); p0.setText("%dep z.load(\"com.databricks:spark-csv_2.11:1.2.0\")"); note.run(p0.getId()); waitForFinish(p0); assertEquals(Status.FINISHED, p0.getStatus()); // write test csv file File tmpFile = File.createTempFile("test", "csv"); FileUtils.write(tmpFile, "a,b\n1,2"); // load data using libraries from dep loader Paragraph p1 = note.addParagraph(); p1.setConfig(config); p1.setText("%pyspark\n" + "from pyspark.sql import SQLContext\n" + "print(sqlContext.read.format('com.databricks.spark.csv')" + ".load('"+ tmpFile.getAbsolutePath() +"').count())"); note.run(p1.getId()); waitForFinish(p1); assertEquals(Status.FINISHED, p1.getStatus()); assertEquals("2\n", p1.getResult().message()); } } /** * Get spark version number as a numerical value. * eg. 1.1.x => 11, 1.2.x => 12, 1.3.x => 13 ... */ private int getSparkVersionNumber(Note note) { Paragraph p = note.addParagraph(); Map config = p.getConfig(); config.put("enabled", true); p.setConfig(config); p.setText("%spark print(sc.version)"); note.run(p.getId()); waitForFinish(p); assertEquals(Status.FINISHED, p.getStatus()); String sparkVersion = p.getResult().message(); System.out.println("Spark version detected " + sparkVersion); String[] split = sparkVersion.split("\\."); int version = Integer.parseInt(split[0]) * 10 + Integer.parseInt(split[1]); return version; } }
apache-2.0
gfyoung/elasticsearch
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
10810
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.watcher.notification.email; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService; import org.elasticsearch.xpack.watcher.notification.NotificationService; import javax.mail.MessagingException; import java.util.Arrays; import java.util.List; /** * A component to store email credentials and handle sending email notifications. */ public class EmailService extends NotificationService<Account> { private static final Setting<String> SETTING_DEFAULT_ACCOUNT = Setting.simpleString("xpack.notification.email.default_account", Property.Dynamic, Property.NodeScope); private static final Setting.AffixSetting<String> SETTING_PROFILE = Setting.affixKeySetting("xpack.notification.email.account.", "profile", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Settings> SETTING_EMAIL_DEFAULTS = Setting.affixKeySetting("xpack.notification.email.account.", "email_defaults", (key) -> Setting.groupSetting(key + ".", Property.Dynamic, Property.NodeScope)); // settings that can be configured as smtp properties private static final Setting.AffixSetting<Boolean> SETTING_SMTP_AUTH = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.auth", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_ENABLE = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.starttls.enable", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_REQUIRED = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.starttls.required", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<String> SETTING_SMTP_HOST = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.host", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Integer> SETTING_SMTP_PORT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.port", (key) -> Setting.intSetting(key, 587, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<String> SETTING_SMTP_USER = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.user", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<String> SETTING_SMTP_PASSWORD = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.password", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); private static final Setting.AffixSetting<SecureString> SETTING_SECURE_PASSWORD = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.secure_password", (key) -> SecureSetting.secureString(key, null)); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_TIMEOUT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_CONNECTION_TIMEOUT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.connection_timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_WRITE_TIMEOUT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.write_timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<String> SETTING_SMTP_LOCAL_ADDRESS = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_address", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<String> SETTING_SMTP_SSL_TRUST_ADDRESS = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.ssl.trust", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Integer> SETTING_SMTP_LOCAL_PORT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_port", (key) -> Setting.intSetting(key, 25, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_SEND_PARTIAL = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.send_partial", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting<Boolean> SETTING_SMTP_WAIT_ON_QUIT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.wait_on_quit", (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); private final CryptoService cryptoService; public EmailService(Settings settings, @Nullable CryptoService cryptoService, ClusterSettings clusterSettings) { super(settings, "email", clusterSettings, EmailService.getSettings()); this.cryptoService = cryptoService; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_PROFILE, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_EMAIL_DEFAULTS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_AUTH, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_ENABLE, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_REQUIRED, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_HOST, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SSL_TRUST_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WAIT_ON_QUIT, (s, o) -> {}, (s, o) -> {}); // do an initial load reload(settings); } @Override protected Account createAccount(String name, Settings accountSettings) { Account.Config config = new Account.Config(name, accountSettings); return new Account(config, cryptoService, logger); } public EmailSent send(Email email, Authentication auth, Profile profile, String accountName) throws MessagingException { Account account = getAccount(accountName); if (account == null) { throw new IllegalArgumentException("failed to send email with subject [" + email.subject() + "] via account [" + accountName + "]. account does not exist"); } return send(email, auth, profile, account); } private EmailSent send(Email email, Authentication auth, Profile profile, Account account) throws MessagingException { assert account != null; try { email = account.send(email, auth, profile); } catch (MessagingException me) { throw new MessagingException("failed to send email with subject [" + email.subject() + "] via account [" + account.name() + "]", me); } return new EmailSent(account.name(), email); } public static class EmailSent { private final String account; private final Email email; public EmailSent(String account, Email email) { this.account = account; this.email = email; } public String account() { return account; } public Email email() { return email; } } public static List<Setting<?>> getSettings() { return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS, SETTING_SECURE_PASSWORD); } }
apache-2.0
idea4bsd/idea4bsd
java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/instructions/MethodCallInstruction.java
7013
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created by IntelliJ IDEA. * User: max * Date: Jan 26, 2002 * Time: 10:48:52 PM * To change template for new class use * Code Style | Class Templates options (Tools | IDE Options). */ package com.intellij.codeInspection.dataFlow.instructions; import com.intellij.codeInspection.dataFlow.*; import com.intellij.codeInspection.dataFlow.value.DfaValue; import com.intellij.psi.*; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.List; import java.util.Map; public class MethodCallInstruction extends Instruction { @Nullable private final PsiCall myCall; @Nullable private final PsiType myType; @NotNull private final PsiExpression[] myArgs; private final boolean myShouldFlushFields; @NotNull private final PsiElement myContext; @Nullable private final PsiMethod myTargetMethod; private final List<MethodContract> myContracts; private final MethodType myMethodType; @Nullable private final DfaValue myPrecalculatedReturnValue; private final boolean myOfNullable; private final boolean myVarArgCall; private final Map<PsiExpression, Nullness> myArgRequiredNullability; private boolean myOnlyNullArgs = true; private boolean myOnlyNotNullArgs = true; public enum MethodType { BOXING, UNBOXING, REGULAR_METHOD_CALL, CAST } public MethodCallInstruction(@NotNull PsiExpression context, MethodType methodType, @Nullable PsiType resultType) { myContext = context; myContracts = Collections.emptyList(); myMethodType = methodType; myCall = null; myArgs = PsiExpression.EMPTY_ARRAY; myType = resultType; myShouldFlushFields = false; myPrecalculatedReturnValue = null; myTargetMethod = null; myVarArgCall = false; myOfNullable = false; myArgRequiredNullability = Collections.emptyMap(); } public MethodCallInstruction(@NotNull PsiCall call, @Nullable DfaValue precalculatedReturnValue, List<MethodContract> contracts) { myContext = call; myContracts = contracts; myMethodType = MethodType.REGULAR_METHOD_CALL; myCall = call; final PsiExpressionList argList = call.getArgumentList(); myArgs = argList != null ? argList.getExpressions() : PsiExpression.EMPTY_ARRAY; myType = myCall instanceof PsiCallExpression ? ((PsiCallExpression)myCall).getType() : null; JavaResolveResult result = call.resolveMethodGenerics(); myTargetMethod = (PsiMethod)result.getElement(); PsiSubstitutor substitutor = result.getSubstitutor(); if (argList != null && myTargetMethod != null) { PsiParameter[] parameters = myTargetMethod.getParameterList().getParameters(); myVarArgCall = isVarArgCall(myTargetMethod, substitutor, myArgs, parameters); myArgRequiredNullability = calcArgRequiredNullability(substitutor, parameters); } else { myVarArgCall = false; myArgRequiredNullability = Collections.emptyMap(); } myShouldFlushFields = !(call instanceof PsiNewExpression && myType != null && myType.getArrayDimensions() > 0) && !isPureCall(); myPrecalculatedReturnValue = precalculatedReturnValue; myOfNullable = call instanceof PsiMethodCallExpression && DfaOptionalSupport.resolveOfNullable((PsiMethodCallExpression)call) != null; } private Map<PsiExpression, Nullness> calcArgRequiredNullability(PsiSubstitutor substitutor, PsiParameter[] parameters) { int checkedCount = Math.min(myArgs.length, parameters.length) - (myVarArgCall ? 1 : 0); Map<PsiExpression, Nullness> map = ContainerUtil.newHashMap(); for (int i = 0; i < checkedCount; i++) { map.put(myArgs[i], DfaPsiUtil.getElementNullability(substitutor.substitute(parameters[i].getType()), parameters[i])); } return map; } public static boolean isVarArgCall(PsiMethod method, PsiSubstitutor substitutor, PsiExpression[] args, PsiParameter[] parameters) { if (!method.isVarArgs()) { return false; } int argCount = args.length; int paramCount = parameters.length; if (argCount > paramCount) { return true; } if (paramCount > 0 && argCount == paramCount) { PsiType lastArgType = args[argCount - 1].getType(); if (lastArgType != null && !substitutor.substitute(parameters[paramCount - 1].getType()).isAssignableFrom(lastArgType)) { return true; } } return false; } private boolean isPureCall() { if (myTargetMethod == null) return false; return ControlFlowAnalyzer.isPure(myTargetMethod); } @Nullable public PsiType getResultType() { return myType; } @NotNull public PsiExpression[] getArgs() { return myArgs; } public MethodType getMethodType() { return myMethodType; } public boolean shouldFlushFields() { return myShouldFlushFields; } @Nullable public PsiMethod getTargetMethod() { return myTargetMethod; } public boolean isVarArgCall() { return myVarArgCall; } @Nullable public Nullness getArgRequiredNullability(@NotNull PsiExpression arg) { return myArgRequiredNullability.get(arg); } public List<MethodContract> getContracts() { return myContracts; } @Override public DfaInstructionState[] accept(DataFlowRunner runner, DfaMemoryState stateBefore, InstructionVisitor visitor) { return visitor.visitMethodCall(this, runner, stateBefore); } @Nullable public PsiCall getCallExpression() { return myCall; } @NotNull public PsiElement getContext() { return myContext; } @Nullable public DfaValue getPrecalculatedReturnValue() { return myPrecalculatedReturnValue; } public String toString() { return myMethodType == MethodType.UNBOXING ? "UNBOX" : myMethodType == MethodType.BOXING ? "BOX" : "CALL_METHOD: " + (myCall == null ? "null" : myCall.getText()); } public boolean updateOfNullable(DfaMemoryState memState, DfaValue arg) { if (!myOfNullable) return false; if (!memState.isNotNull(arg)) { myOnlyNotNullArgs = false; } if (!memState.isNull(arg)) { myOnlyNullArgs = false; } return true; } public boolean isOptionalAlwaysNullProblem() { return myOfNullable && myOnlyNullArgs; } public boolean isOptionalAlwaysNotNullProblem() { return myOfNullable && myOnlyNotNullArgs; } }
apache-2.0
shyTNT/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/LiveStreamEvent.java
10126
package com.google.api.ads.dfp.jaxws.v201408; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * A {@code LiveStreamEvent} encapsulates all the information necessary * to enable DAI (Dynamic Ad Insertion) into a live video stream. * * <p>This includes information such as the start and expected end time of * the event, the URL of the actual content for DFP to pull and insert ads into, * as well as the metadata necessary to generate ad requests during the event. * * * <p>Java class for LiveStreamEvent complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="LiveStreamEvent"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="status" type="{https://www.google.com/apis/ads/publisher/v201408}LiveStreamEventStatus" minOccurs="0"/> * &lt;element name="creationDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/> * &lt;element name="lastModifiedDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/> * &lt;element name="startDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/> * &lt;element name="endDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/> * &lt;element name="totalEstimatedConcurrentUsers" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="contentUrls" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="adTags" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="liveStreamEventCode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "LiveStreamEvent", propOrder = { "id", "name", "description", "status", "creationDateTime", "lastModifiedDateTime", "startDateTime", "endDateTime", "totalEstimatedConcurrentUsers", "contentUrls", "adTags", "liveStreamEventCode" }) public class LiveStreamEvent { protected Long id; protected String name; protected String description; @XmlSchemaType(name = "string") protected LiveStreamEventStatus status; protected DateTime creationDateTime; protected DateTime lastModifiedDateTime; protected DateTime startDateTime; protected DateTime endDateTime; protected Long totalEstimatedConcurrentUsers; protected List<String> contentUrls; protected List<String> adTags; protected String liveStreamEventCode; /** * Gets the value of the id property. * * @return * possible object is * {@link Long } * */ public Long getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link Long } * */ public void setId(Long value) { this.id = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the status property. * * @return * possible object is * {@link LiveStreamEventStatus } * */ public LiveStreamEventStatus getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link LiveStreamEventStatus } * */ public void setStatus(LiveStreamEventStatus value) { this.status = value; } /** * Gets the value of the creationDateTime property. * * @return * possible object is * {@link DateTime } * */ public DateTime getCreationDateTime() { return creationDateTime; } /** * Sets the value of the creationDateTime property. * * @param value * allowed object is * {@link DateTime } * */ public void setCreationDateTime(DateTime value) { this.creationDateTime = value; } /** * Gets the value of the lastModifiedDateTime property. * * @return * possible object is * {@link DateTime } * */ public DateTime getLastModifiedDateTime() { return lastModifiedDateTime; } /** * Sets the value of the lastModifiedDateTime property. * * @param value * allowed object is * {@link DateTime } * */ public void setLastModifiedDateTime(DateTime value) { this.lastModifiedDateTime = value; } /** * Gets the value of the startDateTime property. * * @return * possible object is * {@link DateTime } * */ public DateTime getStartDateTime() { return startDateTime; } /** * Sets the value of the startDateTime property. * * @param value * allowed object is * {@link DateTime } * */ public void setStartDateTime(DateTime value) { this.startDateTime = value; } /** * Gets the value of the endDateTime property. * * @return * possible object is * {@link DateTime } * */ public DateTime getEndDateTime() { return endDateTime; } /** * Sets the value of the endDateTime property. * * @param value * allowed object is * {@link DateTime } * */ public void setEndDateTime(DateTime value) { this.endDateTime = value; } /** * Gets the value of the totalEstimatedConcurrentUsers property. * * @return * possible object is * {@link Long } * */ public Long getTotalEstimatedConcurrentUsers() { return totalEstimatedConcurrentUsers; } /** * Sets the value of the totalEstimatedConcurrentUsers property. * * @param value * allowed object is * {@link Long } * */ public void setTotalEstimatedConcurrentUsers(Long value) { this.totalEstimatedConcurrentUsers = value; } /** * Gets the value of the contentUrls property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the contentUrls property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContentUrls().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getContentUrls() { if (contentUrls == null) { contentUrls = new ArrayList<String>(); } return this.contentUrls; } /** * Gets the value of the adTags property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the adTags property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAdTags().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getAdTags() { if (adTags == null) { adTags = new ArrayList<String>(); } return this.adTags; } /** * Gets the value of the liveStreamEventCode property. * * @return * possible object is * {@link String } * */ public String getLiveStreamEventCode() { return liveStreamEventCode; } /** * Sets the value of the liveStreamEventCode property. * * @param value * allowed object is * {@link String } * */ public void setLiveStreamEventCode(String value) { this.liveStreamEventCode = value; } }
apache-2.0
GoogleChromeLabs/chromeos_smart_card_connector
third_party/closure-compiler/src/test/com/google/javascript/jscomp/PartialCompilationTest.java
8229
/* * Copyright 2017 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Predicates.not; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.NamedType; import com.google.javascript.rhino.jstype.NoType; import com.google.javascript.rhino.jstype.ObjectType; import com.google.javascript.rhino.jstype.UnionType; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests exercising {@link CompilerOptions#assumeForwardDeclaredForMissingTypes} and {@link * DiagnosticGroups#MISSING_SOURCES_WARNINGS}. */ @RunWith(JUnit4.class) public class PartialCompilationTest { private Compiler compiler; /** * Asserts that the given lines of code compile and only give errors matching the {@link * DiagnosticGroups#MISSING_SOURCES_WARNINGS} category. */ private void assertPartialCompilationSucceeds(String... code) throws Exception { compiler = new Compiler(); compiler.setErrorManager( new BasicErrorManager() { @Override public void report(CheckLevel level, JSError error) { super.report(CheckLevel.ERROR, error); } @Override public void println(CheckLevel level, JSError error) { /* no-op */ } @Override protected void printSummary() { /* no-op */ } }); CompilerOptions options = new CompilerOptions(); options.setAssumeForwardDeclaredForMissingTypes(true); options.setStrictModeInput(true); options.setPreserveDetailedSourceInfo(true); CompilationLevel.ADVANCED_OPTIMIZATIONS.setOptionsForCompilationLevel(options); compiler.init( ImmutableList.of(), Collections.singletonList(SourceFile.fromCode("input.js", Joiner.on('\n').join(code))), options); compiler.parse(); compiler.check(); ImmutableList<JSError> sourcesErrors = compiler.getErrors().stream() .filter(not(DiagnosticGroups.MISSING_SOURCES_WARNINGS::matches)) .collect(toImmutableList()); assertThat(sourcesErrors).isEmpty(); } @Test public void testUsesMissingCode() throws Exception { assertPartialCompilationSucceeds( "goog.provide('missing_code_user');", "goog.require('some.thing.Missing');", "missing_code_user.fnUsesMissingNs = function() {", " missing_code_user.missingNamespace.foo();", " missingTopLevelNamespace.bar();", "};"); } @Test public void testMissingType_variable() throws Exception { assertPartialCompilationSucceeds("/** @type {!some.thing.Missing} */ var foo;"); } @Test public void testMissingType_assignment() throws Exception { assertPartialCompilationSucceeds( "/** @type {!some.thing.Missing} */ var foo;", // line break "/** @type {number} */ var bar = foo;"); } @Test public void testMissingRequire() throws Exception { assertPartialCompilationSucceeds( "goog.provide('missing_extends');", // line break "goog.require('some.thing.Missing');"); } @Test public void testMissingExtends() throws Exception { assertPartialCompilationSucceeds( "goog.provide('missing_extends');", "/** @constructor @extends {some.thing.Missing} */", "missing_extends.Extends = function() {}"); } @Test public void testMissingExtends_template() throws Exception { assertPartialCompilationSucceeds( "goog.provide('missing_extends');", "/** @constructor @extends {some.thing.Missing<string>} x */", "missing_extends.Extends = function() {}"); } @Test public void testMissingType_typedefAlias() throws Exception { assertPartialCompilationSucceeds("/** @typedef {string} */ var typedef;"); } @Test public void testMissingType_typedefField() throws Exception { assertPartialCompilationSucceeds("/** @typedef {some.thing.Missing} */ var typedef;"); } @Test public void testMissingEs6Externs() throws Exception { assertPartialCompilationSucceeds("let foo = {a, b};"); } @Test public void testUnresolvedGenerics() throws Exception { assertPartialCompilationSucceeds( "/** @type {!some.thing.Missing<string, !AlsoMissing<!More>>} */", "var x;"); TypedVar x = compiler.getTopScope().getSlot("x"); assertWithMessage("type %s", x.getType()).that(x.getType().isNoResolvedType()).isTrue(); NoType templatizedType = (NoType) x.getType(); assertThat(templatizedType.getReferenceName()).isEqualTo("some.thing.Missing"); ImmutableList<JSType> templateTypes = templatizedType.getTemplateTypes(); assertThat(templateTypes.get(0).isString()).isTrue(); assertThat(templateTypes.get(1).isObject()).isTrue(); ObjectType alsoMissing = (ObjectType) templateTypes.get(1); assertThat(alsoMissing.getReferenceName()).isEqualTo("AlsoMissing"); assertThat(alsoMissing.getTemplateTypes()).hasSize(1); ObjectType more = (ObjectType) alsoMissing.getTemplateTypes().get(0); assertThat(more.getReferenceName()).isEqualTo("More"); } @Test public void testUnresolvedUnions() throws Exception { assertPartialCompilationSucceeds("/** @type {some.thing.Foo|some.thing.Bar} */", "var x;"); TypedVar x = compiler.getTopScope().getSlot("x"); assertWithMessage("type %s", x.getType()).that(x.getType().isUnionType()).isTrue(); UnionType unionType = (UnionType) x.getType(); Collection<JSType> alternatives = unionType.getAlternates(); assertThat(alternatives).hasSize(3); int nullTypeCount = 0; List<String> namedTypes = new ArrayList<>(); for (JSType alternative : alternatives) { assertThat(alternative.isNamedType() || alternative.isNullType()).isTrue(); if (alternative.isNamedType()) { assertThat(alternative.isNoResolvedType()).isTrue(); namedTypes.add(((NamedType) alternative).getReferenceName()); } if (alternative.isNullType()) { nullTypeCount++; } } assertThat(nullTypeCount).isEqualTo(1); assertThat(namedTypes).containsExactly("some.thing.Foo", "some.thing.Bar"); } @Test public void testUnresolvedGenerics_defined() throws Exception { assertPartialCompilationSucceeds( "/** @param {!some.thing.Missing<string>} x */", "function useMissing(x) {}", "/** @const {!some.thing.Missing<string>} */", "var x;", "/** @constructor @template T */", "some.thing.Missing = function () {}", "function missingInside() {", " useMissing(new some.thing.Missing());", "}"); } @Test public void testUnresolvedBaseClassDoesNotHideFields() throws Exception { assertPartialCompilationSucceeds( "/** @constructor @extends {MissingBase} */", "var Klass = function () {", " /** @type {string} */", " this.foo;", "};"); TypedVar x = compiler.getTopScope().getSlot("Klass"); JSType type = x.getType(); assertThat(type.isFunctionType()).isTrue(); FunctionType fType = (FunctionType) type; assertThat(fType.getTypeOfThis().hasProperty("foo")).isTrue(); } }
apache-2.0
ravipesala/incubator-carbondata
core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/dimension/legacy/HighCardDictDimensionIndexCodec.java
3503
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.core.datastore.page.encoding.dimension.legacy; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorage; import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort; import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort; import org.apache.carbondata.core.datastore.compression.Compressor; import org.apache.carbondata.core.datastore.compression.CompressorFactory; import org.apache.carbondata.core.datastore.page.ColumnPage; import org.apache.carbondata.core.datastore.page.encoding.ColumnPageEncoder; import org.apache.carbondata.core.util.ByteUtil; import org.apache.carbondata.format.Encoding; public class HighCardDictDimensionIndexCodec extends IndexStorageCodec { /** * whether this column is varchar data type(long string) */ private boolean isVarcharType; public HighCardDictDimensionIndexCodec(boolean isSort, boolean isInvertedIndex, boolean isVarcharType) { super(isSort, isInvertedIndex); this.isVarcharType = isVarcharType; } @Override public String getName() { return "HighCardDictDimensionIndexCodec"; } @Override public ColumnPageEncoder createEncoder(Map<String, String> parameter) { return new IndexStorageEncoder() { @Override protected void encodeIndexStorage(ColumnPage input) { BlockIndexerStorage<byte[][]> indexStorage; byte[][] data = input.getByteArrayPage(); boolean isDictionary = input.isLocalDictGeneratedPage(); if (isInvertedIndex) { indexStorage = new BlockIndexerStorageForShort(data, isDictionary, !isDictionary, isSort); } else { indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, isDictionary); } byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage()); Compressor compressor = CompressorFactory.getInstance().getCompressor( input.getColumnCompressorName()); super.compressedDataPage = compressor.compressByte(flattened); super.indexStorage = indexStorage; } @Override protected List<Encoding> getEncodingList() { List<Encoding> encodings = new ArrayList<>(); if (isVarcharType) { encodings.add(Encoding.DIRECT_COMPRESS_VARCHAR); } else if (indexStorage.getRowIdPageLengthInBytes() > 0) { encodings.add(Encoding.INVERTED_INDEX); } if (indexStorage.getDataRlePageLengthInBytes() > 0) { encodings.add(Encoding.RLE); } return encodings; } }; } }
apache-2.0
zstackorg/zstack
sdk/src/main/java/org/zstack/sdk/BackupStorageMigrateImageResult.java
330
package org.zstack.sdk; import org.zstack.sdk.ImageInventory; public class BackupStorageMigrateImageResult { public ImageInventory inventory; public void setInventory(ImageInventory inventory) { this.inventory = inventory; } public ImageInventory getInventory() { return this.inventory; } }
apache-2.0
ehsan/js-symbolic-executor
closure-compiler/src/com/google/javascript/rhino/SimpleErrorReporter.java
3775
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Bob Jervis * Google Inc. * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino; import java.util.ArrayList; import java.util.List; /** * A simple {@link ErrorReporter} that collects warnings and errors and makes * them accessible via {@link #errors()} and {@link #warnings()}. * * */ public class SimpleErrorReporter implements ErrorReporter { private List<String> warnings = null; private List<String> errors = null; public void warning(String message, String sourceName, int line, String lineSource, int lineOffset) { if (warnings == null) { warnings = new ArrayList<String>(); } warnings.add(formatDetailedMessage( message, sourceName, line, lineSource, lineOffset)); } public void error(String message, String sourceName, int line, String lineSource, int lineOffset) { if (errors == null) { errors = new ArrayList<String>(); } errors.add(formatDetailedMessage( message, sourceName, line, lineSource, lineOffset)); } public EvaluatorException runtimeError( String message, String sourceName, int line, String lineSource, int lineOffset) { return new EvaluatorException( message, sourceName, line, lineSource, lineOffset); } /** * Returns the list of errors, or {@code null} if there were none. */ public List<String> errors() { return errors; } /** * Returns the list of warnings, or {@code null} if there were none. */ public List<String> warnings() { return warnings; } private String formatDetailedMessage( String message, String sourceName, int line, String lineSource, int lineOffset) { RhinoException e = new RhinoException(message); if (sourceName != null) { e.initSourceName(sourceName); } if (lineSource != null) { e.initLineSource(lineSource); } if (line > 0) { e.initLineNumber(line); } if (lineOffset > 0) { e.initColumnNumber(lineOffset); } return e.getMessage(); } }
apache-2.0
dennishuo/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java
59670
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.fs.permission.AclEntryScope.*; import static org.apache.hadoop.fs.permission.AclEntryType.*; import static org.apache.hadoop.fs.permission.FsAction.*; import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.*; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSInotifyEventInputStream; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException; import org.apache.hadoop.hdfs.util.XMLUtils.Stanza; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.log4j.Level; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.LogManager; import org.apache.log4j.spi.LoggingEvent; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** * This class tests the creation and validation of a checkpoint. */ @RunWith(Parameterized.class) public class TestEditLog { static { GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL); } @Parameters public static Collection<Object[]> data() { Collection<Object[]> params = new ArrayList<Object[]>(); params.add(new Object[]{ Boolean.FALSE }); params.add(new Object[]{ Boolean.TRUE }); return params; } private static boolean useAsyncEditLog; public TestEditLog(Boolean async) { useAsyncEditLog = async; } public static Configuration getConf() { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_ASYNC_LOGGING, useAsyncEditLog); return conf; } /** * A garbage mkdir op which is used for testing * {@link EditLogFileInputStream#scanEditLog(File, long, boolean)} */ public static class GarbageMkdirOp extends FSEditLogOp { public GarbageMkdirOp() { super(FSEditLogOpCodes.OP_MKDIR); } @Override void resetSubFields() { // nop } @Override void readFields(DataInputStream in, int logVersion) throws IOException { throw new IOException("cannot decode GarbageMkdirOp"); } @Override public void writeFields(DataOutputStream out) throws IOException { // write in some garbage content Random random = new Random(); byte[] content = new byte[random.nextInt(16) + 1]; random.nextBytes(content); out.write(content); } @Override protected void toXml(ContentHandler contentHandler) throws SAXException { throw new UnsupportedOperationException( "Not supported for GarbageMkdirOp"); } @Override void fromXml(Stanza st) throws InvalidXmlException { throw new UnsupportedOperationException( "Not supported for GarbageMkdirOp"); } } static final Log LOG = LogFactory.getLog(TestEditLog.class); static final int NUM_DATA_NODES = 0; // This test creates NUM_THREADS threads and each thread does // 2 * NUM_TRANSACTIONS Transactions concurrently. static final int NUM_TRANSACTIONS = 100; static final int NUM_THREADS = 100; static final File TEST_DIR = PathUtils.getTestDir(TestEditLog.class); /** An edits log with 3 edits from 0.20 - the result of * a fresh namesystem followed by hadoop fs -touchz /myfile */ static final byte[] HADOOP20_SOME_EDITS = StringUtils.hexStringToByte(( "ffff ffed 0a00 0000 0000 03fa e100 0000" + "0005 0007 2f6d 7966 696c 6500 0133 000d" + "3132 3932 3331 3634 3034 3138 3400 0d31" + "3239 3233 3136 3430 3431 3834 0009 3133" + "3432 3137 3732 3800 0000 0004 746f 6464" + "0a73 7570 6572 6772 6f75 7001 a400 1544" + "4653 436c 6965 6e74 5f2d 3136 3136 3535" + "3738 3931 000b 3137 322e 3239 2e35 2e33" + "3209 0000 0005 0007 2f6d 7966 696c 6500" + "0133 000d 3132 3932 3331 3634 3034 3138" + "3400 0d31 3239 3233 3136 3430 3431 3834" + "0009 3133 3432 3137 3732 3800 0000 0004" + "746f 6464 0a73 7570 6572 6772 6f75 7001" + "a4ff 0000 0000 0000 0000 0000 0000 0000" ).replace(" ","")); static { // No need to fsync for the purposes of tests. This makes // the tests run much faster. EditLogFileOutputStream.setShouldSkipFsyncForTesting(true); } static final byte TRAILER_BYTE = FSEditLogOpCodes.OP_INVALID.getOpCode(); private static final int CHECKPOINT_ON_STARTUP_MIN_TXNS = 100; // // an object that does a bunch of transactions // static class Transactions implements Runnable { final FSNamesystem namesystem; final int numTransactions; final short replication = 3; final long blockSize = 64; final int startIndex; Transactions(FSNamesystem ns, int numTx, int startIdx) { namesystem = ns; numTransactions = numTx; startIndex = startIdx; } // add a bunch of transactions. @Override public void run() { PermissionStatus p = namesystem.createFsOwnerPermissions( new FsPermission((short)0777)); FSEditLog editLog = namesystem.getEditLog(); for (int i = 0; i < numTransactions; i++) { INodeFile inode = new INodeFile(namesystem.dir.allocateNewInodeId(), null, p, 0L, 0L, BlockInfo.EMPTY_ARRAY, replication, blockSize); inode.toUnderConstruction("", ""); editLog.logOpenFile("/filename" + (startIndex + i), inode, false, false); editLog.logCloseFile("/filename" + (startIndex + i), inode); editLog.logSync(); } } } /** * Construct FSEditLog with default configuration, taking editDirs from NNStorage * * @param storage Storage object used by namenode */ private static FSEditLog getFSEditLog(NNStorage storage) throws IOException { Configuration conf = getConf(); // Make sure the edits dirs are set in the provided configuration object. conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, StringUtils.join(",", storage.getEditsDirectories())); FSEditLog log = FSEditLog.newInstance( conf, storage, FSNamesystem.getNamespaceEditsDirs(conf)); return log; } /** * Test case for an empty edit log from a prior version of Hadoop. */ @Test public void testPreTxIdEditLogNoEdits() throws Exception { FSNamesystem namesys = Mockito.mock(FSNamesystem.class); namesys.dir = Mockito.mock(FSDirectory.class); long numEdits = testLoad( StringUtils.hexStringToByte("ffffffed"), // just version number namesys); assertEquals(0, numEdits); } /** * Test case for loading a very simple edit log from a format * prior to the inclusion of edit transaction IDs in the log. */ @Test public void testPreTxidEditLogWithEdits() throws Exception { Configuration conf = getConf(); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build(); cluster.waitActive(); final FSNamesystem namesystem = cluster.getNamesystem(); long numEdits = testLoad(HADOOP20_SOME_EDITS, namesystem); assertEquals(3, numEdits); // Sanity check the edit HdfsFileStatus fileInfo = namesystem.getFileInfo("/myfile", false, false, false); assertEquals("supergroup", fileInfo.getGroup()); assertEquals(3, fileInfo.getReplication()); } finally { if (cluster != null) { cluster.shutdown(); } } } private long testLoad(byte[] data, FSNamesystem namesys) throws IOException { FSEditLogLoader loader = new FSEditLogLoader(namesys, 0); return loader.loadFSEdits(new EditLogByteInputStream(data), 1); } /** * Simple test for writing to and rolling the edit log. */ @Test public void testSimpleEditLog() throws IOException { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); assertExistsInStorageDirs( cluster, NameNodeDirType.EDITS, NNStorage.getInProgressEditsFileName(1)); editLog.logSetReplication("fakefile", (short) 1); editLog.logSync(); editLog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); assertExistsInStorageDirs( cluster, NameNodeDirType.EDITS, NNStorage.getFinalizedEditsFileName(1,3)); assertExistsInStorageDirs( cluster, NameNodeDirType.EDITS, NNStorage.getInProgressEditsFileName(4)); editLog.logSetReplication("fakefile", (short) 2); editLog.logSync(); editLog.close(); } finally { if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } } /** * Tests transaction logging in dfs. */ @Test public void testMultiThreadedEditLog() throws IOException { testEditLog(2048); // force edit buffer to automatically sync on each log of edit log entry testEditLog(1); } private void assertExistsInStorageDirs(MiniDFSCluster cluster, NameNodeDirType dirType, String filename) { NNStorage storage = cluster.getNamesystem().getFSImage().getStorage(); for (StorageDirectory sd : storage.dirIterable(dirType)) { File f = new File(sd.getCurrentDir(), filename); assertTrue("Expect that " + f + " exists", f.exists()); } } /** * Test edit log with different initial buffer size * * @param initialSize initial edit log buffer size * @throws IOException */ private void testEditLog(int initialSize) throws IOException { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); for (Iterator<URI> it = cluster.getNameDirs(0).iterator(); it.hasNext(); ) { File dir = new File(it.next().getPath()); System.out.println(dir); } FSImage fsimage = namesystem.getFSImage(); FSEditLog editLog = fsimage.getEditLog(); // set small size of flush buffer editLog.setOutputBufferCapacity(initialSize); // Roll log so new output buffer size takes effect // we should now be writing to edits_inprogress_3 fsimage.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); // Remember the current lastInodeId and will reset it back to test // loading editlog segments.The transactions in the following allocate new // inode id to write to editlogs but doesn't create ionde in namespace long originalLastInodeId = namesystem.dir.getLastInodeId(); // Create threads and make them run transactions concurrently. Thread threadId[] = new Thread[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { Transactions trans = new Transactions(namesystem, NUM_TRANSACTIONS, i*NUM_TRANSACTIONS); threadId[i] = new Thread(trans, "TransactionThread-" + i); threadId[i].start(); } // wait for all transactions to get over for (int i = 0; i < NUM_THREADS; i++) { try { threadId[i].join(); } catch (InterruptedException e) { i--; // retry } } // Reopen some files as for append Transactions trans = new Transactions(namesystem, NUM_TRANSACTIONS, NUM_TRANSACTIONS / 2); trans.run(); // Roll another time to finalize edits_inprogress_3 fsimage.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); long expectedTxns = ((NUM_THREADS+1) * 2 * NUM_TRANSACTIONS) + 2; // +2 for start/end txns // Verify that we can read in all the transactions that we have written. // If there were any corruptions, it is likely that the reading in // of these transactions will throw an exception. // namesystem.dir.resetLastInodeIdWithoutChecking(originalLastInodeId); for (Iterator<StorageDirectory> it = fsimage.getStorage().dirIterator(NameNodeDirType.EDITS); it.hasNext();) { FSEditLogLoader loader = new FSEditLogLoader(namesystem, 0); File editFile = NNStorage.getFinalizedEditsFile(it.next(), 3, 3 + expectedTxns - 1); assertTrue("Expect " + editFile + " exists", editFile.exists()); System.out.println("Verifying file: " + editFile); long numEdits = loader.loadFSEdits( new EditLogFileInputStream(editFile), 3); int numLeases = namesystem.leaseManager.countLease(); System.out.println("Number of outstanding leases " + numLeases); assertEquals(0, numLeases); assertTrue("Verification for " + editFile + " failed. " + "Expected " + expectedTxns + " transactions. "+ "Found " + numEdits + " transactions.", numEdits == expectedTxns); } } finally { try { if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } catch (Throwable t) { LOG.error("Couldn't shut down cleanly", t); } } } private void doLogEdit(ExecutorService exec, final FSEditLog log, final String filename) throws Exception { exec.submit(new Callable<Void>() { @Override public Void call() { log.logSetReplication(filename, (short)1); return null; } }).get(); } private void doCallLogSync(ExecutorService exec, final FSEditLog log) throws Exception { exec.submit(new Callable<Void>() { @Override public Void call() { log.logSync(); return null; } }).get(); } private void doCallLogSyncAll(ExecutorService exec, final FSEditLog log) throws Exception { exec.submit(new Callable<Void>() { @Override public Void call() throws Exception { log.logSyncAll(); return null; } }).get(); } @Test public void testSyncBatching() throws Exception { if (useAsyncEditLog) { // semantics are completely differently since edits will be auto-synced return; } // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; ExecutorService threadA = Executors.newSingleThreadExecutor(); ExecutorService threadB = Executors.newSingleThreadExecutor(); try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); assertEquals("should start with only the BEGIN_LOG_SEGMENT txn synced", 1, editLog.getSyncTxId()); // Log an edit from thread A doLogEdit(threadA, editLog, "thread-a 1"); assertEquals("logging edit without syncing should do not affect txid", 1, editLog.getSyncTxId()); // Log an edit from thread B doLogEdit(threadB, editLog, "thread-b 1"); assertEquals("logging edit without syncing should do not affect txid", 1, editLog.getSyncTxId()); // Now ask to sync edit from B, which should sync both edits. doCallLogSync(threadB, editLog); assertEquals("logSync from second thread should bump txid up to 3", 3, editLog.getSyncTxId()); // Now ask to sync edit from A, which was already batched in - thus // it should increment the batch count metric doCallLogSync(threadA, editLog); assertEquals("logSync from first thread shouldn't change txid", 3, editLog.getSyncTxId()); //Should have incremented the batch count exactly once assertCounter("TransactionsBatchedInSync", 1L, getMetrics("NameNodeActivity")); } finally { threadA.shutdown(); threadB.shutdown(); if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } } /** * Test what happens with the following sequence: * * Thread A writes edit * Thread B calls logSyncAll * calls close() on stream * Thread A calls logSync * * This sequence is legal and can occur if enterSafeMode() is closely * followed by saveNamespace. */ @Test public void testBatchedSyncWithClosedLogs() throws Exception { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; ExecutorService threadA = Executors.newSingleThreadExecutor(); ExecutorService threadB = Executors.newSingleThreadExecutor(); try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); // Log an edit from thread A doLogEdit(threadA, editLog, "thread-a 1"); // async log is doing batched syncs in background. logSync just ensures // the edit is durable, so the txid may increase prior to sync if (!useAsyncEditLog) { assertEquals("logging edit without syncing should do not affect txid", 1, editLog.getSyncTxId()); } // logSyncAll in Thread B doCallLogSyncAll(threadB, editLog); assertEquals("logSyncAll should sync thread A's transaction", 2, editLog.getSyncTxId()); // Close edit log editLog.close(); // Ask thread A to finish sync (which should be a no-op) doCallLogSync(threadA, editLog); } finally { threadA.shutdown(); threadB.shutdown(); if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } } @Test public void testEditChecksum() throws Exception { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); fileSys.mkdirs(new Path("/tmp")); Iterator<StorageDirectory> iter = fsimage.getStorage(). dirIterator(NameNodeDirType.EDITS); LinkedList<StorageDirectory> sds = new LinkedList<StorageDirectory>(); while (iter.hasNext()) { sds.add(iter.next()); } editLog.close(); cluster.shutdown(); for (StorageDirectory sd : sds) { File editFile = NNStorage.getFinalizedEditsFile(sd, 1, 3); assertTrue(editFile.exists()); long fileLen = editFile.length(); LOG.debug("Corrupting Log File: " + editFile + " len: " + fileLen); RandomAccessFile rwf = new RandomAccessFile(editFile, "rw"); rwf.seek(fileLen-4); // seek to checksum bytes int b = rwf.readInt(); rwf.seek(fileLen-4); rwf.writeInt(b+1); rwf.close(); } try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).format(false).build(); fail("should not be able to start"); } catch (IOException e) { // expected assertNotNull("Cause of exception should be ChecksumException", e.getCause()); assertEquals("Cause of exception should be ChecksumException", ChecksumException.class, e.getCause().getClass()); } } /** * Test what happens if the NN crashes when it has has started but * had no transactions written. */ @Test public void testCrashRecoveryNoTransactions() throws Exception { testCrashRecovery(0); } /** * Test what happens if the NN crashes when it has has started and * had a few transactions written */ @Test public void testCrashRecoveryWithTransactions() throws Exception { testCrashRecovery(150); } /** * Do a test to make sure the edit log can recover edits even after * a non-clean shutdown. This does a simulated crash by copying over * the edits directory while the NN is still running, then shutting it * down, and restoring that edits directory. */ private void testCrashRecovery(int numTransactions) throws Exception { MiniDFSCluster cluster = null; Configuration conf = getConf(); conf.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_TXNS_KEY, CHECKPOINT_ON_STARTUP_MIN_TXNS); try { LOG.info("\n===========================================\n" + "Starting empty cluster"); cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_DATA_NODES) .format(true) .build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); for (int i = 0; i < numTransactions; i++) { fs.mkdirs(new Path("/test" + i)); } // Directory layout looks like: // test/data/dfs/nameN/current/{fsimage_N,edits_...} File nameDir = new File(cluster.getNameDirs(0).iterator().next().getPath()); File dfsDir = nameDir.getParentFile(); assertEquals(dfsDir.getName(), "dfs"); // make sure we got right dir LOG.info("Copying data directory aside to a hot backup"); File backupDir = new File(dfsDir.getParentFile(), "dfs.backup-while-running"); FileUtils.copyDirectory(dfsDir, backupDir); LOG.info("Shutting down cluster #1"); cluster.shutdown(); cluster = null; // Now restore the backup FileUtil.fullyDeleteContents(dfsDir); dfsDir.delete(); backupDir.renameTo(dfsDir); // Directory layout looks like: // test/data/dfs/nameN/current/{fsimage_N,edits_...} File currentDir = new File(nameDir, "current"); // We should see the file as in-progress File editsFile = new File(currentDir, NNStorage.getInProgressEditsFileName(1)); assertTrue("Edits file " + editsFile + " should exist", editsFile.exists()); File imageFile = FSImageTestUtil.findNewestImageFile( currentDir.getAbsolutePath()); assertNotNull("No image found in " + nameDir, imageFile); assertEquals(NNStorage.getImageFileName(0), imageFile.getName()); // Try to start a new cluster LOG.info("\n===========================================\n" + "Starting same cluster after simulated crash"); cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_DATA_NODES) .format(false) .build(); cluster.waitActive(); // We should still have the files we wrote prior to the simulated crash fs = cluster.getFileSystem(); for (int i = 0; i < numTransactions; i++) { assertTrue(fs.exists(new Path("/test" + i))); } long expectedTxId; if (numTransactions > CHECKPOINT_ON_STARTUP_MIN_TXNS) { // It should have saved a checkpoint on startup since there // were more unfinalized edits than configured expectedTxId = numTransactions + 1; } else { // otherwise, it shouldn't have made a checkpoint expectedTxId = 0; } imageFile = FSImageTestUtil.findNewestImageFile( currentDir.getAbsolutePath()); assertNotNull("No image found in " + nameDir, imageFile); assertEquals(NNStorage.getImageFileName(expectedTxId), imageFile.getName()); // Started successfully. Shut it down and make sure it can restart. cluster.shutdown(); cluster = null; cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_DATA_NODES) .format(false) .build(); cluster.waitActive(); } finally { if (cluster != null) { cluster.shutdown(); } } } // should succeed - only one corrupt log dir @Test public void testCrashRecoveryEmptyLogOneDir() throws Exception { doTestCrashRecoveryEmptyLog(false, true, true); } // should fail - seen_txid updated to 3, but no log dir contains txid 3 @Test public void testCrashRecoveryEmptyLogBothDirs() throws Exception { doTestCrashRecoveryEmptyLog(true, true, false); } // should succeed - only one corrupt log dir @Test public void testCrashRecoveryEmptyLogOneDirNoUpdateSeenTxId() throws Exception { doTestCrashRecoveryEmptyLog(false, false, true); } // should succeed - both log dirs corrupt, but seen_txid never updated @Test public void testCrashRecoveryEmptyLogBothDirsNoUpdateSeenTxId() throws Exception { doTestCrashRecoveryEmptyLog(true, false, true); } /** * Test that the NN handles the corruption properly * after it crashes just after creating an edit log * (ie before writing START_LOG_SEGMENT). In the case * that all logs have this problem, it should mark them * as corrupt instead of trying to finalize them. * * @param inBothDirs if true, there will be a truncated log in * both of the edits directories. If false, the truncated log * will only be in one of the directories. In both cases, the * NN should fail to start up, because it's aware that txid 3 * was reached, but unable to find a non-corrupt log starting there. * @param updateTransactionIdFile if true update the seen_txid file. * If false, it will not be updated. This will simulate a case where * the NN crashed between creating the new segment and updating the * seen_txid file. * @param shouldSucceed true if the test is expected to succeed. */ private void doTestCrashRecoveryEmptyLog(boolean inBothDirs, boolean updateTransactionIdFile, boolean shouldSucceed) throws Exception { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_DATA_NODES).build(); cluster.shutdown(); Collection<URI> editsDirs = cluster.getNameEditsDirs(0); for (URI uri : editsDirs) { File dir = new File(uri.getPath()); File currentDir = new File(dir, "current"); // We should start with only the finalized edits_1-2 GenericTestUtils.assertGlobEquals(currentDir, "edits_.*", NNStorage.getFinalizedEditsFileName(1, 2)); // Make a truncated edits_3_inprogress File log = new File(currentDir, NNStorage.getInProgressEditsFileName(3)); EditLogFileOutputStream stream = new EditLogFileOutputStream(conf, log, 1024); try { stream.create(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); if (!inBothDirs) { break; } NNStorage storage = new NNStorage(conf, Collections.<URI>emptyList(), Lists.newArrayList(uri)); if (updateTransactionIdFile) { storage.writeTransactionIdFileToStorage(3); } storage.close(); } finally { stream.close(); } } try { cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_DATA_NODES).format(false).build(); if (!shouldSucceed) { fail("Should not have succeeded in startin cluster"); } } catch (IOException ioe) { if (shouldSucceed) { LOG.info("Should have succeeded in starting cluster, but failed", ioe); throw ioe; } else { GenericTestUtils.assertExceptionContains( "Gap in transactions. Expected to be able to read up until " + "at least txid 3 but unable to find any edit logs containing " + "txid 3", ioe); } } finally { cluster.shutdown(); } } private static class EditLogByteInputStream extends EditLogInputStream { private final InputStream input; private final long len; private int version; private FSEditLogOp.Reader reader = null; private FSEditLogLoader.PositionTrackingInputStream tracker = null; public EditLogByteInputStream(byte[] data) throws IOException { len = data.length; input = new ByteArrayInputStream(data); BufferedInputStream bin = new BufferedInputStream(input); DataInputStream in = new DataInputStream(bin); version = EditLogFileInputStream.readLogVersion(in, true); tracker = new FSEditLogLoader.PositionTrackingInputStream(in); in = new DataInputStream(tracker); reader = FSEditLogOp.Reader.create(in, tracker, version); } @Override public long getFirstTxId() { return HdfsServerConstants.INVALID_TXID; } @Override public long getLastTxId() { return HdfsServerConstants.INVALID_TXID; } @Override public long length() throws IOException { return len; } @Override public long getPosition() { return tracker.getPos(); } @Override protected FSEditLogOp nextOp() throws IOException { return reader.readOp(false); } @Override public int getVersion(boolean verifyVersion) throws IOException { return version; } @Override public void close() throws IOException { input.close(); } @Override public String getName() { return "AnonEditLogByteInputStream"; } @Override public boolean isInProgress() { return true; } @Override public void setMaxOpSize(int maxOpSize) { reader.setMaxOpSize(maxOpSize); } @Override public boolean isLocalLog() { return true; } } @Test public void testFailedOpen() throws Exception { File logDir = new File(TEST_DIR, "testFailedOpen"); logDir.mkdirs(); FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir); try { FileUtil.setWritable(logDir, false); log.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); fail("Did no throw exception on only having a bad dir"); } catch (IOException ioe) { GenericTestUtils.assertExceptionContains( "too few journals successfully started", ioe); } finally { FileUtil.setWritable(logDir, true); log.close(); } } /** * Regression test for HDFS-1112/HDFS-3020. Ensures that, even if * logSync isn't called periodically, the edit log will sync itself. */ @Test public void testAutoSync() throws Exception { File logDir = new File(TEST_DIR, "testAutoSync"); logDir.mkdirs(); FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir); String oneKB = StringUtils.byteToHexString( new byte[500]); try { log.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); NameNodeMetrics mockMetrics = Mockito.mock(NameNodeMetrics.class); log.setMetricsForTests(mockMetrics); for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } // After ~400KB, we're still within the 512KB buffer size Mockito.verify(mockMetrics, Mockito.times(0)).addSync(Mockito.anyLong()); // After ~400KB more, we should have done an automatic sync for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } Mockito.verify(mockMetrics, Mockito.times(1)).addSync(Mockito.anyLong()); } finally { log.close(); } } /** * Tests the getEditLogManifest function using mock storage for a number * of different situations. */ @Test public void testEditLogManifestMocks() throws IOException { NNStorage storage; FSEditLog log; // Simple case - different directories have the same // set of logs, with an in-progress one at end storage = mockStorageWithEdits( "[1,100]|[101,200]|[201,]", "[1,100]|[101,200]|[201,]"); log = getFSEditLog(storage); log.initJournalsForWrite(); assertEquals("[[1,100], [101,200]] CommittedTxId: 200", log.getEditLogManifest(1).toString()); assertEquals("[[101,200]] CommittedTxId: 200", log.getEditLogManifest(101).toString()); // Another simple case, different directories have different // sets of files storage = mockStorageWithEdits( "[1,100]|[101,200]", "[1,100]|[201,300]|[301,400]"); // nothing starting at 101 log = getFSEditLog(storage); log.initJournalsForWrite(); assertEquals("[[1,100], [101,200], [201,300], [301,400]]" + " CommittedTxId: 400", log.getEditLogManifest(1).toString()); // Case where one directory has an earlier finalized log, followed // by a gap. The returned manifest should start after the gap. storage = mockStorageWithEdits( "[1,100]|[301,400]", // gap from 101 to 300 "[301,400]|[401,500]"); log = getFSEditLog(storage); log.initJournalsForWrite(); assertEquals("[[301,400], [401,500]] CommittedTxId: 500", log.getEditLogManifest(1).toString()); // Case where different directories have different length logs // starting at the same txid - should pick the longer one storage = mockStorageWithEdits( "[1,100]|[101,150]", // short log at 101 "[1,50]|[101,200]"); // short log at 1 log = getFSEditLog(storage); log.initJournalsForWrite(); assertEquals("[[1,100], [101,200]] CommittedTxId: 200", log.getEditLogManifest(1).toString()); assertEquals("[[101,200]] CommittedTxId: 200", log.getEditLogManifest(101).toString()); // Case where the first storage has an inprogress while // the second has finalised that file (i.e. the first failed // recently) storage = mockStorageWithEdits( "[1,100]|[101,]", "[1,100]|[101,200]"); log = getFSEditLog(storage); log.initJournalsForWrite(); assertEquals("[[1,100], [101,200]] CommittedTxId: 200", log.getEditLogManifest(1).toString()); assertEquals("[[101,200]] CommittedTxId: 200", log.getEditLogManifest(101).toString()); } /** * Create a mock NNStorage object with several directories, each directory * holding edit logs according to a specification. Each directory * is specified by a pipe-separated string. For example: * <code>[1,100]|[101,200]</code> specifies a directory which * includes two finalized segments, one from 1-100, and one from 101-200. * The syntax <code>[1,]</code> specifies an in-progress log starting at * txid 1. */ private NNStorage mockStorageWithEdits(String... editsDirSpecs) throws IOException { List<StorageDirectory> sds = Lists.newArrayList(); List<URI> uris = Lists.newArrayList(); NNStorage storage = Mockito.mock(NNStorage.class); for (String dirSpec : editsDirSpecs) { List<String> files = Lists.newArrayList(); String[] logSpecs = dirSpec.split("\\|"); for (String logSpec : logSpecs) { Matcher m = Pattern.compile("\\[(\\d+),(\\d+)?\\]").matcher(logSpec); assertTrue("bad spec: " + logSpec, m.matches()); if (m.group(2) == null) { files.add(NNStorage.getInProgressEditsFileName( Long.parseLong(m.group(1)))); } else { files.add(NNStorage.getFinalizedEditsFileName( Long.parseLong(m.group(1)), Long.parseLong(m.group(2)))); } } StorageDirectory sd = FSImageTestUtil.mockStorageDirectory( NameNodeDirType.EDITS, false, files.toArray(new String[0])); sds.add(sd); URI u = URI.create("file:///storage"+ Math.random()); Mockito.doReturn(sd).when(storage).getStorageDirectory(u); uris.add(u); } Mockito.doReturn(sds).when(storage).dirIterable(NameNodeDirType.EDITS); Mockito.doReturn(uris).when(storage).getEditsDirectories(); return storage; } /** * Specification for a failure during #setupEdits */ static class AbortSpec { final int roll; final int logindex; /** * Construct the failure specification. * @param roll number to fail after. e.g. 1 to fail after the first roll * @param logindex index of journal to fail. */ AbortSpec(int roll, int logindex) { this.roll = roll; this.logindex = logindex; } } final static int TXNS_PER_ROLL = 10; final static int TXNS_PER_FAIL = 2; /** * Set up directories for tests. * * Each rolled file is 10 txns long. * A failed file is 2 txns long. * * @param editUris directories to create edit logs in * @param numrolls number of times to roll the edit log during setup * @param closeOnFinish whether to close the edit log after setup * @param abortAtRolls Specifications for when to fail, see AbortSpec */ public static NNStorage setupEdits(List<URI> editUris, int numrolls, boolean closeOnFinish, AbortSpec... abortAtRolls) throws IOException { List<AbortSpec> aborts = new ArrayList<AbortSpec>(Arrays.asList(abortAtRolls)); NNStorage storage = new NNStorage(getConf(), Collections.<URI>emptyList(), editUris); storage.format(new NamespaceInfo()); FSEditLog editlog = getFSEditLog(storage); // open the edit log and add two transactions // logGenerationStamp is used, simply because it doesn't // require complex arguments. editlog.initJournalsForWrite(); editlog.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); for (int i = 2; i < TXNS_PER_ROLL; i++) { editlog.logGenerationStamp((long) 0); } editlog.logSync(); // Go into edit log rolling loop. // On each roll, the abortAtRolls abort specs are // checked to see if an abort is required. If so the // the specified journal is aborted. It will be brought // back into rotation automatically by rollEditLog for (int i = 0; i < numrolls; i++) { editlog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); editlog.logGenerationStamp((long) i); editlog.logSync(); while (aborts.size() > 0 && aborts.get(0).roll == (i+1)) { AbortSpec spec = aborts.remove(0); editlog.getJournals().get(spec.logindex).abort(); } for (int j = 3; j < TXNS_PER_ROLL; j++) { editlog.logGenerationStamp((long) i); } editlog.logSync(); } if (closeOnFinish) { editlog.close(); } FSImageTestUtil.logStorageContents(LOG, storage); return storage; } /** * Set up directories for tests. * * Each rolled file is 10 txns long. * A failed file is 2 txns long. * * @param editUris directories to create edit logs in * @param numrolls number of times to roll the edit log during setup * @param abortAtRolls Specifications for when to fail, see AbortSpec */ public static NNStorage setupEdits(List<URI> editUris, int numrolls, AbortSpec... abortAtRolls) throws IOException { return setupEdits(editUris, numrolls, true, abortAtRolls); } /** * Test loading an editlog which has had both its storage fail * on alternating rolls. Two edit log directories are created. * The first one fails on odd rolls, the second on even. Test * that we are able to load the entire editlog regardless. */ @Test public void testAlternatingJournalFailure() throws IOException { File f1 = new File(TEST_DIR + "/alternatingjournaltest0"); File f2 = new File(TEST_DIR + "/alternatingjournaltest1"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI()); NNStorage storage = setupEdits(editUris, 10, new AbortSpec(1, 0), new AbortSpec(2, 1), new AbortSpec(3, 0), new AbortSpec(4, 1), new AbortSpec(5, 0), new AbortSpec(6, 1), new AbortSpec(7, 0), new AbortSpec(8, 1), new AbortSpec(9, 0), new AbortSpec(10, 1)); long totaltxnread = 0; FSEditLog editlog = getFSEditLog(storage); editlog.initJournalsForWrite(); long startTxId = 1; Iterable<EditLogInputStream> editStreams = editlog.selectInputStreams(startTxId, TXNS_PER_ROLL*11); for (EditLogInputStream edits : editStreams) { FSEditLogLoader.EditLogValidation val = FSEditLogLoader.scanEditLog(edits, Long.MAX_VALUE); long read = (val.getEndTxId() - edits.getFirstTxId()) + 1; LOG.info("Loading edits " + edits + " read " + read); assertEquals(startTxId, edits.getFirstTxId()); startTxId += read; totaltxnread += read; } editlog.close(); storage.close(); assertEquals(TXNS_PER_ROLL*11, totaltxnread); } /** * Test loading an editlog with gaps. A single editlog directory * is set up. On of the edit log files is deleted. This should * fail when selecting the input streams as it will not be able * to select enough streams to load up to 4*TXNS_PER_ROLL. * There should be 4*TXNS_PER_ROLL transactions as we rolled 3 * times. */ @Test public void testLoadingWithGaps() throws IOException { File f1 = new File(TEST_DIR + "/gaptest0"); List<URI> editUris = ImmutableList.of(f1.toURI()); NNStorage storage = setupEdits(editUris, 3); final long startGapTxId = 1*TXNS_PER_ROLL + 1; final long endGapTxId = 2*TXNS_PER_ROLL; File[] files = new File(f1, "current").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.startsWith(NNStorage.getFinalizedEditsFileName(startGapTxId, endGapTxId))) { return true; } return false; } }); assertEquals(1, files.length); assertTrue(files[0].delete()); FSEditLog editlog = getFSEditLog(storage); editlog.initJournalsForWrite(); long startTxId = 1; try { editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL); fail("Should have thrown exception"); } catch (IOException ioe) { GenericTestUtils.assertExceptionContains( "Gap in transactions. Expected to be able to read up until " + "at least txid 40 but unable to find any edit logs containing " + "txid 11", ioe); } } /** * Test that we can read from a byte stream without crashing. * */ static void validateNoCrash(byte garbage[]) throws IOException { final File TEST_LOG_NAME = new File(TEST_DIR, "test_edit_log"); EditLogFileOutputStream elfos = null; EditLogFileInputStream elfis = null; try { elfos = new EditLogFileOutputStream(getConf(), TEST_LOG_NAME, 0); elfos.create(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); elfos.writeRaw(garbage, 0, garbage.length); elfos.setReadyToFlush(); elfos.flushAndSync(true); elfos.close(); elfos = null; elfis = new EditLogFileInputStream(TEST_LOG_NAME); // verify that we can read everything without killing the JVM or // throwing an exception other than IOException try { while (true) { FSEditLogOp op = elfis.readOp(); if (op == null) break; } } catch (IOException e) { } catch (Throwable t) { fail("Caught non-IOException throwable " + StringUtils.stringifyException(t)); } } finally { if ((elfos != null) && (elfos.isOpen())) elfos.close(); if (elfis != null) elfis.close(); } } static byte[][] invalidSequenecs = null; /** * "Fuzz" test for the edit log. * * This tests that we can read random garbage from the edit log without * crashing the JVM or throwing an unchecked exception. */ @Test public void testFuzzSequences() throws IOException { final int MAX_GARBAGE_LENGTH = 512; final int MAX_INVALID_SEQ = 5000; // The seed to use for our random number generator. When given the same // seed, Java.util.Random will always produce the same sequence of values. // This is important because it means that the test is deterministic and // repeatable on any machine. final int RANDOM_SEED = 123; Random r = new Random(RANDOM_SEED); for (int i = 0; i < MAX_INVALID_SEQ; i++) { byte[] garbage = new byte[r.nextInt(MAX_GARBAGE_LENGTH)]; r.nextBytes(garbage); validateNoCrash(garbage); } } private static long readAllEdits(Collection<EditLogInputStream> streams, long startTxId) throws IOException { FSEditLogOp op; long nextTxId = startTxId; long numTx = 0; for (EditLogInputStream s : streams) { while (true) { op = s.readOp(); if (op == null) break; if (op.getTransactionId() != nextTxId) { throw new IOException("out of order transaction ID! expected " + nextTxId + " but got " + op.getTransactionId() + " when " + "reading " + s.getName()); } numTx++; nextTxId = op.getTransactionId() + 1; } } return numTx; } /** * Test edit log failover. If a single edit log is missing, other * edits logs should be used instead. */ @Test public void testEditLogFailOverFromMissing() throws IOException { File f1 = new File(TEST_DIR + "/failover0"); File f2 = new File(TEST_DIR + "/failover1"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI()); NNStorage storage = setupEdits(editUris, 3); final long startErrorTxId = 1*TXNS_PER_ROLL + 1; final long endErrorTxId = 2*TXNS_PER_ROLL; File[] files = new File(f1, "current").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.startsWith(NNStorage.getFinalizedEditsFileName(startErrorTxId, endErrorTxId))) { return true; } return false; } }); assertEquals(1, files.length); assertTrue(files[0].delete()); FSEditLog editlog = getFSEditLog(storage); editlog.initJournalsForWrite(); long startTxId = 1; Collection<EditLogInputStream> streams = null; try { streams = editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL); readAllEdits(streams, startTxId); } catch (IOException e) { LOG.error("edit log failover didn't work", e); fail("Edit log failover didn't work"); } finally { IOUtils.cleanup(null, streams.toArray(new EditLogInputStream[0])); } } /** * Test edit log failover from a corrupt edit log */ @Test public void testEditLogFailOverFromCorrupt() throws IOException { File f1 = new File(TEST_DIR + "/failover0"); File f2 = new File(TEST_DIR + "/failover1"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI()); NNStorage storage = setupEdits(editUris, 3); final long startErrorTxId = 1*TXNS_PER_ROLL + 1; final long endErrorTxId = 2*TXNS_PER_ROLL; File[] files = new File(f1, "current").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.startsWith(NNStorage.getFinalizedEditsFileName(startErrorTxId, endErrorTxId))) { return true; } return false; } }); assertEquals(1, files.length); long fileLen = files[0].length(); LOG.debug("Corrupting Log File: " + files[0] + " len: " + fileLen); RandomAccessFile rwf = new RandomAccessFile(files[0], "rw"); rwf.seek(fileLen-4); // seek to checksum bytes int b = rwf.readInt(); rwf.seek(fileLen-4); rwf.writeInt(b+1); rwf.close(); FSEditLog editlog = getFSEditLog(storage); editlog.initJournalsForWrite(); long startTxId = 1; Collection<EditLogInputStream> streams = null; try { streams = editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL); readAllEdits(streams, startTxId); } catch (IOException e) { LOG.error("edit log failover didn't work", e); fail("Edit log failover didn't work"); } finally { IOUtils.cleanup(null, streams.toArray(new EditLogInputStream[0])); } } /** * Test creating a directory with lots and lots of edit log segments */ @Test public void testManyEditLogSegments() throws IOException { final int NUM_EDIT_LOG_ROLLS = 1000; // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); for (int i = 0; i < NUM_EDIT_LOG_ROLLS; i++){ editLog.logSetReplication("fakefile" + i, (short)(i % 3)); assertExistsInStorageDirs( cluster, NameNodeDirType.EDITS, NNStorage.getInProgressEditsFileName((i * 3) + 1)); editLog.logSync(); editLog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); assertExistsInStorageDirs( cluster, NameNodeDirType.EDITS, NNStorage.getFinalizedEditsFileName((i * 3) + 1, (i * 3) + 3)); } editLog.close(); } finally { if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } // How long does it take to read through all these edit logs? long startTime = Time.now(); try { cluster = new MiniDFSCluster.Builder(conf). numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); } finally { if (cluster != null) { cluster.shutdown(); } } long endTime = Time.now(); double delta = ((float)(endTime - startTime)) / 1000.0; LOG.info(String.format("loaded %d edit log segments in %.2f seconds", NUM_EDIT_LOG_ROLLS, delta)); } /** * Edit log op instances are cached internally using thread-local storage. * This test checks that the cached instances are reset in between different * transactions processed on the same thread, so that we don't accidentally * apply incorrect attributes to an inode. * * @throws IOException if there is an I/O error */ @Test public void testResetThreadLocalCachedOps() throws IOException { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true); // Set single handler thread, so all transactions hit same thread-local ops. conf.setInt(DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY, 1); MiniDFSCluster cluster = null; FileSystem fileSys = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); // Create /dir1 with a default ACL. Path dir1 = new Path("/dir1"); fileSys.mkdirs(dir1); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "foo", READ_EXECUTE)); fileSys.modifyAclEntries(dir1, aclSpec); // /dir1/dir2 is expected to clone the default ACL. Path dir2 = new Path("/dir1/dir2"); fileSys.mkdirs(dir2); // /dir1/file1 is expected to clone the default ACL. Path file1 = new Path("/dir1/file1"); fileSys.create(file1).close(); // /dir3 is not a child of /dir1, so must not clone the default ACL. Path dir3 = new Path("/dir3"); fileSys.mkdirs(dir3); // /file2 is not a child of /dir1, so must not clone the default ACL. Path file2 = new Path("/file2"); fileSys.create(file2).close(); // Restart and assert the above stated expectations. IOUtils.cleanup(LOG, fileSys); cluster.restartNameNode(); fileSys = cluster.getFileSystem(); assertFalse(fileSys.getAclStatus(dir1).getEntries().isEmpty()); assertFalse(fileSys.getAclStatus(dir2).getEntries().isEmpty()); assertFalse(fileSys.getAclStatus(file1).getEntries().isEmpty()); assertTrue(fileSys.getAclStatus(dir3).getEntries().isEmpty()); assertTrue(fileSys.getAclStatus(file2).getEntries().isEmpty()); } finally { IOUtils.cleanup(LOG, fileSys); if (cluster != null) { cluster.shutdown(); } } } class TestAppender extends AppenderSkeleton { private final List<LoggingEvent> log = new ArrayList<>(); @Override public boolean requiresLayout() { return false; } @Override protected void append(final LoggingEvent loggingEvent) { log.add(loggingEvent); } @Override public void close() { } public List<LoggingEvent> getLog() { return new ArrayList<>(log); } } /** * * @throws Exception */ @Test public void testReadActivelyUpdatedLog() throws Exception { final TestAppender appender = new TestAppender(); LogManager.getRootLogger().addAppender(appender); Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true); // Set single handler thread, so all transactions hit same thread-local ops. conf.setInt(DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY, 1); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); FSImage fsimage = cluster.getNamesystem().getFSImage(); StorageDirectory sd = fsimage.getStorage().getStorageDir(0); final DistributedFileSystem fileSys = cluster.getFileSystem(); DFSInotifyEventInputStream events = fileSys.getInotifyEventStream(); fileSys.mkdirs(new Path("/test")); fileSys.mkdirs(new Path("/test/dir1")); fileSys.delete(new Path("/test/dir1"), true); fsimage.getEditLog().logSync(); fileSys.mkdirs(new Path("/test/dir2")); final File inProgressEdit = NNStorage.getInProgressEditsFile(sd, 1); assertTrue(inProgressEdit.exists()); EditLogFileInputStream elis = new EditLogFileInputStream(inProgressEdit); FSEditLogOp op; long pos = 0; while (true) { op = elis.readOp(); if (op != null && op.opCode != FSEditLogOpCodes.OP_INVALID) { pos = elis.getPosition(); } else { break; } } elis.close(); assertTrue(pos > 0); RandomAccessFile rwf = new RandomAccessFile(inProgressEdit, "rw"); rwf.seek(pos); assertEquals(rwf.readByte(), (byte) -1); rwf.seek(pos + 1); rwf.writeByte(2); rwf.close(); events.poll(); String pattern = "Caught exception after reading (.*) ops"; Pattern r = Pattern.compile(pattern); final List<LoggingEvent> log = appender.getLog(); for (LoggingEvent event : log) { Matcher m = r.matcher(event.getRenderedMessage()); if (m.find()) { fail("Should not try to read past latest syned edit log op"); } } } finally { if (cluster != null) { cluster.shutdown(); } LogManager.getRootLogger().removeAppender(appender); } } }
apache-2.0
ehsan/js-symbolic-executor
cvc3/java/src/cvc3/Embedded.java
2909
package cvc3; import java.util.*; import java.io.*; /** Wrapper for a c++ object as a java Object. see README for details on garbage collection, i.e. interplay of delete, finalize, and EmbeddedManager to destruct the embedded c++ object. */ public abstract class Embedded { // load jni c++ library static { System.loadLibrary("cvc3jni"); /* // for debugging: stop here by waiting for a key press, // and attach c++ debugger System.out.println("Loadded cvc3jni"); try { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); br.readLine(); } catch (IOException ioe) { } */ } /// Attributes // embedded object protected Object d_embedded; // embedded object manager private final EmbeddedManager d_embeddedManager; /// Constructor // initialize with embedded object and EmbeddedManager // if EmbeddedManager is null then delete must be called before // Embedded is garbage collected protected Embedded(Object Embedded, EmbeddedManager embeddedManager) { //System.out.println("Create: Embedded"); assert(Embedded != null); d_embedded = Embedded; d_embeddedManager = embeddedManager; } // access to embedded c++ object public synchronized Object embedded() { return d_embedded; } // access to EmbeddedManager (might be null if none used) public EmbeddedManager embeddedManager() { return d_embeddedManager; } // check if already destructed // (or queued for destruction in embeddedManager) public synchronized boolean isDeleted() { return (d_embedded == null); } // delete embedded object or enqueue it for deletion public synchronized void delete() throws Cvc3Exception { if (isDeleted()) return; // no embedded manager, so should be in main thread: // destruct right away if (d_embeddedManager == null) { EmbeddedManager.jniDelete(d_embedded); } // could be in finalizer, so queue in embeddedManager; // unless the embeddedManager is already deleted, // then its (and this') ValidityChecker has been delete. // assuming this is an Expr or a Theorem it's embedded object // has then already been deleted as well. else { synchronized(d_embeddedManager) { if (!d_embeddedManager.isDeleted()) { d_embeddedManager.register(this); } } } d_embedded = null; } // ensure that delete is called if finalization occurs public void finalize() throws Throwable { try { // no embeddedManager, so deleted should have been called if (d_embeddedManager == null) { if (d_embedded != null) { assert(false); // System.out.println("Embedded.Finalizer: should never be called"); throw new Error("Embedded.Finalizer: should never be called"); } } else if (!d_embeddedManager.isDeleted()) { delete(); } } finally { super.finalize(); } } }
apache-2.0
shun634501730/java_source_cn
src_en/com/sun/java/swing/plaf/windows/WindowsScrollBarUI.java
18587
/* * Copyright (c) 1997, 2008, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.java.swing.plaf.windows; import java.awt.*; import java.awt.event.*; import java.awt.image.*; import java.lang.ref.*; import java.util.*; import javax.swing.plaf.basic.*; import javax.swing.*; import javax.swing.plaf.ComponentUI; import static com.sun.java.swing.plaf.windows.TMSchema.*; import static com.sun.java.swing.plaf.windows.XPStyle.Skin; /** * Windows rendition of the component. * <p> * <strong>Warning:</strong> * Serialized objects of this class will not be compatible with * future Swing releases. The current serialization support is appropriate * for short term storage or RMI between applications running the same * version of Swing. A future release of Swing will provide support for * long term persistence. */ public class WindowsScrollBarUI extends BasicScrollBarUI { private Grid thumbGrid; private Grid highlightGrid; private Dimension horizontalThumbSize; private Dimension verticalThumbSize; /** * Creates a UI for a JScrollBar. * * @param c the text field * @return the UI */ public static ComponentUI createUI(JComponent c) { return new WindowsScrollBarUI(); } protected void installDefaults() { super.installDefaults(); XPStyle xp = XPStyle.getXP(); if (xp != null) { scrollbar.setBorder(null); horizontalThumbSize = getSize(scrollbar, xp, Part.SBP_THUMBBTNHORZ); verticalThumbSize = getSize(scrollbar, xp, Part.SBP_THUMBBTNVERT); } else { horizontalThumbSize = null; verticalThumbSize = null; } } private static Dimension getSize(Component component, XPStyle xp, Part part) { Skin skin = xp.getSkin(component, part); return new Dimension(skin.getWidth(), skin.getHeight()); } @Override protected Dimension getMinimumThumbSize() { if ((horizontalThumbSize == null) || (verticalThumbSize == null)) { return super.getMinimumThumbSize(); } return JScrollBar.HORIZONTAL == scrollbar.getOrientation() ? horizontalThumbSize : verticalThumbSize; } public void uninstallUI(JComponent c) { super.uninstallUI(c); thumbGrid = highlightGrid = null; } protected void configureScrollBarColors() { super.configureScrollBarColors(); Color color = UIManager.getColor("ScrollBar.trackForeground"); if (color != null && trackColor != null) { thumbGrid = Grid.getGrid(color, trackColor); } color = UIManager.getColor("ScrollBar.trackHighlightForeground"); if (color != null && trackHighlightColor != null) { highlightGrid = Grid.getGrid(color, trackHighlightColor); } } protected JButton createDecreaseButton(int orientation) { return new WindowsArrowButton(orientation, UIManager.getColor("ScrollBar.thumb"), UIManager.getColor("ScrollBar.thumbShadow"), UIManager.getColor("ScrollBar.thumbDarkShadow"), UIManager.getColor("ScrollBar.thumbHighlight")); } protected JButton createIncreaseButton(int orientation) { return new WindowsArrowButton(orientation, UIManager.getColor("ScrollBar.thumb"), UIManager.getColor("ScrollBar.thumbShadow"), UIManager.getColor("ScrollBar.thumbDarkShadow"), UIManager.getColor("ScrollBar.thumbHighlight")); } /** * {@inheritDoc} * @since 1.6 */ @Override protected ArrowButtonListener createArrowButtonListener(){ // we need to repaint the entire scrollbar because state change for each // button causes a state change for the thumb and other button on Vista if(XPStyle.isVista()) { return new ArrowButtonListener() { public void mouseEntered(MouseEvent evt) { repaint(); super.mouseEntered(evt); } public void mouseExited(MouseEvent evt) { repaint(); super.mouseExited(evt); } private void repaint() { scrollbar.repaint(); } }; } else { return super.createArrowButtonListener(); } } protected void paintTrack(Graphics g, JComponent c, Rectangle trackBounds){ boolean v = (scrollbar.getOrientation() == JScrollBar.VERTICAL); XPStyle xp = XPStyle.getXP(); if (xp != null) { JScrollBar sb = (JScrollBar)c; State state = State.NORMAL; // Pending: Implement rollover (hot) and pressed if (!sb.isEnabled()) { state = State.DISABLED; } Part part = v ? Part.SBP_LOWERTRACKVERT : Part.SBP_LOWERTRACKHORZ; xp.getSkin(sb, part).paintSkin(g, trackBounds, state); } else if (thumbGrid == null) { super.paintTrack(g, c, trackBounds); } else { thumbGrid.paint(g, trackBounds.x, trackBounds.y, trackBounds.width, trackBounds.height); if (trackHighlight == DECREASE_HIGHLIGHT) { paintDecreaseHighlight(g); } else if (trackHighlight == INCREASE_HIGHLIGHT) { paintIncreaseHighlight(g); } } } protected void paintThumb(Graphics g, JComponent c, Rectangle thumbBounds) { boolean v = (scrollbar.getOrientation() == JScrollBar.VERTICAL); XPStyle xp = XPStyle.getXP(); if (xp != null) { JScrollBar sb = (JScrollBar)c; State state = State.NORMAL; if (!sb.isEnabled()) { state = State.DISABLED; } else if (isDragging) { state = State.PRESSED; } else if (isThumbRollover()) { state = State.HOT; } else if (XPStyle.isVista()) { if ((incrButton != null && incrButton.getModel().isRollover()) || (decrButton != null && decrButton.getModel().isRollover())) { state = State.HOVER; } } // Paint thumb Part thumbPart = v ? Part.SBP_THUMBBTNVERT : Part.SBP_THUMBBTNHORZ; xp.getSkin(sb, thumbPart).paintSkin(g, thumbBounds, state); // Paint gripper Part gripperPart = v ? Part.SBP_GRIPPERVERT : Part.SBP_GRIPPERHORZ; Skin skin = xp.getSkin(sb, gripperPart); Insets gripperInsets = xp.getMargin(c, thumbPart, null, Prop.CONTENTMARGINS); if (gripperInsets == null || (v && (thumbBounds.height - gripperInsets.top - gripperInsets.bottom >= skin.getHeight())) || (!v && (thumbBounds.width - gripperInsets.left - gripperInsets.right >= skin.getWidth()))) { skin.paintSkin(g, thumbBounds.x + (thumbBounds.width - skin.getWidth()) / 2, thumbBounds.y + (thumbBounds.height - skin.getHeight()) / 2, skin.getWidth(), skin.getHeight(), state); } } else { super.paintThumb(g, c, thumbBounds); } } protected void paintDecreaseHighlight(Graphics g) { if (highlightGrid == null) { super.paintDecreaseHighlight(g); } else { Insets insets = scrollbar.getInsets(); Rectangle thumbR = getThumbBounds(); int x, y, w, h; if (scrollbar.getOrientation() == JScrollBar.VERTICAL) { x = insets.left; y = decrButton.getY() + decrButton.getHeight(); w = scrollbar.getWidth() - (insets.left + insets.right); h = thumbR.y - y; } else { x = decrButton.getX() + decrButton.getHeight(); y = insets.top; w = thumbR.x - x; h = scrollbar.getHeight() - (insets.top + insets.bottom); } highlightGrid.paint(g, x, y, w, h); } } protected void paintIncreaseHighlight(Graphics g) { if (highlightGrid == null) { super.paintDecreaseHighlight(g); } else { Insets insets = scrollbar.getInsets(); Rectangle thumbR = getThumbBounds(); int x, y, w, h; if (scrollbar.getOrientation() == JScrollBar.VERTICAL) { x = insets.left; y = thumbR.y + thumbR.height; w = scrollbar.getWidth() - (insets.left + insets.right); h = incrButton.getY() - y; } else { x = thumbR.x + thumbR.width; y = insets.top; w = incrButton.getX() - x; h = scrollbar.getHeight() - (insets.top + insets.bottom); } highlightGrid.paint(g, x, y, w, h); } } /** * {@inheritDoc} * @since 1.6 */ @Override protected void setThumbRollover(boolean active) { boolean old = isThumbRollover(); super.setThumbRollover(active); // we need to repaint the entire scrollbar because state change for thumb // causes state change for incr and decr buttons on Vista if(XPStyle.isVista() && active != old) { scrollbar.repaint(); } } /** * WindowsArrowButton is used for the buttons to position the * document up/down. It differs from BasicArrowButton in that the * preferred size is always a square. */ private class WindowsArrowButton extends BasicArrowButton { public WindowsArrowButton(int direction, Color background, Color shadow, Color darkShadow, Color highlight) { super(direction, background, shadow, darkShadow, highlight); } public WindowsArrowButton(int direction) { super(direction); } public void paint(Graphics g) { XPStyle xp = XPStyle.getXP(); if (xp != null) { ButtonModel model = getModel(); Skin skin = xp.getSkin(this, Part.SBP_ARROWBTN); State state = null; boolean jointRollover = XPStyle.isVista() && (isThumbRollover() || (this == incrButton && decrButton.getModel().isRollover()) || (this == decrButton && incrButton.getModel().isRollover())); // normal, rollover, pressed, disabled if (model.isArmed() && model.isPressed()) { switch (direction) { case NORTH: state = State.UPPRESSED; break; case SOUTH: state = State.DOWNPRESSED; break; case WEST: state = State.LEFTPRESSED; break; case EAST: state = State.RIGHTPRESSED; break; } } else if (!model.isEnabled()) { switch (direction) { case NORTH: state = State.UPDISABLED; break; case SOUTH: state = State.DOWNDISABLED; break; case WEST: state = State.LEFTDISABLED; break; case EAST: state = State.RIGHTDISABLED; break; } } else if (model.isRollover() || model.isPressed()) { switch (direction) { case NORTH: state = State.UPHOT; break; case SOUTH: state = State.DOWNHOT; break; case WEST: state = State.LEFTHOT; break; case EAST: state = State.RIGHTHOT; break; } } else if (jointRollover) { switch (direction) { case NORTH: state = State.UPHOVER; break; case SOUTH: state = State.DOWNHOVER; break; case WEST: state = State.LEFTHOVER; break; case EAST: state = State.RIGHTHOVER; break; } } else { switch (direction) { case NORTH: state = State.UPNORMAL; break; case SOUTH: state = State.DOWNNORMAL; break; case WEST: state = State.LEFTNORMAL; break; case EAST: state = State.RIGHTNORMAL; break; } } skin.paintSkin(g, 0, 0, getWidth(), getHeight(), state); } else { super.paint(g); } } public Dimension getPreferredSize() { int size = 16; if (scrollbar != null) { switch (scrollbar.getOrientation()) { case JScrollBar.VERTICAL: size = scrollbar.getWidth(); break; case JScrollBar.HORIZONTAL: size = scrollbar.getHeight(); break; } size = Math.max(size, 5); } return new Dimension(size, size); } } /** * This should be pulled out into its own class if more classes need to * use it. * <p> * Grid is used to draw the track for windows scrollbars. Grids * are cached in a HashMap, with the key being the rgb components * of the foreground/background colors. Further the Grid is held through * a WeakRef so that it can be freed when no longer needed. As the * Grid is rather expensive to draw, it is drawn in a BufferedImage. */ private static class Grid { private static final int BUFFER_SIZE = 64; private static HashMap<String, WeakReference<Grid>> map; private BufferedImage image; static { map = new HashMap<String, WeakReference<Grid>>(); } public static Grid getGrid(Color fg, Color bg) { String key = fg.getRGB() + " " + bg.getRGB(); WeakReference<Grid> ref = map.get(key); Grid grid = (ref == null) ? null : ref.get(); if (grid == null) { grid = new Grid(fg, bg); map.put(key, new WeakReference<Grid>(grid)); } return grid; } public Grid(Color fg, Color bg) { int cmap[] = { fg.getRGB(), bg.getRGB() }; IndexColorModel icm = new IndexColorModel(8, 2, cmap, 0, false, -1, DataBuffer.TYPE_BYTE); image = new BufferedImage(BUFFER_SIZE, BUFFER_SIZE, BufferedImage.TYPE_BYTE_INDEXED, icm); Graphics g = image.getGraphics(); try { g.setClip(0, 0, BUFFER_SIZE, BUFFER_SIZE); paintGrid(g, fg, bg); } finally { g.dispose(); } } /** * Paints the grid into the specified Graphics at the specified * location. */ public void paint(Graphics g, int x, int y, int w, int h) { Rectangle clipRect = g.getClipBounds(); int minX = Math.max(x, clipRect.x); int minY = Math.max(y, clipRect.y); int maxX = Math.min(clipRect.x + clipRect.width, x + w); int maxY = Math.min(clipRect.y + clipRect.height, y + h); if (maxX <= minX || maxY <= minY) { return; } int xOffset = (minX - x) % 2; for (int xCounter = minX; xCounter < maxX; xCounter += BUFFER_SIZE) { int yOffset = (minY - y) % 2; int width = Math.min(BUFFER_SIZE - xOffset, maxX - xCounter); for (int yCounter = minY; yCounter < maxY; yCounter += BUFFER_SIZE) { int height = Math.min(BUFFER_SIZE - yOffset, maxY - yCounter); g.drawImage(image, xCounter, yCounter, xCounter + width, yCounter + height, xOffset, yOffset, xOffset + width, yOffset + height, null); if (yOffset != 0) { yCounter -= yOffset; yOffset = 0; } } if (xOffset != 0) { xCounter -= xOffset; xOffset = 0; } } } /** * Actually renders the grid into the Graphics <code>g</code>. */ private void paintGrid(Graphics g, Color fg, Color bg) { Rectangle clipRect = g.getClipBounds(); g.setColor(bg); g.fillRect(clipRect.x, clipRect.y, clipRect.width, clipRect.height); g.setColor(fg); g.translate(clipRect.x, clipRect.y); int width = clipRect.width; int height = clipRect.height; int xCounter = clipRect.x % 2; for (int end = width - height; xCounter < end; xCounter += 2) { g.drawLine(xCounter, 0, xCounter + height, height); } for (int end = width; xCounter < end; xCounter += 2) { g.drawLine(xCounter, 0, width, width - xCounter); } int yCounter = ((clipRect.x % 2) == 0) ? 2 : 1; for (int end = height - width; yCounter < end; yCounter += 2) { g.drawLine(0, yCounter, width, yCounter + width); } for (int end = height; yCounter < end; yCounter += 2) { g.drawLine(0, yCounter, height - yCounter, height); } g.translate(-clipRect.x, -clipRect.y); } } }
apache-2.0
apurtell/hbase
hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestDeadServerMetricRegionChore.java
2127
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.assignment; import static org.junit.Assert.fail; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Testcase for HBASE-23682. */ @Category({ MasterTests.class, MediumTests.class }) public class TestDeadServerMetricRegionChore { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestDeadServerMetricRegionChore.class); protected HBaseTestingUtil util; @Before public void setUp() throws Exception { util = new HBaseTestingUtil(); // Disable DeadServerMetricRegionChore util.getConfiguration() .setInt(AssignmentManager.DEAD_REGION_METRIC_CHORE_INTERVAL_MSEC_CONF_KEY, -1); } @After public void tearDown() throws Exception { this.util.shutdownMiniCluster(); } @Test public void testDeadServerMetricRegionChore() throws Exception { try { this.util.startMiniCluster(); } catch (Exception e) { fail("Start cluster failed"); } } }
apache-2.0
facebook/presto
presto-thrift-api/src/main/java/com/facebook/presto/thrift/api/udf/UdfExecutionFailureInfo.java
2491
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.thrift.api.udf; import com.facebook.drift.annotations.ThriftConstructor; import com.facebook.drift.annotations.ThriftField; import com.facebook.drift.annotations.ThriftStruct; import com.google.common.collect.ImmutableList; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.util.List; import static com.facebook.drift.annotations.ThriftField.Recursiveness.TRUE; import static com.facebook.drift.annotations.ThriftField.Requiredness.OPTIONAL; import static java.util.Objects.requireNonNull; @Immutable @ThriftStruct public class UdfExecutionFailureInfo { private final String type; private final String message; private final UdfExecutionFailureInfo cause; private final List<UdfExecutionFailureInfo> suppressed; private final List<String> stack; @ThriftConstructor public UdfExecutionFailureInfo( String type, String message, @Nullable UdfExecutionFailureInfo cause, List<UdfExecutionFailureInfo> suppressed, List<String> stack) { this.type = requireNonNull(type, "type is null"); this.message = requireNonNull(message, "message is null"); this.cause = cause; this.suppressed = ImmutableList.copyOf(suppressed); this.stack = ImmutableList.copyOf(stack); } @ThriftField(1) public String getType() { return type; } @Nullable @ThriftField(2) public String getMessage() { return message; } @Nullable @ThriftField(value = 3, isRecursive = TRUE, requiredness = OPTIONAL) public UdfExecutionFailureInfo getCause() { return cause; } @ThriftField(4) public List<UdfExecutionFailureInfo> getSuppressed() { return suppressed; } @ThriftField(5) public List<String> getStack() { return stack; } }
apache-2.0
dke-knu/i2am
rdma-based-storm/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java
2496
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.mongodb.topology; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.IRichSpout; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Values; import java.util.Map; import java.util.Random; import java.util.UUID; public class WordSpout implements IRichSpout { boolean isDistributed; SpoutOutputCollector collector; public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" }; public WordSpout() { this(true); } public WordSpout(boolean isDistributed) { this.isDistributed = isDistributed; } public boolean isDistributed() { return this.isDistributed; } @SuppressWarnings("rawtypes") public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) { this.collector = collector; } public void close() { } public void nextTuple() { final Random rand = new Random(); final String word = words[rand.nextInt(words.length)]; this.collector.emit(new Values(word), UUID.randomUUID()); Thread.yield(); } public void ack(Object msgId) { } public void fail(Object msgId) { } public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("word")); } @Override public void activate() { } @Override public void deactivate() { } @Override public Map<String, Object> getComponentConfiguration() { return null; } }
apache-2.0
jackyglony/msgpack-java
msgpack-core/src/main/java/org/msgpack/core/MessageIntegerOverflowException.java
1614
// // MessagePack for Java // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.msgpack.core; import java.math.BigInteger; /** * This error is thrown when the user tries to read an integer value * using a smaller types. For example, calling MessageUnpacker.unpackInt() for an integer value * that is larger than Integer.MAX_VALUE will cause this exception. */ public class MessageIntegerOverflowException extends MessageTypeException { private final BigInteger bigInteger; public MessageIntegerOverflowException(BigInteger bigInteger) { super(); this.bigInteger = bigInteger; } public MessageIntegerOverflowException(long value) { this(BigInteger.valueOf(value)); } public MessageIntegerOverflowException(String message, BigInteger bigInteger) { super(message); this.bigInteger = bigInteger; } public BigInteger getBigInteger() { return bigInteger; } @Override public String getMessage() { return bigInteger.toString(); } }
apache-2.0
daidong/DominoHBase
hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
4805
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.balancer; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.master.LoadBalancer; import org.apache.hadoop.hbase.master.RegionPlan; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Test the load balancer that is created by default. */ @Category(MediumTests.class) public class TestDefaultLoadBalancer extends BalancerTestBase { private static final Log LOG = LogFactory.getLog(TestDefaultLoadBalancer.class); private static LoadBalancer loadBalancer; @BeforeClass public static void beforeAllTests() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set("hbase.regions.slop", "0"); loadBalancer = new DefaultLoadBalancer(); loadBalancer.setConf(conf); } // int[testnum][servernumber] -> numregions int[][] clusterStateMocks = new int[][] { // 1 node new int[] { 0 }, new int[] { 1 }, new int[] { 10 }, // 2 node new int[] { 0, 0 }, new int[] { 2, 0 }, new int[] { 2, 1 }, new int[] { 2, 2 }, new int[] { 2, 3 }, new int[] { 2, 4 }, new int[] { 1, 1 }, new int[] { 0, 1 }, new int[] { 10, 1 }, new int[] { 14, 1432 }, new int[] { 47, 53 }, // 3 node new int[] { 0, 1, 2 }, new int[] { 1, 2, 3 }, new int[] { 0, 2, 2 }, new int[] { 0, 3, 0 }, new int[] { 0, 4, 0 }, new int[] { 20, 20, 0 }, // 4 node new int[] { 0, 1, 2, 3 }, new int[] { 4, 0, 0, 0 }, new int[] { 5, 0, 0, 0 }, new int[] { 6, 6, 0, 0 }, new int[] { 6, 2, 0, 0 }, new int[] { 6, 1, 0, 0 }, new int[] { 6, 0, 0, 0 }, new int[] { 4, 4, 4, 7 }, new int[] { 4, 4, 4, 8 }, new int[] { 0, 0, 0, 7 }, // 5 node new int[] { 1, 1, 1, 1, 4 }, // more nodes new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 10 }, new int[] { 6, 6, 5, 6, 6, 6, 6, 6, 6, 1 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 54 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 55 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 56 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 8 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 9 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 10 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 123 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 155 }, new int[] { 0, 0, 144, 1, 1, 1, 1, 1123, 133, 138, 12, 1444 }, new int[] { 0, 0, 144, 1, 0, 4, 1, 1123, 133, 138, 12, 1444 }, new int[] { 1538, 1392, 1561, 1557, 1535, 1553, 1385, 1542, 1619 } }; /** * Test the load balancing algorithm. * * Invariant is that all servers should be hosting either floor(average) or * ceiling(average) * * @throws Exception */ @Test public void testBalanceCluster() throws Exception { for (int[] mockCluster : clusterStateMocks) { Map<ServerName, List<HRegionInfo>> servers = mockClusterServers(mockCluster); List<ServerAndLoad> list = convertToList(servers); LOG.info("Mock Cluster : " + printMock(list) + " " + printStats(list)); List<RegionPlan> plans = loadBalancer.balanceCluster(servers); List<ServerAndLoad> balancedCluster = reconcile(list, plans); LOG.info("Mock Balance : " + printMock(balancedCluster)); assertClusterAsBalanced(balancedCluster); for (Map.Entry<ServerName, List<HRegionInfo>> entry : servers.entrySet()) { returnRegions(entry.getValue()); returnServer(entry.getKey()); } } } }
apache-2.0
rmetzger/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/FunctionDescriptorValidator.java
1990
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.descriptors; import org.apache.flink.annotation.Internal; import org.apache.flink.table.api.ValidationException; import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; /** Validator for {@link FunctionDescriptor}. */ @Internal public class FunctionDescriptorValidator implements DescriptorValidator { public static final String FROM = "from"; public static final String FROM_VALUE_CLASS = "class"; public static final String FROM_VALUE_PYTHON = "python"; @Override public void validate(DescriptorProperties properties) { Map<String, Consumer<String>> enumValidation = new HashMap<>(); enumValidation.put( FROM_VALUE_CLASS, s -> new ClassInstanceValidator().validate(properties)); enumValidation.put( FROM_VALUE_PYTHON, s -> new PythonFunctionValidator().validate(properties)); // check for 'from' if (properties.containsKey(FROM)) { properties.validateEnum(FROM, false, enumValidation); } else { throw new ValidationException("Could not find 'from' property for function."); } } }
apache-2.0