repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
manovotn/core
tests-arquillian/src/test/java/org/jboss/weld/tests/ws/interceptors/POJOEndpointImpl.java
1578
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.weld.tests.ws.interceptors; import javax.jws.WebService; /** * @author <a href="mailto:ropalka@redhat.com">Richard Opalka</a> */ @WebService(name = "POJOEndpoint", serviceName="POJOEndpointService", targetNamespace = WebServiceInterceptorTest.TARGET_NAMESPACE) public class POJOEndpointImpl implements EndpointIface { static boolean interceptorCalled; @POJOInterceptor public String echo(final String message) { return interceptorCalled ? message + " (including POJO interceptor)" : message; } }
apache-2.0
mdogan/hazelcast
hazelcast/src/main/java/com/hazelcast/internal/services/TransactionalService.java
1157
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.services; import com.hazelcast.spi.annotation.PrivateApi; import com.hazelcast.transaction.TransactionalObject; import com.hazelcast.transaction.impl.Transaction; import java.util.UUID; /** * An interface that can be implemented by SPI services that want to deal with transactions. */ @PrivateApi public interface TransactionalService { <T extends TransactionalObject> T createTransactionalObject(String name, Transaction transaction); void rollbackTransaction(UUID transactionId); }
apache-2.0
damienmg/bazel
src/main/java/com/google/devtools/build/skyframe/DirtyBuildingState.java
9812
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.skyframe; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.util.GroupedList; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.skyframe.NodeEntry.DirtyState; import java.util.Collection; import java.util.Set; /** * State for a node that has been dirtied, and will be checked to see if it needs re-evaluation, and * either marked clean or re-evaluated. * * <p>This class is public only for the benefit of alternative graph implementations outside of the * package. */ public abstract class DirtyBuildingState { /** * The state of a dirty node. A node is marked dirty in the DirtyBuildingState constructor, and * goes into either the state {@link DirtyState#CHECK_DEPENDENCIES} or {@link * DirtyState#NEEDS_REBUILDING}, depending on whether the caller specified that the node was * itself changed or not. Never null. */ private DirtyState dirtyState; /** * The dependencies requested (with group markers) last time the node was built (and below, the * value last time the node was built). They will be compared to dependencies requested on this * build to check whether this node has changed in {@link NodeEntry#setValue}. If they are null, * it means that this node is being built for the first time. See {@link * InMemoryNodeEntry#directDeps} for more on dependency group storage. * * <p>Public only for the use of alternative graph implementations. */ public abstract GroupedList<SkyKey> getLastBuildDirectDeps() throws InterruptedException; /** * The number of groups of the dependencies requested last time when the node was built. * * <p>Getting the number of last-built dependencies should not throw {@link InterruptedException}. */ protected abstract int getNumOfGroupsInLastBuildDirectDeps(); /** * The value of the node the last time it was built. * * <p>Public only for the use of alternative graph implementations. */ public abstract SkyValue getLastBuildValue() throws InterruptedException; /** * Group of children to be checked next in the process of determining if this entry needs to be * re-evaluated. Used by {@link DirtyBuildingState#getNextDirtyDirectDeps} and {@link * #signalDepInternal}. */ protected int dirtyDirectDepIndex; protected DirtyBuildingState(boolean isChanged) { dirtyState = isChanged ? DirtyState.NEEDS_REBUILDING : DirtyState.CHECK_DEPENDENCIES; // We need to iterate through the deps to see if they have changed, or to remove them if one // has. Initialize the iterating index. dirtyDirectDepIndex = 0; } static DirtyBuildingState create( boolean isChanged, GroupedList<SkyKey> lastBuildDirectDeps, SkyValue lastBuildValue) { return new FullDirtyBuildingState(isChanged, lastBuildDirectDeps, lastBuildValue); } final void markChanged() { Preconditions.checkState(dirtyState == DirtyState.CHECK_DEPENDENCIES, this); Preconditions.checkState(dirtyDirectDepIndex == 0, "Unexpected evaluation: %s", this); dirtyState = DirtyState.NEEDS_REBUILDING; } final void forceChanged() { Preconditions.checkState(dirtyState == DirtyState.CHECK_DEPENDENCIES, this); Preconditions.checkState(getNumOfGroupsInLastBuildDirectDeps() == dirtyDirectDepIndex, this); dirtyState = DirtyState.REBUILDING; } final boolean isChanged() { return dirtyState == DirtyState.NEEDS_REBUILDING || dirtyState == DirtyState.REBUILDING; } private void checkFinishedBuildingWhenAboutToSetValue() { Preconditions.checkState( dirtyState == DirtyState.VERIFIED_CLEAN || dirtyState == DirtyState.REBUILDING, "not done building %s", this); } /** * If this node is not yet known to need rebuilding, sets {@link #dirtyState} to {@link * DirtyState#NEEDS_REBUILDING} if the child has changed, and {@link DirtyState#VERIFIED_CLEAN} if * the child has not changed and this was the last child to be checked (as determined by {@code * isReady} and comparing {@link #dirtyDirectDepIndex} and {@link * DirtyBuildingState#getNumOfGroupsInLastBuildDirectDeps()}. */ final void signalDepInternal(boolean childChanged, boolean isReady) { Preconditions.checkState( isChanged() || (dirtyState == DirtyState.CHECK_DEPENDENCIES && dirtyDirectDepIndex > 0), "Unexpected not evaluating: %s", this); if (!isChanged()) { // Synchronization isn't needed here because the only caller is NodeEntry, which does it // through the synchronized method signalDep(Version). if (childChanged) { dirtyState = DirtyState.NEEDS_REBUILDING; } else if (dirtyState == DirtyState.CHECK_DEPENDENCIES && isReady && getNumOfGroupsInLastBuildDirectDeps() == dirtyDirectDepIndex) { // No other dep already marked this as NEEDS_REBUILDING, no deps outstanding, and this was // the last block of deps to be checked. dirtyState = DirtyState.VERIFIED_CLEAN; } } } /** * Returns true if {@code newValue}.equals the value from the last time this node was built. * Should only be used by {@link NodeEntry#setValue}. * * <p>Changes in direct deps do <i>not</i> force this to return false. Only the value is * considered. */ final boolean unchangedFromLastBuild(SkyValue newValue) throws InterruptedException { checkFinishedBuildingWhenAboutToSetValue(); return !(newValue instanceof NotComparableSkyValue) && getLastBuildValue().equals(newValue); } /** * Returns true if the deps requested during this evaluation ({@code directDeps}) are exactly * those requested the last time this node was built, in the same order. */ final boolean depsUnchangedFromLastBuild(GroupedList<SkyKey> directDeps) throws InterruptedException { checkFinishedBuildingWhenAboutToSetValue(); return getLastBuildDirectDeps().equals(directDeps); } final boolean noDepsLastBuild() { return getNumOfGroupsInLastBuildDirectDeps() == 0; } /** @see NodeEntry#getDirtyState() */ final DirtyState getDirtyState() { return dirtyState; } /** * Gets the next children to be re-evaluated to see if this dirty node needs to be re-evaluated. * * <p>See {@link NodeEntry#getNextDirtyDirectDeps}. */ final Collection<SkyKey> getNextDirtyDirectDeps() throws InterruptedException { Preconditions.checkState(dirtyState == DirtyState.CHECK_DEPENDENCIES, this); Preconditions.checkState(dirtyDirectDepIndex < getNumOfGroupsInLastBuildDirectDeps(), this); return getLastBuildDirectDeps().get(dirtyDirectDepIndex++); } /** * Returns the remaining direct deps that have not been checked. If {@code preservePosition} is * true, this method is non-mutating. If {@code preservePosition} is false, the caller must * process the returned set, and so subsequent calls to this method will return the empty set. */ Set<SkyKey> getAllRemainingDirtyDirectDeps(boolean preservePosition) throws InterruptedException { ImmutableSet.Builder<SkyKey> result = ImmutableSet.builder(); for (int ind = dirtyDirectDepIndex; ind < getNumOfGroupsInLastBuildDirectDeps(); ind++) { result.addAll(getLastBuildDirectDeps().get(ind)); } if (!preservePosition) { dirtyDirectDepIndex = getNumOfGroupsInLastBuildDirectDeps(); } return result.build(); } protected void markRebuilding() { Preconditions.checkState(dirtyState == DirtyState.NEEDS_REBUILDING, this); dirtyState = DirtyState.REBUILDING; } protected MoreObjects.ToStringHelper getStringHelper() { return MoreObjects.toStringHelper(this) .add("dirtyState", dirtyState) .add("dirtyDirectDepIndex", dirtyDirectDepIndex); } @Override public String toString() { return getStringHelper().toString(); } private static class FullDirtyBuildingState extends DirtyBuildingState { private final GroupedList<SkyKey> lastBuildDirectDeps; private final SkyValue lastBuildValue; private FullDirtyBuildingState( boolean isChanged, GroupedList<SkyKey> lastBuildDirectDeps, SkyValue lastBuildValue) { super(isChanged); this.lastBuildDirectDeps = lastBuildDirectDeps; Preconditions.checkState( isChanged || getNumOfGroupsInLastBuildDirectDeps() > 0, "%s is being marked dirty, not changed, but has no children that could have dirtied it", this); this.lastBuildValue = lastBuildValue; } @Override public SkyValue getLastBuildValue() { return lastBuildValue; } @Override public GroupedList<SkyKey> getLastBuildDirectDeps() throws InterruptedException { return lastBuildDirectDeps; } @Override protected int getNumOfGroupsInLastBuildDirectDeps() { return lastBuildDirectDeps.listSize(); } @Override protected MoreObjects.ToStringHelper getStringHelper() { return super.getStringHelper() .add("lastBuildDirectDeps", lastBuildDirectDeps) .add("lastBuildValue", lastBuildValue); } } }
apache-2.0
apache/npanday
plugins/netplugins/NPanday.Plugin.SysRef/javabinding/src/main/java/NPanday/Plugin/SysRef/SysRefMojo.java
3822
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package NPanday.Plugin.SysRef; import npanday.plugin.FieldAnnotation; import org.apache.maven.artifact.factory.ArtifactFactory; /** * @phase SysRef * @goal prepare */ public class SysRefMojo extends npanday.plugin.AbstractMojo { /** * @parameter expression = "${settings.localRepository}" */ @FieldAnnotation() public java.lang.String repository; /** * @parameter expression = "${project}" */ @FieldAnnotation() public org.apache.maven.project.MavenProject mavenProject; /** * @parameter expression = "${project}" */ private org.apache.maven.project.MavenProject project; /** * @parameter expression = "${settings.localRepository}" */ private String localRepository; /** * @parameter expression = "${vendor}" */ private String vendor; /** * @parameter expression = "${vendorVersion}" */ private String vendorVersion; /** * @parameter expression = "${frameworkVersion}" */ private String frameworkVersion; /** * @component */ private npanday.executable.NetPluginExecutableFactory netPluginExecutableFactory; /** * @component */ private npanday.plugin.PluginContext pluginContext; /** * @component */ private ArtifactFactory artifactFactory; /** * @parameter expression="${plugin.version}" * @required * @readonly */ private String pluginVersion; public String getMojoArtifactId() { return "NPanday.Plugin.SysRef"; } public String getMojoGroupId() { return "org.apache.npanday.plugins"; } public String getClassName() { return "NPanday.Plugin.SysRef.SysRefMojo"; } public npanday.plugin.PluginContext getNetPluginContext() { return pluginContext; } public npanday.executable.NetPluginExecutableFactory getNetPluginExecutableFactory() { return netPluginExecutableFactory; } public org.apache.maven.project.MavenProject getMavenProject() { return project; } public String getLocalRepository() { return localRepository; } public String getVendorVersion() { return vendorVersion; } public String getVendor() { return vendor; } public String getFrameworkVersion() { return frameworkVersion; } /** * The version of the .NET plugin to resolve, will typically match that of the Java wrapper. */ @Override protected String getPluginVersion() { return pluginVersion; } public ArtifactFactory getArtifactFactory() { return artifactFactory; } }
apache-2.0
xmlunit/xmlunit
xmlunit-core/src/main/java/org/xmlunit/diff/ComparisonControllers.java
1985
/* This file is licensed to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.xmlunit.diff; /** * Controllers used for the base cases. */ public final class ComparisonControllers { /** * Does not stop the comparison at all. */ public static final ComparisonController Default = new ComparisonController() { @Override public boolean stopDiffing(Difference ignored) { return false; } }; /** * Makes the comparison stop as soon as the first "real" * difference is encountered. */ public static final ComparisonController StopWhenDifferent = new StopComparisonController(ComparisonResult.DIFFERENT); /** * Makes the comparison stop as soon as the first * difference is encountered even if it is similar. */ public static final ComparisonController StopWhenSimilar = new StopComparisonController(ComparisonResult.SIMILAR); private ComparisonControllers() { } private static final class StopComparisonController implements ComparisonController { final ComparisonResult minimumComparisonResult; public StopComparisonController(ComparisonResult minimumComparisonResult) { this.minimumComparisonResult = minimumComparisonResult; } @Override public boolean stopDiffing(Difference d) { return d.getResult().ordinal() >= minimumComparisonResult.ordinal(); } } }
apache-2.0
tufangorel/hazelcast
hazelcast/src/main/java/com/hazelcast/internal/networking/OutboundFrame.java
1907
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.networking; /** * Represents a payload to can be written to a {@link Channel} * * There are different types of OutboundFrame: * <ol> * <li>Packet: for member to member and old-client to member communication</li> * <li>TextMessage: for memcached and rest communication</li> * <li>ClientMessage: for the new client to member communication</li> * </ol> * * Till so far, all communication over a single connection, will be of a single * Frame-class. E.g. member to member only uses Packets. * * There is no need for an InboundFrame interface. * * @see com.hazelcast.nio.serialization.Data * @see Channel#write(OutboundFrame) */ public interface OutboundFrame { /** * Checks if this Frame is urgent. * * Frames that are urgent, have priority above regular frames. This is useful * to implement System Operations so that they can be send faster than regular * operations; especially when the system is under load you want these operations * have precedence. * * @return true if urgent, false otherwise. */ boolean isUrgent(); /** * Returns the frame length. This includes header and payload size. * * @return The frame length. */ int getFrameLength(); }
apache-2.0
betfair/cougar
cougar-test/cougar-normal-code-tests/src/test/java/com/betfair/cougar/tests/updatedcomponenttests/responsetypes/primitiveresponsetypes/i32/rpc/RPCi32OutOfBoundsResponseTest.java
4249
/* * Copyright 2013, The Sporting Exchange Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Originally from UpdatedComponentTests/ResponseTypes/PrimitiveResponseTypes/i32/RPC/RPC_i32OutOfBoundsResponse.xls; package com.betfair.cougar.tests.updatedcomponenttests.responsetypes.primitiveresponsetypes.i32.rpc; import com.betfair.testing.utils.cougar.assertions.AssertionUtils; import com.betfair.testing.utils.cougar.beans.HttpCallBean; import com.betfair.testing.utils.cougar.beans.HttpResponseBean; import com.betfair.testing.utils.cougar.helpers.CougarHelpers; import com.betfair.testing.utils.cougar.manager.AccessLogRequirement; import com.betfair.testing.utils.cougar.manager.CougarManager; import org.testng.annotations.Test; import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; /** * Test that the Cougar service returns a correct error when value passed for i32 is out of type bounds (Batched JSON) */ public class RPCi32OutOfBoundsResponseTest { @Test public void doTest() throws Exception { // Set up the Http Call Bean to make the request CougarManager cougarManager1 = CougarManager.getInstance(); HttpCallBean callBean = cougarManager1.getNewHttpCallBean("87.248.113.14"); CougarManager cougarManager = cougarManager1; // Set Cougar Fault Controller attributes cougarManager.setCougarFaultControllerJMXMBeanAttrbiute("DetailedFaults", "false"); // Set the call bean to use JSON batching callBean.setJSONRPC(true); // Set the list of requests to make a batched call to Map[] mapArray2 = new Map[2]; mapArray2[0] = new HashMap(); mapArray2[0].put("method","i32SimpleTypeEcho"); mapArray2[0].put("params","[21474836470]"); mapArray2[0].put("id","1"); mapArray2[1] = new HashMap(); mapArray2[1].put("method","i32SimpleTypeEcho"); mapArray2[1].put("params","[21474836470]"); mapArray2[1].put("id","2"); callBean.setBatchedRequests(mapArray2); // Get current time for getting log entries later Timestamp timeStamp = new Timestamp(System.currentTimeMillis()); // Make JSON call to the operation requesting a JSON response cougarManager.makeRestCougarHTTPCall(callBean, com.betfair.testing.utils.cougar.enums.CougarMessageProtocolRequestTypeEnum.RESTJSON, com.betfair.testing.utils.cougar.enums.CougarMessageContentTypeEnum.JSON); // Get the response to the batched query (store the response for further comparison as order of batched responses cannot be relied on) HttpResponseBean response = callBean.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTJSONJSON); // Convert the returned json object to a map for comparison CougarHelpers cougarHelpers4 = new CougarHelpers(); Map<String, Object> map5 = cougarHelpers4.convertBatchedResponseToMap(response); AssertionUtils.multiAssertEquals("{\"id\":1,\"error\":{\"message\":\"DSC-0044\",\"code\":-32602},\"jsonrpc\":\"2.0\"}", map5.get("response1")); AssertionUtils.multiAssertEquals("{\"id\":2,\"error\":{\"message\":\"DSC-0044\",\"code\":-32602},\"jsonrpc\":\"2.0\"}", map5.get("response2")); AssertionUtils.multiAssertEquals(200, map5.get("httpStatusCode")); AssertionUtils.multiAssertEquals("OK", map5.get("httpStatusText")); // Pause the test to allow the logs to be filled // generalHelpers.pauseTest(500L); // Check the log entries are as expected cougarManager.verifyAccessLogEntriesAfterDate(timeStamp, new AccessLogRequirement("87.248.113.14", "/json-rpc", "Ok") ); } }
apache-2.0
apache/commons-configuration
src/test/java/org/apache/commons/configuration2/builder/TestDefaultParametersManager.java
9447
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration2.builder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import java.util.Map; import org.apache.commons.configuration2.builder.fluent.FileBasedBuilderParameters; import org.apache.commons.configuration2.builder.fluent.Parameters; import org.apache.commons.configuration2.builder.fluent.PropertiesBuilderParameters; import org.apache.commons.configuration2.builder.fluent.XMLBuilderParameters; import org.apache.commons.configuration2.convert.ListDelimiterHandler; import org.apache.commons.configuration2.tree.ExpressionEngine; import org.easymock.EasyMock; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * Test class for {@code DefaultParametersManager}. * */ public class TestDefaultParametersManager { /** * A test defaults handler implementation for testing the initialization of parameters objects with default values. This * class sets some hard-coded default values. */ private static class FileBasedDefaultsHandler implements DefaultParametersHandler<FileBasedBuilderParameters> { @Override public void initializeDefaults(final FileBasedBuilderParameters parameters) { parameters.setThrowExceptionOnMissing(true).setEncoding(DEF_ENCODING).setListDelimiterHandler(listHandler); } } /** Constant for the default encoding. */ private static final String DEF_ENCODING = "UTF-8"; /** A test list delimiter handler. */ private static ListDelimiterHandler listHandler; /** * Checks whether the expected default values have been set on a parameters object. * * @param map the map with parameters */ private static void checkDefaultValues(final Map<String, Object> map) { assertEquals("Wrong delimiter handler", listHandler, map.get("listDelimiterHandler")); assertEquals("Wrong exception flag value", Boolean.TRUE, map.get("throwExceptionOnMissing")); final FileBasedBuilderParametersImpl fbparams = FileBasedBuilderParametersImpl.fromParameters(map); assertEquals("Wrong encoding", DEF_ENCODING, fbparams.getFileHandler().getEncoding()); } /** * Checks that no default values have been set on a parameters object. * * @param map the map with parameters */ private static void checkNoDefaultValues(final Map<String, Object> map) { assertFalse("Got base properties", map.containsKey("throwExceptionOnMissing")); final FileBasedBuilderParametersImpl fbParams = FileBasedBuilderParametersImpl.fromParameters(map, true); assertNull("Got an encoding", fbParams.getFileHandler().getEncoding()); } @BeforeClass public static void setUpBeforeClass() throws Exception { listHandler = EasyMock.createMock(ListDelimiterHandler.class); } /** An object for creating new parameters objects. */ private Parameters parameters; /** The manager to be tested. */ private DefaultParametersManager manager; @Before public void setUp() throws Exception { parameters = new Parameters(); manager = new DefaultParametersManager(); } /** * Tests whether default values are set for newly created parameters objects. */ @Test public void testApplyDefaults() { manager.registerDefaultsHandler(FileBasedBuilderParameters.class, new FileBasedDefaultsHandler()); final FileBasedBuilderParameters params = parameters.fileBased(); manager.initializeParameters(params); final Map<String, Object> map = params.getParameters(); checkDefaultValues(map); } /** * Tests whether multiple handlers can be registered for the same classes and whether they are called in the correct * order. */ @Test public void testApplyDefaultsMultipleHandlers() { final ExpressionEngine engine = EasyMock.createMock(ExpressionEngine.class); manager.registerDefaultsHandler(XMLBuilderParameters.class, parameters -> parameters.setThrowExceptionOnMissing(false) .setListDelimiterHandler(EasyMock.createMock(ListDelimiterHandler.class)).setExpressionEngine(engine)); manager.registerDefaultsHandler(FileBasedBuilderParameters.class, new FileBasedDefaultsHandler()); final XMLBuilderParameters params = parameters.xml(); manager.initializeParameters(params); final Map<String, Object> map = params.getParameters(); checkDefaultValues(map); assertSame("Expression engine not set", engine, map.get("expressionEngine")); } /** * Tests whether default values are also applied when a sub parameters class is created. */ @Test public void testApplyDefaultsOnSubClass() { manager.registerDefaultsHandler(FileBasedBuilderParameters.class, new FileBasedDefaultsHandler()); final XMLBuilderParameters params = parameters.xml(); manager.initializeParameters(params); final Map<String, Object> map = params.getParameters(); checkDefaultValues(map); } /** * Tests that default values are only applied if the start class provided at registration time matches. */ @Test public void testApplyDefaultsStartClass() { manager.registerDefaultsHandler(FileBasedBuilderParameters.class, new FileBasedDefaultsHandler(), XMLBuilderParameters.class); final XMLBuilderParameters paramsXml = parameters.xml(); manager.initializeParameters(paramsXml); Map<String, Object> map = paramsXml.getParameters(); checkDefaultValues(map); final PropertiesBuilderParameters paramsProps = parameters.properties(); manager.initializeParameters(paramsProps); map = paramsProps.getParameters(); checkNoDefaultValues(map); } /** * Tests whether initializeParameters() ignores null input. (We can only test that no exception is thrown.) */ @Test public void testInitializeParametersNull() { manager.registerDefaultsHandler(FileBasedBuilderParameters.class, new FileBasedDefaultsHandler()); manager.initializeParameters(null); } /** * Tries to register a default handler without a class. */ @Test(expected = IllegalArgumentException.class) public void testRegisterDefaultsHandlerNoClass() { manager.registerDefaultsHandler(null, new FileBasedDefaultsHandler()); } /** * Tries to register a null default handler. */ @Test(expected = IllegalArgumentException.class) public void testRegisterDefaultsHandlerNoHandler() { manager.registerDefaultsHandler(BasicBuilderProperties.class, null); } /** * Tests whether all occurrences of a given defaults handler can be removed. */ @Test public void testUnregisterDefaultsHandlerAll() { final FileBasedDefaultsHandler handler = new FileBasedDefaultsHandler(); manager.registerDefaultsHandler(FileBasedBuilderParameters.class, handler, XMLBuilderParameters.class); manager.registerDefaultsHandler(FileBasedBuilderParameters.class, handler, PropertiesBuilderParameters.class); manager.unregisterDefaultsHandler(handler); final XMLBuilderParameters paramsXml = parameters.xml(); manager.initializeParameters(paramsXml); checkNoDefaultValues(paramsXml.getParameters()); final PropertiesBuilderParameters paramsProps = parameters.properties(); manager.initializeParameters(paramsProps); checkNoDefaultValues(paramsProps.getParameters()); } /** * Tests whether a specific occurrence of a defaults handler can be removed. */ @Test public void testUnregisterDefaultsHandlerSpecific() { final FileBasedDefaultsHandler handler = new FileBasedDefaultsHandler(); manager.registerDefaultsHandler(FileBasedBuilderParameters.class, handler, XMLBuilderParameters.class); manager.registerDefaultsHandler(FileBasedBuilderParameters.class, handler, PropertiesBuilderParameters.class); manager.unregisterDefaultsHandler(handler, PropertiesBuilderParameters.class); final XMLBuilderParameters paramsXml = parameters.xml(); manager.initializeParameters(paramsXml); checkDefaultValues(paramsXml.getParameters()); final PropertiesBuilderParameters paramsProps = parameters.properties(); manager.initializeParameters(paramsProps); checkNoDefaultValues(paramsProps.getParameters()); } }
apache-2.0
apache/continuum
continuum-core/src/main/java/org/apache/maven/continuum/buildcontroller/DefaultBuildController.java
28838
package org.apache.maven.continuum.buildcontroller; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.continuum.dao.BuildDefinitionDao; import org.apache.continuum.dao.BuildResultDao; import org.apache.continuum.dao.ProjectDao; import org.apache.continuum.dao.ProjectGroupDao; import org.apache.continuum.dao.ProjectScmRootDao; import org.apache.continuum.model.project.ProjectScmRoot; import org.apache.continuum.utils.ContinuumUtils; import org.apache.continuum.utils.build.BuildTrigger; import org.apache.maven.continuum.core.action.AbstractContinuumAction; import org.apache.maven.continuum.core.action.ExecuteBuilderContinuumAction; import org.apache.maven.continuum.execution.ContinuumBuildExecutor; import org.apache.maven.continuum.execution.ContinuumBuildExecutorConstants; import org.apache.maven.continuum.execution.manager.BuildExecutorManager; import org.apache.maven.continuum.model.project.BuildDefinition; import org.apache.maven.continuum.model.project.BuildResult; import org.apache.maven.continuum.model.project.Project; import org.apache.maven.continuum.model.project.ProjectDependency; import org.apache.maven.continuum.model.project.ProjectGroup; import org.apache.maven.continuum.model.scm.ChangeFile; import org.apache.maven.continuum.model.scm.ChangeSet; import org.apache.maven.continuum.model.scm.ScmResult; import org.apache.maven.continuum.notification.ContinuumNotificationDispatcher; import org.apache.maven.continuum.project.ContinuumProjectState; import org.apache.maven.continuum.store.ContinuumObjectNotFoundException; import org.apache.maven.continuum.store.ContinuumStoreException; import org.apache.maven.continuum.utils.WorkingDirectoryService; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.repository.ScmRepositoryException; import org.codehaus.plexus.action.ActionManager; import org.codehaus.plexus.action.ActionNotFoundException; import org.codehaus.plexus.component.annotations.Component; import org.codehaus.plexus.component.annotations.Requirement; import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * @author <a href="mailto:trygvis@inamo.no">Trygve Laugst&oslash;l</a> */ @Component( role = org.apache.maven.continuum.buildcontroller.BuildController.class, hint = "default" ) public class DefaultBuildController implements BuildController { private static final Logger log = LoggerFactory.getLogger( DefaultBuildController.class ); @Requirement private BuildDefinitionDao buildDefinitionDao; @Requirement private BuildResultDao buildResultDao; @Requirement private ProjectDao projectDao; @Requirement private ProjectGroupDao projectGroupDao; @Requirement private ProjectScmRootDao projectScmRootDao; @Requirement private ContinuumNotificationDispatcher notifierDispatcher; @Requirement private ActionManager actionManager; @Requirement private WorkingDirectoryService workingDirectoryService; @Requirement private BuildExecutorManager buildExecutorManager; // ---------------------------------------------------------------------- // BuildController Implementation // ---------------------------------------------------------------------- /** * @param projectId * @param buildDefinitionId * @param buildTrigger * @param scmResult * @throws TaskExecutionException */ public void build( int projectId, int buildDefinitionId, BuildTrigger buildTrigger, ScmResult scmResult ) throws TaskExecutionException { log.info( "Initializing build" ); BuildContext context = initializeBuildContext( projectId, buildDefinitionId, buildTrigger, scmResult ); // ignore this if AlwaysBuild ? if ( !checkScmResult( context ) ) { log.info( "Error updating from SCM, not building" ); return; } log.info( "Starting build of " + context.getProject().getName() ); startBuild( context ); try { checkProjectDependencies( context ); if ( !shouldBuild( context ) ) { return; } Map<String, Object> actionContext = context.getActionContext(); try { performAction( "update-project-from-working-directory", context ); } catch ( TaskExecutionException e ) { updateBuildResult( context, ContinuumUtils.throwableToString( e ) ); //just log the error but don't stop the build from progressing in order not to suppress any build result messages there log.error( "Error executing action update-project-from-working-directory '", e ); } performAction( "execute-builder", context ); performAction( "deploy-artifact", context ); context.setCancelled( ExecuteBuilderContinuumAction.isCancelled( actionContext ) ); String s = AbstractContinuumAction.getBuildId( actionContext, null ); if ( s != null && !context.isCancelled() ) { try { context.setBuildResult( buildResultDao.getBuildResult( Integer.valueOf( s ) ) ); } catch ( NumberFormatException e ) { throw new TaskExecutionException( "Internal error: build id not an integer", e ); } catch ( ContinuumObjectNotFoundException e ) { throw new TaskExecutionException( "Internal error: Cannot find build result", e ); } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error loading build result", e ); } } } finally { endBuild( context ); } } /** * Checks if the build should be marked as ERROR and notifies the end of the build. * * @param context * @throws TaskExecutionException */ private void endBuild( BuildContext context ) throws TaskExecutionException { Project project = context.getProject(); try { if ( project.getState() != ContinuumProjectState.NEW && project.getState() != ContinuumProjectState.CHECKEDOUT && project.getState() != ContinuumProjectState.OK && project.getState() != ContinuumProjectState.FAILED && project.getState() != ContinuumProjectState.ERROR && !context.isCancelled() ) { try { String s = AbstractContinuumAction.getBuildId( context.getActionContext(), null ); if ( s != null ) { BuildResult buildResult = buildResultDao.getBuildResult( Integer.valueOf( s ) ); project.setState( buildResult.getState() ); projectDao.updateProject( project ); } } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error storing the project", e ); } } } finally { if ( !context.isCancelled() ) { notifierDispatcher.buildComplete( project, context.getBuildDefinition(), context.getBuildResult() ); } } } private void updateBuildResult( BuildContext context, String error ) throws TaskExecutionException { BuildResult build = context.getBuildResult(); if ( build == null ) { build = makeAndStoreBuildResult( context, error ); } else { updateBuildResult( build, context ); build.setError( error ); try { buildResultDao.updateBuildResult( build ); build = buildResultDao.getBuildResult( build.getId() ); context.setBuildResult( build ); } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error updating build result", e ); } } context.getProject().setState( build.getState() ); try { projectDao.updateProject( context.getProject() ); } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error updating project", e ); } } private void updateBuildResult( BuildResult build, BuildContext context ) { if ( build.getScmResult() == null && context.getScmResult() != null ) { build.setScmResult( context.getScmResult() ); } if ( build.getModifiedDependencies() == null && context.getModifiedDependencies() != null ) { build.setModifiedDependencies( context.getModifiedDependencies() ); } } private void startBuild( BuildContext context ) throws TaskExecutionException { Project project = context.getProject(); project.setOldState( project.getState() ); project.setState( ContinuumProjectState.BUILDING ); try { projectDao.updateProject( project ); } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error persisting project", e ); } notifierDispatcher.buildStarted( project, context.getBuildDefinition() ); } /** * Initializes a BuildContext for the build. * * @param projectId * @param buildDefinitionId * @param buildTrigger * @param scmResult * @return * @throws TaskExecutionException */ @SuppressWarnings( "unchecked" ) protected BuildContext initializeBuildContext( int projectId, int buildDefinitionId, BuildTrigger buildTrigger, ScmResult scmResult ) throws TaskExecutionException { BuildContext context = new BuildContext(); context.setStartTime( System.currentTimeMillis() ); Map actionContext = context.getActionContext(); try { Project project = projectDao.getProject( projectId ); context.setProject( project ); BuildDefinition buildDefinition = buildDefinitionDao.getBuildDefinition( buildDefinitionId ); BuildTrigger newBuildTrigger = buildTrigger; if ( newBuildTrigger.getTrigger() == ContinuumProjectState.TRIGGER_SCHEDULED ) { newBuildTrigger.setTriggeredBy( buildDefinition.getSchedule().getName() ); } context.setBuildTrigger( newBuildTrigger ); context.setBuildDefinition( buildDefinition ); BuildResult oldBuildResult = buildResultDao.getLatestBuildResultForBuildDefinition( projectId, buildDefinitionId ); context.setOldBuildResult( oldBuildResult ); context.setScmResult( scmResult ); // CONTINUUM-2193 ProjectGroup projectGroup = project.getProjectGroup(); List<ProjectScmRoot> scmRoots = projectScmRootDao.getProjectScmRootByProjectGroup( projectGroup.getId() ); String projectScmUrl = project.getScmUrl(); String projectScmRootAddress = ""; for ( ProjectScmRoot projectScmRoot : scmRoots ) { projectScmRootAddress = projectScmRoot.getScmRootAddress(); if ( projectScmUrl.startsWith( projectScmRoot.getScmRootAddress() ) ) { AbstractContinuumAction.setProjectScmRootUrl( actionContext, projectScmRoot.getScmRootAddress() ); break; } } if ( project.isCheckedOutInSingleDirectory() ) { List<Project> projectsInGroup = projectGroupDao.getProjectGroupWithProjects( projectGroup.getId() ).getProjects(); List<Project> projectsWithCommonScmRoot = new ArrayList<Project>(); for ( Project projectInGroup : projectsInGroup ) { if ( projectInGroup.getScmUrl().startsWith( projectScmRootAddress ) ) { projectsWithCommonScmRoot.add( projectInGroup ); } } AbstractContinuumAction.setListOfProjectsInGroupWithCommonScmRoot( actionContext, projectsWithCommonScmRoot ); } // CONTINUUM-1871 olamy if continuum is killed during building oldBuildResult will have a endTime 0 // this means all changes since the project has been loaded in continuum will be in memory // now we will load all BuildResult with an Id bigger or equals than the oldBuildResult one //if ( oldBuildResult != null ) //{ // context.setOldScmResult( // getOldScmResults( projectId, oldBuildResult.getBuildNumber(), oldBuildResult.getEndTime() ) ); //} } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error initializing the build context", e ); } // Map<String, Object> actionContext = context.getActionContext(); AbstractContinuumAction.setProjectId( actionContext, projectId ); AbstractContinuumAction.setProject( actionContext, context.getProject() ); AbstractContinuumAction.setBuildDefinitionId( actionContext, buildDefinitionId ); AbstractContinuumAction.setBuildDefinition( actionContext, context.getBuildDefinition() ); AbstractContinuumAction.setBuildTrigger( actionContext, buildTrigger ); AbstractContinuumAction.setScmResult( actionContext, context.getScmResult() ); if ( context.getOldBuildResult() != null ) { AbstractContinuumAction.setOldBuildId( actionContext, context.getOldBuildResult().getId() ); } return context; } private void performAction( String actionName, BuildContext context ) throws TaskExecutionException { String error; TaskExecutionException exception; try { log.info( "Performing action " + actionName ); actionManager.lookup( actionName ).execute( context.getActionContext() ); return; } catch ( ActionNotFoundException e ) { error = ContinuumUtils.throwableToString( e ); exception = new TaskExecutionException( "Error looking up action '" + actionName + "'", e ); } catch ( ScmRepositoryException e ) { error = getValidationMessages( e ) + "\n" + ContinuumUtils.throwableToString( e ); exception = new TaskExecutionException( "SCM error while executing '" + actionName + "'", e ); } catch ( ScmException e ) { error = ContinuumUtils.throwableToString( e ); exception = new TaskExecutionException( "SCM error while executing '" + actionName + "'", e ); } catch ( Exception e ) { exception = new TaskExecutionException( "Error executing action '" + actionName + "'", e ); error = ContinuumUtils.throwableToString( exception ); } // TODO: clean this up. We catch the original exception from the action, and then update the buildresult // for it - we need to because of the specialized error message for SCM. // If updating the buildresult fails, log the previous error and throw the new one. // If updating the buildresult succeeds, throw the original exception. The build result should NOT // be updated again - a TaskExecutionException is final, no further action should be taken upon it. try { updateBuildResult( context, error ); } catch ( TaskExecutionException e ) { log.error( "Error updating build result after receiving the following exception: ", exception ); throw e; } throw exception; } protected boolean shouldBuild( BuildContext context ) throws TaskExecutionException { BuildDefinition buildDefinition = context.getBuildDefinition(); if ( buildDefinition.isAlwaysBuild() ) { log.info( "AlwaysBuild configured, building" ); return true; } if ( context.getOldBuildResult() == null ) { log.info( "The project has never been built with the current build definition, building" ); return true; } Project project = context.getProject(); //CONTINUUM-1428 if ( project.getOldState() == ContinuumProjectState.ERROR || context.getOldBuildResult().getState() == ContinuumProjectState.ERROR ) { log.info( "Latest state was 'ERROR', building" ); return true; } if ( context.getBuildTrigger().getTrigger() == ContinuumProjectState.TRIGGER_FORCED ) { log.info( "The project build is forced, building" ); return true; } boolean shouldBuild = false; boolean allChangesUnknown = true; if ( project.getOldState() != ContinuumProjectState.NEW && project.getOldState() != ContinuumProjectState.CHECKEDOUT && context.getBuildTrigger().getTrigger() != ContinuumProjectState.TRIGGER_FORCED && project.getState() != ContinuumProjectState.NEW && project.getState() != ContinuumProjectState.CHECKEDOUT ) { // Check SCM changes if ( context.getScmResult() != null ) { allChangesUnknown = checkAllChangesUnknown( context.getScmResult().getChanges() ); } if ( allChangesUnknown ) { if ( context.getScmResult() != null && !context.getScmResult().getChanges().isEmpty() ) { log.info( "The project was not built because all changes are unknown (maybe local modifications or ignored files not defined in your SCM tool." ); } else { log.info( "The project was not built because no changes were detected in sources since the last build." ); } } // Check dependencies changes if ( context.getModifiedDependencies() != null && !context.getModifiedDependencies().isEmpty() ) { log.info( "Found dependencies changes, building" ); shouldBuild = true; } } // Check changes if ( !shouldBuild && ( ( !allChangesUnknown && context.getScmResult() != null && !context.getScmResult().getChanges().isEmpty() ) || project.getExecutorId().equals( ContinuumBuildExecutorConstants.MAVEN_TWO_BUILD_EXECUTOR ) ) ) { try { ContinuumBuildExecutor executor = buildExecutorManager.getBuildExecutor( project.getExecutorId() ); Map<String, Object> actionContext = context.getActionContext(); List<Project> projectsWithCommonScmRoot = AbstractContinuumAction.getListOfProjectsInGroupWithCommonScmRoot( actionContext ); String projectScmRootUrl = AbstractContinuumAction.getProjectScmRootUrl( actionContext, project.getScmUrl() ); if ( executor == null ) { log.warn( "No continuum build executor found for project " + project.getId() + " with executor '" + project.getExecutorId() + "'" ); } else if ( context.getScmResult() != null ) { shouldBuild = executor.shouldBuild( context.getScmResult().getChanges(), project, workingDirectoryService.getWorkingDirectory( project, projectScmRootUrl, projectsWithCommonScmRoot ), context.getBuildDefinition() ); } } catch ( Exception e ) { updateBuildResult( context, ContinuumUtils.throwableToString( e ) ); throw new TaskExecutionException( "Can't determine if the project should build or not", e ); } } if ( shouldBuild ) { log.info( "Changes found in the current project, building" ); } else { project.setState( project.getOldState() ); project.setOldState( 0 ); try { projectDao.updateProject( project ); } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error storing project", e ); } log.info( "No changes in the current project, not building" ); } return shouldBuild; } private boolean checkAllChangesUnknown( List<ChangeSet> changes ) { for ( ChangeSet changeSet : changes ) { List<ChangeFile> changeFiles = changeSet.getFiles(); for ( ChangeFile changeFile : changeFiles ) { if ( !"unknown".equalsIgnoreCase( changeFile.getStatus() ) ) { return false; } } } return true; } private String getValidationMessages( ScmRepositoryException ex ) { List<String> messages = ex.getValidationMessages(); StringBuffer message = new StringBuffer(); if ( messages != null && !messages.isEmpty() ) { for ( Iterator<String> i = messages.iterator(); i.hasNext(); ) { message.append( i.next() ); if ( i.hasNext() ) { message.append( System.getProperty( "line.separator" ) ); } } } return message.toString(); } protected void checkProjectDependencies( BuildContext context ) { if ( context.getOldBuildResult() == null ) { return; } try { Project project = projectDao.getProjectWithDependencies( context.getProject().getId() ); List<ProjectDependency> dependencies = project.getDependencies(); if ( dependencies == null ) { dependencies = new ArrayList<ProjectDependency>(); } if ( project.getParent() != null ) { dependencies.add( project.getParent() ); } if ( dependencies.isEmpty() ) { return; } List<ProjectDependency> modifiedDependencies = new ArrayList<ProjectDependency>(); for ( ProjectDependency dep : dependencies ) { Project dependencyProject = projectDao.getProject( dep.getGroupId(), dep.getArtifactId(), dep.getVersion() ); if ( dependencyProject != null ) { long nbBuild = buildResultDao.getNbBuildResultsInSuccessForProject( dependencyProject.getId(), context.getOldBuildResult().getEndTime() ); if ( nbBuild > 0 ) { log.debug( "Dependency changed: " + dep.getGroupId() + ":" + dep.getArtifactId() + ":" + dep.getVersion() ); modifiedDependencies.add( dep ); } else { log.debug( "Dependency not changed: " + dep.getGroupId() + ":" + dep.getArtifactId() + ":" + dep.getVersion() ); } } else { log.debug( "Skip non Continuum project: " + dep.getGroupId() + ":" + dep.getArtifactId() + ":" + dep.getVersion() ); } } context.setModifiedDependencies( modifiedDependencies ); AbstractContinuumAction.setUpdatedDependencies( context.getActionContext(), modifiedDependencies ); } catch ( ContinuumStoreException e ) { log.warn( "Can't get the project dependencies", e ); } } // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- private BuildResult makeAndStoreBuildResult( BuildContext context, String error ) throws TaskExecutionException { // Project project, ScmResult scmResult, long startTime, int trigger ) // project, scmResult, startTime, trigger ); BuildResult build = new BuildResult(); build.setState( ContinuumProjectState.ERROR ); build.setTrigger( context.getBuildTrigger().getTrigger() ); build.setUsername( context.getBuildTrigger().getTriggeredBy() ); build.setStartTime( context.getStartTime() ); build.setEndTime( System.currentTimeMillis() ); updateBuildResult( build, context ); build.setScmResult( context.getScmResult() ); build.setBuildDefinition( context.getBuildDefinition() ); if ( error != null ) { build.setError( error ); } try { buildResultDao.addBuildResult( context.getProject(), build ); build = buildResultDao.getBuildResult( build.getId() ); context.setBuildResult( build ); return build; } catch ( ContinuumStoreException e ) { throw new TaskExecutionException( "Error storing build result", e ); } } /** * Check to see if there was a error while checking out/updating the project * * @param context The build context * @return true if scm result is ok * @throws TaskExecutionException */ private boolean checkScmResult( BuildContext context ) throws TaskExecutionException { Project project = context.getProject(); int projectGroupId = project.getProjectGroup().getId(); List<ProjectScmRoot> scmRoots = projectScmRootDao.getProjectScmRootByProjectGroup( projectGroupId ); for ( ProjectScmRoot projectScmRoot : scmRoots ) { if ( project.getScmUrl().startsWith( projectScmRoot.getScmRootAddress() ) ) { if ( projectScmRoot.getState() == ContinuumProjectState.UPDATED ) { return true; } break; } } return false; } }
apache-2.0
RussellSpitzer/incubator-tinkerpop
gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/structure/io/Storage.java
5268
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.structure.io; import org.apache.tinkerpop.gremlin.process.computer.KeyValue; import org.apache.tinkerpop.gremlin.structure.Vertex; import java.util.Iterator; import java.util.List; /** * Storage is a standard API that providers can implement to allow "file-system"-based access to data sources. * The methods provided by Storage are similar in form and behavior to standard Linux operating system commands. * * @author Marko A. Rodriguez (http://markorodriguez.com) */ public interface Storage { /** * List all the data sources in the root directory. * * @return the data sources in the root directory */ public List<String> ls(); /** * List all the data sources at the specified location. * * @param location a location * @return the data sources at the specified location */ public List<String> ls(final String location); /** * Recursively copy all the data sources from the source location to the target location. * * @param sourceLocation the source location * @param targetLocation the target location * @return whether data sources were copied */ public boolean cp(final String sourceLocation, final String targetLocation); /** * Determine whether the specified location has a data source. * * @param location a location to check * @return whether that location has a data source. */ public boolean exists(final String location); /** * Recursively remove the data source at the specified location. * * @param location the location of the data source * @return whether a data source was removed. */ public boolean rm(final String location); /** * Get a string representation of the specified number of lines at the data source location. * * @param location the data source location * @return an iterator of lines */ public default Iterator<String> head(final String location) { return this.head(location, Integer.MAX_VALUE); } /** * Get a string representation of the specified number of lines at the data source location. * * @param location the data source location * @param totalLines the total number of lines to retrieve * @return an iterator of lines. */ public Iterator<String> head(final String location, final int totalLines); /** * Get the vertices at the specified graph location. * * @param location the location of the graph (or the root location and search will be made) * @param parserClass the class of the parser that understands the graph format * @param totalLines the total number of lines of the graph to return * @return an iterator of vertices. */ public Iterator<Vertex> head(final String location, final Class parserClass, final int totalLines); /** * Get the vertices at the specified graph location. * * @param location the location of the graph (or the root location and search will be made) * @param parserClass the class of the parser that understands the graph format * @return an iterator of vertices. */ public default Iterator<Vertex> head(final String location, final Class parserClass) { return this.head(location, parserClass, Integer.MAX_VALUE); } /** * Get the {@link KeyValue} data at the specified memory location. * * @param location the root location of the data * @param memoryKey the memory key * @param parserClass the class of the parser that understands the memory format * @param totalLines the total number of key-values to return * @return an iterator of key-values. */ public <K, V> Iterator<KeyValue<K, V>> head(final String location, final String memoryKey, final Class parserClass, final int totalLines); /** * Get the {@link KeyValue} data at the specified memory location. * * @param location the root location of the data * @param memoryKey the memory key * @param parserClass the class of the parser that understands the memory format * @return an iterator of key-values. */ public default <K, V> Iterator<KeyValue<K, V>> head(final String location, final String memoryKey, final Class parserClass) { return this.head(location, memoryKey, parserClass, Integer.MAX_VALUE); } }
apache-2.0
kierarad/gocd
server/src/test-integration/java/com/thoughtworks/go/server/transaction/GoCDSqlSessionDaoSupportTest.java
3962
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.transaction; import com.thoughtworks.go.config.GoConfigDao; import com.thoughtworks.go.domain.User; import com.thoughtworks.go.server.cache.GoCache; import com.thoughtworks.go.server.dao.DatabaseAccessHelper; import com.thoughtworks.go.server.dao.UserDao; import com.thoughtworks.go.util.GoConfigFileHelper; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:/applicationContext-global.xml", "classpath:/applicationContext-dataLocalAccess.xml", "classpath:/testPropertyConfigurer.xml", "classpath:/spring-all-servlet.xml", }) public class GoCDSqlSessionDaoSupportTest { @Autowired private GoConfigDao goConfigDao; @Autowired private GoCache goCache; @Autowired private DatabaseAccessHelper dbHelper; @Autowired private TransactionTemplate transactionTemplate; @Autowired private UserDao userDao; private GoConfigFileHelper configHelper = new GoConfigFileHelper(); private TransactionCacheAssertionUtil assertionUtil; @Before public void setUp() throws Exception { assertionUtil = new TransactionCacheAssertionUtil(goCache, transactionTemplate); configHelper.usingCruiseConfigDao(goConfigDao); configHelper.onSetUp(); dbHelper.onSetUp(); goCache.clear(); } @After public void tearDown() throws Exception { dbHelper.onTearDown(); configHelper.onTearDown(); } @Test public void shouldOptOutOfCacheServing_forInsert() { assertionUtil.assertCacheBehaviourInTxn(new TransactionCacheAssertionUtil.DoInTxn() { @Override public void invoke() { userDao.saveOrUpdate(new User("loser", "Massive Loser", "boozer@loser.com")); } }); assertThat(userDao.findUser("loser").getEmail(), is("boozer@loser.com")); } @Test public void shouldNotOptOutOfCacheServing_whenQueryingObjects() { final User loser = new User("loser"); userDao.saveOrUpdate(loser); final User[] loadedUser = new User[1]; assertThat(assertionUtil.doInTxnWithCachePut(new TransactionCacheAssertionUtil.DoInTxn() { @Override public void invoke() { loadedUser[0] = userDao.findUser(loser.getName()); } }), is("boozer")); assertThat(loadedUser[0].getName(), is("loser")); } @Test public void shouldNotOptOutOfCacheServing_whenQueryingList() { final User loser = new User("loser"); userDao.saveOrUpdate(loser); final User[] loadedUser = new User[1]; assertThat(assertionUtil.doInTxnWithCachePut(new TransactionCacheAssertionUtil.DoInTxn() { @Override public void invoke() { loadedUser[0] = userDao.allUsers().get(0); } }), is("boozer")); assertThat(loadedUser[0].getName(), is("loser")); } }
apache-2.0
mpollmeier/tinkerpop3
giraph-gremlin/src/main/java/com/tinkerpop/gremlin/giraph/process/computer/GiraphMap.java
2276
package com.tinkerpop.gremlin.giraph.process.computer; import com.tinkerpop.gremlin.giraph.process.computer.util.GremlinWritable; import com.tinkerpop.gremlin.giraph.process.computer.util.MapReduceHelper; import com.tinkerpop.gremlin.process.computer.MapReduce; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Mapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class GiraphMap extends Mapper<NullWritable, GiraphComputeVertex, GremlinWritable, GremlinWritable> { private static final Logger LOGGER = LoggerFactory.getLogger(GiraphMap.class); private MapReduce mapReduce; private GiraphMap() { } @Override public void setup(final Mapper<NullWritable, GiraphComputeVertex, GremlinWritable, GremlinWritable>.Context context) { this.mapReduce = MapReduceHelper.getMapReduce(context.getConfiguration()); } @Override public void map(final NullWritable key, final GiraphComputeVertex value, final Mapper<NullWritable, GiraphComputeVertex, GremlinWritable, GremlinWritable>.Context context) throws IOException, InterruptedException { this.mapReduce.map(value.getBaseVertex(), new GiraphMapEmitter<>(context)); } public static class GiraphMapEmitter<K, V> implements MapReduce.MapEmitter<K, V> { final Mapper<NullWritable, GiraphComputeVertex, GremlinWritable, GremlinWritable>.Context context; final GremlinWritable<K> keyWritable = new GremlinWritable<>(); final GremlinWritable<V> valueWritable = new GremlinWritable<>(); public GiraphMapEmitter(final Mapper<NullWritable, GiraphComputeVertex, GremlinWritable, GremlinWritable>.Context context) { this.context = context; } @Override public void emit(final K key, final V value) { this.keyWritable.set(key); this.valueWritable.set(value); try { this.context.write(this.keyWritable, this.valueWritable); } catch (final Exception e) { LOGGER.error(e.getMessage()); throw new IllegalStateException(e.getMessage(), e); } } } }
apache-2.0
epheatt/spark-solr
src/main/java/com/lucidworks/spark/util/ScalaUtil.java
9914
package com.lucidworks.spark.util; import org.apache.log4j.Logger; import org.apache.spark.mllib.linalg.Matrices; import org.apache.spark.mllib.linalg.Vectors; import org.apache.spark.sql.types.ArrayType; import org.apache.spark.sql.types.Decimal; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import java.io.Serializable; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; public class ScalaUtil implements Serializable { public static Logger log = Logger.getLogger(ScalaUtil.class); public static String optionalParam(scala.collection.immutable.Map<String,String> config, String param, String defaultValue) { scala.Option<String> opt = config.get(param); String val = (opt != null && !opt.isEmpty()) ? (String)opt.get() : null; return (val == null || val.trim().isEmpty()) ? defaultValue : val; } public static String requiredParam(scala.collection.immutable.Map<String,String> config, String param) { String val = optionalParam(config, param, null); if (val == null) throw new IllegalArgumentException(param+" parameter is required!"); return val; } public static Object getArrayToString(org.apache.spark.sql.types.DataType dataType, Object value) { if (dataType.typeName().equals("array")) { org.apache.spark.sql.types.ArrayType a = (org.apache.spark.sql.types.ArrayType) dataType; org.apache.spark.sql.types.DataType e = a.elementType(); int arraysize = 0; Object[] ab1 = new Object[arraysize]; if (value instanceof scala.collection.mutable.WrappedArray) { scala.collection.mutable.WrappedArray ab = (scala.collection.mutable.WrappedArray) value; arraysize = ab.size(); ab1 = new Object[ab.size()]; ab.deep().copyToArray(ab1); } if (value instanceof scala.collection.mutable.ArrayBuffer) { scala.collection.mutable.ArrayBuffer ab = (scala.collection.mutable.ArrayBuffer) value; arraysize = ab.size(); //ab1 = new Object[ab.size()]; ab1 = ab.array(); } Object[] d; if (arraysize > 0) { d = new Object[arraysize]; for (int i = 0; i < ab1.length; i++) { if (e.typeName().equals("array")) { d[i] = getArrayToString(e, ab1[i]); } else { d[i] = ab1[i]; } } } else { d = new String[]{}; } return Arrays.toString(d); } return ""; } public static String getArraySchema(org.apache.spark.sql.types.DataType dType) { if (((org.apache.spark.sql.types.ArrayType) dType).elementType().typeName().equals("array")) { return dType.typeName() + ":" + getArraySchema(((org.apache.spark.sql.types.ArrayType) dType).elementType()); } else { return dType.typeName() + ":" + ((org.apache.spark.sql.types.ArrayType) dType).elementType().typeName(); } } public static String getFieldTypeMapping(StructType s, String fieldName) { scala.collection.Iterator x = s.iterator(); while (x.hasNext()) { StructField f = (StructField) x.next(); if (f.name().equals(fieldName) && !f.dataType().typeName().toString().toLowerCase().equals("struct")) { if (f.dataType().typeName().toLowerCase().equals("array")) { if (((ArrayType) f.dataType()).elementType().typeName().toLowerCase().equals("array")) { return (f.dataType().typeName() + ":" + (getFieldTypeMapping((ArrayType) (((ArrayType) f.dataType()).elementType()), fieldName))); } else { return (f.dataType().typeName() + ":" + ((ArrayType) f.dataType()).elementType().typeName()); } } else { return f.dataType().typeName(); } } else { if (f.dataType().typeName().toString().toLowerCase().equals("struct")) { String fieldType = getFieldTypeMapping((StructType) f.dataType(), fieldName); if (!fieldType.equals("")) { return fieldType; } } } } return ""; } public static String getFieldTypeMapping(ArrayType d, String fieldName) { if (d.elementType().typeName().toLowerCase().equals("array")) { getFieldTypeMapping((ArrayType) d.elementType(), fieldName); } return (d.typeName() + ":" + d.elementType().typeName()); } public static Integer convertToInteger(String s) { return Integer.parseInt(s); } public static Double convertToDouble(String s) { return Double.parseDouble(s); } public static Float convertToFloat(String s) { return Float.parseFloat(s); } public static Short convertToShort(String s) { return Short.parseShort(s); } public static Long convertToLong(String s) { return Long.parseLong(s); } public static Decimal convertToDecimal(String s) { return Decimal.apply(s); } public static Boolean convertToBoolean(String s) { return Boolean.parseBoolean(s); } public static Timestamp convertToTimestamp(String s){ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss.SSS"); Timestamp timestamp = null; try { timestamp = new Timestamp(dateFormat.parse(s).getTime()); } catch (ParseException e) { e.printStackTrace(); } return timestamp; } public static Date convertToDate(String s){ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss.SSS"); Date date = null; try { date = dateFormat.parse(s); } catch (ParseException e) { e.printStackTrace(); } return date; } public static org.apache.spark.mllib.linalg.Vector convertToVector(String s) { return Vectors.parse(s); } public static org.apache.spark.mllib.linalg.Matrix convertToMatrix(String s) { String[] data = s.split(":"); String dataArray = data[2]; String[] items = dataArray.replaceFirst("\\[", "").substring(0,dataArray.replaceFirst("\\[", "").lastIndexOf("]")).split(","); double[] doubleArray = new double[items.length]; for (int i = 0; i<items.length; i++) { doubleArray[i] = Double.parseDouble(items[i]); } return Matrices.dense(Integer.parseInt(data[0]), Integer.parseInt(data[1]), doubleArray); } public static Object[] getArrayFromString(String type, String s, int fromIdx, ArrayList<Object[]> ret) { if (type.contains(":") && type.split(":")[1].equals("array")) { fromIdx = type.indexOf(":", fromIdx); type = type.substring(fromIdx+1, type.length()); String[] items = s.replaceFirst("\\[", "").substring(0,s.replaceFirst("\\[", "").lastIndexOf("]")).split("\\],"); ArrayList<Object[]> ret1 = new ArrayList<Object[]>(); for (int i=0; i<items.length; i++) { if (i == items.length -1 ) { ret1.add(getArrayFromString(type, items[i], fromIdx, ret1)); } else { ret1.add(getArrayFromString(type, items[i] + "]", fromIdx, ret1)); } } ret.add(ret1.toArray()); return ret1.toArray(); } String[] items = s.replaceFirst("\\[", "").substring(0,s.replaceFirst("\\[", "").lastIndexOf("]")).split(","); if (type.split(":")[1].equals("integer")) { return convertToIntegerArray(items); } else if (type.split(":")[1].equals("double")) { return convertToDoubleArray(items); } else if (type.split(":")[1].equals("float")) { return convertToFloatArray(items); } else if (type.split(":")[1].equals("short")) { return convertToShortArray(items); } else if (type.split(":")[1].equals("long")) { return convertToLongArray(items); } else { return items; } } public static Integer[] convertToIntegerArray(String[] s) { Integer[] results = new Integer[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Integer.parseInt(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert String array to integer array"); }; } return results; } public static Double[] convertToDoubleArray(String[] s) { Double[] results = new Double[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Double.parseDouble(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert String array to double array"); }; } return results; } public static Float[] convertToFloatArray(String[] s) { Float[] results = new Float[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Float.parseFloat(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert String array to float array"); }; } return results; } public static Short[] convertToShortArray(String[] s) { Short[] results = new Short[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Short.parseShort(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert String array to short array"); }; } return results; } public static Long[] convertToLongArray(String[] s) { Long[] results = new Long[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Long.parseLong(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert string array to long array"); }; } return results; } public static Boolean[] convertToBooleanArray(String[] s) { Boolean[] results = new Boolean[s.length]; for (int i = 0; i < s.length; i++) { try { results[i] = Boolean.parseBoolean(s[i]); } catch (NumberFormatException nfe) { log.error("Unable to convert string array to boolean array"); }; } return results; } }
apache-2.0
LiuJianan/giraphpp-1
target/munged/test/org/apache/giraph/lib/TestTextDoubleDoubleAdjacencyListVertexInputFormat.java
9604
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraph.lib; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.lang.reflect.Method; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.giraph.graph.BasicVertex; import org.apache.giraph.graph.BspUtils; import org.apache.giraph.graph.Edge; import org.apache.giraph.graph.EdgeListVertex; import org.apache.giraph.graph.GiraphJob; import org.apache.giraph.graph.GraphState; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.junit.Before; import org.junit.Test; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class TestTextDoubleDoubleAdjacencyListVertexInputFormat { private RecordReader<LongWritable, Text> rr; private Configuration conf; private TaskAttemptContext tac; private GraphState<Text, DoubleWritable, DoubleWritable, BooleanWritable> graphState; @Before public void setUp() throws IOException, InterruptedException { rr = mock(RecordReader.class); when(rr.nextKeyValue()).thenReturn(true).thenReturn(false); conf = new Configuration(); conf.setClass(GiraphJob.VERTEX_CLASS, DummyVertex.class, BasicVertex.class); conf.setClass(GiraphJob.VERTEX_INDEX_CLASS, Text.class, Writable.class); conf.setClass(GiraphJob.VERTEX_VALUE_CLASS, DoubleWritable.class, Writable.class); graphState = mock(GraphState.class); tac = mock(TaskAttemptContext.class); when(tac.getConfiguration()).thenReturn(conf); } @Test public void testIndexMustHaveValue() throws IOException, InterruptedException { String input = "hi"; when(rr.getCurrentValue()).thenReturn(new Text(input)); TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable> vr = new TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable>(rr); vr.initialize(null, tac); try { vr.nextVertex(); vr.getCurrentVertex(); fail("Should have thrown an IllegalArgumentException"); } catch (IllegalArgumentException iae) { assertTrue(iae.getMessage().startsWith("Line did not split correctly: ")); } } @Test public void testEdgesMustHaveValues() throws IOException, InterruptedException { String input = "index\t55.66\tindex2"; when(rr.getCurrentValue()).thenReturn(new Text(input)); TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable> vr = new TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable>(rr); vr.initialize(null, tac); try { vr.nextVertex(); vr.getCurrentVertex(); fail("Should have thrown an IllegalArgumentException"); } catch (IllegalArgumentException iae) { assertTrue(iae.getMessage().startsWith("Line did not split correctly: ")); } } public static void setGraphState(BasicVertex vertex, GraphState graphState) throws Exception { Class<? extends BasicVertex> c = BasicVertex.class; Method m = c.getDeclaredMethod("setGraphState", GraphState.class); m.setAccessible(true); m.invoke(vertex, graphState); } public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> void assertValidVertex(Configuration conf, GraphState<I, V, E, M> graphState, BasicVertex<I, V, E, M> actual, I expectedId, V expectedValue, Edge<I, E>... edges) throws Exception { BasicVertex<I, V, E, M> expected = BspUtils.createVertex(conf); setGraphState(expected, graphState); // FIXME! maybe can't work if not instantiated properly Map<I, E> edgeMap = Maps.newHashMap(); for(Edge<I, E> edge : edges) { edgeMap.put(edge.getDestVertexId(), edge.getEdgeValue()); } expected.initialize(expectedId, expectedValue, edgeMap, null); assertValid(expected, actual); } public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> void assertValid(BasicVertex<I, V, E, M> expected, BasicVertex<I, V, E, M> actual) { assertEquals(expected.getVertexId(), actual.getVertexId()); assertEquals(expected.getVertexValue(), actual.getVertexValue()); assertEquals(expected.getNumEdges(), actual.getNumEdges()); List<Edge<I, E>> expectedEdges = Lists.newArrayList(); List<Edge<I, E>> actualEdges = Lists.newArrayList(); for(I actualDestId : actual) { actualEdges.add(new Edge<I, E>(actualDestId, actual.getEdgeValue(actualDestId))); } for(I expectedDestId : expected) { expectedEdges.add(new Edge<I, E>(expectedDestId, expected.getEdgeValue(expectedDestId))); } Collections.sort(expectedEdges); Collections.sort(actualEdges); for(int i = 0; i < expectedEdges.size(); i++) { assertEquals(expectedEdges.get(i), actualEdges.get(i)); } } @Test public void testHappyPath() throws Exception { String input = "Hi\t0\tCiao\t1.123\tBomdia\t2.234\tOla\t3.345"; when(rr.getCurrentValue()).thenReturn(new Text(input)); TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable> vr = new TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable>(rr); vr.initialize(null, tac); assertTrue("Should have been able to add a vertex", vr.nextVertex()); BasicVertex<Text, DoubleWritable, DoubleWritable, BooleanWritable> vertex = vr.getCurrentVertex(); setGraphState(vertex, graphState); assertValidVertex(conf, graphState, vertex, new Text("Hi"), new DoubleWritable(0), new Edge<Text, DoubleWritable>(new Text("Ciao"), new DoubleWritable(1.123d)), new Edge<Text, DoubleWritable>(new Text("Bomdia"), new DoubleWritable(2.234d)), new Edge<Text, DoubleWritable>(new Text("Ola"), new DoubleWritable(3.345d))); assertEquals(vertex.getNumOutEdges(), 3); } @Test public void testLineSanitizer() throws Exception { String input = "Bye\t0.01\tCiao\t1.001\tTchau\t2.0001\tAdios\t3.00001"; AdjacencyListVertexReader.LineSanitizer toUpper = new AdjacencyListVertexReader.LineSanitizer() { @Override public String sanitize(String s) { return s.toUpperCase(); } }; when(rr.getCurrentValue()).thenReturn(new Text(input)); TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable> vr = new TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable>(rr, toUpper); vr.initialize(null, tac); assertTrue("Should have been able to read vertex", vr.nextVertex()); BasicVertex<Text, DoubleWritable, DoubleWritable, BooleanWritable> vertex = vr.getCurrentVertex(); setGraphState(vertex, graphState); assertValidVertex(conf, graphState, vertex, new Text("BYE"), new DoubleWritable(0.01d), new Edge<Text, DoubleWritable>(new Text("CIAO"), new DoubleWritable(1.001d)), new Edge<Text, DoubleWritable>(new Text("TCHAU"), new DoubleWritable(2.0001d)), new Edge<Text, DoubleWritable>(new Text("ADIOS"), new DoubleWritable(3.00001d))); assertEquals(vertex.getNumOutEdges(), 3); } @Test public void testDifferentSeparators() throws Exception { String input = "alpha:42:beta:99"; when(rr.getCurrentValue()).thenReturn(new Text(input)); conf.set(AdjacencyListVertexReader.LINE_TOKENIZE_VALUE, ":"); TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable> vr = new TextDoubleDoubleAdjacencyListVertexInputFormat.VertexReader<BooleanWritable>(rr); vr.initialize(null, tac); assertTrue("Should have been able to read vertex", vr.nextVertex()); BasicVertex<Text, DoubleWritable, DoubleWritable, BooleanWritable> vertex = vr.getCurrentVertex(); setGraphState(vertex, graphState); assertValidVertex(conf, graphState, vertex, new Text("alpha"), new DoubleWritable(42d), new Edge<Text, DoubleWritable>(new Text("beta"), new DoubleWritable(99d))); assertEquals(vertex.getNumOutEdges(), 1); } public static class DummyVertex extends EdgeListVertex<Text, DoubleWritable, DoubleWritable, BooleanWritable> { @Override public void compute(Iterator<BooleanWritable> msgIterator) throws IOException { // ignore } } }
apache-2.0
dustinstanley/katharsis-framework
katharsis-spring/src/test/java/io/katharsis/spring/domain/repository/TaskToProjectRepository.java
1118
package io.katharsis.spring.domain.repository; import io.katharsis.queryParams.QueryParams; import io.katharsis.repository.RelationshipRepository; import io.katharsis.spring.domain.model.Project; import io.katharsis.spring.domain.model.Task; import org.springframework.stereotype.Component; @Component public class TaskToProjectRepository implements RelationshipRepository<Task, Long, Project, Long> { @Override public void setRelation(Task task, Long projectId, String fieldName) { } @Override public void setRelations(Task task, Iterable<Long> projectId, String fieldName) { } @Override public void addRelations(Task source, Iterable<Long> targetIds, String fieldName) { } @Override public void removeRelations(Task source, Iterable<Long> targetIds, String fieldName) { } @Override public Project findOneTarget(Long sourceId, String fieldName, QueryParams requestParams) { return null; } @Override public Iterable<Project> findManyTargets(Long sourceId, String fieldName, QueryParams requestParams) { return null; } }
apache-2.0
apurtell/hadoop
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/resourcemanager/MockAMLauncher.java
4477
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.sls.resourcemanager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl; import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.appmaster.AMSimulator; import java.util.Map; public class MockAMLauncher extends ApplicationMasterLauncher implements EventHandler<AMLauncherEvent> { private static final Logger LOG = LoggerFactory.getLogger( MockAMLauncher.class); private Map<ApplicationId, AMSimulator> appIdAMSim; SLSRunner se; public MockAMLauncher(SLSRunner se, RMContext rmContext, Map<ApplicationId, AMSimulator> appIdAMSim) { super(rmContext); this.appIdAMSim = appIdAMSim; this.se = se; } @Override protected void serviceInit(Configuration conf) throws Exception { // Do nothing } @Override protected void serviceStart() throws Exception { // Do nothing } @Override protected void serviceStop() throws Exception { // Do nothing } private void setupAMRMToken(RMAppAttempt appAttempt) { // Setup AMRMToken Token<AMRMTokenIdentifier> amrmToken = super.context.getAMRMTokenSecretManager().createAndGetAMRMToken( appAttempt.getAppAttemptId()); ((RMAppAttemptImpl) appAttempt).setAMRMToken(amrmToken); } @Override @SuppressWarnings("unchecked") public void handle(AMLauncherEvent event) { ApplicationId appId = event.getAppAttempt().getAppAttemptId().getApplicationId(); // find AMSimulator AMSimulator ams = appIdAMSim.get(appId); if (ams == null) { throw new YarnRuntimeException( "Didn't find any AMSimulator for applicationId=" + appId); } Container amContainer = event.getAppAttempt().getMasterContainer(); switch (event.getType()) { case LAUNCH: try { setupAMRMToken(event.getAppAttempt()); // Notify RMAppAttempt to change state super.context.getDispatcher().getEventHandler().handle( new RMAppAttemptEvent(event.getAppAttempt().getAppAttemptId(), RMAppAttemptEventType.LAUNCHED)); ams.notifyAMContainerLaunched( event.getAppAttempt().getMasterContainer()); LOG.info("Notify AM launcher launched:" + amContainer.getId()); se.getNmMap().get(amContainer.getNodeId()) .addNewContainer(amContainer, -1); return; } catch (Exception e) { throw new YarnRuntimeException(e); } case CLEANUP: se.getNmMap().get(amContainer.getNodeId()) .cleanupContainer(amContainer.getId()); break; default: throw new YarnRuntimeException( "Didn't find any AMSimulator for applicationId=" + appId); } } }
apache-2.0
apache/solr
solr/solrj/src/java/org/apache/solr/client/solrj/request/QueryRequest.java
2240
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.request; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.SolrParams; /** * @since solr 1.3 */ public class QueryRequest extends SolrRequest<QueryResponse> { private SolrParams query; public QueryRequest() { super(METHOD.GET, null); } public QueryRequest(SolrParams q) { super(METHOD.GET, null); query = q; } public QueryRequest(SolrParams q, METHOD method) { super(method, null); query = q; } /** Use the params 'QT' parameter if it exists */ @Override public String getPath() { String qt = query == null ? null : query.get(CommonParams.QT); if (qt == null) { qt = super.getPath(); } if (qt != null && qt.startsWith("/")) { return qt; } return "/select"; } // --------------------------------------------------------------------------------- // --------------------------------------------------------------------------------- @Override protected QueryResponse createResponse(SolrClient client) { return new QueryResponse(client); } @Override public SolrParams getParams() { return query; } @Override public String getRequestType() { return SolrRequestType.QUERY.toString(); } }
apache-2.0
sterlp/training
spring-redis/src/test/java/org/sterl/training/redis/TestObjectHashMapper.java
3841
package org.sterl.training.redis; import java.time.Instant; import java.util.Map; import java.util.Map.Entry; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.redis.connection.ReactiveRedisConnectionFactory; import org.springframework.data.redis.core.ReactiveHashOperations; import org.springframework.data.redis.core.ReactiveRedisTemplate; import org.springframework.data.redis.hash.ObjectHashMapper; import org.springframework.data.redis.serializer.RedisSerializationContext; import org.springframework.data.redis.serializer.RedisSerializer; import org.springframework.data.redis.serializer.SerializationException; import org.springframework.data.redis.serializer.StringRedisSerializer; import org.springframework.lang.Nullable; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.sterl.training.redis.model.CachedEntity; import org.sterl.training.redis.service.model.Person; import reactor.core.publisher.Mono; @ExtendWith(SpringExtension.class) @SpringBootTest class TestObjectHashMapper { @Autowired ReactiveRedisConnectionFactory factory; @Autowired ObjectHashMapper objectMapper; @Test void test() { Person p = new Person("1", "Foo"); final Map<byte[], byte[]> hash = objectMapper.toHash(p); System.out.println(hash); } // noop Serializer, copied from spring enum ByteArrayRedisSerializer implements RedisSerializer<byte[]> { INSTANCE; @Nullable @Override public byte[] serialize(@Nullable byte[] bytes) throws SerializationException { return bytes; } @Nullable @Override public byte[] deserialize(@Nullable byte[] bytes) throws SerializationException { return bytes; } } @Test void testReactiveObjectMapper() throws Exception { final StringRedisSerializer keySerializer = new StringRedisSerializer(); // the key itself can be a string RedisSerializationContext.RedisSerializationContextBuilder<String, byte[]> builder = RedisSerializationContext.newSerializationContext(keySerializer); // ensure we don't convert values and keys for the hash, as ObjectHashMapper // uses byte arrays RedisSerializationContext<String, byte[]> context = builder .value(ByteArrayRedisSerializer.INSTANCE) .hashKey(ByteArrayRedisSerializer.INSTANCE) .hashValue(ByteArrayRedisSerializer.INSTANCE) .build(); // build the redis reactive template and get the opsForHash final ReactiveRedisTemplate<String, byte[]> template = new ReactiveRedisTemplate<>(factory, context); final ReactiveHashOperations<String, byte[], byte[]> opsForHash = template.opsForHash(); final CachedEntity entry = CachedEntity.builder() .id("Muster_id") .payload("Muster") .cacheTime(Instant.now()) .build(); // save Mono.just(entry) // emulate spring data <hash-name>:<id> as key .flatMap(e -> opsForHash.putAll("entity:" + e.getId(), objectMapper.toHash(e))) .block(); opsForHash.entries("entity:Muster_id") .collectMap(Entry::getKey, Entry::getValue) .subscribe(m -> { // class not needed, as it written into the hash by the ObjectHashMapper // but if set not cast is needed final Object result = objectMapper.fromHash(m); System.out.println(result); }); } }
apache-2.0
apache/jena
jena-arq/src/main/java/org/apache/jena/sparql/algebra/Algebra.java
6574
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.algebra; import java.util.Iterator; import org.apache.jena.graph.Graph; import org.apache.jena.graph.Node; import org.apache.jena.query.ARQ; import org.apache.jena.query.Dataset; import org.apache.jena.query.Query; import org.apache.jena.rdf.model.Model; import org.apache.jena.shared.PrefixMapping; import org.apache.jena.sparql.algebra.optimize.Optimize; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.core.Var; import org.apache.jena.sparql.engine.Plan; import org.apache.jena.sparql.engine.QueryEngineFactory; import org.apache.jena.sparql.engine.QueryEngineRegistry; import org.apache.jena.sparql.engine.QueryIterator; import org.apache.jena.sparql.engine.binding.Binding; import org.apache.jena.sparql.engine.binding.BindingBuilder; import org.apache.jena.sparql.engine.binding.BindingRoot; import org.apache.jena.sparql.engine.ref.QueryEngineRef; import org.apache.jena.sparql.sse.Item; import org.apache.jena.sparql.sse.SSE; import org.apache.jena.sparql.sse.builders.BuilderOp; import org.apache.jena.sparql.syntax.Element; import org.apache.jena.sparql.util.Context; /** Utilities to produce SPARQL algebra */ public class Algebra { // -------- Optimize /** Apply static transformations to a query to optimize it */ public static Op optimize(Op op) { return optimize(op, null); } /** Apply static transformations to a query to optimize it */ public static Op optimize(Op op, Context context) { if ( context == null ) context = ARQ.getContext(); // Call-through to somewhere to manage all the optimizations if ( op == null ) return null; return Optimize.optimize(op, context); } // -------- Compile /** Compile a query - pattern and modifiers. */ public static Op compile(Query query) { if ( query == null ) return null; return new AlgebraGenerator().compile(query); } /** Compile a pattern. */ public static Op compile(Element elt) { if ( elt == null ) return null; return new AlgebraGenerator().compile(elt); } /** Turn an algebra expression into quadpattern form */ public static Op toQuadForm(Op op) { return AlgebraQuad.quadize(op); } /** Turn an algebra expression into quadblock form */ public static Op toQuadBlockForm(Op op) { return AlgebraQuad.quadizeBlock(op); } // -------- SSE uses these operations ... static public Op read(String filename) { Item item = SSE.readFile(filename); return parse(item); } static public Op parse(String string) { Item item = SSE.parse(string); return parse(item); } static public Op parse(String string, PrefixMapping pmap) { Item item = SSE.parse(string, pmap); return parse(item); } static public Op parse(Item item) { Op op = BuilderOp.build(item); return op; } // -------- Execute static public QueryIterator exec(Op op, Dataset ds) { return exec(op, ds.asDatasetGraph()); } static public QueryIterator exec(Op op, Model model) { return exec(op, model.getGraph()); } static public QueryIterator exec(Op op, Graph graph) { return exec(op, DatasetGraphFactory.wrap(graph)); } static public QueryIterator exec(Op op, DatasetGraph ds) { QueryEngineFactory f = QueryEngineRegistry.findFactory(op, ds, null); Plan plan = f.create(op, ds, BindingRoot.create(), null); return plan.iterator(); } // Reference engine static public QueryIterator execRef(Op op, Dataset ds) { return execRef(op, ds.asDatasetGraph()); } static public QueryIterator execRef(Op op, Model model) { return execRef(op, model.getGraph()); } static public QueryIterator execRef(Op op, Graph graph) { return execRef(op, DatasetGraphFactory.wrap(graph)); } static public QueryIterator execRef(Op op, DatasetGraph dsg) { QueryEngineRef qe = new QueryEngineRef(op, dsg, ARQ.getContext().copy()); return qe.getPlan().iterator(); } /** This is the SPARQL merge rule. */ public static Binding merge(Binding bindingLeft, Binding bindingRight) { // Test to see if compatible: Iterate over variables in left boolean matches = compatible(bindingLeft, bindingRight); if ( !matches ) return null; // If compatible, merge. Iterate over variables in right but not in left. BindingBuilder b = Binding.builder(bindingLeft); for ( Iterator<Var> vIter = bindingRight.vars() ; vIter.hasNext() ; ) { Var v = vIter.next(); Node n = bindingRight.get(v); if ( !bindingLeft.contains(v) ) b.add(v, n); } return b.build(); } public static boolean compatible(Binding bindingLeft, Binding bindingRight) { // Test to see if compatible: Iterate over variables in left for ( Iterator<Var> vIter = bindingLeft.vars() ; vIter.hasNext() ; ) { Var v = vIter.next(); Node nLeft = bindingLeft.get(v); Node nRight = bindingRight.get(v); if ( nRight != null && !nRight.equals(nLeft) ) return false; } return true; } public static boolean disjoint(Binding binding1, Binding binding2) { Iterator<Var> iterVar1 = binding1.vars(); for ( ; iterVar1.hasNext() ; ) { Var v = iterVar1.next(); if ( binding2.contains(v) ) return false; } return true; } }
apache-2.0
ChinmaySKulkarni/hbase
hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
16123
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.hbase.io.compress; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.util.BlockIOUtils; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.DoNotPool; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Compression related stuff. * Copied from hadoop-3315 tfile. */ @InterfaceAudience.Private public final class Compression { private static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * Prevent the instantiation of class. */ private Compression() { super(); } static class FinishOnFlushCompressionStream extends FilterOutputStream { public FinishOnFlushCompressionStream(CompressionOutputStream cout) { super(cout); } @Override public void write(byte b[], int off, int len) throws IOException { out.write(b, off, len); } @Override public void flush() throws IOException { CompressionOutputStream cout = (CompressionOutputStream) out; cout.finish(); cout.flush(); cout.resetState(); } } /** * Returns the classloader to load the Codec class from. */ private static ClassLoader getClassLoaderForCodec() { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = Compression.class.getClassLoader(); } if (cl == null) { cl = ClassLoader.getSystemClassLoader(); } if (cl == null) { throw new RuntimeException("A ClassLoader to load the Codec could not be determined"); } return cl; } /** * Compression algorithms. The ordinal of these cannot change or else you * risk breaking all existing HFiles out there. Even the ones that are * not compressed! (They use the NONE algorithm) */ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="SE_TRANSIENT_FIELD_NOT_RESTORED", justification="We are not serializing so doesn't apply (not sure why transient though)") @InterfaceAudience.Public public static enum Algorithm { LZO("lzo") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lzoCodec; private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (lzoCodec == null) { synchronized (lock) { if (lzoCodec == null) { lzoCodec = buildCodec(conf); } } } return lzoCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("com.hadoop.compression.lzo.LzoCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, new Configuration(conf)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, GZ("gz") { private volatile transient GzipCodec codec; private final transient Object lock = new Object(); @Override DefaultCodec getCodec(Configuration conf) { if (codec == null) { synchronized (lock) { if (codec == null) { codec = buildCodec(conf); } } } return codec; } private GzipCodec buildCodec(Configuration conf) { GzipCodec gzcodec = new ReusableStreamGzipCodec(); gzcodec.setConf(new Configuration(conf)); return gzcodec; } }, NONE("none") { @Override DefaultCodec getCodec(Configuration conf) { return null; } @Override public synchronized InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { if (downStreamBufferSize > 0) { return new BufferedInputStream(downStream, downStreamBufferSize); } return downStream; } @Override public synchronized OutputStream createCompressionStream( OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException { if (downStreamBufferSize > 0) { return new BufferedOutputStream(downStream, downStreamBufferSize); } return downStream; } }, SNAPPY("snappy") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec snappyCodec; private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (snappyCodec == null) { synchronized (lock) { if (snappyCodec == null) { snappyCodec = buildCodec(conf); } } } return snappyCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, LZ4("lz4") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lz4Codec; private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (lz4Codec == null) { synchronized (lock) { if (lz4Codec == null) { lz4Codec = buildCodec(conf); } } } return lz4Codec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, BZIP2("bzip2") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec bzipCodec; private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (bzipCodec == null) { synchronized (lock) { if (bzipCodec == null) { bzipCodec = buildCodec(conf); } } } return bzipCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.BZip2Codec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, ZSTD("zstd") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec zStandardCodec; private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (zStandardCodec == null) { synchronized (lock) { if (zStandardCodec == null) { zStandardCodec = buildCodec(conf); } } } return zStandardCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.ZStandardCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }; private final Configuration conf; private final String compressName; /** data input buffer size to absorb small reads from application. */ private static final int DATA_IBUF_SIZE = 1 * 1024; /** data output buffer size to absorb small writes from application. */ private static final int DATA_OBUF_SIZE = 4 * 1024; Algorithm(String name) { this.conf = new Configuration(); this.conf.setBoolean("io.native.lib.available", true); this.compressName = name; } abstract CompressionCodec getCodec(Configuration conf); public InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { CompressionCodec codec = getCodec(conf); // Set the internal buffer size to read from down stream. if (downStreamBufferSize > 0) { ((Configurable)codec).getConf().setInt("io.file.buffer.size", downStreamBufferSize); } CompressionInputStream cis = codec.createInputStream(downStream, decompressor); BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); return bis2; } public OutputStream createCompressionStream( OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException { OutputStream bos1 = null; if (downStreamBufferSize > 0) { bos1 = new BufferedOutputStream(downStream, downStreamBufferSize); } else { bos1 = downStream; } CompressionOutputStream cos = createPlainCompressionStream(bos1, compressor); BufferedOutputStream bos2 = new BufferedOutputStream(new FinishOnFlushCompressionStream(cos), DATA_OBUF_SIZE); return bos2; } /** * Creates a compression stream without any additional wrapping into * buffering streams. */ public CompressionOutputStream createPlainCompressionStream( OutputStream downStream, Compressor compressor) throws IOException { CompressionCodec codec = getCodec(conf); ((Configurable)codec).getConf().setInt("io.file.buffer.size", 32 * 1024); return codec.createOutputStream(downStream, compressor); } public Compressor getCompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); if (LOG.isTraceEnabled()) LOG.trace("Retrieved compressor " + compressor + " from pool."); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using it. LOG.warn("Compressor obtained from CodecPool is already finished()"); } compressor.reset(); } return compressor; } return null; } public void returnCompressor(Compressor compressor) { if (compressor != null) { if (LOG.isTraceEnabled()) LOG.trace("Returning compressor " + compressor + " to pool."); CodecPool.returnCompressor(compressor); } } public Decompressor getDecompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Decompressor decompressor = CodecPool.getDecompressor(codec); if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool."); if (decompressor != null) { if (decompressor.finished()) { // Somebody returns the decompressor to CodecPool but is still using it. LOG.warn("Deompressor obtained from CodecPool is already finished()"); } decompressor.reset(); } return decompressor; } return null; } public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { if (LOG.isTraceEnabled()) LOG.trace("Returning decompressor " + decompressor + " to pool."); CodecPool.returnDecompressor(decompressor); if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) { if (LOG.isTraceEnabled()) LOG.trace("Ending decompressor " + decompressor); decompressor.end(); } } } public String getName() { return compressName; } } public static Algorithm getCompressionAlgorithmByName(String compressName) { Algorithm[] algos = Algorithm.class.getEnumConstants(); for (Algorithm a : algos) { if (a.getName().equals(compressName)) { return a; } } throw new IllegalArgumentException("Unsupported compression algorithm name: " + compressName); } /** * Get names of supported compression algorithms. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are supported. */ public static String[] getSupportedAlgorithms() { Algorithm[] algos = Algorithm.class.getEnumConstants(); String[] ret = new String[algos.length]; int i = 0; for (Algorithm a : algos) { ret[i++] = a.getName(); } return ret; } /** * Decompresses data from the given stream using the configured compression algorithm. It will * throw an exception if the dest buffer does not have enough space to hold the decompressed data. * @param dest the output buffer * @param bufferedBoundedStream a stream to read compressed data from, bounded to the exact amount * of compressed data * @param uncompressedSize uncompressed data size, header not included * @param compressAlgo compression algorithm used * @throws IOException if any IO error happen */ public static void decompress(ByteBuff dest, InputStream bufferedBoundedStream, int uncompressedSize, Compression.Algorithm compressAlgo) throws IOException { if (dest.remaining() < uncompressedSize) { throw new IllegalArgumentException("Output buffer does not have enough space to hold " + uncompressedSize + " decompressed bytes, available: " + dest.remaining()); } Decompressor decompressor = null; try { decompressor = compressAlgo.getDecompressor(); try (InputStream is = compressAlgo.createDecompressionStream(bufferedBoundedStream, decompressor, 0)) { BlockIOUtils.readFullyWithHeapBuffer(is, dest, uncompressedSize); } } finally { if (decompressor != null) { compressAlgo.returnDecompressor(decompressor); } } } }
apache-2.0
EvilMcJerkface/atlasdb
atlasdb-client/src/main/java/com/palantir/atlasdb/persister/JsonNodePersister.java
1738
/* * (c) Copyright 2018 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.persister; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.palantir.atlasdb.annotation.Reusable; import com.palantir.atlasdb.persist.api.Persister; import com.palantir.common.base.Throwables; import java.io.IOException; @Reusable public class JsonNodePersister implements Persister<JsonNode> { static final ObjectMapper mapper = new ObjectMapper(); @Override public byte[] persistToBytes(JsonNode jsonNode) { try { return mapper.writeValueAsBytes(jsonNode); } catch (JsonProcessingException e) { throw Throwables.throwUncheckedException(e); } } @Override public JsonNode hydrateFromBytes(byte[] input) { try { return mapper.readTree(input); } catch (IOException e) { throw Throwables.throwUncheckedException(e); } } @Override public Class<JsonNode> getPersistingClassType() { return JsonNode.class; } }
apache-2.0
runfriends/PurchaseNear
purchasenear/purchasenear-user/src/main/java/cn/purchasenear/v1/user/regist/_UserServiceOperationsNC.java
626
// ********************************************************************** // // Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved. // // This copy of Ice is licensed to you under the terms described in the // ICE_LICENSE file included in this distribution. // // ********************************************************************** // // Ice version 3.5.1 // // <auto-generated> // // Generated from file `UserService.ice' // // Warning: do not edit this file. // // </auto-generated> // package cn.purchasenear.v1.user.regist; public interface _UserServiceOperationsNC { ActivateInfo regist(ClientInfo info); }
apache-2.0
RyanTech/okhttp
okhttp-protocols/src/test/java/com/squareup/okhttp/internal/spdy/MockSpdyPeer.java
8654
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.spdy; import com.squareup.okhttp.internal.Util; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import static java.util.concurrent.Executors.defaultThreadFactory; /** Replays prerecorded outgoing frames and records incoming frames. */ public final class MockSpdyPeer implements Closeable { private int frameCount = 0; private final boolean client; private final ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); private final FrameWriter frameWriter; private final List<OutFrame> outFrames = new ArrayList<OutFrame>(); private final BlockingQueue<InFrame> inFrames = new LinkedBlockingQueue<InFrame>(); private int port; private final Executor executor = Executors.newCachedThreadPool(defaultThreadFactory()); private ServerSocket serverSocket; private Socket socket; public MockSpdyPeer(boolean client) { this.client = client; this.frameWriter = Variant.SPDY3.newWriter(bytesOut, client); } public void acceptFrame() { frameCount++; } public FrameWriter sendFrame() { outFrames.add(new OutFrame(frameCount++, bytesOut.size(), Integer.MAX_VALUE)); return frameWriter; } /** * Sends a manually-constructed frame. This is useful to test frames that * won't be generated naturally. */ public void sendFrame(byte[] frame) throws IOException { outFrames.add(new OutFrame(frameCount++, bytesOut.size(), Integer.MAX_VALUE)); bytesOut.write(frame); } /** * Sends a frame, truncated to {@code truncateToLength} bytes. This is only * useful for testing error handling as the truncated frame will be * malformed. */ public FrameWriter sendTruncatedFrame(int truncateToLength) { outFrames.add(new OutFrame(frameCount++, bytesOut.size(), truncateToLength)); return frameWriter; } public int getPort() { return port; } public InFrame takeFrame() throws InterruptedException { return inFrames.take(); } public void play() throws IOException { if (serverSocket != null) throw new IllegalStateException(); serverSocket = new ServerSocket(0); serverSocket.setReuseAddress(true); this.port = serverSocket.getLocalPort(); executor.execute(new Runnable() { @Override public void run() { try { readAndWriteFrames(); } catch (IOException e) { throw new RuntimeException(e); } } }); } private void readAndWriteFrames() throws IOException { if (socket != null) throw new IllegalStateException(); socket = serverSocket.accept(); OutputStream out = socket.getOutputStream(); InputStream in = socket.getInputStream(); FrameReader reader = Variant.SPDY3.newReader(in, client); Iterator<OutFrame> outFramesIterator = outFrames.iterator(); byte[] outBytes = bytesOut.toByteArray(); OutFrame nextOutFrame = null; for (int i = 0; i < frameCount; i++) { if (nextOutFrame == null && outFramesIterator.hasNext()) { nextOutFrame = outFramesIterator.next(); } if (nextOutFrame != null && nextOutFrame.sequence == i) { int start = nextOutFrame.start; int truncateToLength = nextOutFrame.truncateToLength; int end; if (outFramesIterator.hasNext()) { nextOutFrame = outFramesIterator.next(); end = nextOutFrame.start; } else { end = outBytes.length; } // write a frame int length = Math.min(end - start, truncateToLength); out.write(outBytes, start, length); } else { // read a frame InFrame inFrame = new InFrame(i, reader); reader.nextFrame(inFrame); inFrames.add(inFrame); } } Util.closeQuietly(socket); } public Socket openSocket() throws IOException { return new Socket("localhost", port); } @Override public void close() throws IOException { Socket socket = this.socket; if (socket != null) { socket.close(); this.socket = null; } ServerSocket serverSocket = this.serverSocket; if (serverSocket != null) { serverSocket.close(); this.serverSocket = null; } } private static class OutFrame { private final int sequence; private final int start; private final int truncateToLength; private OutFrame(int sequence, int start, int truncateToLength) { this.sequence = sequence; this.start = start; this.truncateToLength = truncateToLength; } } public static class InFrame implements FrameReader.Handler { public final int sequence; public final FrameReader reader; public int type = -1; public boolean clearPrevious; public boolean outFinished; public boolean inFinished; public int streamId; public int associatedStreamId; public int priority; public ErrorCode errorCode; public int deltaWindowSize; public List<String> nameValueBlock; public byte[] data; public Settings settings; public HeadersMode headersMode; public InFrame(int sequence, FrameReader reader) { this.sequence = sequence; this.reader = reader; } @Override public void settings(boolean clearPrevious, Settings settings) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_SETTINGS; this.clearPrevious = clearPrevious; this.settings = settings; } @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, int priority, List<String> nameValueBlock, HeadersMode headersMode) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_HEADERS; this.outFinished = outFinished; this.inFinished = inFinished; this.streamId = streamId; this.associatedStreamId = associatedStreamId; this.priority = priority; this.nameValueBlock = nameValueBlock; this.headersMode = headersMode; } @Override public void data(boolean inFinished, int streamId, InputStream in, int length) throws IOException { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_DATA; this.inFinished = inFinished; this.streamId = streamId; this.data = new byte[length]; Util.readFully(in, this.data); } @Override public void rstStream(int streamId, ErrorCode errorCode) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_RST_STREAM; this.streamId = streamId; this.errorCode = errorCode; } @Override public void ping(boolean reply, int payload1, int payload2) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_PING; this.streamId = payload1; } @Override public void noop() { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_NOOP; } @Override public void goAway(int lastGoodStreamId, ErrorCode errorCode) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_GOAWAY; this.streamId = lastGoodStreamId; this.errorCode = errorCode; } @Override public void windowUpdate(int streamId, int deltaWindowSize, boolean endFlowControl) { if (this.type != -1) throw new IllegalStateException(); this.type = Spdy3.TYPE_WINDOW_UPDATE; this.streamId = streamId; this.deltaWindowSize = deltaWindowSize; } @Override public void priority(int streamId, int priority) { throw new UnsupportedOperationException(); } } }
apache-2.0
fredsa/playn-samples
paymentsdemo/core/src/main/java/playn/sample/payments/java/PaymentsDemoJava.java
983
/** * Copyright 2010 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package playn.sample.payments.java; import playn.core.PlayN; import playn.java.JavaPlatform; import playn.sample.payments.core.PaymentsDemo; public class PaymentsDemoJava { public static void main(String[] args) { JavaPlatform platform = JavaPlatform.register(); platform.assets().setPathPrefix("playn/sample/payments/resources"); PlayN.run(new PaymentsDemo()); } }
apache-2.0
AndroidX/constraintlayout
constraintlayout/core/src/main/java/androidx/constraintlayout/core/widgets/analyzer/GuidelineReference.java
4574
/* * Copyright (C) 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.constraintlayout.core.widgets.analyzer; import androidx.constraintlayout.core.widgets.ConstraintWidget; import androidx.constraintlayout.core.widgets.Guideline; class GuidelineReference extends WidgetRun { GuidelineReference(ConstraintWidget widget) { super(widget); widget.mHorizontalRun.clear(); widget.mVerticalRun.clear(); this.orientation = ((Guideline) widget).getOrientation(); } @Override void clear() { start.clear(); } @Override void reset() { start.resolved = false; end.resolved = false; } @Override boolean supportsWrapComputation() { return false; } private void addDependency( androidx.constraintlayout.core.widgets.analyzer.DependencyNode node) { start.mDependencies.add(node); node.mTargets.add(start); } @Override public void update(Dependency dependency) { if (!start.readyToSolve) { return; } if (start.resolved) { return; } // ready to solve, centering. androidx.constraintlayout.core.widgets.analyzer.DependencyNode startTarget = start.mTargets.get(0); Guideline guideline = (Guideline) mWidget; int startPos = (int) (0.5f + startTarget.value * guideline.getRelativePercent()); start.resolve(startPos); } @Override void apply() { Guideline guideline = (Guideline) mWidget; int relativeBegin = guideline.getRelativeBegin(); int relativeEnd = guideline.getRelativeEnd(); float percent = guideline.getRelativePercent(); if (guideline.getOrientation() == ConstraintWidget.VERTICAL) { if (relativeBegin != -1) { start.mTargets.add(mWidget.mParent.mHorizontalRun.start); mWidget.mParent.mHorizontalRun.start.mDependencies.add(start); start.mMargin = relativeBegin; } else if (relativeEnd != -1) { start.mTargets.add(mWidget.mParent.mHorizontalRun.end); mWidget.mParent.mHorizontalRun.end.mDependencies.add(start); start.mMargin = -relativeEnd; } else { start.delegateToWidgetRun = true; start.mTargets.add(mWidget.mParent.mHorizontalRun.end); mWidget.mParent.mHorizontalRun.end.mDependencies.add(start); } // FIXME -- if we move the DependencyNode directly // in the ConstraintAnchor we'll be good. addDependency(mWidget.mHorizontalRun.start); addDependency(mWidget.mHorizontalRun.end); } else { if (relativeBegin != -1) { start.mTargets.add(mWidget.mParent.mVerticalRun.start); mWidget.mParent.mVerticalRun.start.mDependencies.add(start); start.mMargin = relativeBegin; } else if (relativeEnd != -1) { start.mTargets.add(mWidget.mParent.mVerticalRun.end); mWidget.mParent.mVerticalRun.end.mDependencies.add(start); start.mMargin = -relativeEnd; } else { start.delegateToWidgetRun = true; start.mTargets.add(mWidget.mParent.mVerticalRun.end); mWidget.mParent.mVerticalRun.end.mDependencies.add(start); } // FIXME -- if we move the DependencyNode directly // in the ConstraintAnchor we'll be good. addDependency(mWidget.mVerticalRun.start); addDependency(mWidget.mVerticalRun.end); } } @Override public void applyToWidget() { Guideline guideline = (Guideline) mWidget; if (guideline.getOrientation() == ConstraintWidget.VERTICAL) { mWidget.setX(start.value); } else { mWidget.setY(start.value); } } }
apache-2.0
deleidos/digitaledge-platform
webapp-alertsapi/src/main/java/com/deleidos/rtws/webapp/alertsapi/servlet/enunciate/WatchListServiceImpl.java
22400
/** * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * APPENDIX: How to apply the Apache License to your work. * * To apply the Apache License to your work, attach the following * boilerplate notice, with the fields enclosed by brackets "{}" * replaced with your own identifying information. (Don't include * the brackets!) The text should be enclosed in the appropriate * comment syntax for the file format. We also recommend that a * file or class name and description of purpose be included on the * same "printed page" as the copyright notice for easier * identification within third-party archives. * * Copyright {yyyy} {name of copyright owner} * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deleidos.rtws.webapp.alertsapi.servlet.enunciate; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.ws.rs.Path; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import net.sf.json.JSONArray; import net.sf.json.JSONException; import net.sf.json.JSONObject; import org.apache.commons.beanutils.BeanUtils; import org.apache.log4j.Logger; import com.deleidos.rtws.commons.exception.PermissionDeniedException; import com.deleidos.rtws.commons.model.response.ErrorResponse; import com.deleidos.rtws.commons.model.response.StandardResponse; import com.deleidos.rtws.commons.model.user.Filter; import com.deleidos.rtws.commons.model.user.Subscription; import com.deleidos.rtws.commons.model.user.WatchListFilter; import com.deleidos.rtws.webapp.alertsapi.client.NamedFilterRestClient; import com.deleidos.rtws.webapp.alertsapi.client.NamedFilterUsersRestClient; import com.deleidos.rtws.webapp.alertsapi.client.NamedFilterWatchlistRestClient; import com.deleidos.rtws.webapp.alertsapi.client.RestClientException; /** * WatchListServiceImpl is an implementation of the WatchListService interface. * It is used to manipulate the watch lists used by the a NamedFilterRestClient. */ @Path( "/watchlist" ) public class WatchListServiceImpl implements WatchListService { /** The logger. */ private Logger logger = Logger.getLogger(WatchListServiceImpl.class); private NamedFilterRestClient filterClient; /** * Sets the NamedFilterRestClient. * * @param filterClient the new filter client */ public void setFilterClient(NamedFilterRestClient filterClient) { this.filterClient = filterClient; } private NamedFilterUsersRestClient userClient; /** * Sets the NamedFilterUsersRestClient. * * @param userClient the new user client */ public void setUserClient(NamedFilterUsersRestClient userClient) { this.userClient = userClient; } private NamedFilterWatchlistRestClient watchlistClient; /** * Sets the NamedFilterWatchlistRestClient. * * @param watchlistClient the new watch list client */ public void setWatchlistClient(NamedFilterWatchlistRestClient watchlistClient) { this.watchlistClient = watchlistClient; } /** * Add a filter to the user's watch list. * * @param username the name of the user to add the filter too * @param id Filter ID of the filter to add * @param color Color to use for the filter * @param email the format for the email * @return a StandardResponse * @throws PermissionDeniedException */ public StandardResponse<?> addFilterByUser(String username, Long id, Long color, String email) throws PermissionDeniedException { if (userClient == null) { setUserClient(new NamedFilterUsersRestClient()); } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } // Get user key by username Long userKey = userClient.getUserKey(username); if (userKey == null) { logger.warn("Unknown user " + username); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage("Unknown user " + username); return response; } WatchListFilter filter = new WatchListFilter(); filter.setKey(id); filter.setColor(color); filter.setEmail(email); // Create WatchListFilter return watchlistClient.createWatchListFilter(userKey, filter); } /** * Get the user's filter watch list. * * @param username the name of the user to get the watch list for * @return a collection of watch list filters * @throws PermissionDeniedException */ public Collection<WatchListFilter> getWatchListByUser(String username) throws PermissionDeniedException { if (filterClient == null) { setFilterClient(new NamedFilterRestClient()); } if (userClient == null) { setUserClient(new NamedFilterUsersRestClient()); } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } // Get user key by username Long userKey = userClient.getUserKey(username); if (userKey == null) { return new ArrayList<WatchListFilter>(); } // Get WatchListFilters by user key Map<Long, WatchListFilter> watchlist = watchlistClient.getByUserKey(userKey); if (watchlist == null) { return new ArrayList<WatchListFilter>(); } // Enrich WatchListFilters with matching subclass Filter bean properties Collection<Filter> allFilters = filterClient.getAll(); for (Filter filter : allFilters) { if (watchlist.containsKey(filter.getKey())) { WatchListFilter watch = watchlist.get(filter.getKey()); try { BeanUtils.copyProperties(watch, filter); } catch (Exception oops) {} } } return watchlist.values(); } /** * Remove a filter from the user's watch list. * * @param username the name of the user to remove the filter from * @param id the Filter ID of the filter to remove * @return a StandardResponse * @throws PermissionDeniedException */ public StandardResponse<?> deleteFilterByUser(String username, Long id) throws PermissionDeniedException { if (userClient == null) { setUserClient(new NamedFilterUsersRestClient()); } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } // Get user key by username Long userKey = userClient.getUserKey(username); if (userKey == null) { logger.warn("Unknown user " + username); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage("Unknown user " + username); return response; } // Delete WatchListFilter return watchlistClient.deleteWatchListFilter(userKey, id); } /** * Modify a filter in the user's watch list. * * @param username the name of the user for the filter to be modified * @param id the Filter ID of the filter to be modified * @param color the Color for the filter to be modified * @param email the format for the email * @return a StandardResponse * @throws PermissionDeniedException * @throws UnsupportedEncodingException */ public StandardResponse<?> updateFilterByUser(String username, Long id, Long color, String email) throws PermissionDeniedException, UnsupportedEncodingException { if (userClient == null) { setUserClient(new NamedFilterUsersRestClient()); } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } // Get user key by username Long userKey = userClient.getUserKey(username); if (userKey == null) { logger.warn("Unknown user " + username); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage("Unknown user " + username); return response; } WatchListFilter filter = new WatchListFilter(); filter.setKey(id); filter.setColor(color); filter.setEmail(email); // Update WatchListFilter return watchlistClient.updateWatchListFilter(userKey, filter); } public List<Subscription> getFilterSubscriptions(Long filterId) { if(filterId == null) { ErrorResponse errResponse = new ErrorResponse(); errResponse.setStandardHeaderCode(Status.BAD_REQUEST.getStatusCode()); errResponse.setMessage("filterId is required"); throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(errResponse).build()); } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } try { return watchlistClient.listFilterSubscriptions(filterId); } catch(RestClientException restClientException) { ErrorResponse errResponse = new ErrorResponse(); errResponse.setStandardHeaderCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()); errResponse.setMessage(restClientException.getMessage()); throw new WebApplicationException(Response.serverError().entity(errResponse).build()); } } public Response updateFilterSubscriptions(Long filterId, String subscriptionsJson) { if(filterId == null) { ErrorResponse errResponse = new ErrorResponse(); errResponse.setStandardHeaderCode(Status.BAD_REQUEST.getStatusCode()); errResponse.setMessage("filterId is required"); throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(errResponse).build()); } ArrayList<Subscription> subscriptions = null; if(subscriptionsJson != null) { String parseErrorMsg = null; try { JSONArray subscriptionsJsonArray = JSONArray.fromObject(subscriptionsJson); Iterator subscriptionIterator = subscriptionsJsonArray.iterator(); Subscription currSubscription = null; while(subscriptionIterator.hasNext()) { currSubscription = null; JSONObject row = (JSONObject)subscriptionIterator.next(); if(row.isNullObject() == false) { currSubscription = ((Subscription)JSONObject.toBean(row, Subscription.class)); if(currSubscription.getSubscriberId() == null) { currSubscription = null; } } if(currSubscription == null) { parseErrorMsg = "Invalid subscription was specified"; break; } else { if(subscriptions == null) { subscriptions = new ArrayList<Subscription>(); } subscriptions.add(currSubscription); } } } catch(JSONException jsonException) { parseErrorMsg = "subscriptionsJson is invalid"; } if(parseErrorMsg != null) { ErrorResponse errResponse = new ErrorResponse(); errResponse.setStandardHeaderCode(Status.BAD_REQUEST.getStatusCode()); errResponse.setMessage(parseErrorMsg); throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(errResponse).build()); } } if (watchlistClient == null) { setWatchlistClient(new NamedFilterWatchlistRestClient()); } try { watchlistClient.updateFilterSubscriptions(filterId, subscriptions); return Response.ok().build(); } catch(RestClientException restClientException) { ErrorResponse errResponse = new ErrorResponse(); errResponse.setStandardHeaderCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()); errResponse.setMessage(restClientException.getMessage()); throw new WebApplicationException(Response.serverError().entity(errResponse).build()); } } }
apache-2.0
sdgdsffdsfff/util
src/com/jing/test/XMLUtilTest.java
718
package com.jing.test; import java.io.File; import java.io.IOException; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException; import com.jing.io.XMLUtil; public class XMLUtilTest { /** * @param args */ public static void main(String[] args) { try { XMLUtil util=new XMLUtil(new File("d://web.xml")); System.out.println(util.readNodeList("welcome-file").item(1)); } catch (ParserConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
apache-2.0
chirino/activemq
activemq-runtime-config/src/test/java/org/apache/activemq/java/JavaAuthorizationTest.java
10436
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.java; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.activemq.AbstractAuthorizationTest; import org.apache.activemq.broker.BrokerPlugin; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.filter.DestinationMapEntry; import org.apache.activemq.plugin.java.JavaRuntimeConfigurationBroker; import org.apache.activemq.plugin.java.JavaRuntimeConfigurationPlugin; import org.apache.activemq.security.AuthorizationEntry; import org.apache.activemq.security.AuthorizationPlugin; import org.apache.activemq.security.DefaultAuthorizationMap; import org.apache.activemq.security.JaasAuthenticationPlugin; import org.apache.activemq.security.TempDestinationAuthorizationEntry; import org.junit.Test; public class JavaAuthorizationTest extends AbstractAuthorizationTest { public static final int SLEEP = 2; // seconds String configurationSeed = "authorizationTest"; private JavaRuntimeConfigurationBroker javaConfigBroker; public void startBroker(BrokerService brokerService) throws Exception { this.brokerService = brokerService; JaasAuthenticationPlugin authenticationPlugin = new JaasAuthenticationPlugin(); authenticationPlugin.setConfiguration("activemq-domain"); AuthorizationPlugin authorizationPlugin = new AuthorizationPlugin(); DefaultAuthorizationMap authorizationMap = new DefaultAuthorizationMap(); authorizationPlugin.setMap(authorizationMap); brokerService.setPlugins(new BrokerPlugin[]{new JavaRuntimeConfigurationPlugin(), authenticationPlugin, authorizationPlugin}); brokerService.setPersistent(false); brokerService.start(); brokerService.waitUntilStarted(); javaConfigBroker = (JavaRuntimeConfigurationBroker) brokerService.getBroker().getAdaptor(JavaRuntimeConfigurationBroker.class); } @Test public void testMod() throws Exception { DefaultAuthorizationMap authorizationMap = buildUsersMap(); BrokerService brokerService = new BrokerService(); startBroker(brokerService); assertTrue("broker alive", brokerService.isStarted()); javaConfigBroker.updateAuthorizationMap(authorizationMap); assertAllowed("user", "USERS.A"); assertDenied("user", "GUESTS.A"); assertDeniedTemp("guest"); // applyNewConfig(brokerConfig, configurationSeed + "-users-guests", SLEEP); authorizationMap = buildUsersGuestsMap(); javaConfigBroker.updateAuthorizationMap(authorizationMap); TimeUnit.SECONDS.sleep(SLEEP); assertAllowed("user", "USERS.A"); assertAllowed("guest", "GUESTS.A"); assertDenied("user", "GUESTS.A"); assertAllowedTemp("guest"); } @Test public void testModRm() throws Exception { DefaultAuthorizationMap authorizationMap = buildUsersGuestsMap(); BrokerService brokerService = new BrokerService(); startBroker(brokerService); assertTrue("broker alive", brokerService.isStarted()); javaConfigBroker.updateAuthorizationMap(authorizationMap); TimeUnit.SECONDS.sleep(SLEEP); assertAllowed("user", "USERS.A"); assertAllowed("guest", "GUESTS.A"); assertDenied("user", "GUESTS.A"); assertAllowedTemp("guest"); authorizationMap = buildUsersMap(); javaConfigBroker.updateAuthorizationMap(authorizationMap); TimeUnit.SECONDS.sleep(SLEEP); assertAllowed("user", "USERS.A"); assertDenied("user", "GUESTS.A"); assertDeniedTemp("guest"); } @Test public void testWildcard() throws Exception { DefaultAuthorizationMap authorizationMap = buildWildcardUsersGuestsMap(); BrokerService brokerService = new BrokerService(); startBroker(brokerService); assertTrue("broker alive", brokerService.isStarted()); javaConfigBroker.updateAuthorizationMap(authorizationMap); TimeUnit.SECONDS.sleep(SLEEP); final String ALL_USERS = "ALL.USERS.>"; final String ALL_GUESTS = "ALL.GUESTS.>"; assertAllowed("user", ALL_USERS); assertAllowed("guest", ALL_GUESTS); assertDenied("user", ALL_USERS + "," + ALL_GUESTS); assertDenied("guest", ALL_GUESTS + "," + ALL_USERS); final String ALL_PREFIX = "ALL.>"; assertDenied("user", ALL_PREFIX); assertDenied("guest", ALL_PREFIX); assertAllowed("user", "ALL.USERS.A"); assertAllowed("user", "ALL.USERS.A,ALL.USERS.B"); assertAllowed("guest", "ALL.GUESTS.A"); assertAllowed("guest", "ALL.GUESTS.A,ALL.GUESTS.B"); assertDenied("user", "USERS.>"); assertDenied("guest", "GUESTS.>"); assertAllowedTemp("guest"); } /** * @return * @throws Exception */ private DefaultAuthorizationMap buildWildcardUsersGuestsMap() throws Exception { DefaultAuthorizationMap authorizationMap = new DefaultAuthorizationMap(); @SuppressWarnings("rawtypes") List<DestinationMapEntry> entries = new ArrayList<>(); entries.add(buildQueueAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildQueueAuthorizationEntry("ALL.USERS.>", "users", "users", "users")); entries.add(buildQueueAuthorizationEntry("ALL.GUESTS.>", "guests", "guests,users", "guests,users")); entries.add(buildTopicAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildTopicAuthorizationEntry("ALL.USERS.>", "users", "users", "users")); entries.add(buildTopicAuthorizationEntry("ALL.GUESTS.>", "guests", "guests,users", "guests,users")); entries.add(buildTopicAuthorizationEntry("ActiveMQ.Advisory.>", "guests,users", "guests,users", "guests,users")); TempDestinationAuthorizationEntry tempEntry = new TempDestinationAuthorizationEntry(); tempEntry.setRead("tempDestinationAdmins,guests"); tempEntry.setWrite("tempDestinationAdmins,guests"); tempEntry.setAdmin("tempDestinationAdmins,guests"); authorizationMap.setAuthorizationEntries(entries); authorizationMap.setTempDestinationAuthorizationEntry(tempEntry); return authorizationMap; } private DefaultAuthorizationMap buildUsersMap() throws Exception { DefaultAuthorizationMap authorizationMap = new DefaultAuthorizationMap(); @SuppressWarnings("rawtypes") List<DestinationMapEntry> entries = new ArrayList<>(); entries.add(buildQueueAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildQueueAuthorizationEntry("USERS.>", "users", "users", "users")); entries.add(buildTopicAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildTopicAuthorizationEntry("USERS.>", "users", "users", "users")); entries.add(buildTopicAuthorizationEntry("ActiveMQ.Advisory.>", "guests,users", "guests,users", "guests,users")); TempDestinationAuthorizationEntry tempEntry = new TempDestinationAuthorizationEntry(); tempEntry.setRead("tempDestinationAdmins"); tempEntry.setWrite("tempDestinationAdmins"); tempEntry.setAdmin("tempDestinationAdmins"); authorizationMap.setAuthorizationEntries(entries); authorizationMap.setTempDestinationAuthorizationEntry(tempEntry); return authorizationMap; } private DefaultAuthorizationMap buildUsersGuestsMap() throws Exception { DefaultAuthorizationMap authorizationMap = new DefaultAuthorizationMap(); @SuppressWarnings("rawtypes") List<DestinationMapEntry> entries = new ArrayList<>(); entries.add(buildQueueAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildQueueAuthorizationEntry("USERS.>", "users", "users", "users")); entries.add(buildQueueAuthorizationEntry("GUESTS.>", "guests", "guests,users", "guests,users")); entries.add(buildTopicAuthorizationEntry(">", "admins", "admins", "admins")); entries.add(buildTopicAuthorizationEntry("USERS.>", "users", "users", "users")); entries.add(buildTopicAuthorizationEntry("GUESTS.>", "guests", "guests,users", "guests,users")); entries.add(buildTopicAuthorizationEntry("ActiveMQ.Advisory.>", "guests,users", "guests,users", "guests,users")); TempDestinationAuthorizationEntry tempEntry = new TempDestinationAuthorizationEntry(); tempEntry.setRead("tempDestinationAdmins,guests"); tempEntry.setWrite("tempDestinationAdmins,guests"); tempEntry.setAdmin("tempDestinationAdmins,guests"); authorizationMap.setAuthorizationEntries(entries); authorizationMap.setTempDestinationAuthorizationEntry(tempEntry); return authorizationMap; } private AuthorizationEntry buildQueueAuthorizationEntry(String queue, String read, String write, String admin) throws Exception { AuthorizationEntry entry = new AuthorizationEntry(); entry.setQueue(queue); entry.setRead(read); entry.setWrite(write); entry.setAdmin(admin); return entry; } private AuthorizationEntry buildTopicAuthorizationEntry(String topic, String read, String write, String admin) throws Exception { AuthorizationEntry entry = new AuthorizationEntry(); entry.setTopic(topic); entry.setRead(read); entry.setWrite(write); entry.setAdmin(admin); return entry; } }
apache-2.0
nirmal070125/siddhi
modules/siddhi-core/src/main/java/org/wso2/siddhi/core/query/processor/SchedulingProcessor.java
934
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.core.query.processor; import org.wso2.siddhi.core.util.Scheduler; /** * Created on 12/4/14. */ public interface SchedulingProcessor extends Processor { void setScheduler(Scheduler scheduler); Scheduler getScheduler(); }
apache-2.0
SmarterApp/TechnologyReadinessTool
core-components/src/main/java/net/techreadiness/service/RoleServiceImpl.java
9832
package net.techreadiness.service; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; import javax.jws.WebService; import net.techreadiness.annotation.CoreDataModificationStatus; import net.techreadiness.annotation.CoreDataModificationStatus.ModificationType; import net.techreadiness.persistence.dao.EntityDAO.EntityTypeCode; import net.techreadiness.persistence.dao.EntityFieldDAO; import net.techreadiness.persistence.dao.PermissionDAO; import net.techreadiness.persistence.dao.RoleDAO; import net.techreadiness.persistence.dao.RoleDelegationDAO; import net.techreadiness.persistence.dao.RolePermissionDAO; import net.techreadiness.persistence.dao.ScopeDAO; import net.techreadiness.persistence.domain.EntityFieldDO; import net.techreadiness.persistence.domain.RoleDO; import net.techreadiness.persistence.domain.RoleDelegationDO; import net.techreadiness.persistence.domain.RolePermissionDO; import net.techreadiness.persistence.domain.ScopeDO; import net.techreadiness.security.CorePermissionCodes; import net.techreadiness.security.PermissionCode; import net.techreadiness.service.common.ValidationError; import net.techreadiness.service.exception.FaultInfo; import net.techreadiness.service.exception.ValidationServiceException; import net.techreadiness.service.object.Role; import net.techreadiness.service.object.Scope; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.google.common.collect.Sets; @WebService @Service @Transactional public class RoleServiceImpl extends BaseServiceWithValidationImpl implements RoleService { @Inject private RoleDAO roleDAO; @Inject private ScopeDAO scopeDAO; @Inject private PermissionDAO permissionDAO; @Inject private RolePermissionDAO rolePermissionDAO; @Inject private RoleDelegationDAO roleDelegationDAO; @Inject private UserService userService; @Inject private EntityFieldDAO entityFieldDAO; @Override public Role getById(ServiceContext context, Long roleId) { return getMappingService().map(roleDAO.getById(roleId)); } @Override public Set<Long> findAssociatedPermissionIds(ServiceContext context, Long roleId) { return roleDAO.getAssociatedPermissionIds(roleId); } @Override @CoreDataModificationStatus(modificationType = ModificationType.UPDATE, entityClass = RolePermissionDO.class) public void updateRolePermissions(ServiceContext context, Long roleId, Set<Long> permissions) { if (roleId == null) { return; } Set<Long> currentPermissions = Sets.newHashSet(); Collection<RolePermissionDO> rolePermissions = rolePermissionDAO.findPermissionsForRole(roleId); for (RolePermissionDO rolePermissionDO : rolePermissions) { currentPermissions.add(rolePermissionDO.getPermission().getPermissionId()); } Set<Long> toAdd = Sets.difference(permissions, currentPermissions); Set<Long> toRemove = Sets.difference(currentPermissions, permissions); for (Long permissionId : toRemove) { RolePermissionDO rpDO = rolePermissionDAO.getByRoleIdPermissionId(roleId, permissionId); if (rpDO != null) { rolePermissionDAO.delete(rpDO); } } for (Long permissionId : toAdd) { RolePermissionDO rpDO = rolePermissionDAO.getByRoleIdPermissionId(roleId, permissionId); if (rpDO == null) { RolePermissionDO rolePermissionDO = new RolePermissionDO(); rolePermissionDO.setRole(roleDAO.getById(roleId)); rolePermissionDO.setPermission(permissionDAO.getById(permissionId)); rolePermissionDAO.create(rolePermissionDO); } } } @Override public boolean isUniqueRoleCategoryNameCodeByScope(ServiceContext context, Role role) { Collection<ScopeDO> pathScopes = scopeDAO.getAncestorsAndDescendants(role.getScope().getScopeId()); // RoleDO roleDO = roleDAO.getById(role.getRoleId()); return roleDAO.isUniqueRoleCategoryNameCodeByScopes(role, pathScopes); } @Override public List<Role> findRolesFromScope(ServiceContext context) { PermissionCode[] ignoreConferPerm = { CorePermissionCodes.CORE_SEARCH_IGNORE_ROLECONFER }; return getMappingService().mapFromDOList( roleDAO.findRolesFromScope(context.getScopeId(), context.getUserId(), userService.hasPermission(context, ignoreConferPerm))); } @Override public void unassignPermissions(ServiceContext context, Set<Long> permissionIdSet) { if (permissionIdSet == null) { return; } for (Long permissionId : permissionIdSet) { Collection<RolePermissionDO> rolePermissions = rolePermissionDAO.getRolePermissionsByPermission(permissionId); if (rolePermissions != null) { for (RolePermissionDO rolePermissionDO : rolePermissions) { rolePermissionDAO.delete(rolePermissionDO); } } } } @Override public List<Role> findRolesBySearchTerm(ServiceContext context, String term) { PermissionCode[] ignoreConferPerm = { CorePermissionCodes.CORE_SEARCH_IGNORE_ROLECONFER }; return getMappingService().mapFromDOList( roleDAO.getRolesBySearchTerm(context.getScopeId(), term, context.getUserId(), userService.hasPermission(context, ignoreConferPerm))); } @Override public void validateRole(ServiceContext context, Map<String, String> map, Role role) { List<ValidationError> errors = performValidation(map, context.getScopeId(), EntityTypeCode.ROLE); RoleDO dbRole = roleDAO.getRoleByCode(role.getScope().getScopeId(), role.getCode()); if (dbRole != null) { if (role.getRoleId() == null || !role.getRoleId().equals(dbRole.getRoleId())) { EntityFieldDO code = entityFieldDAO.findByScopeAndTypeAndCode(role.getScope().getId(), EntityTypeCode.ROLE, "code"); String message = messageSource.getMessage("validation.role.code.alreadyExists", null, null); ValidationError e = new ValidationError(code.getCode(), code.getName(), message, "validation.role.code.alreadyExists", message); errors.add(e); } } dbRole = roleDAO.getRoleByName(role.getScope().getScopeId(), role.getName()); if (dbRole != null) { if (role.getRoleId() == null || !role.getRoleId().equals(dbRole.getRoleId())) { EntityFieldDO name = entityFieldDAO.findByScopeAndTypeAndCode(role.getScope().getId(), EntityTypeCode.ROLE, "name"); String message = messageSource.getMessage("validation.role.name.alreadyExists", null, null); ValidationError e = new ValidationError(name.getCode(), name.getName(), message, "validation.role.name.alreadyExists", message); errors.add(e); } } if (!isUniqueRoleCategoryNameCodeByScope(context, role)) { EntityFieldDO name = entityFieldDAO.findByScopeAndTypeAndCode(role.getScope().getId(), EntityTypeCode.ROLE, "name"); String message = messageSource.getMessage("validation.role.uniqueNameAndCodeAndCategory", null, null); ValidationError e = new ValidationError(name.getCode(), name.getName(), message, "validation.role.uniqueNameAndCodeAndCategory", message); errors.add(e); } if (errors == null || !errors.isEmpty()) { FaultInfo faultInfo = new FaultInfo(); faultInfo.setMessage("Role failed validation."); faultInfo.setAttributeErrors(errors); throw new ValidationServiceException(faultInfo); } } @Override @Transactional @CoreDataModificationStatus(modificationType = ModificationType.UPDATE, entityClass = RolePermissionDO.class) public Role update(ServiceContext context, Long scopeId, Role role) { RoleDO roleDO = roleDAO.getById(role.getRoleId()); ScopeDO scopeDO = scopeDAO.getById(scopeId); Scope scope = getMappingService().map(scopeDO); role.setScope(scope); roleDO.setCategory(role.getCategory()); roleDO.setName(role.getName()); roleDO.setShortName(role.getShortName()); roleDO.setCode(role.getCode()); roleDO.setDescription(role.getDescription()); roleDO.setDisplayOrder(role.getDisplayOrder()); roleDO.setScope(scopeDO); validateRole(context, roleDO.getAsMap(), role); return getMappingService().map(roleDAO.update(roleDO)); } @Override @Transactional public Role create(ServiceContext context, Role role) { RoleDO roleDO = getMappingService().map(role); roleDO.setScope(scopeDAO.getById(context.getScopeId())); role = getMappingService().map(roleDO); validateRole(context, roleDO.getAsMap(), role); return getMappingService().map(roleDAO.create(roleDO)); } @Override public List<Role> findByIds(ServiceContext context, Collection<Long> roleIds) { return getMappingService().mapFromDOList(roleDAO.findById(roleIds)); } @Override public Map<String, Boolean> getRoleConferAsMap(ServiceContext context) { return roleDAO.getRoleConferAsMap(context); } @Override public Boolean isDelegated(Long roleId, Long delegRoleId) { return roleDAO.isDelegated(roleId, delegRoleId); } @Override public void updateRoleDelegations(ServiceContext context, Long roleId, Set<Long> addDelegationSet, Set<Long> delDelegationSet) { if (roleId == null) { return; } if (delDelegationSet != null) { for (Long delegRoleId : delDelegationSet) { RoleDelegationDO rdDO = roleDelegationDAO.getByRoleIdDelegRoleId(roleId, delegRoleId); if (rdDO != null) { roleDelegationDAO.delete(rdDO); } } } if (addDelegationSet != null) { for (Long delegRoleId : addDelegationSet) { RoleDelegationDO rdDO = roleDelegationDAO.getByRoleIdDelegRoleId(roleId, delegRoleId); if (rdDO == null) { RoleDelegationDO roleDelegationDO = new RoleDelegationDO(); roleDelegationDO.setRole(roleDAO.getById(roleId)); roleDelegationDO.setDelegRole(roleDAO.getById(delegRoleId)); roleDelegationDAO.create(roleDelegationDO); } } } } @Override public Role getRoleByCode(ServiceContext context, String code) { RoleDO role = roleDAO.getRoleByCode(context.getScopeId(), code); return getMappingService().map(role); } }
apache-2.0
gdtlf/msg
msg-platform/src/main/java/org/mybatis/generator/plugins/OraclePaginationPlugin.java
14468
package org.mybatis.generator.plugins; import java.util.List; import org.mybatis.generator.api.CommentGenerator; import org.mybatis.generator.api.IntrospectedTable; import org.mybatis.generator.api.PluginAdapter; import org.mybatis.generator.api.dom.java.Field; import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType; import org.mybatis.generator.api.dom.java.InnerClass; import org.mybatis.generator.api.dom.java.JavaVisibility; import org.mybatis.generator.api.dom.java.Method; import org.mybatis.generator.api.dom.java.Parameter; import org.mybatis.generator.api.dom.java.TopLevelClass; import org.mybatis.generator.api.dom.xml.Attribute; import org.mybatis.generator.api.dom.xml.Document; import org.mybatis.generator.api.dom.xml.TextElement; import org.mybatis.generator.api.dom.xml.XmlElement; public class OraclePaginationPlugin extends PluginAdapter { @Override public boolean modelExampleClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { // add field, getter, setter for limit clause addPage(topLevelClass, introspectedTable, "begin"); addPage(topLevelClass, introspectedTable, "end"); // GeneratedCriteria add method addGeneratedCriteriaMethod(topLevelClass, introspectedTable); return super.modelExampleClassGenerated(topLevelClass, introspectedTable); } private void addGeneratedCriteriaMethod(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { InnerClass criteria = null; // first, find the Criteria inner class for (InnerClass innerClass : topLevelClass.getInnerClasses()) { if ("GeneratedCriteria".equals(innerClass.getType().getShortName())) { //$NON-NLS-1$ criteria = innerClass; break; } } // andIsNull Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.setName("andIsNull"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" is null\");"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andIsNotNull method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.setName("andIsNotNull"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" is not null\");"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andEqualTo method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andEqualTo"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" =\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andNotEqualTo method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andNotEqualTo"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" <>\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andGreaterThan method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andGreaterThan"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" > \", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andGreaterThanOrEqualTo method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andGreaterThanOrEqualTo"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" >=\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andLessThan method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andLessThan"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" <\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andLessThanOrEqualTo method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andLessThanOrEqualTo"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" <=\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andIn method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andIn"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" in\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andNotIn method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andNotIn"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" not in\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andBetween method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value2")); //$NON-NLS-1$ method.setName("andBetween"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" between\", value1, value2, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andNotBetween method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value1")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value2")); //$NON-NLS-1$ method.setName("andNotBetween"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" not between\", value1, value2, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andLike method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andLike"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" like\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); // andNotLike method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "filed")); //$NON-NLS-1$ method.addParameter(new Parameter(FullyQualifiedJavaType.getStringInstance(), "value")); //$NON-NLS-1$ method.setName("andNotLike"); method.setReturnType(FullyQualifiedJavaType.getCriteriaInstance()); method.addBodyLine("addCriterion(filed + \" not like\", value, filed);"); method.addBodyLine("return (Criteria)this;"); //$NON-NLS-1$ criteria.addMethod(method); } @Override public boolean sqlMapDocumentGenerated(Document document, IntrospectedTable introspectedTable) { XmlElement parentElement = document.getRootElement(); // 产生分页语句前半部分 XmlElement paginationPrefixElement = new XmlElement("sql"); paginationPrefixElement.addAttribute(new Attribute("id", "OracleDialectPrefix")); XmlElement pageStart = new XmlElement("if"); pageStart.addAttribute(new Attribute("test", "begin != null and end != null")); pageStart.addElement(new TextElement( "select * from ( select row_.*, rownum rownum_ from ( ")); paginationPrefixElement.addElement(pageStart); parentElement.addElement(paginationPrefixElement); // 产生分页语句后半部分 XmlElement paginationSuffixElement = new XmlElement("sql"); paginationSuffixElement.addAttribute(new Attribute("id", "OracleDialectSuffix")); XmlElement pageEnd = new XmlElement("if"); pageEnd.addAttribute(new Attribute("test", "begin != null and end != null")); pageEnd.addElement(new TextElement( "<![CDATA[ ) row_ ) where rownum_ >= #{begin} and rownum_ <= #{end} ]]>")); paginationSuffixElement.addElement(pageEnd); parentElement.addElement(paginationSuffixElement); return super.sqlMapDocumentGenerated(document, introspectedTable); } @Override public boolean sqlMapSelectByExampleWithoutBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { XmlElement pageStart = new XmlElement("include"); //$NON-NLS-1$ pageStart.addAttribute(new Attribute("refid", "OracleDialectPrefix")); element.getElements().add(0, pageStart); XmlElement isNotNullElement = new XmlElement("include"); //$NON-NLS-1$ isNotNullElement.addAttribute(new Attribute("refid", "OracleDialectSuffix")); element.getElements().add(isNotNullElement); return super.sqlMapUpdateByExampleWithoutBLOBsElementGenerated(element, introspectedTable); } /** * @param topLevelClass * @param introspectedTable * @param name */ private void addPage(TopLevelClass topLevelClass, IntrospectedTable introspectedTable, String name) { topLevelClass.addImportedType(new FullyQualifiedJavaType("int")); CommentGenerator commentGenerator = context.getCommentGenerator(); Field field = new Field(); field.setVisibility(JavaVisibility.PROTECTED); field.setType(new FullyQualifiedJavaType("Integer")); field.setName(name); commentGenerator.addFieldComment(field, introspectedTable); topLevelClass.addField(field); char c = name.charAt(0); String camel = Character.toUpperCase(c) + name.substring(1); Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setName("set" + camel); method.addParameter(new Parameter(new FullyQualifiedJavaType("Integer"), name)); method.addBodyLine("this." + name + "=" + name + ";"); commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(new FullyQualifiedJavaType("Integer")); method.setName("get" + camel); method.addBodyLine("return " + name + ";"); commentGenerator.addGeneralMethodComment(method, introspectedTable); topLevelClass.addMethod(method); } /** * This plugin is always valid - no properties are required */ @Override public boolean validate(List<String> warnings) { return true; } }
apache-2.0
renmeng8875/projects
Hibernate-source/源代码及重要说明/Hibernate相关资料/hibernate-3.2.0.ga/hibernate-3.2/src/org/hibernate/loader/custom/sql/SQLCustomQuery.java
8067
//$Id: SQLCustomQuery.java 10018 2006-06-15 05:21:06Z steve.ebersole@jboss.com $ package org.hibernate.loader.custom.sql; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.HashMap; import org.hibernate.HibernateException; import org.hibernate.engine.query.sql.NativeSQLQueryReturn; import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.loader.custom.CustomQuery; import org.hibernate.persister.collection.SQLLoadableCollection; import org.hibernate.persister.entity.SQLLoadable; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Implements Hibernate's built-in support for native SQL queries. * <p/> * This support is built on top of the notion of "custom queries"... * * @author Gavin King * @author Max Andersen * @author Steve Ebersole */ public class SQLCustomQuery implements CustomQuery { public static final Log log = LogFactory.getLog( SQLCustomQuery.class ); private final String sql; private final Set querySpaces = new HashSet(); private final Map namedParameterBindPoints = new HashMap(); private final List customQueryReturns = new ArrayList(); public String getSQL() { return sql; } public Set getQuerySpaces() { return querySpaces; } public Map getNamedParameterBindPoints() { return namedParameterBindPoints; } public List getCustomQueryReturns() { return customQueryReturns; } public SQLCustomQuery( final String sqlQuery, final NativeSQLQueryReturn[] queryReturns, final Collection additionalQuerySpaces, final SessionFactoryImplementor factory) throws HibernateException { log.trace( "starting processing of sql query [" + sqlQuery + "]" ); SQLQueryReturnProcessor processor = new SQLQueryReturnProcessor(queryReturns, factory); SQLQueryReturnProcessor.ResultAliasContext aliasContext = processor.process(); // Map[] propertyResultMaps = (Map[]) processor.getPropertyResults().toArray( new Map[0] ); // Map[] collectionResultMaps = (Map[]) processor.getCollectionPropertyResults().toArray( new Map[0] ); // // List collectionSuffixes = new ArrayList(); // List collectionOwnerAliases = processor.getCollectionOwnerAliases(); // List collectionPersisters = processor.getCollectionPersisters(); // int size = collectionPersisters.size(); // if (size!=0) { // collectionOwners = new int[size]; // collectionRoles = new String[size]; // //collectionDescriptors = new CollectionAliases[size]; // for ( int i=0; i<size; i++ ) { // CollectionPersister collectionPersister = (CollectionPersister) collectionPersisters.get(i); // collectionRoles[i] = ( collectionPersister ).getRole(); // collectionOwners[i] = processor.getAliases().indexOf( collectionOwnerAliases.get(i) ); // String suffix = i + "__"; // collectionSuffixes.add(suffix); // //collectionDescriptors[i] = new GeneratedCollectionAliases( collectionResultMaps[i], collectionPersister, suffix ); // } // } // else { // collectionRoles = null; // //collectionDescriptors = null; // collectionOwners = null; // } // // String[] aliases = ArrayHelper.toStringArray( processor.getAliases() ); // String[] collAliases = ArrayHelper.toStringArray( processor.getCollectionAliases() ); // String[] collSuffixes = ArrayHelper.toStringArray(collectionSuffixes); // // SQLLoadable[] entityPersisters = (SQLLoadable[]) processor.getPersisters().toArray( new SQLLoadable[0] ); // SQLLoadableCollection[] collPersisters = (SQLLoadableCollection[]) collectionPersisters.toArray( new SQLLoadableCollection[0] ); // lockModes = (LockMode[]) processor.getLockModes().toArray( new LockMode[0] ); // // scalarColumnAliases = ArrayHelper.toStringArray( processor.getScalarColumnAliases() ); // scalarTypes = ArrayHelper.toTypeArray( processor.getScalarTypes() ); // // // need to match the "sequence" of what we return. scalar first, entity last. // returnAliases = ArrayHelper.join(scalarColumnAliases, aliases); // // String[] suffixes = BasicLoader.generateSuffixes(entityPersisters.length); SQLQueryParser parser = new SQLQueryParser( sqlQuery, new ParserContext( aliasContext ) ); this.sql = parser.process(); this.namedParameterBindPoints.putAll( parser.getNamedParameters() ); // SQLQueryParser parser = new SQLQueryParser( // sqlQuery, // processor.getAlias2Persister(), // processor.getAlias2Return(), // aliases, // collAliases, // collPersisters, // suffixes, // collSuffixes // ); // // sql = parser.process(); // // namedParameterBindPoints = parser.getNamedParameters(); customQueryReturns.addAll( processor.generateCustomReturns( parser.queryHasAliases() ) ); // // Populate entityNames, entityDescrptors and querySpaces // entityNames = new String[entityPersisters.length]; // entityDescriptors = new EntityAliases[entityPersisters.length]; // for (int i = 0; i < entityPersisters.length; i++) { // SQLLoadable persister = entityPersisters[i]; // //alias2Persister.put( aliases[i], persister ); // //TODO: Does not consider any other tables referenced in the query // ArrayHelper.addAll( querySpaces, persister.getQuerySpaces() ); // entityNames[i] = persister.getEntityName(); // if ( parser.queryHasAliases() ) { // entityDescriptors[i] = new DefaultEntityAliases( // propertyResultMaps[i], // entityPersisters[i], // suffixes[i] // ); // } // else { // entityDescriptors[i] = new ColumnEntityAliases( // propertyResultMaps[i], // entityPersisters[i], // suffixes[i] // ); // } // } if ( additionalQuerySpaces != null ) { querySpaces.addAll( additionalQuerySpaces ); } // if (size!=0) { // collectionDescriptors = new CollectionAliases[size]; // for ( int i=0; i<size; i++ ) { // CollectionPersister collectionPersister = (CollectionPersister) collectionPersisters.get(i); // String suffix = i + "__"; // if( parser.queryHasAliases() ) { // collectionDescriptors[i] = new GeneratedCollectionAliases( collectionResultMaps[i], collectionPersister, suffix ); // } else { // collectionDescriptors[i] = new ColumnCollectionAliases( collectionResultMaps[i], (SQLLoadableCollection) collectionPersister ); // } // } // } // else { // collectionDescriptors = null; // } // // // // Resolve owners // Map alias2OwnerAlias = processor.getAlias2OwnerAlias(); // int[] ownersArray = new int[entityPersisters.length]; // for ( int j=0; j < aliases.length; j++ ) { // String ownerAlias = (String) alias2OwnerAlias.get( aliases[j] ); // if ( StringHelper.isNotEmpty(ownerAlias) ) { // ownersArray[j] = processor.getAliases().indexOf( ownerAlias ); // } // else { // ownersArray[j] = -1; // } // } // if ( ArrayHelper.isAllNegative(ownersArray) ) { // ownersArray = null; // } // this.entityOwners = ownersArray; } private static class ParserContext implements SQLQueryParser.ParserContext { private final SQLQueryReturnProcessor.ResultAliasContext aliasContext; public ParserContext(SQLQueryReturnProcessor.ResultAliasContext aliasContext) { this.aliasContext = aliasContext; } public boolean isEntityAlias(String alias) { return getEntityPersisterByAlias( alias ) != null; } public SQLLoadable getEntityPersisterByAlias(String alias) { return aliasContext.getEntityPersister( alias ); } public String getEntitySuffixByAlias(String alias) { return aliasContext.getEntitySuffix( alias ); } public boolean isCollectionAlias(String alias) { return getCollectionPersisterByAlias( alias ) != null; } public SQLLoadableCollection getCollectionPersisterByAlias(String alias) { return aliasContext.getCollectionPersister( alias ); } public String getCollectionSuffixByAlias(String alias) { return aliasContext.getCollectionSuffix( alias ); } public Map getPropertyResultsMapByAlias(String alias) { return aliasContext.getPropertyResultsMap( alias ); } } }
apache-2.0
apache/incubator-shardingsphere
shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/distsql/exception/resource/ResourceDefinitionViolationException.java
1302
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.infra.distsql.exception.resource; import org.apache.shardingsphere.infra.distsql.exception.DistSQLException; /** * Resource definition violation exception. */ public abstract class ResourceDefinitionViolationException extends DistSQLException { private static final long serialVersionUID = -2686784350802985974L; public ResourceDefinitionViolationException(final int errorCode, final String reason) { super(errorCode, reason); } }
apache-2.0
tabish121/proton4j
protonj2-test-driver/src/main/java/org/apache/qpid/protonj2/test/driver/matchers/transport/BeginMatcher.java
6043
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.protonj2.test.driver.matchers.transport; import static org.hamcrest.CoreMatchers.equalTo; import java.util.Map; import org.apache.qpid.protonj2.test.driver.codec.primitives.Symbol; import org.apache.qpid.protonj2.test.driver.codec.primitives.UnsignedInteger; import org.apache.qpid.protonj2.test.driver.codec.primitives.UnsignedShort; import org.apache.qpid.protonj2.test.driver.codec.transport.Begin; import org.apache.qpid.protonj2.test.driver.codec.util.TypeMapper; import org.apache.qpid.protonj2.test.driver.matchers.ListDescribedTypeMatcher; import org.hamcrest.Matcher; public class BeginMatcher extends ListDescribedTypeMatcher { public BeginMatcher() { super(Begin.Field.values().length, Begin.DESCRIPTOR_CODE, Begin.DESCRIPTOR_SYMBOL); } @Override protected Class<?> getDescribedTypeClass() { return Begin.class; } //----- Type specific with methods that perform simple equals checks public BeginMatcher withRemoteChannel(int remoteChannel) { return withRemoteChannel(equalTo(UnsignedShort.valueOf((short) remoteChannel))); } public BeginMatcher withRemoteChannel(UnsignedShort remoteChannel) { return withRemoteChannel(equalTo(remoteChannel)); } public BeginMatcher withNextOutgoingId(int nextOutgoingId) { return withNextOutgoingId(equalTo(UnsignedInteger.valueOf(nextOutgoingId))); } public BeginMatcher withNextOutgoingId(long nextOutgoingId) { return withNextOutgoingId(equalTo(UnsignedInteger.valueOf(nextOutgoingId))); } public BeginMatcher withNextOutgoingId(UnsignedInteger nextOutgoingId) { return withNextOutgoingId(equalTo(nextOutgoingId)); } public BeginMatcher withIncomingWindow(int incomingWindow) { return withIncomingWindow(equalTo(UnsignedInteger.valueOf(incomingWindow))); } public BeginMatcher withIncomingWindow(long incomingWindow) { return withIncomingWindow(equalTo(UnsignedInteger.valueOf(incomingWindow))); } public BeginMatcher withIncomingWindow(UnsignedInteger incomingWindow) { return withIncomingWindow(equalTo(incomingWindow)); } public BeginMatcher withOutgoingWindow(int outgoingWindow) { return withOutgoingWindow(equalTo(UnsignedInteger.valueOf(outgoingWindow))); } public BeginMatcher withOutgoingWindow(long outgoingWindow) { return withOutgoingWindow(equalTo(UnsignedInteger.valueOf(outgoingWindow))); } public BeginMatcher withOutgoingWindow(UnsignedInteger outgoingWindow) { return withOutgoingWindow(equalTo(outgoingWindow)); } public BeginMatcher withHandleMax(int handleMax) { return withHandleMax(equalTo(UnsignedInteger.valueOf(handleMax))); } public BeginMatcher withHandleMax(long handleMax) { return withHandleMax(equalTo(UnsignedInteger.valueOf(handleMax))); } public BeginMatcher withHandleMax(UnsignedInteger handleMax) { return withHandleMax(equalTo(handleMax)); } public BeginMatcher withOfferedCapabilities(String... offeredCapabilities) { return withOfferedCapabilities(equalTo(TypeMapper.toSymbolArray(offeredCapabilities))); } public BeginMatcher withOfferedCapabilities(Symbol... offeredCapabilities) { return withOfferedCapabilities(equalTo(offeredCapabilities)); } public BeginMatcher withDesiredCapabilities(String... desiredCapabilities) { return withDesiredCapabilities(equalTo(TypeMapper.toSymbolArray(desiredCapabilities))); } public BeginMatcher withDesiredCapabilities(Symbol... desiredCapabilities) { return withDesiredCapabilities(equalTo(desiredCapabilities)); } public BeginMatcher withPropertiesMap(Map<Symbol, Object> properties) { return withProperties(equalTo(properties)); } public BeginMatcher withProperties(Map<String, Object> properties) { return withProperties(equalTo(TypeMapper.toSymbolKeyedMap(properties))); } //----- Matcher based with methods for more complex validation public BeginMatcher withRemoteChannel(Matcher<?> m) { addFieldMatcher(Begin.Field.REMOTE_CHANNEL, m); return this; } public BeginMatcher withNextOutgoingId(Matcher<?> m) { addFieldMatcher(Begin.Field.NEXT_OUTGOING_ID, m); return this; } public BeginMatcher withIncomingWindow(Matcher<?> m) { addFieldMatcher(Begin.Field.INCOMING_WINDOW, m); return this; } public BeginMatcher withOutgoingWindow(Matcher<?> m) { addFieldMatcher(Begin.Field.OUTGOING_WINDOW, m); return this; } public BeginMatcher withHandleMax(Matcher<?> m) { addFieldMatcher(Begin.Field.HANDLE_MAX, m); return this; } public BeginMatcher withOfferedCapabilities(Matcher<?> m) { addFieldMatcher(Begin.Field.OFFERED_CAPABILITIES, m); return this; } public BeginMatcher withDesiredCapabilities(Matcher<?> m) { addFieldMatcher(Begin.Field.DESIRED_CAPABILITIES, m); return this; } public BeginMatcher withProperties(Matcher<?> m) { addFieldMatcher(Begin.Field.PROPERTIES, m); return this; } }
apache-2.0
ef-labs/vertigo
core/src/main/java/net/kuujo/vertigo/network/builder/impl/OutputBuilderImpl.java
2317
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kuujo.vertigo.network.builder.impl; import io.vertx.core.json.JsonObject; import net.kuujo.vertigo.network.builder.OutputBuilder; import net.kuujo.vertigo.network.builder.OutputPortBuilder; /** * Output builder implementation. * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ public class OutputBuilderImpl implements OutputBuilder { private final ComponentBuilderImpl component; public OutputBuilderImpl(ComponentBuilderImpl component) { this.component = component; } @Override public OutputPortBuilder port(String name) { return new OutputPortBuilderImpl(component, component.component.getOutput().addPort(name)); } @Override public OutputBuilder identifier(String identifier) { component.identifier(identifier); return this; } @Override public OutputBuilder config(JsonObject config) { component.config(config); return this; } @Override public OutputBuilder worker() { component.worker(); return this; } @Override public OutputBuilder worker(boolean worker) { component.worker(worker); return this; } @Override public OutputBuilder multiThreaded() { component.multiThreaded(); return this; } @Override public OutputBuilder multiThreaded(boolean multiThreaded) { component.multiThreaded(multiThreaded); return this; } @Override public OutputBuilder stateful() { component.stateful(); return this; } @Override public OutputBuilder stateful(boolean stateful) { component.stateful(stateful); return this; } @Override public OutputBuilder replicas(int replicas) { component.replicas(replicas); return this; } }
apache-2.0
IllusionRom-deprecated/android_platform_tools_idea
xml/impl/src/com/intellij/psi/formatter/xml/AbstractXmlBlock.java
18024
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.formatter.xml; import com.intellij.formatting.*; import com.intellij.lang.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.formatter.WhiteSpaceFormattingStrategy; import com.intellij.psi.formatter.WhiteSpaceFormattingStrategyFactory; import com.intellij.psi.formatter.common.AbstractBlock; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.tree.LeafElement; import com.intellij.psi.impl.source.tree.TreeElement; import com.intellij.psi.impl.source.tree.TreeUtil; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.xml.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; public abstract class AbstractXmlBlock extends AbstractBlock { protected XmlFormattingPolicy myXmlFormattingPolicy; protected XmlInjectedLanguageBlockBuilder myInjectedBlockBuilder; private final boolean myPreserveSpace; protected AbstractXmlBlock(final ASTNode node, final Wrap wrap, final Alignment alignment, final XmlFormattingPolicy policy) { this(node, wrap, alignment, policy, false); } protected AbstractXmlBlock(final ASTNode node, final Wrap wrap, final Alignment alignment, final XmlFormattingPolicy policy, final boolean preserveSpace) { super(node, wrap, alignment); myXmlFormattingPolicy = policy; if (node.getTreeParent() == null) { myXmlFormattingPolicy.setRootBlock(node, this); } myInjectedBlockBuilder = new XmlInjectedLanguageBlockBuilder(myXmlFormattingPolicy); myPreserveSpace = shouldPreserveSpace(node, preserveSpace); } /** * Handles xml:space='preserve|default' attribute. * See <a href="http://www.w3.org/TR/2004/REC-xml-20040204/#sec-white-space">Extensible Markup Language (XML) 1.0 (Third Edition), * White Space Handling</a> * * @return True if the space must be preserved (xml:space='preserve'), false if the attribute * contains 'default'. If the attribute is not defined, return the current value. */ private static boolean shouldPreserveSpace(ASTNode node, boolean defaultValue) { if (node.getPsi() instanceof XmlTag) { XmlTag tag = (XmlTag)node.getPsi(); if (tag != null) { XmlAttribute spaceAttr = tag.getAttribute("xml:space"); if (spaceAttr != null) { String value = spaceAttr.getValue(); if ("preserve".equals(value)) { return true; } if ("default".equals(value)) { return false; } } } } return defaultValue; } public boolean isPreserveSpace() { return myPreserveSpace; } public static WrapType getWrapType(final int type) { if (type == CommonCodeStyleSettings.DO_NOT_WRAP) return WrapType.NONE; if (type == CommonCodeStyleSettings.WRAP_ALWAYS) return WrapType.ALWAYS; if (type == CommonCodeStyleSettings.WRAP_AS_NEEDED) return WrapType.NORMAL; return WrapType.CHOP_DOWN_IF_LONG; } protected Alignment chooseAlignment(final ASTNode child, final Alignment attrAlignment, final Alignment textAlignment) { if (myNode.getElementType() == XmlElementType.XML_TEXT) return getAlignment(); final IElementType elementType = child.getElementType(); if (elementType == XmlElementType.XML_ATTRIBUTE && myXmlFormattingPolicy.getShouldAlignAttributes()) return attrAlignment; if (elementType == XmlElementType.XML_TEXT && myXmlFormattingPolicy.getShouldAlignText()) return textAlignment; return null; } private Wrap getTagEndWrapping(final XmlTag parent) { return Wrap.createWrap(myXmlFormattingPolicy.getWrappingTypeForTagEnd(parent), true); } protected Wrap chooseWrap(final ASTNode child, final Wrap tagBeginWrap, final Wrap attrWrap, final Wrap textWrap) { if (myNode.getElementType() == XmlElementType.XML_TEXT) return textWrap; final IElementType elementType = child.getElementType(); if (elementType == XmlElementType.XML_ATTRIBUTE) return attrWrap; if (elementType == XmlTokenType.XML_START_TAG_START) return tagBeginWrap; if (elementType == XmlTokenType.XML_END_TAG_START) { final PsiElement parent = SourceTreeToPsiMap.treeElementToPsi(child.getTreeParent()); if (parent instanceof XmlTag) { final XmlTag tag = (XmlTag)parent; if (canWrapTagEnd(tag)) { return getTagEndWrapping(tag); } } return null; } if (elementType == XmlElementType.XML_TEXT || elementType == XmlTokenType.XML_DATA_CHARACTERS) return textWrap; return null; } protected boolean canWrapTagEnd(final XmlTag tag) { return tag.getSubTags().length > 0; } protected XmlTag getTag() { return getTag(myNode); } protected static XmlTag getTag(final ASTNode node) { final PsiElement element = SourceTreeToPsiMap.treeElementToPsi(node); if (element instanceof XmlTag) { return (XmlTag)element; } else { return null; } } protected Wrap createTagBeginWrapping(final XmlTag tag) { return Wrap.createWrap(myXmlFormattingPolicy.getWrappingTypeForTagBegin(tag), true); } @Nullable protected ASTNode processChild(List<Block> result, final ASTNode child, final Wrap wrap, final Alignment alignment, final Indent indent) { final Language myLanguage = myNode.getPsi().getLanguage(); final PsiElement childPsi = child.getPsi(); final Language childLanguage = childPsi.getLanguage(); if (useMyFormatter(myLanguage, childLanguage, childPsi)) { XmlTag tag = getAnotherTreeTag(child); if (tag != null && containsTag(tag) && doesNotIntersectSubTagsWith(tag)) { ASTNode currentChild = createAnotherTreeNode(result, child, tag, indent, wrap, alignment); if (currentChild == null) { return null; } while (currentChild != null && currentChild.getTreeParent() != myNode && currentChild.getTreeParent() != child.getTreeParent()) { currentChild = processAllChildrenFrom(result, currentChild, wrap, alignment, indent); if (currentChild != null && (currentChild.getTreeParent() == myNode || currentChild.getTreeParent() == child.getTreeParent())) { return currentChild; } if (currentChild != null) { currentChild = currentChild.getTreeParent(); } } return currentChild; } processSimpleChild(child, indent, result, wrap, alignment); return child; } else { myInjectedBlockBuilder.addInjectedLanguageBlockWrapper(result, child, indent, 0, null); return child; } } protected boolean doesNotIntersectSubTagsWith(final PsiElement tag) { final TextRange tagRange = tag.getTextRange(); final XmlTag[] subTags = getSubTags(); for (XmlTag subTag : subTags) { final TextRange subTagRange = subTag.getTextRange(); if (subTagRange.getEndOffset() < tagRange.getStartOffset()) continue; if (subTagRange.getStartOffset() > tagRange.getEndOffset()) return true; if (tagRange.getStartOffset() > subTagRange.getStartOffset() && tagRange.getEndOffset() < subTagRange.getEndOffset()) return false; if (tagRange.getEndOffset() > subTagRange.getStartOffset() && tagRange.getEndOffset() < subTagRange.getEndOffset()) return false; } return true; } private XmlTag[] getSubTags() { if (myNode instanceof XmlTag) { return ((XmlTag)myNode.getPsi()).getSubTags(); } else if (myNode.getPsi() instanceof XmlElement) { return collectSubTags((XmlElement)myNode.getPsi()); } else { return new XmlTag[0]; } } private static XmlTag[] collectSubTags(final XmlElement node) { final List<XmlTag> result = new ArrayList<XmlTag>(); node.processElements(new PsiElementProcessor() { public boolean execute(@NotNull final PsiElement element) { if (element instanceof XmlTag) { result.add((XmlTag)element); } return true; } }, node); return result.toArray(new XmlTag[result.size()]); } protected boolean containsTag(final PsiElement tag) { final ASTNode closingTagStart = XmlChildRole.CLOSING_TAG_START_FINDER.findChild(myNode); final ASTNode startTagStart = XmlChildRole.START_TAG_END_FINDER.findChild(myNode); if (closingTagStart == null && startTagStart == null) { return tag.getTextRange().getEndOffset() <= myNode.getTextRange().getEndOffset(); } else if (closingTagStart == null) { return false; } else { return tag.getTextRange().getEndOffset() <= closingTagStart.getTextRange().getEndOffset(); } } private ASTNode processAllChildrenFrom(final List<Block> result, @NotNull final ASTNode child, final Wrap wrap, final Alignment alignment, final Indent indent) { ASTNode resultNode = child; ASTNode currentChild = child.getTreeNext(); while (currentChild != null && currentChild.getElementType() != XmlTokenType.XML_END_TAG_START) { if (!containsWhiteSpacesOnly(currentChild)) { currentChild = processChild(result, currentChild, wrap, alignment, indent); resultNode = currentChild; } if (currentChild != null) { currentChild = currentChild.getTreeNext(); } } return resultNode; } protected void processSimpleChild(final ASTNode child, final Indent indent, final List<Block> result, final Wrap wrap, final Alignment alignment) { if (isXmlTag(child)) { result.add(createTagBlock(child, indent != null ? indent : Indent.getNoneIndent(), wrap, alignment)); } else if (child.getElementType() == XmlElementType.XML_DOCTYPE) { result.add( new XmlBlock(child, wrap, alignment, myXmlFormattingPolicy, indent, null, isPreserveSpace()) { protected Wrap getDefaultWrap(final ASTNode node) { final IElementType type = node.getElementType(); return type == XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN ? Wrap.createWrap(getWrapType(myXmlFormattingPolicy.getAttributesWrap()), false) : null; } } ); } else { result.add(createSimpleChild(child, indent, wrap, alignment)); } } protected XmlBlock createSimpleChild(final ASTNode child, final Indent indent, final Wrap wrap, final Alignment alignment) { return new XmlBlock(child, wrap, alignment, myXmlFormattingPolicy, indent, null, isPreserveSpace()); } protected XmlTagBlock createTagBlock(final ASTNode child, final Indent indent, final Wrap wrap, final Alignment alignment) { return new XmlTagBlock(child, wrap, alignment, myXmlFormattingPolicy, indent != null ? indent : Indent.getNoneIndent(), isPreserveSpace()); } @Nullable protected XmlTag findXmlTagAt(final ASTNode child, final int startOffset) { return null; } @Nullable protected ASTNode createAnotherTreeNode(final List<Block> result, final ASTNode child, PsiElement tag, final Indent indent, final Wrap wrap, final Alignment alignment) { return null; } @Nullable protected Block createAnotherTreeTagBlock(final PsiElement tag, final Indent childIndent) { return null; } protected XmlFormattingPolicy createPolicyFor() { return myXmlFormattingPolicy; } @Nullable protected XmlTag getAnotherTreeTag(final ASTNode child) { return null; } protected boolean isXmlTag(final ASTNode child) { return isXmlTag(child.getPsi()); } protected boolean isXmlTag(final PsiElement psi) { return psi instanceof XmlTag; } protected boolean useMyFormatter(final Language myLanguage, final Language childLanguage, final PsiElement childPsi) { if (myLanguage == childLanguage || childLanguage == StdFileTypes.HTML.getLanguage() || childLanguage == StdFileTypes.XHTML.getLanguage() || childLanguage == StdFileTypes.XML.getLanguage()) { return true; } final FormattingModelBuilder childFormatter = LanguageFormatting.INSTANCE.forLanguage(childLanguage); return childFormatter == null || childFormatter instanceof DelegatingFormattingModelBuilder && ((DelegatingFormattingModelBuilder)childFormatter).dontFormatMyModel(); } protected boolean isJspxJavaContainingNode(final ASTNode child) { return false; } public abstract boolean insertLineBreakBeforeTag(); public abstract boolean removeLineBreakBeforeTag(); protected Spacing createDefaultSpace(boolean forceKeepLineBreaks, final boolean inText) { boolean shouldKeepLineBreaks = getShouldKeepLineBreaks(inText, forceKeepLineBreaks); return Spacing.createSpacing(0, Integer.MAX_VALUE, 0, shouldKeepLineBreaks, myXmlFormattingPolicy.getKeepBlankLines()); } private boolean getShouldKeepLineBreaks(final boolean inText, final boolean forceKeepLineBreaks) { if (forceKeepLineBreaks) { return true; } if (inText && myXmlFormattingPolicy.getShouldKeepLineBreaksInText()) { return true; } if (!inText && myXmlFormattingPolicy.getShouldKeepLineBreaks()) { return true; } return false; } public abstract boolean isTextElement(); private static final Logger LOG = Logger.getInstance("#com.intellij.psi.formatter.xml.AbstractXmlBlock"); protected void createJspTextNode(final List<Block> localResult, final ASTNode child, final Indent indent) { } @Nullable protected static ASTNode findChildAfter(@NotNull final ASTNode child, final int endOffset) { TreeElement fileNode = TreeUtil.getFileElement((TreeElement)child); final LeafElement leaf = fileNode.findLeafElementAt(endOffset); if (leaf != null && leaf.getStartOffset() == endOffset && endOffset > 0) { return fileNode.findLeafElementAt(endOffset - 1); } return leaf; } public boolean isLeaf() { return (isComment(myNode)) || myNode.getElementType() == TokenType.WHITE_SPACE || myNode.getElementType() == XmlTokenType.XML_DATA_CHARACTERS || myNode.getElementType() == XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN; } private static boolean isComment(final ASTNode node) { final PsiElement psiElement = SourceTreeToPsiMap.treeElementToPsi(node); if (psiElement instanceof PsiComment) return true; final ParserDefinition parserDefinition = LanguageParserDefinitions.INSTANCE.forLanguage(psiElement.getLanguage()); if (parserDefinition == null) return false; final TokenSet commentTokens = parserDefinition.getCommentTokens(); return commentTokens.contains(node.getElementType()); } public void setXmlFormattingPolicy(final XmlFormattingPolicy xmlFormattingPolicy) { myXmlFormattingPolicy = xmlFormattingPolicy; } protected boolean buildInjectedPsiBlocks(List<Block> result, final ASTNode child, Wrap wrap, Alignment alignment, Indent indent) { if (myInjectedBlockBuilder.addInjectedBlocks(result, child, wrap, alignment, indent)) { return true; } PsiFile containingFile = child.getPsi().getContainingFile(); FileViewProvider fileViewProvider = containingFile.getViewProvider(); if (fileViewProvider instanceof TemplateLanguageFileViewProvider) { Language templateLanguage = ((TemplateLanguageFileViewProvider)fileViewProvider).getTemplateDataLanguage(); PsiElement at = fileViewProvider.findElementAt(child.getStartOffset(), templateLanguage); if (at instanceof XmlToken) { at = at.getParent(); } // TODO: several comments if (at instanceof PsiComment && at.getTextRange().equals(child.getTextRange()) && at.getNode() != child) { return buildInjectedPsiBlocks(result, at.getNode(), wrap, alignment, indent); } } return false; } public boolean isCDATAStart() { return myNode.getElementType() == XmlTokenType.XML_CDATA_START; } public boolean isCDATAEnd() { return myNode.getElementType() == XmlTokenType.XML_CDATA_END; } public static boolean containsWhiteSpacesOnly(ASTNode node) { WhiteSpaceFormattingStrategy strategy = WhiteSpaceFormattingStrategyFactory.getStrategy(node.getPsi().getLanguage()); String nodeText = node.getText(); int length = nodeText.length(); return strategy.check(nodeText, 0, length) >= length; } }
apache-2.0
jexp/idea2
plugins/properties/src/com/intellij/lang/properties/psi/impl/PropertiesFileImpl.java
6819
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.properties.psi.impl; import com.intellij.extapi.psi.PsiFileBase; import com.intellij.lang.ASTFactory; import com.intellij.lang.ASTNode; import com.intellij.lang.properties.PropertiesUtil; import com.intellij.lang.properties.ResourceBundle; import com.intellij.lang.properties.ResourceBundleImpl; import com.intellij.lang.properties.parsing.PropertiesElementTypes; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.lang.properties.psi.Property; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.util.Computable; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.source.tree.ChangeUtil; import com.intellij.psi.impl.source.tree.TreeElement; import com.intellij.psi.tree.TokenSet; import com.intellij.util.IncorrectOperationException; import com.intellij.util.SmartList; import gnu.trove.THashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class PropertiesFileImpl extends PsiFileBase implements PropertiesFile { private static final TokenSet outPropertiesListSet = TokenSet.create(PropertiesElementTypes.PROPERTIES_LIST); private Map<String,List<Property>> myPropertiesMap; private List<Property> myProperties; public PropertiesFileImpl(FileViewProvider viewProvider) { super(viewProvider, StdFileTypes.PROPERTIES.getLanguage()); } @NotNull public FileType getFileType() { return StdFileTypes.PROPERTIES; } @NonNls public String toString() { return "Properties file:" + getName(); } @NotNull public List<Property> getProperties() { synchronized (PsiLock.LOCK) { ensurePropertiesLoaded(); return myProperties; } } private ASTNode getPropertiesList() { final ASTNode[] nodes = getNode().getChildren(outPropertiesListSet); return nodes.length > 0 ? nodes[0]:null; } private void ensurePropertiesLoaded() { if (myPropertiesMap != null) { return; } final ASTNode[] props = getPropertiesList().getChildren(PropertiesElementTypes.PROPERTIES); myPropertiesMap = new LinkedHashMap<String, List<Property>>(); myProperties = new ArrayList<Property>(props.length); for (final ASTNode prop : props) { final Property property = (Property)prop.getPsi(); String key = property.getUnescapedKey(); List<Property> list = myPropertiesMap.get(key); if (list == null) { list = new SmartList<Property>(); myPropertiesMap.put(key, list); } list.add(property); myProperties.add(property); } } public Property findPropertyByKey(@NotNull String key) { synchronized (PsiLock.LOCK) { ensurePropertiesLoaded(); List<Property> list = myPropertiesMap.get(key); return list == null ? null : list.get(0); } } @NotNull public List<Property> findPropertiesByKey(@NotNull String key) { synchronized (PsiLock.LOCK) { ensurePropertiesLoaded(); List<Property> list = myPropertiesMap.get(key); return list == null ? Collections.<Property>emptyList() : list; } } @NotNull public ResourceBundle getResourceBundle() { VirtualFile virtualFile = getVirtualFile(); if (!isValid() || virtualFile == null) { return ResourceBundleImpl.NULL; } String baseName = PropertiesUtil.getBaseName(virtualFile); PsiDirectory directory = ApplicationManager.getApplication().runReadAction(new Computable<PsiDirectory>() { @Nullable public PsiDirectory compute() { return getContainingFile().getContainingDirectory(); }}); if (directory == null) return ResourceBundleImpl.NULL; return new ResourceBundleImpl(directory.getVirtualFile(), baseName); } @NotNull public Locale getLocale() { return PropertiesUtil.getLocale(getVirtualFile()); } public PsiElement add(@NotNull PsiElement element) throws IncorrectOperationException { if (element instanceof Property) { throw new IncorrectOperationException("Use addProperty() instead"); } return super.add(element); } @NotNull public PsiElement addProperty(@NotNull Property property) throws IncorrectOperationException { if (haveToAddNewLine()) { insertLinebreakBefore(null); } final TreeElement copy = ChangeUtil.copyToElement(property); getPropertiesList().addChild(copy); return copy.getPsi(); } @NotNull public PsiElement addPropertyAfter(@NotNull final Property property, @Nullable final Property anchor) throws IncorrectOperationException { final TreeElement copy = ChangeUtil.copyToElement(property); List<Property> properties = getProperties(); ASTNode anchorBefore = anchor == null ? properties.isEmpty() ? null : properties.get(0).getNode() : anchor.getNode().getTreeNext(); if (anchorBefore != null) { if (anchorBefore.getElementType() == TokenType.WHITE_SPACE) { anchorBefore = anchorBefore.getTreeNext(); } } if (anchorBefore == null && haveToAddNewLine()) { insertLinebreakBefore(null); } getPropertiesList().addChild(copy, anchorBefore); if (anchorBefore != null) { insertLinebreakBefore(anchorBefore); } return copy.getPsi(); } private void insertLinebreakBefore(final ASTNode anchorBefore) { getPropertiesList().addChild(ASTFactory.whitespace("\n"), anchorBefore); } private boolean haveToAddNewLine() { ASTNode lastChild = getPropertiesList().getLastChildNode(); return lastChild != null && !lastChild.getText().endsWith("\n"); } @NotNull public Map<String, String> getNamesMap() { Map<String, String> result = new THashMap<String, String>(); for (Property property : getProperties()) { result.put(property.getUnescapedKey(), property.getValue()); } return result; } @Override public void clearCaches() { super.clearCaches(); synchronized (PsiLock.LOCK) { myPropertiesMap = null; myProperties = null; } } }
apache-2.0
qmwu2000/cat2
cat-core-message/src/main/java/org/unidal/cat/message/storage/BlockWriter.java
247
package org.unidal.cat.message.storage; import java.util.concurrent.BlockingQueue; import org.unidal.helper.Threads.Task; public interface BlockWriter extends Task { public void initialize(int hour, int index, BlockingQueue<Block> queue); }
apache-2.0
xorware/android_frameworks_base
packages/SystemUI/src/com/android/systemui/recents/events/activity/ToggleRecentsEvent.java
907
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.systemui.recents.events.activity; import com.android.systemui.recents.events.EventBus; /** * This is sent when the user taps on the Overview button to toggle the Recents activity. */ public class ToggleRecentsEvent extends EventBus.Event { // Simple event }
apache-2.0
gustavoanatoly/flink
flink-libraries/flink-table/src/test/java/org/apache/flink/table/api/java/utils/UserDefinedAggFunctions.java
3740
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.java.utils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.functions.AggregateFunction; import java.util.Iterator; /** * Test aggregator functions. */ public class UserDefinedAggFunctions { /** * Accumulator for test requiresOver. */ public static class Accumulator0 extends Tuple2<Long, Integer>{} /** * Test for requiresOver. */ public static class OverAgg0 extends AggregateFunction<Long, Accumulator0> { @Override public Accumulator0 createAccumulator() { return new Accumulator0(); } @Override public Long getValue(Accumulator0 accumulator) { return 1L; } //Overloaded accumulate method public void accumulate(Accumulator0 accumulator, long iValue, int iWeight) { } @Override public boolean requiresOver() { return true; } } /** * Accumulator for WeightedAvg. */ public static class WeightedAvgAccum extends Tuple2<Long, Integer> { public long sum = 0; public int count = 0; } /** * Base class for WeightedAvg. */ public static class WeightedAvg extends AggregateFunction<Long, WeightedAvgAccum> { @Override public WeightedAvgAccum createAccumulator() { return new WeightedAvgAccum(); } @Override public Long getValue(WeightedAvgAccum accumulator) { if (accumulator.count == 0) { return null; } else { return accumulator.sum / accumulator.count; } } // overloaded accumulate method public void accumulate(WeightedAvgAccum accumulator, long iValue, int iWeight) { accumulator.sum += iValue * iWeight; accumulator.count += iWeight; } //Overloaded accumulate method public void accumulate(WeightedAvgAccum accumulator, int iValue, int iWeight) { accumulator.sum += iValue * iWeight; accumulator.count += iWeight; } } /** * A WeightedAvg class with merge method. */ public static class WeightedAvgWithMerge extends WeightedAvg { public void merge(WeightedAvgAccum acc, Iterable<WeightedAvgAccum> it) { Iterator<WeightedAvgAccum> iter = it.iterator(); while (iter.hasNext()) { WeightedAvgAccum a = iter.next(); acc.count += a.count; acc.sum += a.sum; } } } /** * A WeightedAvg class with merge and reset method. */ public static class WeightedAvgWithMergeAndReset extends WeightedAvgWithMerge { public void resetAccumulator(WeightedAvgAccum acc) { acc.count = 0; acc.sum = 0L; } } /** * A WeightedAvg class with retract method. */ public static class WeightedAvgWithRetract extends WeightedAvg { //Overloaded retract method public void retract(WeightedAvgAccum accumulator, long iValue, int iWeight) { accumulator.sum -= iValue * iWeight; accumulator.count -= iWeight; } //Overloaded retract method public void retract(WeightedAvgAccum accumulator, int iValue, int iWeight) { accumulator.sum -= iValue * iWeight; accumulator.count -= iWeight; } } }
apache-2.0
LAX1DUDE/JDA
src/main/java/net/dv8tion/jda/core/events/package-info.java
1369
/* * Copyright 2015-2017 Austin Keener & Michael Ritter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * The core events that are fired by this library, informing * the end-user about the connection state of the current JDA instance. * * <p>This package contains all implementations of {@link net.dv8tion.jda.core.events.Event Event}. * <br>These are specific depending on the event that has been received by the {@link net.dv8tion.jda.core.requests.WebSocketClient WebSocketClient} * * <p>All events are forwarded by an {@link net.dv8tion.jda.core.hooks.IEventManager IEventManager} implementation. * <br>Some events are specific for JDA internal events such as the {@link net.dv8tion.jda.core.events.ReadyEvent ReadyEvent} * which is only fired when JDA finishes to setup its internal cache. */ package net.dv8tion.jda.core.events;
apache-2.0
jonasrk/rheem
rheem-basic/src/main/java/org/qcri/rheem/basic/mapping/Mappings.java
544
package org.qcri.rheem.basic.mapping; import org.qcri.rheem.core.mapping.Mapping; import java.util.Arrays; import java.util.Collection; /** * Register for the components provided in the basic plugin. */ public class Mappings { public static Collection<Mapping> BASIC_MAPPINGS = Arrays.asList( new ReduceByMapping(), new MaterializedGroupByMapping(), new GlobalReduceMapping() ); public static Collection<Mapping> GRAPH_MAPPINGS = Arrays.asList( new PageRankMapping() ); }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-dlp/v2/1.31.0/com/google/api/services/dlp/v2/model/GooglePrivacyDlpV2PrivacyMetric.java
5750
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.dlp.v2.model; /** * Privacy metric to compute for reidentification risk analysis. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Data Loss Prevention (DLP) API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GooglePrivacyDlpV2PrivacyMetric extends com.google.api.client.json.GenericJson { /** * Categorical stats * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2CategoricalStatsConfig categoricalStatsConfig; /** * delta-presence * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2DeltaPresenceEstimationConfig deltaPresenceEstimationConfig; /** * K-anonymity * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2KAnonymityConfig kAnonymityConfig; /** * k-map * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2KMapEstimationConfig kMapEstimationConfig; /** * l-diversity * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2LDiversityConfig lDiversityConfig; /** * Numerical stats * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2NumericalStatsConfig numericalStatsConfig; /** * Categorical stats * @return value or {@code null} for none */ public GooglePrivacyDlpV2CategoricalStatsConfig getCategoricalStatsConfig() { return categoricalStatsConfig; } /** * Categorical stats * @param categoricalStatsConfig categoricalStatsConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setCategoricalStatsConfig(GooglePrivacyDlpV2CategoricalStatsConfig categoricalStatsConfig) { this.categoricalStatsConfig = categoricalStatsConfig; return this; } /** * delta-presence * @return value or {@code null} for none */ public GooglePrivacyDlpV2DeltaPresenceEstimationConfig getDeltaPresenceEstimationConfig() { return deltaPresenceEstimationConfig; } /** * delta-presence * @param deltaPresenceEstimationConfig deltaPresenceEstimationConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setDeltaPresenceEstimationConfig(GooglePrivacyDlpV2DeltaPresenceEstimationConfig deltaPresenceEstimationConfig) { this.deltaPresenceEstimationConfig = deltaPresenceEstimationConfig; return this; } /** * K-anonymity * @return value or {@code null} for none */ public GooglePrivacyDlpV2KAnonymityConfig getKAnonymityConfig() { return kAnonymityConfig; } /** * K-anonymity * @param kAnonymityConfig kAnonymityConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setKAnonymityConfig(GooglePrivacyDlpV2KAnonymityConfig kAnonymityConfig) { this.kAnonymityConfig = kAnonymityConfig; return this; } /** * k-map * @return value or {@code null} for none */ public GooglePrivacyDlpV2KMapEstimationConfig getKMapEstimationConfig() { return kMapEstimationConfig; } /** * k-map * @param kMapEstimationConfig kMapEstimationConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setKMapEstimationConfig(GooglePrivacyDlpV2KMapEstimationConfig kMapEstimationConfig) { this.kMapEstimationConfig = kMapEstimationConfig; return this; } /** * l-diversity * @return value or {@code null} for none */ public GooglePrivacyDlpV2LDiversityConfig getLDiversityConfig() { return lDiversityConfig; } /** * l-diversity * @param lDiversityConfig lDiversityConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setLDiversityConfig(GooglePrivacyDlpV2LDiversityConfig lDiversityConfig) { this.lDiversityConfig = lDiversityConfig; return this; } /** * Numerical stats * @return value or {@code null} for none */ public GooglePrivacyDlpV2NumericalStatsConfig getNumericalStatsConfig() { return numericalStatsConfig; } /** * Numerical stats * @param numericalStatsConfig numericalStatsConfig or {@code null} for none */ public GooglePrivacyDlpV2PrivacyMetric setNumericalStatsConfig(GooglePrivacyDlpV2NumericalStatsConfig numericalStatsConfig) { this.numericalStatsConfig = numericalStatsConfig; return this; } @Override public GooglePrivacyDlpV2PrivacyMetric set(String fieldName, Object value) { return (GooglePrivacyDlpV2PrivacyMetric) super.set(fieldName, value); } @Override public GooglePrivacyDlpV2PrivacyMetric clone() { return (GooglePrivacyDlpV2PrivacyMetric) super.clone(); } }
apache-2.0
KalicyZhou/incubator-weex
android/sdk/src/main/java/com/taobao/weex/appfram/storage/WXSQLiteOpenHelper.java
7541
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.taobao.weex.appfram.storage; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.database.sqlite.SQLiteOpenHelper; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.taobao.weex.utils.WXLogUtils; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; public class WXSQLiteOpenHelper extends SQLiteOpenHelper { private static final String DATABASE_NAME = "WXStorage"; private static final int DATABASE_VERSION = 2; static final String TAG_STORAGE = "weex_storage"; private long mMaximumDatabaseSize = 5 * 10 * 1024 * 1024L;//50mb static SimpleDateFormat sDateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault()); private Context mContext; private SQLiteDatabase mDb; static final String TABLE_STORAGE = "default_wx_storage"; static final String COLUMN_KEY = "key"; static final String COLUMN_VALUE = "value"; static final String COLUMN_TIMESTAMP = "timestamp"; static final String COLUMN_PERSISTENT = "persistent"; private static final int SLEEP_TIME_MS = 30; private static final String STATEMENT_CREATE_TABLE = "CREATE TABLE IF NOT EXISTS " + TABLE_STORAGE + " (" + COLUMN_KEY + " TEXT PRIMARY KEY," + COLUMN_VALUE + " TEXT NOT NULL," + COLUMN_TIMESTAMP + " TEXT NOT NULL," + COLUMN_PERSISTENT + " INTEGER DEFAULT 0" + ")"; public WXSQLiteOpenHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); this.mContext = context; } /** * retrieve sqlite database * * @return a {@link SQLiteDatabase} instance or null if retrieve fails. * */ public @Nullable SQLiteDatabase getDatabase() { ensureDatabase(); return mDb; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(STATEMENT_CREATE_TABLE); } /** * version 1: * * ---------------- * | key | value | * --------------- * * version 2: * * ---------------------------------------- * | key | value | timestamp | persistent | * ---------------------------------------- **/ @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { if (oldVersion != newVersion) { if(newVersion == 2 && oldVersion == 1){ WXLogUtils.d(TAG_STORAGE,"storage is updating from version "+oldVersion+" to version "+newVersion); boolean updateResult = true; try { long start = System.currentTimeMillis(); db.beginTransaction(); // update table structure String SQL_ADD_COLUMN_TIMESTAMP = "ALTER TABLE "+TABLE_STORAGE+" ADD COLUMN "+COLUMN_TIMESTAMP+" TEXT;"; WXLogUtils.d(TAG_STORAGE,"exec sql : "+ SQL_ADD_COLUMN_TIMESTAMP); db.execSQL(SQL_ADD_COLUMN_TIMESTAMP); String SQL_ADD_COLUMN_PERSISTENT = "ALTER TABLE "+TABLE_STORAGE+" ADD COLUMN "+COLUMN_PERSISTENT+" INTEGER;"; WXLogUtils.d(TAG_STORAGE,"exec sql : "+ SQL_ADD_COLUMN_PERSISTENT); db.execSQL(SQL_ADD_COLUMN_PERSISTENT); // update timestamp & persistent String SQL_UPDATE_TABLE = "UPDATE "+TABLE_STORAGE+" SET "+ COLUMN_TIMESTAMP+" = '"+sDateFormatter.format(new Date())+"' , "+ COLUMN_PERSISTENT +" = 0"; WXLogUtils.d(TAG_STORAGE,"exec sql : "+ SQL_UPDATE_TABLE); db.execSQL(SQL_UPDATE_TABLE); db.setTransactionSuccessful(); long time = System.currentTimeMillis() - start; WXLogUtils.d(TAG_STORAGE,"storage updated success ("+time+"ms)"); }catch (Exception e){ WXLogUtils.d(TAG_STORAGE,"storage updated failed from version "+oldVersion+" to version "+newVersion+","+e.getMessage()); updateResult = false; }finally { db.endTransaction(); } //rollback if(!updateResult){ WXLogUtils.d(TAG_STORAGE,"storage is rollback,all data will be removed"); deleteDB(); onCreate(db); } }else{ deleteDB(); onCreate(db); } } } synchronized void ensureDatabase() { if (mDb != null && mDb.isOpen()) { return; } // Sometimes retrieving the database fails. We do 2 retries: first without database deletion // and then with deletion. for (int tries = 0; tries < 2; tries++) { try { if (tries > 0) { //delete db and recreate deleteDB(); } mDb = getWritableDatabase(); break; } catch (SQLiteException e) { e.printStackTrace(); } // Wait before retrying. try { Thread.sleep(SLEEP_TIME_MS); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } if(mDb == null){ return; } createTableIfNotExists(mDb); mDb.setMaximumSize(mMaximumDatabaseSize); } public synchronized void setMaximumSize(long size) { mMaximumDatabaseSize = size; if (mDb != null) { mDb.setMaximumSize(mMaximumDatabaseSize); } } private boolean deleteDB() { closeDatabase(); return mContext.deleteDatabase(DATABASE_NAME); } public synchronized void closeDatabase() { if (mDb != null && mDb.isOpen()) { mDb.close(); mDb = null; } } private void createTableIfNotExists(@NonNull SQLiteDatabase db) { Cursor cursor = null; try { cursor = db.rawQuery("SELECT DISTINCT tbl_name FROM sqlite_master WHERE tbl_name = '"+TABLE_STORAGE+"'", null); if(cursor != null && cursor.getCount() > 0) { return; } db.execSQL(STATEMENT_CREATE_TABLE); }catch (Exception e){ e.printStackTrace(); }finally { if(cursor != null){ cursor.close(); } } } }
apache-2.0
scnakandala/derby
java/client/org/apache/derby/client/net/NetConfiguration.java
6710
/* Derby - Class org.apache.derby.client.net.NetConfiguration Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.client.net; import org.apache.derby.iapi.reference.DRDAConstants; import org.apache.derby.client.am.Version; public class NetConfiguration { // --------------------------------------------------------------------------- // Value to use when padding non-character data in ddm objects. static final byte NON_CHAR_DDM_DATA_PAD_BYTE = 0x00; // Maximum size of External Name. static final int EXTNAM_MAXSIZE = 255; // Minimum agent level required by protocol. static final int MIN_AGENT_MGRLVL = 3; // Minimum communications tcpip manager level required by protocol. static final int MIN_CMNTCPIP_MGRLVL = 5; // Minimum LU6.2 Conversational Communications Manager static final int MIN_CMNAPPC_MGRLVL = 3; // Minimum rdb manager level required by protocol. static final int MIN_RDB_MGRLVL = 3; // Minimum secmgr manager level required by protocol. static final int MIN_SECMGR_MGRLVL = 5; // Minimum sqlam manager level required by protocol. static final int MIN_SQLAM_MGRLVL = 4; // Minimum xa manager level required by protocol. static final int MIN_XAMGR_MGRLVL = 7; // Minimum secmgr manager level required by protocol. static final int MIN_SYNCPTMGR_MGRLVL = 5; // Minimum sqlam manager level required by protocol. static final int MIN_RSYNCMGR_MGRLVL = 5; // Minimum unicodemgr manager level required by protocol static final int MIN_UNICODE_MGRLVL = 0; // Maximun Password size. static final int PASSWORD_MAXSIZE = 255; // Fixed PRDDTA application id fixed length. static final int PRDDTA_APPL_ID_FIXED_LEN = 20; // PRDDTA Accounting Suffix Length byte offset. static final int PRDDTA_ACCT_SUFFIX_LEN_BYTE = 55; // PRDDTA Length byte offset. static final int PRDDTA_LEN_BYTE = 0; // Maximum PRDDTA size. static final int PRDDTA_MAXSIZE = 255; // PRDDTA platform id. static final String PRDDTA_PLATFORM_ID = "JVM "; // Fixed PRDDTA user id fixed length. static final int PRDDTA_USER_ID_FIXED_LEN = 8; // Identifier Length for fixed length rdb name static final int PKG_IDENTIFIER_FIXED_LEN = 18; // Maximum RDBNAM Identifier Length // this used to be 255 prior to DERBY-4805 fix static final int RDBNAM_MAX_LEN = 1024; // Maximum RDB Identifier Length static final int PKG_IDENTIFIER_MAX_LEN = 255; // Fixed pkgcnstkn length static final int PKGCNSTKN_FIXED_LEN = 8; // Maximum length of a security token. // Anything greater than 32763 bytes of SECTKN would require extended length DDMs. // This seems like an impossible upper bound limit right now so set // max to 32763 and cross bridge later. static final int SECTKN_MAXSIZE = 32763; // this was 255 // Server class name of the ClientDNC product. static final String SRVCLSNM_JVM = "QDERBY/JVM"; // Maximum size of SRVNAM Name. static final int SRVNAM_MAXSIZE = 255; // Manager is NA or not usued. static final int MGRLVL_NA = 0; // Manager Level 5 constant. static final int MGRLVL_5 = 0x05; // Manager Level 7 constant. static final int MGRLVL_7 = 0x07; // Indicates userid/encrypted password security mechanism. public static final int SECMEC_EUSRIDPWD = 0x09; // Indicates userid only security mechanism. public static final int SECMEC_USRIDONL = 0x04; // Indicates userid/encrypted password security mechanism. public static final int SECMEC_USRENCPWD = 0x07; // Indicates userid/password security mechanism. public static final int SECMEC_USRIDPWD = 0x03; //Indicates Encrypted userid and Encrypted Security-sensitive Data security mechanism public static final int SECMEC_EUSRIDDTA = 0x0C; //Indicates Encrypted userid,Encrypted password and Encrypted Security-sensitive Data security mechanism public static final int SECMEC_EUSRPWDDTA = 0x0D; // Indicates userid with strong password substitute security mechanism. public static final int SECMEC_USRSSBPWD = 0x08; // list of security mechanisms supported by this driver static final int[] SECMGR_SECMECS = {NetConfiguration.SECMEC_EUSRIDPWD, NetConfiguration.SECMEC_USRENCPWD, NetConfiguration.SECMEC_USRIDPWD, NetConfiguration.SECMEC_USRIDONL, NetConfiguration.SECMEC_EUSRIDDTA, NetConfiguration.SECMEC_EUSRPWDDTA, NetConfiguration.SECMEC_USRSSBPWD}; // IEEE ASCII constant. static final String SYSTEM_ASC = "QTDSQLASC"; // Maximum size of User Name. static final int USRID_MAXSIZE = 255; // Product id of the ClientDNC. static final String PRDID; // The server release level of this product. // It will be prefixed with PRDID static final String SRVRLSLV; // Initialize PRDID and SRVRLSLV static { int majorVersion = Version.getMajorVersion(); int minorVersion = Version.getMinorVersion(); int protocolMaintVersion = Version.getProtocolMaintVersion(); // PRDID format as Network Server expects it: DNCMMmx // MM = major version // mm = minor version // x = protocol MaintenanceVersion String prdId = DRDAConstants.DERBY_DRDA_CLIENT_ID; if (majorVersion < 10) { prdId += "0"; } prdId += majorVersion; if (minorVersion < 10) { prdId += "0"; } prdId += minorVersion; prdId += protocolMaintVersion; PRDID = prdId; SRVRLSLV = prdId + "/" + Version.getDriverVersion(); } }
apache-2.0
google/error-prone
core/src/main/java/com/google/errorprone/bugpatterns/nullness/NullnessUtils.java
22525
/* * Copyright 2017 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.nullness; import static com.google.common.collect.Lists.reverse; import static com.google.errorprone.bugpatterns.nullness.NullnessUtils.NullCheck.Polarity.IS_NOT_NULL; import static com.google.errorprone.bugpatterns.nullness.NullnessUtils.NullCheck.Polarity.IS_NULL; import static com.google.errorprone.bugpatterns.nullness.NullnessUtils.NullableAnnotationToUse.annotationToBeImported; import static com.google.errorprone.bugpatterns.nullness.NullnessUtils.NullableAnnotationToUse.annotationWithoutImporting; import static com.google.errorprone.fixes.SuggestedFix.emptyFix; import static com.google.errorprone.matchers.Matchers.instanceMethod; import static com.google.errorprone.suppliers.Suppliers.JAVA_LANG_VOID_TYPE; import static com.google.errorprone.util.ASTHelpers.getSymbol; import static com.google.errorprone.util.ASTHelpers.getType; import static com.google.errorprone.util.ASTHelpers.stripParentheses; import static com.sun.source.tree.Tree.Kind.ARRAY_TYPE; import static com.sun.source.tree.Tree.Kind.IDENTIFIER; import static com.sun.source.tree.Tree.Kind.NULL_LITERAL; import static com.sun.source.tree.Tree.Kind.PARAMETERIZED_TYPE; import static com.sun.tools.javac.parser.Tokens.TokenKind.DOT; import com.google.common.collect.ImmutableSet; import com.google.errorprone.ErrorProneFlags; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.nullness.NullnessUtils.NullCheck.Polarity; import com.google.errorprone.fixes.SuggestedFix; import com.google.errorprone.fixes.SuggestedFixes; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.util.FindIdentifiers; import com.sun.source.tree.AnnotatedTypeTree; import com.sun.source.tree.ArrayTypeTree; import com.sun.source.tree.AssignmentTree; import com.sun.source.tree.BinaryTree; import com.sun.source.tree.BlockTree; import com.sun.source.tree.ConditionalExpressionTree; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.IdentifierTree; import com.sun.source.tree.IfTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.ParameterizedTypeTree; import com.sun.source.tree.ParenthesizedTree; import com.sun.source.tree.StatementTree; import com.sun.source.tree.Tree; import com.sun.source.tree.TypeCastTree; import com.sun.source.tree.VariableTree; import com.sun.source.util.SimpleTreeVisitor; import com.sun.source.util.TreePath; import com.sun.source.util.Trees; import com.sun.tools.javac.code.Kinds.KindSelector; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.processing.JavacProcessingEnvironment; import java.util.Set; import javax.annotation.Nullable; import javax.lang.model.element.Name; /** * Static utility methods for common functionality in the nullable checkers. * * @author awturner@google.com (Andy Turner) */ class NullnessUtils { private NullnessUtils() {} private static final Matcher<ExpressionTree> OPTIONAL_OR_NULL = instanceMethod().onDescendantOf("com.google.common.base.Optional").named("orNull"); private static final Matcher<ExpressionTree> OPTIONAL_OR_ELSE = instanceMethod().onDescendantOf("java.util.Optional").named("orElse"); /** * Returns a {@link SuggestedFix} to add a {@code Nullable} annotation to the given method's * return type. */ static SuggestedFix fixByAddingNullableAnnotationToReturnType( VisitorState state, MethodTree method) { return fixByAddingNullableAnnotationToElementOrType( state, method, method.getReturnType(), "nullness:return"); } /** * Returns a {@link SuggestedFix} to add a {@code Nullable} annotation to the given variable's * type. */ static SuggestedFix fixByAddingNullableAnnotationToType( VisitorState state, VariableTree variable) { return fixByAddingNullableAnnotationToElementOrType( state, variable, variable.getType(), /* suppressionToRemove= */ null); } private static SuggestedFix fixByAddingNullableAnnotationToElementOrType( VisitorState state, Tree elementTree, Tree typeTree, @Nullable String suppressionToRemove) { NullableAnnotationToUse nullableAnnotationToUse = pickNullableAnnotation(state); switch (applyOnlyIfAlreadyInScope(state)) { case TRUE: if (!nullableAnnotationToUse.isAlreadyInScope()) { return emptyFix(); } break; case IF_NOT: if (nullableAnnotationToUse.isAlreadyInScope()) { return emptyFix(); } break; default: break; } if (!nullableAnnotationToUse.isTypeUse()) { return nullableAnnotationToUse.fixPrefixingOnto(elementTree, state, suppressionToRemove); } return fixByAddingKnownTypeUseNullableAnnotation( state, typeTree, nullableAnnotationToUse, suppressionToRemove); } /** * Returns a {@link SuggestedFix} to add a <b>type-use</b> {@code Nullable} annotation to the * given tree. The tree should be a "type-use-only" location, like a type argument or a bounds of * a type parameter or wildcard. Prefer to use {@link #fixByAddingNullableAnnotationToReturnType} * and {@link #fixByAddingNullableAnnotationToType} instead of this method when applicable. */ static SuggestedFix fixByAnnotatingTypeUseOnlyLocationWithNullableAnnotation( VisitorState state, Tree typeTree) { NullableAnnotationToUse nullableAnnotationToUse = pickNullableAnnotation(state); if (!nullableAnnotationToUse.isTypeUse()) { return emptyFix(); } return fixByAddingKnownTypeUseNullableAnnotation( state, typeTree, nullableAnnotationToUse, /* suppressionToRemove= */ null); } private static SuggestedFix fixByAddingKnownTypeUseNullableAnnotation( VisitorState state, Tree typeTree, NullableAnnotationToUse nullableAnnotationToUse, @Nullable String suppressionToRemove) { if (typeTree.getKind() == PARAMETERIZED_TYPE) { typeTree = ((ParameterizedTypeTree) typeTree).getType(); } switch (typeTree.getKind()) { case ARRAY_TYPE: Tree beforeBrackets; for (beforeBrackets = typeTree; beforeBrackets.getKind() == ARRAY_TYPE; beforeBrackets = ((ArrayTypeTree) beforeBrackets).getType()) {} // For an explanation of "int @Foo [][] f," etc., see JLS 4.11. return nullableAnnotationToUse.fixPostfixingOnto( beforeBrackets, state, suppressionToRemove); case MEMBER_SELECT: int lastDot = reverse(state.getOffsetTokensForNode(typeTree)).stream() .filter(t -> t.kind() == DOT) .findFirst() .get() .pos(); return nullableAnnotationToUse.fixPostfixingOnto(lastDot, state, suppressionToRemove); case ANNOTATED_TYPE: return nullableAnnotationToUse.fixPrefixingOnto( ((AnnotatedTypeTree) typeTree).getAnnotations().get(0), state, suppressionToRemove); case IDENTIFIER: return nullableAnnotationToUse.fixPrefixingOnto(typeTree, state, suppressionToRemove); default: throw new AssertionError( "unexpected kind for type tree: " + typeTree.getKind() + " for " + typeTree); } // TODO(cpovirk): Remove any @NonNull, etc. annotation that is present? } @com.google.auto.value.AutoValue // fully qualified to work around JDK-7177813(?) in JDK8 build abstract static class NullableAnnotationToUse { static NullableAnnotationToUse annotationToBeImported(String qualifiedName, boolean isTypeUse) { return new AutoValue_NullnessUtils_NullableAnnotationToUse( qualifiedName, qualifiedName.replaceFirst(".*[.]", ""), isTypeUse, /*isAlreadyInScope=*/ false); } static NullableAnnotationToUse annotationWithoutImporting( String name, boolean isTypeUse, boolean isAlreadyInScope) { return new AutoValue_NullnessUtils_NullableAnnotationToUse( null, name, isTypeUse, isAlreadyInScope); } /** * Returns a {@link SuggestedFix} to add a {@code Nullable} annotation after the given position. */ final SuggestedFix fixPostfixingOnto( int position, VisitorState state, @Nullable String suppressionToRemove) { return prepareBuilder(state, suppressionToRemove) .replace(position + 1, position + 1, " @" + use() + " ") .build(); } /** Returns a {@link SuggestedFix} to add a {@code Nullable} annotation after the given tree. */ final SuggestedFix fixPostfixingOnto( Tree tree, VisitorState state, @Nullable String suppressionToRemove) { return prepareBuilder(state, suppressionToRemove) .postfixWith(tree, " @" + use() + " ") .build(); } /** * Returns a {@link SuggestedFix} to add a {@code Nullable} annotation before the given tree. */ final SuggestedFix fixPrefixingOnto( Tree tree, VisitorState state, @Nullable String suppressionToRemove) { return prepareBuilder(state, suppressionToRemove).prefixWith(tree, "@" + use() + " ").build(); } @Nullable abstract String importToAdd(); abstract String use(); abstract boolean isTypeUse(); abstract boolean isAlreadyInScope(); private SuggestedFix.Builder prepareBuilder( VisitorState state, @Nullable String suppressionToRemove) { SuggestedFix.Builder builder = SuggestedFix.builder(); if (importToAdd() != null) { builder.addImport(importToAdd()); } if (applyRemoveSuppressWarnings(state)) { SuggestedFixes.removeSuppressWarnings(builder, state, suppressionToRemove); } return builder; } } private static NullableAnnotationToUse pickNullableAnnotation(VisitorState state) { /* * TODO(cpovirk): Instead of hardcoding these two annotations, pick the one that seems most * appropriate for each user: * * - Look for usages in other files in the compilation? * * - Look for imports of other annotations that are part of an artifact that also contains * @Nullable (e.g., javax.annotation.Nonnull). * * - Call getSymbolFromString. (But that may return transitive dependencies that will cause * compilation to fail strict-deps checking.) * * - Among available candidates, prefer type-usage annotations. * * - When we suggest a jsr305 annotation, might we want to suggest @CheckForNull over @Nullable? * It's more verbose, but it's more obviously a declaration annotation, and it's the * annotation that is *technically* defined to produce the behaviors that users want. (But do * tools like Dagger recognize it?) */ Symbol sym = FindIdentifiers.findIdent("Nullable", state, KindSelector.VAL_TYP); ErrorProneFlags flags = state.errorProneOptions().getFlags(); String defaultType = flags .get("Nullness:DefaultNullnessAnnotation") .orElse( state.isAndroidCompatible() ? "androidx.annotation.Nullable" : "javax.annotation.Nullable"); if (sym != null) { ClassSymbol classSym = (ClassSymbol) sym; if (classSym.isAnnotationType()) { // We've got an existing annotation called Nullable. We can use this. return annotationWithoutImporting( "Nullable", isTypeUse(classSym.className()), /*isAlreadyInScope=*/ true); } else { // It's not an annotation type. We have to fully-qualify the import. return annotationWithoutImporting( defaultType, isTypeUse(defaultType), /*isAlreadyInScope=*/ false); } } // There is no symbol already. Import and use. return annotationToBeImported(defaultType, isTypeUse(defaultType)); } private static boolean isTypeUse(String className) { /* * TODO(b/205115472): Make this tri-state ({type-use, declaration, both}) and avoid using "both" * annotations in any cases in which they would be ambiguous (e.g., arrays/elements). */ switch (className) { case "libcore.util.Nullable": case "org.checkerframework.checker.nullness.compatqual.NullableType": case "org.checkerframework.checker.nullness.qual.Nullable": case "org.jspecify.nullness.Nullable": return true; default: // TODO(cpovirk): Detect type-use-ness from the class symbol if it's available? return false; } } @Nullable static NullCheck getNullCheck(ExpressionTree tree) { tree = stripParentheses(tree); Polarity polarity; switch (tree.getKind()) { case EQUAL_TO: polarity = IS_NULL; break; case NOT_EQUAL_TO: polarity = IS_NOT_NULL; break; default: return null; } BinaryTree equalityTree = (BinaryTree) tree; ExpressionTree nullChecked; if (equalityTree.getRightOperand().getKind() == NULL_LITERAL) { nullChecked = equalityTree.getLeftOperand(); } else if (equalityTree.getLeftOperand().getKind() == NULL_LITERAL) { nullChecked = equalityTree.getRightOperand(); } else { return null; } Name name = nullChecked.getKind() == IDENTIFIER ? ((IdentifierTree) nullChecked).getName() : null; Symbol symbol = getSymbol(nullChecked); VarSymbol varSymbol = symbol instanceof VarSymbol ? (VarSymbol) symbol : null; return new AutoValue_NullnessUtils_NullCheck(name, varSymbol, polarity); } /** * A check of a variable against {@code null}, like {@code foo == null}. * * <p>This class exposes the variable in two forms: the {@link VarSymbol} (if available) and the * {@link Name} (if the null check was performed on a bare identifier, like {@code foo}). Many * callers restrict themselves to bare identifiers because it's easy and safe: Using {@code * Symbol} might lead code to assume that a null check of {@code foo.bar} guarantees something * about {@code otherFoo.bar}, which is represented by the same symbol. * * <p>Even when restricting themselves to bare identifiers, callers should be wary when examining * code that might: * * <ul> * <li>assign a new value to the identifier after the null check but before some usage * <li>declare a new identifier that hides the old * </ul> * * TODO(cpovirk): What our callers really care about is not "bare identifiers" but "this * particular 'instance' of a variable," so we could generalize to cover more cases of that. For * example, we could probably assume that a null check of {@code foo.bar} ensures that {@code * foo.bar} is non-null in the future. One case that might be particularly useful is {@code * this.bar}. We might even go further, assuming that {@code foo.bar()} will continue to have the * same value in some cases. */ @com.google.auto.value.AutoValue // fully qualified to work around JDK-7177813(?) in JDK8 build abstract static class NullCheck { /** * Returns the bare identifier that was checked against {@code null}, if the null check took * that form. Prefer this over {@link #varSymbolButUsuallyPreferBareIdentifier} in most cases, * as discussed in the class documentation. */ @Nullable abstract Name bareIdentifier(); /** Returns the symbol that was checked against {@code null}. */ @Nullable abstract VarSymbol varSymbolButUsuallyPreferBareIdentifier(); abstract Polarity polarity(); boolean bareIdentifierMatches(ExpressionTree other) { return other.getKind() == IDENTIFIER && bareIdentifier() != null && bareIdentifier().equals(((IdentifierTree) other).getName()); } ExpressionTree nullCase(ConditionalExpressionTree tree) { return polarity() == IS_NULL ? tree.getTrueExpression() : tree.getFalseExpression(); } StatementTree nullCase(IfTree tree) { return polarity() == IS_NULL ? tree.getThenStatement() : tree.getElseStatement(); } enum Polarity { IS_NULL, IS_NOT_NULL, } } static boolean hasDefinitelyNullBranch( ExpressionTree tree, Set<VarSymbol> definitelyNullVars, /* * TODO(cpovirk): Compute varsProvenNullByParentIf inside this method, using the TreePath from * an instance of VisitorState, which must be an instance with the current path instead of * stateForCompilationUnit? (This would also let us eliminate the `tree` parameter, since that * would be accessible through getLeaf().) But we'll need to be consistent about whether we * pass the path of the expression or its enclosing statement. */ ImmutableSet<Name> varsProvenNullByParentIf, VisitorState stateForCompilationUnit) { return new SimpleTreeVisitor<Boolean, Void>() { @Override public Boolean visitAssignment(AssignmentTree tree, Void unused) { return visit(tree.getExpression(), unused); } @Override public Boolean visitConditionalExpression(ConditionalExpressionTree tree, Void unused) { return visit(tree.getTrueExpression(), unused) || visit(tree.getFalseExpression(), unused) || isTernaryXIfXIsNull(tree); } @Override public Boolean visitIdentifier(IdentifierTree tree, Void unused) { return super.visitIdentifier(tree, unused) || varsProvenNullByParentIf.contains(tree.getName()); } @Override public Boolean visitMethodInvocation(MethodInvocationTree tree, Void unused) { return super.visitMethodInvocation(tree, unused) || isOptionalOrNull(tree); } @Override public Boolean visitParenthesized(ParenthesizedTree tree, Void unused) { return visit(tree.getExpression(), unused); } // TODO(cpovirk): visitSwitchExpression @Override public Boolean visitTypeCast(TypeCastTree tree, Void unused) { return visit(tree.getExpression(), unused); } @Override protected Boolean defaultAction(Tree tree, Void unused) { /* * This covers not only "Void" and "CAP#1 extends Void" but also the null literal. (It * covers the null literal even through parenthesized expressions. Still, we end up * needing special handling for parenthesized expressions for cases like `(foo ? bar : * null)`.) */ return isVoid(getType(tree), stateForCompilationUnit) || definitelyNullVars.contains(getSymbol(tree)); } boolean isOptionalOrNull(MethodInvocationTree tree) { return OPTIONAL_OR_NULL.matches(tree, stateForCompilationUnit) || (OPTIONAL_OR_ELSE.matches(tree, stateForCompilationUnit) && tree.getArguments().get(0).getKind() == NULL_LITERAL); /* * TODO(cpovirk): Instead of checking only for NULL_LITERAL, call hasDefinitelyNullBranch? * But consider whether that would interfere with the TODO at the top of that method. */ } }.visit(tree, null); } /** Returns true if this is {@code x == null ? x : ...} or similar. */ private static boolean isTernaryXIfXIsNull(ConditionalExpressionTree tree) { NullCheck nullCheck = getNullCheck(tree.getCondition()); if (nullCheck == null) { return false; } ExpressionTree needsToBeKnownNull = nullCheck.nullCase(tree); return nullCheck.bareIdentifierMatches(needsToBeKnownNull); } static boolean isVoid(Type type, VisitorState state) { return type != null && state.getTypes().isSubtype(type, JAVA_LANG_VOID_TYPE.get(state)); } /** Returns x if the path's leaf is the only statement inside {@code if (x == null) { ... }}. */ static ImmutableSet<Name> varsProvenNullByParentIf(TreePath path) { Tree parent = path.getParentPath().getLeaf(); if (!(parent instanceof BlockTree)) { return ImmutableSet.of(); } if (((BlockTree) parent).getStatements().size() > 1) { return ImmutableSet.of(); } Tree grandparent = path.getParentPath().getParentPath().getLeaf(); if (!(grandparent instanceof IfTree)) { return ImmutableSet.of(); } IfTree ifTree = (IfTree) grandparent; NullCheck nullCheck = getNullCheck(ifTree.getCondition()); if (nullCheck == null) { return ImmutableSet.of(); } if (parent != nullCheck.nullCase(ifTree)) { return ImmutableSet.of(); } if (nullCheck.bareIdentifier() == null) { return ImmutableSet.of(); } return ImmutableSet.of(nullCheck.bareIdentifier()); } @Nullable static VariableTree findDeclaration(VisitorState state, Symbol sym) { JavacProcessingEnvironment javacEnv = JavacProcessingEnvironment.instance(state.context); TreePath declPath = Trees.instance(javacEnv).getPath(sym); // Skip fields declared in other compilation units since we can't make a fix for them here. if (declPath != null && declPath.getCompilationUnit() == state.getPath().getCompilationUnit() && (declPath.getLeaf() instanceof VariableTree)) { return (VariableTree) declPath.getLeaf(); } return null; } private enum OnlyIfInScope { IF_NOT, FALSE, TRUE } private static OnlyIfInScope applyOnlyIfAlreadyInScope(VisitorState state) { return state .errorProneOptions() .getFlags() .getEnum("Nullness:OnlyIfAnnotationAlreadyInScope", OnlyIfInScope.class) .orElse(OnlyIfInScope.FALSE); } private static boolean applyRemoveSuppressWarnings(VisitorState state) { return state .errorProneOptions() .getFlags() .getBoolean("Nullness:RemoveSuppressWarnings") .orElse(false); } }
apache-2.0
AndreyBurikhin/big-data-plugin
src/org/pentaho/di/job/entries/sqoop/SqoopConfig.java
20618
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.sqoop; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.ArgumentWrapper; import org.pentaho.di.job.BlockableJobConfig; import org.pentaho.di.job.CommandLineArgument; import org.pentaho.di.job.JobEntryMode; import org.pentaho.di.job.Password; import org.pentaho.ui.xul.XulEventSource; import org.pentaho.ui.xul.util.AbstractModelList; /** * A collection of configuration objects for a Sqoop job entry. */ public abstract class SqoopConfig extends BlockableJobConfig implements XulEventSource, Cloneable { public static final String NAMENODE_HOST = "namenodeHost"; public static final String NAMENODE_PORT = "namenodePort"; public static final String JOBTRACKER_HOST = "jobtrackerHost"; public static final String JOBTRACKER_PORT = "jobtrackerPort"; public static final String DATABASE = "database"; public static final String SCHEMA = "schema"; // Common arguments public static final String CONNECT = "connect"; public static final String USERNAME = "username"; public static final String PASSWORD = "password"; public static final String VERBOSE = "verbose"; public static final String CONNECTION_MANAGER = "connectionManager"; public static final String DRIVER = "driver"; public static final String CONNECTION_PARAM_FILE = "connectionParamFile"; public static final String HADOOP_HOME = "hadoopHome"; // Output line formatting arguments public static final String ENCLOSED_BY = "enclosedBy"; public static final String ESCAPED_BY = "escapedBy"; public static final String FIELDS_TERMINATED_BY = "fieldsTerminatedBy"; public static final String LINES_TERMINATED_BY = "linesTerminatedBy"; public static final String OPTIONALLY_ENCLOSED_BY = "optionallyEnclosedBy"; public static final String MYSQL_DELIMITERS = "mysqlDelimiters"; // Input parsing arguments public static final String INPUT_ENCLOSED_BY = "inputEnclosedBy"; public static final String INPUT_ESCAPED_BY = "inputEscapedBy"; public static final String INPUT_FIELDS_TERMINATED_BY = "inputFieldsTerminatedBy"; public static final String INPUT_LINES_TERMINATED_BY = "inputLinesTerminatedBy"; public static final String INPUT_OPTIONALLY_ENCLOSED_BY = "inputOptionallyEnclosedBy"; // Code generation arguments public static final String BIN_DIR = "binDir"; public static final String CLASS_NAME = "className"; public static final String JAR_FILE = "jarFile"; public static final String OUTDIR = "outdir"; public static final String PACKAGE_NAME = "packageName"; public static final String MAP_COLUMN_JAVA = "mapColumnJava"; // Shared Input/Export options public static final String TABLE = "table"; public static final String NUM_MAPPERS = "numMappers"; public static final String COMMAND_LINE = "commandLine"; public static final String MODE = "mode"; private String namenodeHost; private String namenodePort; private String jobtrackerHost; private String jobtrackerPort; private String database; private String schema; // Properties to support toggling between quick setup and advanced mode in the UI. These should never be saved. private transient String connectFromAdvanced; private transient String usernameFromAdvanced; private transient String passwordFromAdvanced; // Represents the last visible state of the UI and the execution mode. private String mode; // Common arguments @CommandLineArgument( name = CONNECT ) private String connect; @CommandLineArgument( name = "connection-manager" ) private String connectionManager; @CommandLineArgument( name = DRIVER ) private String driver; @CommandLineArgument( name = USERNAME ) private String username; @CommandLineArgument( name = PASSWORD ) @Password private String password; @CommandLineArgument( name = VERBOSE, flag = true ) private String verbose; @CommandLineArgument( name = "connection-param-file" ) private String connectionParamFile; @CommandLineArgument( name = "hadoop-home" ) private String hadoopHome; // Output line formatting arguments @CommandLineArgument( name = "enclosed-by" ) private String enclosedBy; @CommandLineArgument( name = "escaped-by" ) private String escapedBy; @CommandLineArgument( name = "fields-terminated-by" ) private String fieldsTerminatedBy; @CommandLineArgument( name = "lines-terminated-by" ) private String linesTerminatedBy; @CommandLineArgument( name = "optionally-enclosed-by" ) private String optionallyEnclosedBy; @CommandLineArgument( name = "mysql-delimiters", flag = true ) private String mysqlDelimiters; // Input parsing arguments @CommandLineArgument( name = "input-enclosed-by" ) private String inputEnclosedBy; @CommandLineArgument( name = "input-escaped-by" ) private String inputEscapedBy; @CommandLineArgument( name = "input-fields-terminated-by" ) private String inputFieldsTerminatedBy; @CommandLineArgument( name = "input-lines-terminated-by" ) private String inputLinesTerminatedBy; @CommandLineArgument( name = "input-optionally-enclosed-by" ) private String inputOptionallyEnclosedBy; // Code generation arguments @CommandLineArgument( name = "bindir" ) private String binDir; @CommandLineArgument( name = "class-name" ) private String className; @CommandLineArgument( name = "jar-file" ) private String jarFile; @CommandLineArgument( name = OUTDIR ) private String outdir; @CommandLineArgument( name = "package-name" ) private String packageName; @CommandLineArgument( name = "map-column-java" ) private String mapColumnJava; // Shared Input/Export options @CommandLineArgument( name = TABLE ) private String table; @CommandLineArgument( name = "num-mappers" ) private String numMappers; private String commandLine; /** * @return all known arguments for this config object. Some arguments may be synthetic and represent properties * directly set on this config object for the purpose of showing them in the list view of the UI. */ public AbstractModelList<ArgumentWrapper> getAdvancedArgumentsList() { final AbstractModelList<ArgumentWrapper> items = new AbstractModelList<ArgumentWrapper>(); items.addAll( SqoopUtils.findAllArguments( this ) ); try { items.add( new ArgumentWrapper( NAMENODE_HOST, BaseMessages.getString( getClass(), "NamenodeHost.Label" ), false, this, getClass().getMethod( "getNamenodeHost" ), getClass().getMethod( "setNamenodeHost", String.class ) ) ); items.add( new ArgumentWrapper( NAMENODE_PORT, BaseMessages.getString( getClass(), "NamenodePort.Label" ), false, this, getClass().getMethod( "getNamenodePort" ), getClass().getMethod( "setNamenodePort", String.class ) ) ); items.add( new ArgumentWrapper( JOBTRACKER_HOST, BaseMessages.getString( getClass(), "JobtrackerHost.Label" ), false, this, getClass().getMethod( "getJobtrackerHost" ), getClass().getMethod( "setJobtrackerHost", String.class ) ) ); items.add( new ArgumentWrapper( JOBTRACKER_PORT, BaseMessages.getString( getClass(), "JobtrackerPort.Label" ), false, this, getClass().getMethod( "getJobtrackerPort" ), getClass().getMethod( "setJobtrackerPort", String.class ) ) ); items.add( new ArgumentWrapper( BLOCKING_EXECUTION, BaseMessages .getString( getClass(), "BlockingExecution.Label" ), false, this, getClass().getMethod( "getBlockingExecution" ), getClass().getMethod( "setBlockingExecution", String.class ) ) ); items.add( new ArgumentWrapper( BLOCKING_POLLING_INTERVAL, BaseMessages.getString( getClass(), "BlockingPollingInterval.Label" ), false, this, getClass().getMethod( "getBlockingPollingInterval" ), getClass().getMethod( "setBlockingPollingInterval", String.class ) ) ); } catch ( NoSuchMethodException ex ) { throw new RuntimeException( ex ); } return items; } @Override public SqoopConfig clone() { return (SqoopConfig) super.clone(); } /** * Silently set the following properties: {@code database, connect, username, password}. * * @param database * Database name * @param connect * Connection string (JDBC connection URL) * @param username * Username * @param password * Password */ public void setConnectionInfo( String database, String connect, String username, String password ) { this.database = database; this.connect = connect; this.username = username; this.password = password; } /** * Copy connection information from temporary "advanced" fields into annotated argument fields. */ public void copyConnectionInfoFromAdvanced() { database = null; connect = getConnectFromAdvanced(); username = getUsernameFromAdvanced(); password = getPasswordFromAdvanced(); } /** * Copy the current connection information into the "advanced" fields. These are temporary session properties used to * aid the user during configuration via UI. */ public void copyConnectionInfoToAdvanced() { setConnectFromAdvanced( getConnect() ); setUsernameFromAdvanced( getUsername() ); setPasswordFromAdvanced( getPassword() ); } // All getters/setters below this line public String getNamenodeHost() { return namenodeHost; } public void setNamenodeHost( String namenodeHost ) { String old = this.namenodeHost; this.namenodeHost = namenodeHost; pcs.firePropertyChange( NAMENODE_HOST, old, this.namenodeHost ); } public String getNamenodePort() { return namenodePort; } public void setNamenodePort( String namenodePort ) { String old = this.namenodePort; this.namenodePort = namenodePort; pcs.firePropertyChange( NAMENODE_PORT, old, this.namenodePort ); } public String getJobtrackerHost() { return jobtrackerHost; } public void setJobtrackerHost( String jobtrackerHost ) { String old = this.jobtrackerHost; this.jobtrackerHost = jobtrackerHost; pcs.firePropertyChange( JOBTRACKER_HOST, old, this.jobtrackerHost ); } public String getJobtrackerPort() { return jobtrackerPort; } public void setJobtrackerPort( String jobtrackerPort ) { String old = this.jobtrackerPort; this.jobtrackerPort = jobtrackerPort; pcs.firePropertyChange( JOBTRACKER_PORT, old, this.jobtrackerPort ); } public String getDatabase() { return database; } public void setDatabase( String database ) { String old = this.database; this.database = database; pcs.firePropertyChange( DATABASE, old, this.database ); } public String getSchema() { return schema; } public void setSchema( String schema ) { String old = this.schema; this.schema = schema; pcs.firePropertyChange( SCHEMA, old, this.schema ); } public String getConnect() { return connect; } public void setConnect( String connect ) { String old = this.connect; this.connect = connect; pcs.firePropertyChange( CONNECT, old, this.connect ); } public String getUsername() { return username; } public void setUsername( String username ) { String old = this.username; this.username = username; pcs.firePropertyChange( USERNAME, old, this.username ); } public String getPassword() { return password; } public void setPassword( String password ) { String old = this.password; this.password = password; pcs.firePropertyChange( PASSWORD, old, this.password ); } public String getConnectFromAdvanced() { return connectFromAdvanced; } public void setConnectFromAdvanced( String connectFromAdvanced ) { this.connectFromAdvanced = connectFromAdvanced; } public String getUsernameFromAdvanced() { return usernameFromAdvanced; } public void setUsernameFromAdvanced( String usernameFromAdvanced ) { this.usernameFromAdvanced = usernameFromAdvanced; } public String getPasswordFromAdvanced() { return passwordFromAdvanced; } public void setPasswordFromAdvanced( String passwordFromAdvanced ) { this.passwordFromAdvanced = passwordFromAdvanced; } public String getConnectionManager() { return connectionManager; } public void setConnectionManager( String connectionManager ) { String old = this.connectionManager; this.connectionManager = connectionManager; pcs.firePropertyChange( CONNECTION_MANAGER, old, this.connectionManager ); } public String getDriver() { return driver; } public void setDriver( String driver ) { String old = this.driver; this.driver = driver; pcs.firePropertyChange( DRIVER, old, this.driver ); } public String getVerbose() { return verbose; } public void setVerbose( String verbose ) { String old = this.verbose; this.verbose = verbose; pcs.firePropertyChange( VERBOSE, old, this.verbose ); } public String getConnectionParamFile() { return connectionParamFile; } public void setConnectionParamFile( String connectionParamFile ) { String old = this.connectionParamFile; this.connectionParamFile = connectionParamFile; pcs.firePropertyChange( CONNECTION_PARAM_FILE, old, this.connectionParamFile ); } public String getHadoopHome() { return hadoopHome; } public void setHadoopHome( String hadoopHome ) { String old = this.hadoopHome; this.hadoopHome = hadoopHome; pcs.firePropertyChange( HADOOP_HOME, old, this.hadoopHome ); } public String getEnclosedBy() { return enclosedBy; } public void setEnclosedBy( String enclosedBy ) { String old = this.enclosedBy; this.enclosedBy = enclosedBy; pcs.firePropertyChange( ENCLOSED_BY, old, this.enclosedBy ); } public String getEscapedBy() { return escapedBy; } public void setEscapedBy( String escapedBy ) { String old = this.escapedBy; this.escapedBy = escapedBy; pcs.firePropertyChange( ESCAPED_BY, old, this.escapedBy ); } public String getFieldsTerminatedBy() { return fieldsTerminatedBy; } public void setFieldsTerminatedBy( String fieldsTerminatedBy ) { String old = this.fieldsTerminatedBy; this.fieldsTerminatedBy = fieldsTerminatedBy; pcs.firePropertyChange( FIELDS_TERMINATED_BY, old, this.fieldsTerminatedBy ); } public String getLinesTerminatedBy() { return linesTerminatedBy; } public void setLinesTerminatedBy( String linesTerminatedBy ) { String old = this.linesTerminatedBy; this.linesTerminatedBy = linesTerminatedBy; pcs.firePropertyChange( LINES_TERMINATED_BY, old, this.linesTerminatedBy ); } public String getOptionallyEnclosedBy() { return optionallyEnclosedBy; } public void setOptionallyEnclosedBy( String optionallyEnclosedBy ) { String old = this.optionallyEnclosedBy; this.optionallyEnclosedBy = optionallyEnclosedBy; pcs.firePropertyChange( OPTIONALLY_ENCLOSED_BY, old, this.optionallyEnclosedBy ); } public String getMysqlDelimiters() { return mysqlDelimiters; } public void setMysqlDelimiters( String mysqlDelimiters ) { String old = this.mysqlDelimiters; this.mysqlDelimiters = mysqlDelimiters; pcs.firePropertyChange( MYSQL_DELIMITERS, old, this.mysqlDelimiters ); } public String getInputEnclosedBy() { return inputEnclosedBy; } public void setInputEnclosedBy( String inputEnclosedBy ) { String old = this.inputEnclosedBy; this.inputEnclosedBy = inputEnclosedBy; pcs.firePropertyChange( INPUT_ENCLOSED_BY, old, this.inputEnclosedBy ); } public String getInputEscapedBy() { return inputEscapedBy; } public void setInputEscapedBy( String inputEscapedBy ) { String old = this.inputEscapedBy; this.inputEscapedBy = inputEscapedBy; pcs.firePropertyChange( INPUT_ESCAPED_BY, old, this.inputEscapedBy ); } public String getInputFieldsTerminatedBy() { return inputFieldsTerminatedBy; } public void setInputFieldsTerminatedBy( String inputFieldsTerminatedBy ) { String old = this.inputFieldsTerminatedBy; this.inputFieldsTerminatedBy = inputFieldsTerminatedBy; pcs.firePropertyChange( INPUT_FIELDS_TERMINATED_BY, old, this.inputFieldsTerminatedBy ); } public String getInputLinesTerminatedBy() { return inputLinesTerminatedBy; } public void setInputLinesTerminatedBy( String inputLinesTerminatedBy ) { String old = this.inputLinesTerminatedBy; this.inputLinesTerminatedBy = inputLinesTerminatedBy; pcs.firePropertyChange( INPUT_LINES_TERMINATED_BY, old, this.inputLinesTerminatedBy ); } public String getInputOptionallyEnclosedBy() { return inputOptionallyEnclosedBy; } public void setInputOptionallyEnclosedBy( String inputOptionallyEnclosedBy ) { String old = this.inputOptionallyEnclosedBy; this.inputOptionallyEnclosedBy = inputOptionallyEnclosedBy; pcs.firePropertyChange( INPUT_OPTIONALLY_ENCLOSED_BY, old, this.inputOptionallyEnclosedBy ); } public String getBinDir() { return binDir; } public void setBinDir( String binDir ) { String old = this.binDir; this.binDir = binDir; pcs.firePropertyChange( BIN_DIR, old, this.binDir ); } public String getClassName() { return className; } public void setClassName( String className ) { String old = this.className; this.className = className; pcs.firePropertyChange( CLASS_NAME, old, this.className ); } public String getJarFile() { return jarFile; } public void setJarFile( String jarFile ) { String old = this.jarFile; this.jarFile = jarFile; pcs.firePropertyChange( JAR_FILE, old, this.jarFile ); } public String getOutdir() { return outdir; } public void setOutdir( String outdir ) { String old = this.outdir; this.outdir = outdir; pcs.firePropertyChange( OUTDIR, old, this.outdir ); } public String getPackageName() { return packageName; } public void setPackageName( String packageName ) { String old = this.packageName; this.packageName = packageName; pcs.firePropertyChange( PACKAGE_NAME, old, this.packageName ); } public String getMapColumnJava() { return mapColumnJava; } public void setMapColumnJava( String mapColumnJava ) { String old = this.mapColumnJava; this.mapColumnJava = mapColumnJava; pcs.firePropertyChange( MAP_COLUMN_JAVA, old, this.mapColumnJava ); } public String getTable() { return table; } public void setTable( String table ) { String old = this.table; this.table = table; pcs.firePropertyChange( TABLE, old, this.table ); } public String getNumMappers() { return numMappers; } public void setNumMappers( String numMappers ) { String old = this.numMappers; this.numMappers = numMappers; pcs.firePropertyChange( NUM_MAPPERS, old, this.numMappers ); } public String getCommandLine() { return commandLine; } public void setCommandLine( String commandLine ) { String old = this.commandLine; this.commandLine = commandLine; pcs.firePropertyChange( COMMAND_LINE, old, this.commandLine ); } public String getMode() { return mode; } public JobEntryMode getModeAsEnum() { try { return JobEntryMode.valueOf( getMode() ); } catch ( Exception ex ) { // Not a valid ui mode, return the default return JobEntryMode.QUICK_SETUP; } } /** * Sets the mode based on the enum value * * @param mode */ public void setMode( JobEntryMode mode ) { setMode( mode.name() ); } public void setMode( String mode ) { String old = this.mode; this.mode = mode; pcs.firePropertyChange( MODE, old, this.mode ); } }
apache-2.0
avranju/qpid-jms
qpid-jms-client/src/main/java/org/apache/qpid/jms/JmsConnectionListener.java
2486
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.jms; import java.net.URI; import org.apache.qpid.jms.message.JmsInboundMessageDispatch; /** * Providers an interface for client's to listener to events related to * an JmsConnection. */ public interface JmsConnectionListener { /** * Called when a connection has been successfully established. * * This method is never called more than once when using a fault tolerant * connection, instead the connection will signal interrupted and restored. * * @param remoteURI * The URI of the Broker this client is now connected to. */ void onConnectionEstablished(URI remoteURI); /** * Called when an unrecoverable error occurs and the Connection must be closed. * * @param error * The error that triggered the failure. */ void onConnectionFailure(Throwable error); /** * Called when the Connection to the remote peer is lost. * * @param remoteURI * The URI of the Broker previously connected to. */ void onConnectionInterrupted(URI remoteURI); /** * Called when normal communication has been restored to a remote peer. * * @param remoteURI * The URI of the Broker that this client is now connected to. */ void onConnectionRestored(URI remoteURI); /** * Called when a Connection is notified that a new Message has arrived for * one of it's currently active subscriptions. * * @param envelope * The envelope that contains the incoming message and it's delivery information. */ void onInboundMessage(JmsInboundMessageDispatch envelope); }
apache-2.0
rrenomeron/cas
core/cas-server-core-monitor/src/test/java/org/apereo/cas/AllTestsSuite.java
616
package org.apereo.cas; import org.apereo.cas.monitor.CacheHealthIndicatorTests; import org.apereo.cas.monitor.MemoryHealthIndicatorTests; import org.apereo.cas.monitor.PoolHealthIndicatorTests; import org.apereo.cas.monitor.SessionHealthIndicatorTests; import org.junit.platform.suite.api.SelectClasses; /** * This is {@link AllTestsSuite}. * * @author Auto-generated by Gradle Build * @since 6.0.0-RC3 */ @SelectClasses({ MemoryHealthIndicatorTests.class, PoolHealthIndicatorTests.class, SessionHealthIndicatorTests.class, CacheHealthIndicatorTests.class }) public class AllTestsSuite { }
apache-2.0
snicoll/initializr
initializr-generator/src/test/java/io/spring/initializr/generator/io/IndentingWriterTests.java
2907
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.initializr.generator.io; import java.io.IOException; import java.io.StringWriter; import java.util.Arrays; import java.util.List; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link IndentingWriter}. * * @author Andy Wilkinson */ class IndentingWriterTests { private final StringWriter stringWriter = new StringWriter(); private final IndentingWriter indentingWriter = new IndentingWriter(this.stringWriter); @Test void linesAreNotIndentedByDefault() { this.indentingWriter.println("a"); this.indentingWriter.println("b"); this.indentingWriter.println("c"); assertThat(readLines()).containsSequence("a", "b", "c"); } @Test void linesCanBeIndented() { this.indentingWriter.println("a"); this.indentingWriter.indented(() -> this.indentingWriter.println("b")); this.indentingWriter.println("c"); assertThat(readLines()).containsSequence("a", " b", "c"); } @Test void blankLinesAreNotIndented() { this.indentingWriter.println("a"); this.indentingWriter.indented(() -> { this.indentingWriter.println("b"); this.indentingWriter.println(); }); this.indentingWriter.println("c"); assertThat(readLines()).containsSequence("a", " b", "", "c"); } @Test void useOfPrintDoesNotAddIndent() { this.indentingWriter.println("a"); this.indentingWriter.indented(() -> { this.indentingWriter.print("b"); this.indentingWriter.print("b"); this.indentingWriter.println("b"); }); this.indentingWriter.println("c"); assertThat(readLines()).containsSequence("a", " bbb", "c"); } @Test void customIndentStrategyIsUsed() throws IOException { try (IndentingWriter customIndentingWriter = new IndentingWriter(this.stringWriter, new SimpleIndentStrategy("\t"))) { customIndentingWriter.println("a"); customIndentingWriter.indented(() -> { customIndentingWriter.println("b"); customIndentingWriter.indented(() -> { customIndentingWriter.print("c"); customIndentingWriter.println("e"); }); }); } assertThat(readLines()).containsSequence("a", "\tb", "\t\tce"); } private List<String> readLines() { String[] lines = this.stringWriter.toString().split("\\r?\\n"); return Arrays.asList(lines); } }
apache-2.0
lizhanhui/data_druid
extensions-contrib/virtual-columns/src/main/java/io/druid/segment/MapVirtualColumn.java
8401
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.metamx.common.StringUtils; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.DimFilterUtils; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilitiesImpl; import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.virtual.VirtualColumnCacheHelper; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Objects; /** */ public class MapVirtualColumn implements VirtualColumn { private final String outputName; private final String keyDimension; private final String valueDimension; @JsonCreator public MapVirtualColumn( @JsonProperty("keyDimension") String keyDimension, @JsonProperty("valueDimension") String valueDimension, @JsonProperty("outputName") String outputName ) { Preconditions.checkArgument(keyDimension != null, "key dimension should not be null"); Preconditions.checkArgument(valueDimension != null, "value dimension should not be null"); Preconditions.checkArgument(outputName != null, "output name should not be null"); this.keyDimension = keyDimension; this.valueDimension = valueDimension; this.outputName = outputName; } @Override public ObjectColumnSelector makeObjectColumnSelector(String dimension, ColumnSelectorFactory factory) { final DimensionSelector keySelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(keyDimension)); final DimensionSelector valueSelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(valueDimension)); final String subColumnName = VirtualColumns.splitColumnName(dimension).rhs; if (subColumnName == null) { return new ObjectColumnSelector<Map>() { @Override public Class classOfObject() { return Map.class; } @Override public Map get() { final IndexedInts keyIndices = keySelector.getRow(); final IndexedInts valueIndices = valueSelector.getRow(); if (keyIndices == null || valueIndices == null) { return null; } final int limit = Math.min(keyIndices.size(), valueIndices.size()); final Map<String, String> map = Maps.newHashMapWithExpectedSize(limit); for (int i = 0; i < limit; i++) { map.put( keySelector.lookupName(keyIndices.get(i)), valueSelector.lookupName(valueIndices.get(i)) ); } return map; } }; } IdLookup keyIdLookup = keySelector.idLookup(); if (keyIdLookup != null) { final int keyId = keyIdLookup.lookupId(subColumnName); if (keyId < 0) { return NullStringObjectColumnSelector.instance(); } return new ObjectColumnSelector<String>() { @Override public Class classOfObject() { return String.class; } @Override public String get() { final IndexedInts keyIndices = keySelector.getRow(); final IndexedInts valueIndices = valueSelector.getRow(); if (keyIndices == null || valueIndices == null) { return null; } final int limit = Math.min(keyIndices.size(), valueIndices.size()); for (int i = 0; i < limit; i++) { if (keyIndices.get(i) == keyId) { return valueSelector.lookupName(valueIndices.get(i)); } } return null; } }; } else { return new ObjectColumnSelector<String>() { @Override public Class classOfObject() { return String.class; } @Override public String get() { final IndexedInts keyIndices = keySelector.getRow(); final IndexedInts valueIndices = valueSelector.getRow(); if (keyIndices == null || valueIndices == null) { return null; } final int limit = Math.min(keyIndices.size(), valueIndices.size()); for (int i = 0; i < limit; i++) { if (Objects.equals(keySelector.lookupName(keyIndices.get(i)), subColumnName)) { return valueSelector.lookupName(valueIndices.get(i)); } } return null; } }; } } @Override public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec, ColumnSelectorFactory factory) { // Could probably do something useful here if the column name is dot-style. But for now just return nothing. return null; } @Override public FloatColumnSelector makeFloatColumnSelector(String columnName, ColumnSelectorFactory factory) { return null; } @Override public LongColumnSelector makeLongColumnSelector(String columnName, ColumnSelectorFactory factory) { return null; } @Override public DoubleColumnSelector makeDoubleColumnSelector(String columnName, ColumnSelectorFactory factory) { return null; } @Override public ColumnCapabilities capabilities(String columnName) { final ValueType valueType = columnName.indexOf('.') < 0 ? ValueType.COMPLEX : ValueType.STRING; return new ColumnCapabilitiesImpl().setType(valueType); } @Override public List<String> requiredColumns() { return ImmutableList.of(keyDimension, valueDimension); } @Override public boolean usesDotNotation() { return true; } @Override public byte[] getCacheKey() { byte[] key = StringUtils.toUtf8(keyDimension); byte[] value = StringUtils.toUtf8(valueDimension); byte[] output = StringUtils.toUtf8(outputName); return ByteBuffer.allocate(3 + key.length + value.length + output.length) .put(VirtualColumnCacheHelper.CACHE_TYPE_ID_MAP) .put(key).put(DimFilterUtils.STRING_SEPARATOR) .put(value).put(DimFilterUtils.STRING_SEPARATOR) .put(output) .array(); } @JsonProperty public String getKeyDimension() { return keyDimension; } @JsonProperty public String getValueDimension() { return valueDimension; } @Override @JsonProperty public String getOutputName() { return outputName; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof MapVirtualColumn)) { return false; } MapVirtualColumn that = (MapVirtualColumn) o; if (!keyDimension.equals(that.keyDimension)) { return false; } if (!valueDimension.equals(that.valueDimension)) { return false; } if (!outputName.equals(that.outputName)) { return false; } return true; } @Override public int hashCode() { int result = keyDimension.hashCode(); result = 31 * result + valueDimension.hashCode(); result = 31 * result + outputName.hashCode(); return result; } @Override public String toString() { return "MapVirtualColumn{" + "keyDimension='" + keyDimension + '\'' + ", valueDimension='" + valueDimension + '\'' + ", outputName='" + outputName + '\'' + '}'; } }
apache-2.0
apache/olingo-odata4
lib/server-api/src/main/java/org/apache/olingo/server/api/ODataLibraryException.java
5208
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.server.api; import java.util.Arrays; import java.util.Formatter; import java.util.Locale; import java.util.MissingFormatArgumentException; import java.util.MissingResourceException; import java.util.ResourceBundle; import org.apache.olingo.commons.api.ex.ODataException; /** * Abstract superclass of all translatable server exceptions. */ public abstract class ODataLibraryException extends ODataException { private static final long serialVersionUID = -1210541002198287561L; private static final Locale DEFAULT_LOCALE = Locale.ENGLISH; protected static final String DEFAULT_SERVER_BUNDLE_NAME = "server-core-exceptions-i18n"; /** Key for the exception text in the resource bundle. */ public interface MessageKey { /** Gets this key. */ String getKey(); } private MessageKey messageKey; private Object[] parameters; protected ODataLibraryException(final String developmentMessage, final MessageKey messageKey, final String... parameters) { super(developmentMessage); this.messageKey = messageKey; this.parameters = parameters; } protected ODataLibraryException(final String developmentMessage, final Throwable cause, final MessageKey messageKey, final String... parameters) { super(developmentMessage, cause); this.messageKey = messageKey; this.parameters = parameters; } @Override public String getLocalizedMessage() { return getTranslatedMessage(DEFAULT_LOCALE).getMessage(); } @Override public String toString() { return getMessage(); } /** Gets the message key. */ public MessageKey getMessageKey() { return messageKey; } /** * Gets the translated message text for a given locale (or the default locale if not available), * returning the developer message text if none is found. * @param locale the preferred {@link Locale} * @return the error message */ public ODataErrorMessage getTranslatedMessage(final Locale locale) { if (messageKey == null) { return new ODataErrorMessage(getMessage(), DEFAULT_LOCALE); } ResourceBundle bundle = createResourceBundle(locale); if (bundle == null) { return new ODataErrorMessage(getMessage(), DEFAULT_LOCALE); } return buildMessage(bundle, locale); } /** * <p>Gets the name of the {@link ResourceBundle} containing the exception texts.</p> * <p>The key for an exception text is the concatenation of the exception-class name and * the {@link MessageKey}, separated by a dot.</p> * @return the name of the resource bundle */ protected abstract String getBundleName(); private ResourceBundle createResourceBundle(final Locale locale) { try { return ResourceBundle.getBundle(getBundleName(), locale == null ? DEFAULT_LOCALE : locale); } catch (final MissingResourceException e) { return null; } } private ODataErrorMessage buildMessage(final ResourceBundle bundle, final Locale locale) { String message = null; StringBuilder builder = new StringBuilder(); Formatter f = new Formatter(builder, locale); try { message = bundle.getString(getClass().getSimpleName() + '.' + messageKey.getKey()); f.format(message, parameters); Locale usedLocale = bundle.getLocale(); if (Locale.ROOT.equals(usedLocale)) { usedLocale = DEFAULT_LOCALE; } return new ODataErrorMessage(builder.toString(), usedLocale); } catch (MissingResourceException e) { return new ODataErrorMessage("Missing message for key '" + messageKey.getKey() + "'!", DEFAULT_LOCALE); } catch (MissingFormatArgumentException e) { return new ODataErrorMessage("Missing replacement for place holder in message '" + message + "' for following arguments '" + Arrays.toString(parameters) + "'!", DEFAULT_LOCALE); }finally{ f.close(); } } /** Error message text and {@link Locale} used for it. */ public static class ODataErrorMessage { private String message; private Locale locale; public ODataErrorMessage(final String message, final Locale usedLocale) { this.message = message; locale = usedLocale; } /** Gets the message text. */ public String getMessage() { return message; } /** Gets the {@link Locale} used for this message. */ public Locale getLocale() { return locale; } } }
apache-2.0
ma459006574/pentaho-kettle
test/org/pentaho/di/trans/steps/textfileoutput/TextFileOutputSplittingTest.java
5824
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.textfileoutput; import org.apache.commons.io.IOUtils; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemException; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import java.util.List; import static org.junit.Assert.assertEquals; /** * The input transformation contains 3 steps. The last one is TextFileOutput step. It obtains the vector [1,2,...,5]. * Split threshold is set to 3. The step creates in-memory files following this template: {@linkplain * TextFileOutputSplittingTest#OUTPUT_DIR}. The class covers different cases depending on header's and footer's * existence flags. * * @author Andrey Khayrutdinov */ public class TextFileOutputSplittingTest { private static final String OUTPUT_DIR = "ram://pdi-12847"; private static final String H = "value"; private static final String F = "value"; private TransMeta transMeta; @BeforeClass public static void initKettle() throws Exception { KettleEnvironment.init( false ); } @Before public void setUp() throws Exception { transMeta = new TransMeta( "testfiles/org/pentaho/di/trans/steps/textfileoutput/pdi-12847.ktr" ); transMeta.setTransformationType( TransMeta.TransformationType.Normal ); } @After public void tearDown() throws Exception { transMeta = null; FileObject folder = getFolder(); for ( FileObject fileObject : folder.getChildren() ) { fileObject.delete(); } } @Test public void splitWithNone() throws Exception { runTransformation( transMeta ); FileObject[] children = getFolder().getChildren(); // 2 files => [1,2,3], [4,5] assertEquals( 2, children.length ); assertSplitFileIsCorrect( children[ 0 ], "data.txt_0", "1", "2", "3" ); assertSplitFileIsCorrect( children[ 1 ], "data.txt_1", "4", "5" ); } @Test public void splitWithHeader() throws Exception { TextFileOutputMeta meta = pickupTextFileOutputMeta(); meta.setHeaderEnabled( true ); runTransformation( transMeta ); FileObject[] children = getFolder().getChildren(); // 3 files => [h,1,2], [h,3,4], [h,5] assertEquals( 3, children.length ); assertSplitFileIsCorrect( children[ 0 ], "data.txt_0", H, "1", "2" ); assertSplitFileIsCorrect( children[ 1 ], "data.txt_1", H, "3", "4" ); assertSplitFileIsCorrect( children[ 2 ], "data.txt_2", H, "5" ); } @Test public void splitWithFooter() throws Exception { TextFileOutputMeta meta = pickupTextFileOutputMeta(); meta.setFooterEnabled( true ); runTransformation( transMeta ); FileObject[] children = getFolder().getChildren(); // 3 files => [1,2,f], [3,4,f], [5,f] assertEquals( 3, children.length ); assertSplitFileIsCorrect( children[ 0 ], "data.txt_0", "1", "2", F ); assertSplitFileIsCorrect( children[ 1 ], "data.txt_1", "3", "4", F ); assertSplitFileIsCorrect( children[ 2 ], "data.txt_2", "5", F ); } @Test public void splitWithBoth() throws Exception { TextFileOutputMeta meta = pickupTextFileOutputMeta(); meta.setHeaderEnabled( true ); meta.setFooterEnabled( true ); runTransformation( transMeta ); FileObject[] children = getFolder().getChildren(); // 5 files => [h,1,f], [h,2,f], ..., [h,5,f] assertEquals( 5, children.length ); for ( int i = 0; i < children.length; i++ ) { assertSplitFileIsCorrect( children[ i ], "data.txt_" + i, H, Integer.toString( i + 1 ), F ); } } private TextFileOutputMeta pickupTextFileOutputMeta() throws Exception { return (TextFileOutputMeta) transMeta.getSteps().get( 2 ).getStepMetaInterface(); } private static void runTransformation( TransMeta transMeta ) throws Exception { Trans trans = new Trans( transMeta ); trans.prepareExecution( null ); trans.startThreads(); trans.waitUntilFinished(); assertEquals( 0, trans.getErrors() ); } private static FileObject getFolder() throws FileSystemException { return KettleVFS.getInstance().getFileSystemManager().resolveFile( OUTPUT_DIR ); } private static void assertSplitFileIsCorrect( FileObject file, String expectedName, String... expectedLines ) throws Exception { List<String> content = readContentOf( file ); assertEquals( expectedName, file.getName().getBaseName() ); assertEquals( expectedLines.length, content.size() ); for ( int i = 0; i < content.size(); i++ ) { assertEquals( expectedLines[ i ], content.get( i ) ); } } @SuppressWarnings( "unchecked" ) private static List<String> readContentOf( FileObject fileObject ) throws Exception { return IOUtils.readLines( fileObject.getContent().getInputStream() ); } }
apache-2.0
azkaoru/migration-tool
src/tubame.portability/src/tubame/portability/plugin/editor/JbmEditorPart.java
9098
/* * JbmEditorPart.java * Created on 2013/06/28 * * Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tubame.portability.plugin.editor; import java.util.ArrayList; import java.util.List; import org.eclipse.core.resources.IProject; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.viewers.IDoubleClickListener; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.Tree; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IFileEditorInput; import org.eclipse.ui.WorkbenchException; import org.eclipse.ui.part.EditorPart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import tubame.portability.exception.JbmException; import tubame.portability.logic.CheckListInformationFactory; import tubame.portability.logic.JbmAccessFactory; import tubame.portability.model.JbmEditorEnum; import tubame.portability.model.JbmEditorMigrationRow; import tubame.portability.model.MigrationEditorRow; import tubame.portability.plugin.action.JbmRowDelete; import tubame.portability.plugin.action.MigrationRowDelete; import tubame.portability.plugin.view.WorkStatusView; import tubame.portability.util.PluginUtil; import tubame.portability.util.StringUtil; import tubame.portability.util.resource.MessageUtil; import tubame.portability.util.resource.ResourceUtil; /** * Eclipse editor that a hierarchical view of the results of the search * function.<br/> * Sort the search results record, link to the original source file * transplantation, <br/> * delete records, visual confirmation status change, a link to the portability * study guidelines,<br/> * save the file after editing.<br/> * The search results screen editing function display, and display using the * perspective of their own.<br/> */ public class JbmEditorPart extends AbstractJbmEditorPart { /** * Logger */ private static final Logger LOGGER = LoggerFactory.getLogger(JbmEditorPart.class); /** * Hearing column size */ private static final int HIARING_COLUMN_SIZE = 100; /** * Visual confirmation column size */ private static final int VISUAL_COLUMN_SIZE = 100; /** * Guide chapter number column size */ private static final int CHAPTER_NO_COLUMN_SIZE = 60; /** * Difficulty column size */ private static final int DIFFICULTY_COLUMN_SIZE = 70; /** * Line number column size */ private static final int ROW_COLUMN_SIZE = 30; /** * File Name column size */ private static final int TARGET_FILE_PATH_COLUMN_SIZE = 100; /** * Major item column size */ private static final int BIG_ITEM_COLUMN_SIZE = 120; /** * Major item column size */ private static final int MIDDLE_ITEM_COLUMN_SIZE = 120; /** * Number of column size */ private static final int HIT_COLUMN_SIZE = 60; /** * No column size */ private static final int NO_COLUMN_SIZE = 50; /** * Line number size */ private static final int LINE_COLUMN_SIZE = 80; /** * Line number basis size */ private static final int LINE_BASIS_COLUMN_SIZE = 120; /** * Total line size */ private static final int TOTAL_LINE_COLUMN_SIZE = 80; /** * {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public boolean save(String fileName, TreeViewer treeViewer) { LOGGER.info(MessageUtil.INF_SAVE); try { JbmAccessFactory.getJbmWriteFacade().update(fileName, (List<JbmEditorMigrationRow>) treeViewer.getInput()); WorkStatusView.out(false, null); LOGGER.debug(MessageUtil.INF_SAVED); } catch (JbmException e) { // Failed to save PluginUtil.viewErrorDialog(ResourceUtil.JBM_EDITOR, MessageUtil.ERR_SAVE + StringUtil.LINE_SEPARATOR + e.getMessage(), e); } return true; } /** * {@inheritDoc} */ @Override public List<MigrationEditorRow> load(String fileName) throws JbmException { List<JbmEditorMigrationRow> list = JbmAccessFactory.getJbmReadFacade().read(fileName); List<MigrationEditorRow> resultList = new ArrayList<MigrationEditorRow>(); resultList.addAll(list); return resultList; } /** * {@inheritDoc} */ @Override public void contextMenu(TreeViewer treeViewer) { MenuManager menuMgr = new MenuManager("#PopupMenu"); //$NON-NLS-1$ menuMgr.setRemoveAllWhenShown(true); menuMgr.addMenuListener(new ContextMenuListener(this)); Menu menu = menuMgr.createContextMenu(treeViewer.getControl()); treeViewer.getControl().setMenu(menu); } /** * {@inheritDoc} */ @Override public void addColumn(Tree tree) { SelectionListener selectionListener = new JbmEditorSortListener(this); super.createTreeColumn(tree, JbmEditorEnum.INDEX_NO.getName(), SWT.NULL, JbmEditorPart.NO_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.BIG_ITEM.getName(), SWT.NULL, JbmEditorPart.BIG_ITEM_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.MIDDLE_ITEM.getName(), SWT.NULL, JbmEditorPart.MIDDLE_ITEM_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.DIFFICULTY.getName(), SWT.NULL, JbmEditorPart.DIFFICULTY_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.HIT_NUM.getName(), SWT.RIGHT, JbmEditorPart.HIT_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.TARGET_FILE_PATH.getName(), SWT.NULL, JbmEditorPart.TARGET_FILE_PATH_COLUMN_SIZE, null); super.createTreeColumn(tree, JbmEditorEnum.ROW_NO.getName(), SWT.RIGHT, JbmEditorPart.ROW_COLUMN_SIZE, null); // Number of line (with sort) super.createTreeColumn(tree, JbmEditorEnum.LINE_NUM.getName(), SWT.NULL, JbmEditorPart.LINE_COLUMN_SIZE, selectionListener); // Line number basis super.createTreeColumn(tree, JbmEditorEnum.LINE_NUM_BASIS.getName(), SWT.SELECTED, JbmEditorPart.LINE_BASIS_COLUMN_SIZE, null); // Total line super.createTreeColumn(tree, JbmEditorEnum.TOTAL_LINE.getName(), SWT.NULL, JbmEditorPart.TOTAL_LINE_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.CHAPTER_NO.getName(), SWT.NULL, JbmEditorPart.CHAPTER_NO_COLUMN_SIZE, null); // super.createTreeColumn(tree, // JbmEditorEnum.VISUAL_CONFIRM_ITEM.getName(), SWT.NULL, 0, null); // // // super.createTreeColumn(tree, JbmEditorEnum.HIARING_ITEM.getName(), // SWT.NULL, 0, null); super.createTreeColumn(tree, JbmEditorEnum.VISUAL_CONFIRM_STATSU_ITEM.getName(), SWT.NULL, JbmEditorPart.VISUAL_COLUMN_SIZE, selectionListener); super.createTreeColumn(tree, JbmEditorEnum.HIARING_STATUS.getName(), SWT.NULL, JbmEditorPart.HIARING_COLUMN_SIZE, selectionListener); } /** * {@inheritDoc} */ @Override public IDoubleClickListener getDoubleClickListener(MigrationEditorOperation editor) { IProject project = null; if (editor instanceof EditorPart) { EditorPart editorPart = (EditorPart) editor; project = ((IFileEditorInput) editorPart.getEditorInput()).getFile().getProject(); } return new JbmEditorDoubleClickListener(editor, project); } /** * {@inheritDoc} */ @Override public MigrationRowDelete getRowDeleteExecutor() { return new JbmRowDelete(); } /** * {@inheritDoc} */ @Override public void preInit() { try { // For check list view update information, // initialize the checklist file information acquisition class CheckListInformationFactory.getCheckListInformationFacade().initCheckListInformationReader(); PluginUtil.openSeachPerspective(); LOGGER.info(ResourceUtil.JBM_EDITOR + MessageUtil.INF_START); } catch (WorkbenchException e) { throw new RuntimeException(e); } } /** * {@inheritDoc} */ @Override public void postCreatePartControl() { List<JbmEditorMigrationRow> topList = getInput(); for (JbmEditorMigrationRow row : topList) { row.updateWriteData(); } WorkStatusView.out(false, topList); // Expand all tree level super.getTreeViewer().expandAll(); } /** * Get @ see JbmEditorMigrationRow list from the View data.<br/> * * @return @see JbmEditorMigrationRwo list */ @SuppressWarnings("unchecked") private List<JbmEditorMigrationRow> getInput() { TreeViewer treeViewer = getTreeViewer(); return (List<JbmEditorMigrationRow>) treeViewer.getInput(); } /** * {@inheritDoc} */ @Override public String getTitle() { return ResourceUtil.JBM_EDITOR; } /** * {@inheritDoc} */ @Override public void setFocus() { WorkStatusView.out(false, getInput()); super.setFocus(); } }
apache-2.0
tectronics/stickycode
net.stickycode.mockwire/sticky-bdd/src/main/java/net/stickycode/mockwire/bdd/StickyMockitoStubberImpl.java
3159
/** * Copyright (c) 2010 RedEngine Ltd, http://www.redengine.co.nz. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ /** * Copyright (c) 2010 RedEngine Ltd, http://www.redengine.co.nz. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package net.stickycode.mockwire.bdd; import org.mockito.stubbing.Answer; import org.mockito.stubbing.Stubber; public class StickyMockitoStubberImpl implements StickyMockitoStubber { private final Stubber mockitoStubber; public StickyMockitoStubberImpl(Stubber mockitoStubber) { this.mockitoStubber = mockitoStubber; } /* (non-Javadoc) * @see org.mockitousage.customization.StickyMockitoMockito.StickyMockitoStubber#given(java.lang.Object) */ public <T> T given(T mock) { return mockitoStubber.when(mock); } /* (non-Javadoc) * @see org.mockitousage.customization.StickyMockitoMockito.StickyMockitoStubber#willAnswer(org.mockito.stubbing.Answer) */ public StickyMockitoStubber willAnswer(Answer answer) { return new StickyMockitoStubberImpl(mockitoStubber.doAnswer(answer)); } /* (non-Javadoc) * @see org.mockitousage.customization.StickyMockitoMockito.StickyMockitoStubber#willNothing() */ public StickyMockitoStubber willDoNothing() { return new StickyMockitoStubberImpl(mockitoStubber.doNothing()); } /* (non-Javadoc) * @see org.mockitousage.customization.StickyMockitoMockito.StickyMockitoStubber#willReturn(java.lang.Object) */ public StickyMockitoStubber willReturn(Object toBeReturned) { return new StickyMockitoStubberImpl(mockitoStubber.doReturn(toBeReturned)); } /* (non-Javadoc) * @see org.mockitousage.customization.StickyMockitoMockito.StickyMockitoStubber#willThrow(java.lang.Throwable) */ public StickyMockitoStubber willThrow(Throwable toBeThrown) { return new StickyMockitoStubberImpl(mockitoStubber.doThrow(toBeThrown)); } }
apache-2.0
robertnishihara/ray
java/api/src/main/java/io/ray/api/placementgroup/PlacementGroup.java
392
package io.ray.api.placementgroup; /** * A placement group is used to place interdependent actors according to a specific strategy * {@link PlacementStrategy}. * When a placement group is created, the corresponding actor slots and resources are preallocated. * A placement group consists of one or more bundles plus a specific placement strategy. */ public interface PlacementGroup { }
apache-2.0
dbarowy/java-aws-mturk
src/com/amazonaws/mturk/addon/HITDataReader.java
4017
/* * Copyright 2007-2012 Amazon Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.mturk.addon; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.log4j.Logger; import com.amazonaws.mturk.util.FileUtil; /** * The HITDataReader class provides a structured way to access HIT data stored in a file. * It assumes that the data is stored essentially as a table with rows and columns. * The top row is assumed to be field names, and the rows below it are data. */ @Deprecated public class HITDataReader implements HITDataInput { public static final int HIT_ID_FIELD_IND = 0; public static final int HIT_TYPE_ID_FIELD_IND = 1; public final static String DEFAULT_DELIM = "\t"; public final static String HIT_ID_HEADER = "hitid"; protected FileUtil fileImporter = null; protected String[] rows = null; protected String[] fieldNames = null; protected String delim = null; protected static Logger log = Logger.getLogger(HITDataReader.class); public HITDataReader() { super(); this.delim = DEFAULT_DELIM; // initialize rowValues only when rowValues is accessed } public HITDataReader(String fileName) throws IOException { // Constructor that assumes the first row contains fieldNames and the delimeter is DEFAULT_DELIM this(fileName, DEFAULT_DELIM); } public HITDataReader(String fileName, String delim) throws IOException { super(); // Base constructor this.fileImporter = new FileUtil(fileName); this.delim = delim; // initialize( if we aren't given first names, then first row contains fields ) this.setRows(fileImporter.getLines()); this.fill(); } public String getFileName() { if (this.fileImporter == null) { return null; } return this.fileImporter.getFileName(); } public String[] getFieldNames() { if (this.fieldNames == null) { this.fieldNames = this.getRowValues(0); } return fieldNames; } public int getNumRows() { if (this.rows == null) { return 0; } return rows.length; } public String getDelimeter() { return this.delim; } public String[] getRowValues(int rowNum) { String row = this.getRow(rowNum); if (row != null) return row.split(this.delim); return null; } public Map<String, String> getRowAsMap(int rowNum) { String[] fieldNames = this.getFieldNames(); String[] rowValues = this.getRowValues(rowNum); if (fieldNames == null || fieldNames.length == 0) { log.info("No field names were found in your HIT Input. Your first row in your input file must contain field names for each column."); return null; } if (rowValues == null || rowValues.length == 0) { log.info("No input rows were found in your HIT Input. Your input file must contain at least one row of input."); return null; } HashMap<String,String> rowValueMap = new HashMap<String,String>(); for (int i = 0; i < fieldNames.length; i++) { rowValueMap.put(fieldNames[i], rowValues[i]); } return rowValueMap; } public void setRows(String[] rows) { this.rows = rows; } public String getRow(int rowNum) { if (this.rows != null && this.rows.length >= rowNum) { return this.rows[rowNum]; } return null; } private void fill() { if (this.rows != null) { this.fieldNames = rows[0].split(this.delim); } } }
apache-2.0
Tinker-S/FaceBarCodeDemo
src/com/google/zxing/ResultMetadataType.java
2551
/* * Copyright 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing; /** * Represents some type of metadata about the result of the decoding that the * decoder wishes to communicate back to the caller. * * @author Sean Owen */ public enum ResultMetadataType { /** * Unspecified, application-specific metadata. Maps to an unspecified * {@link Object}. */ OTHER, /** * Denotes the likely approximate orientation of the barcode in the image. * This value is given as degrees rotated clockwise from the normal, upright * orientation. For example a 1D barcode which was found by reading * top-to-bottom would be said to have orientation "90". This key maps to an * {@link Integer} whose value is in the range [0,360). */ ORIENTATION, /** * <p> * 2D barcode formats typically encode text, but allow for a sort of 'byte * mode' which is sometimes used to encode binary data. While {@link Result} * makes available the complete raw bytes in the barcode for these formats, * it does not offer the bytes from the byte segments alone. * </p> * * <p> * This maps to a {@link java.util.List} of byte arrays corresponding to the * raw bytes in the byte segments in the barcode, in order. * </p> */ BYTE_SEGMENTS, /** * Error correction level used, if applicable. The value type depends on the * format, but is typically a String. */ ERROR_CORRECTION_LEVEL, /** * For some periodicals, indicates the issue number as an {@link Integer}. */ ISSUE_NUMBER, /** * For some products, indicates the suggested retail price in the barcode as * a formatted {@link String}. */ SUGGESTED_PRICE, /** * For some products, the possible country of manufacture as a * {@link String} denoting the ISO country code. Some map to multiple * possible countries, like "US/CA". */ POSSIBLE_COUNTRY, /** * For some products, the extension text */ UPC_EAN_EXTENSION, /** * PDF417-specific metadata */ PDF417_EXTRA_METADATA, }
apache-2.0
learning-layers/Expert-Identification-Service
src/main/i5/las2peer/services/servicePackage/visualizer/GraphMl2GEXFConverter.java
2804
package i5.las2peer.services.servicePackage.visualizer; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import org.gephi.graph.api.GraphController; import org.gephi.graph.api.GraphModel; import org.gephi.graph.api.Node; import org.gephi.io.exporter.api.ExportController; import org.gephi.io.importer.api.Container; import org.gephi.io.importer.api.ImportController; import org.gephi.io.processor.plugin.DefaultProcessor; import org.gephi.layout.plugin.force.StepDisplacement; import org.gephi.layout.plugin.force.yifanHu.YifanHuLayout; import org.gephi.project.api.ProjectController; import org.gephi.project.api.Workspace; import org.openide.util.Lookup; /** * @author sathvik * */ public class GraphMl2GEXFConverter { GraphModel graphModel; public GraphMl2GEXFConverter() { } /** * * @param srcpath * @throws IOException */ public void convert(String srcpath) throws IOException { File graphmlFile = new File(srcpath); // Init a project - and therefore a workspace ProjectController pc = Lookup.getDefault().lookup(ProjectController.class); pc.newProject(); Workspace workspace = pc.getCurrentWorkspace(); // get import controller ImportController importController = Lookup.getDefault().lookup(ImportController.class); // Import file Container container = importController.importFile(graphmlFile); // Append imported data to GraphAPI importController.process(container, new DefaultProcessor(), workspace); graphModel = Lookup.getDefault().lookup(GraphController.class).getModel(); } public void colorNodes(LinkedHashMap<String, Double> nodes) { ArrayList<String> keyList = new ArrayList<String>(); keyList.addAll(nodes.keySet()); for (Node n : graphModel.getGraph().getNodes()) { String nodeId = n.getNodeData().getId(); if (nodes.keySet().contains(nodeId)) { int index = keyList.indexOf(nodeId); n.getNodeData().setColor((float) 1, 0, 0); n.getNodeData().setSize((float) (60 / Math.log(index + 2))); } else { n.getNodeData().setColor(0, (float) 0.2, 0); } } } public void applyLayout() { YifanHuLayout layout = new YifanHuLayout(null, new StepDisplacement(1f)); layout.setGraphModel(graphModel); layout.initAlgo(); layout.resetPropertiesValues(); layout.setOptimalDistance(200f); for (int i = 0; i < 100 && layout.canAlgo(); i++) { layout.goAlgo(); } layout.endAlgo(); } /** * * @param destpath * @throws IOException */ public void export(String destpath) throws IOException { System.out.println("EXPORTING ...."); // Export graph to GEXF ExportController ec = Lookup.getDefault().lookup(ExportController.class); ec.exportFile(new File(destpath + ".gexf")); } }
apache-2.0
jgarman/autopsy
Core/src/org/sleuthkit/autopsy/filesearch/DateSearchPanel.java
18152
/* * Autopsy Forensic Browser * * Copyright 2011 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.filesearch; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.DateFormat; import java.text.ParseException; import java.util.Date; import java.util.List; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JFormattedTextField; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; /** * Subpanel with controls for file data filtering. */ class DateSearchPanel extends javax.swing.JPanel { DateFormat dateFormat; List<String> timeZones; DateSearchPanel(DateFormat dateFormat, List<String> timeZones) { this.dateFormat = dateFormat; this.timeZones = timeZones; initComponents(); customizeComponents(); } private void customizeComponents(){ dateFromTextField.setComponentPopupMenu(rightClickMenu); dateToTextField.setComponentPopupMenu(rightClickMenu); ActionListener actList = new ActionListener(){ @Override public void actionPerformed(ActionEvent e){ JMenuItem jmi = (JMenuItem) e.getSource(); /* Because there are two text fields, we have to determine which invoked the popupmenu */ JFormattedTextField jftf = (JFormattedTextField) ((JPopupMenu) jmi.getParent()).getInvoker(); if(jmi.equals(cutMenuItem)) jftf.cut(); else if(jmi.equals(copyMenuItem)) jftf.copy(); else if(jmi.equals(pasteMenuItem)) jftf.paste(); else if(jmi.equals(selectAllMenuItem)) jftf.selectAll(); } }; cutMenuItem.addActionListener(actList); copyMenuItem.addActionListener(actList); pasteMenuItem.addActionListener(actList); selectAllMenuItem.addActionListener(actList); } JCheckBox getAccessedCheckBox() { return accessedCheckBox; } JCheckBox getChangedCheckBox() { return changedCheckBox; } JCheckBox getCreatedCheckBox() { return createdCheckBox; } JCheckBox getDateCheckBox() { return dateCheckBox; } JFormattedTextField getDateFromTextField() { return dateFromTextField; } JFormattedTextField getDateToTextField() { return dateToTextField; } JCheckBox getModifiedCheckBox() { return modifiedCheckBox; } JComboBox<String> getTimeZoneComboBox() { return timeZoneComboBox; } void setTimeZones(List<String> newTimeZones) { this.timeZones = newTimeZones; this.timeZoneComboBox.removeAllItems(); for (String tz : newTimeZones) { this.timeZoneComboBox.addItem(tz); } } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { rightClickMenu = new javax.swing.JPopupMenu(); cutMenuItem = new javax.swing.JMenuItem(); copyMenuItem = new javax.swing.JMenuItem(); pasteMenuItem = new javax.swing.JMenuItem(); selectAllMenuItem = new javax.swing.JMenuItem(); dateToTextField = new JFormattedTextField(this.dateFormat); jLabel1 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); dateCheckBox = new javax.swing.JCheckBox(); timeZoneComboBox = new JComboBox<>(this.timeZones.toArray(new String[this.timeZones.size()])); timeZoneComboBox.setRenderer(new DateSearchFilter.ComboBoxRenderer()); jLabel3 = new javax.swing.JLabel(); dateFromTextField = new JFormattedTextField(this.dateFormat); jLabel2 = new javax.swing.JLabel(); modifiedCheckBox = new javax.swing.JCheckBox(); changedCheckBox = new javax.swing.JCheckBox(); accessedCheckBox = new javax.swing.JCheckBox(); createdCheckBox = new javax.swing.JCheckBox(); dateFromButtonCalendar = new org.jbundle.thin.base.screen.jcalendarbutton.JCalendarButton(); dateToButtonCalendar = new org.jbundle.thin.base.screen.jcalendarbutton.JCalendarButton(); cutMenuItem.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.cutMenuItem.text")); // NOI18N rightClickMenu.add(cutMenuItem); copyMenuItem.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.copyMenuItem.text")); // NOI18N rightClickMenu.add(copyMenuItem); pasteMenuItem.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.pasteMenuItem.text")); // NOI18N rightClickMenu.add(pasteMenuItem); selectAllMenuItem.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.selectAllMenuItem.text")); // NOI18N rightClickMenu.add(selectAllMenuItem); dateToTextField.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.dateToTextField.text")); // NOI18N dateToTextField.addFocusListener(new java.awt.event.FocusAdapter() { public void focusLost(java.awt.event.FocusEvent evt) { dateToTextFieldFocusLost(evt); } }); jLabel1.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.jLabel1.text")); // NOI18N jLabel4.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.jLabel4.text")); // NOI18N dateCheckBox.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.dateCheckBox.text")); // NOI18N jLabel3.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.jLabel3.text")); // NOI18N dateFromTextField.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.dateFromTextField.text")); // NOI18N dateFromTextField.addFocusListener(new java.awt.event.FocusAdapter() { public void focusLost(java.awt.event.FocusEvent evt) { dateFromTextFieldFocusLost(evt); } }); jLabel2.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.jLabel2.text")); // NOI18N modifiedCheckBox.setSelected(true); modifiedCheckBox.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.modifiedCheckBox.text")); // NOI18N changedCheckBox.setSelected(true); changedCheckBox.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.changedCheckBox.text")); // NOI18N accessedCheckBox.setSelected(true); accessedCheckBox.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.accessedCheckBox.text")); // NOI18N createdCheckBox.setSelected(true); createdCheckBox.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.createdCheckBox.text")); // NOI18N dateFromButtonCalendar.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.dateFromButtonCalendar.text")); // NOI18N dateFromButtonCalendar.addPropertyChangeListener(new java.beans.PropertyChangeListener() { public void propertyChange(java.beans.PropertyChangeEvent evt) { dateFromPopupChanged(evt); } }); dateToButtonCalendar.setText(org.openide.util.NbBundle.getMessage(DateSearchPanel.class, "DateSearchPanel.dateToButtonCalendar.text")); // NOI18N dateToButtonCalendar.addPropertyChangeListener(new java.beans.PropertyChangeListener() { public void propertyChange(java.beans.PropertyChangeEvent evt) { dateToPopupChanged(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(dateCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(dateFromTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 92, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, 0) .addComponent(dateFromButtonCalendar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(dateToTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 92, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, 0) .addComponent(dateToButtonCalendar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(21, 21, 21) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(changedCheckBox) .addComponent(modifiedCheckBox)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(accessedCheckBox) .addComponent(createdCheckBox))) .addGroup(layout.createSequentialGroup() .addComponent(jLabel4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(timeZoneComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, 193, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(21, 21, 21) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel3) .addComponent(jLabel2))))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(dateCheckBox) .addComponent(dateFromTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(dateToButtonCalendar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(dateToTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(dateFromButtonCalendar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(4, 4, 4) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(timeZoneComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(layout.createSequentialGroup() .addComponent(modifiedCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(changedCheckBox)) .addGroup(layout.createSequentialGroup() .addComponent(accessedCheckBox) .addGap(23, 23, 23)) .addComponent(createdCheckBox)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel3) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void dateFromTextFieldFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_dateFromTextFieldFocusLost // set the "from" calendar button to listen to change in the text field String fromDateString = this.dateFromTextField.getText(); if (!fromDateString.equals("")) { try { Date fromDate = dateFormat.parse(fromDateString); dateFromButtonCalendar.setTargetDate(fromDate); } catch (ParseException ex) { // for now, no need to show the error message to the user her } } }//GEN-LAST:event_dateFromTextFieldFocusLost private void dateToTextFieldFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_dateToTextFieldFocusLost // set the "to" calendar button to listen to change in the text field String toDateString = this.dateToTextField.getText(); if (!toDateString.equals("")) { try { Date toDate = dateFormat.parse(toDateString); dateToButtonCalendar.setTargetDate(toDate); } catch (ParseException ex) { // for now, no need to show the error message to the user here } } }//GEN-LAST:event_dateToTextFieldFocusLost private void dateFromPopupChanged(java.beans.PropertyChangeEvent evt) {//GEN-FIRST:event_dateFromPopupChanged if (evt.getNewValue() instanceof Date) { setFromDate((Date) evt.getNewValue()); } }//GEN-LAST:event_dateFromPopupChanged private void dateToPopupChanged(java.beans.PropertyChangeEvent evt) {//GEN-FIRST:event_dateToPopupChanged if (evt.getNewValue() instanceof Date) { setToDate((Date) evt.getNewValue()); } }//GEN-LAST:event_dateToPopupChanged /** * Validate and set the datetime field on the screen given a datetime string. * @param date The date object */ private void setFromDate(Date date) { String dateStringResult = ""; if (date != null) { dateStringResult = dateFormat.format(date); } dateFromTextField.setText(dateStringResult); dateFromButtonCalendar.setTargetDate(date); } /** * Validate and set the datetime field on the screen given a date. * @param date The date object */ private void setToDate(Date date) { String dateStringResult = ""; if (date != null) { dateStringResult = dateFormat.format(date); } dateToTextField.setText(dateStringResult); dateToButtonCalendar.setTargetDate(date); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox accessedCheckBox; private javax.swing.JCheckBox changedCheckBox; private javax.swing.JMenuItem copyMenuItem; private javax.swing.JCheckBox createdCheckBox; private javax.swing.JMenuItem cutMenuItem; private javax.swing.JCheckBox dateCheckBox; private org.jbundle.thin.base.screen.jcalendarbutton.JCalendarButton dateFromButtonCalendar; private javax.swing.JFormattedTextField dateFromTextField; private org.jbundle.thin.base.screen.jcalendarbutton.JCalendarButton dateToButtonCalendar; private javax.swing.JFormattedTextField dateToTextField; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JCheckBox modifiedCheckBox; private javax.swing.JMenuItem pasteMenuItem; private javax.swing.JPopupMenu rightClickMenu; private javax.swing.JMenuItem selectAllMenuItem; private javax.swing.JComboBox<String> timeZoneComboBox; // End of variables declaration//GEN-END:variables void addActionListener(ActionListener l) { dateFromTextField.addActionListener(l); dateToTextField.addActionListener(l); } }
apache-2.0
akirakw/asakusafw
info/cli/src/main/java/com/asakusafw/info/cli/list/ListBatchCommand.java
3355
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.info.cli.list; import java.io.IOException; import java.io.PrintWriter; import java.nio.file.Path; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.info.BatchInfo; import com.asakusafw.info.cli.common.ApplicationBaseDirectoryParameter; import com.asakusafw.utils.jcommander.common.HelpParameter; import com.asakusafw.utils.jcommander.common.OutputParameter; import com.asakusafw.utils.jcommander.common.VerboseParameter; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; import com.fasterxml.jackson.databind.ObjectMapper; /** * A command for printing list of batch. * @since 0.10.0 */ @Parameters( commandNames = "batch", commandDescriptionKey = "command.generate-list-batch", resourceBundle = "com.asakusafw.info.cli.jcommander" ) public class ListBatchCommand implements Runnable { static final Logger LOG = LoggerFactory.getLogger(ListBatchCommand.class); @ParametersDelegate final HelpParameter helpParameter = new HelpParameter(); @ParametersDelegate final ApplicationBaseDirectoryParameter batchappsParameter = new ApplicationBaseDirectoryParameter(); @ParametersDelegate final VerboseParameter verboseParameter = new VerboseParameter(); @ParametersDelegate final OutputParameter outputParameter = new OutputParameter(); @Override public void run() { LOG.debug("starting {}", getClass().getSimpleName()); try (PrintWriter writer = outputParameter.open()) { List<Path> applications = batchappsParameter.getEntries(); if (verboseParameter.isRequired()) { ObjectMapper mapper = new ObjectMapper(); for (Path batchapp : applications) { Path infoFile = ApplicationBaseDirectoryParameter.findInfo(batchapp).get(); try { BatchInfo info = mapper.readValue(infoFile.toFile(), BatchInfo.class); Map<String, Object> members = new LinkedHashMap<>(); members.put("class", info.getDescriptionClass()); members.put("comment", info.getComment()); writer.printf("%s:%n", info.getId()); ListUtil.printBlock(writer, 4, members); } catch (IOException e) { LOG.error("error occurred while loading batch information: {}", infoFile, e); } } } else { applications.forEach(it -> writer.println(ListUtil.getName(it))); } } } }
apache-2.0
alexduch/grobid
grobid-trainer/src/main/java/org/grobid/trainer/WapitiTrainer.java
1906
package org.grobid.trainer; import org.grobid.core.GrobidModel; import org.grobid.core.jni.WapitiModel; import org.grobid.core.GrobidModels; import org.grobid.trainer.SegmentationTrainer; import java.math.BigDecimal; import java.io.File; /** * User: zholudev * Date: 3/20/14 */ public class WapitiTrainer implements GenericTrainer { public static final String WAPITI = "wapiti"; // default training parameters (only exploited by Wapiti) protected double epsilon = 0.00001; // default size of the interval for stopping criterion protected int window = 20; // default similar to CRF++ protected int nbMaxIterations = 2000; // by default maximum of training iterations @Override public void train(File template, File trainingData, File outputModel, int numThreads, GrobidModel model) { System.out.println("\tepsilon: " + epsilon); System.out.println("\twindow: " + window); System.out.println("\tnb max iterations: " + nbMaxIterations); System.out.println("\tnb threads: " + numThreads); WapitiModel.train(template, trainingData, outputModel, "--nthread " + numThreads + // " --algo sgd-l1" + " -e " + BigDecimal.valueOf(epsilon).toPlainString() + " -w " + window + " -i " + nbMaxIterations ); } @Override public String getName() { return WAPITI; } @Override public void setEpsilon(double epsilon) { this.epsilon = epsilon; } @Override public void setWindow(int window) { this.window = window; } @Override public double getEpsilon() { return epsilon; } @Override public int getWindow() { return window; } @Override public void setNbMaxIterations(int interations) { this.nbMaxIterations = interations; } @Override public int getNbMaxIterations() { return nbMaxIterations; } }
apache-2.0
AttwellBrian/dagger
compiler/src/it/functional-tests/src/test/java/test/NonComponentDependencyTest.java
1158
/* * Copyright (C) 2015 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test; import static com.google.common.truth.Truth.assertThat; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class NonComponentDependencyTest { @Test public void testThing() { NonComponentDependencyComponent component = DaggerNonComponentDependencyComponent.builder() .thingComponent(new NonComponentDependencyComponent.ThingComponentImpl()) .build(); assertThat(component).isNotNull(); assertThat(component.thingTwo()).isNotNull(); } }
apache-2.0
antoinesd/weld-core
tests-arquillian/src/test/java/org/jboss/weld/tests/security/members/SimpleDecorator.java
1085
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc., and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.weld.tests.security.members; import javax.decorator.Decorator; import javax.decorator.Delegate; import javax.inject.Inject; @Decorator public class SimpleDecorator implements Simple { @Inject @Delegate private Simple delegate; @Override public void ping() { } }
apache-2.0
OrienteerDW/Orienteer
orienteer-core/src/main/java/org/orienteer/core/boot/loader/util/OMetadataUpdater.java
16345
package org.orienteer.core.boot.loader.util; import com.google.common.collect.Lists; import org.apache.http.util.Args; import org.orienteer.core.boot.loader.util.artifact.OArtifact; import org.orienteer.core.boot.loader.util.artifact.OArtifactReference; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.nio.file.Path; import java.util.List; /** * Utility class for update metadata.xml */ class OMetadataUpdater extends AbstractXmlUtil { private final Path pathToMetadata; private static final String ALL_MODULES_EXP = String.format("/%s/*", MetadataTag.METADATA.get()); /** * Constructor * @param pathToMetadata {@link Path} of metadata.xml * @throws IllegalArgumentException if pathToMetadata is null */ OMetadataUpdater(Path pathToMetadata) { Args.notNull(pathToMetadata, "pathToMetadata"); this.pathToMetadata = pathToMetadata; } /** * Create new metadata.xml with oArtifacts * @param oArtifacts list of {@link OArtifact} for write in metadata.xml * @throws IllegalArgumentException if oArtifacts is null */ void create(List<OArtifact> oArtifacts) { Args.notNull(oArtifacts, "oArtifacts"); Document document = createNewDocument(); if (document == null) documentCannotCreateException(pathToMetadata); Element root = document.createElement(MetadataTag.METADATA.get()); document.appendChild(root); addArtifacts(oArtifacts, document); saveDocument(document, pathToMetadata); } /** * Update metadata.xml * @param oArtifact - {@link OArtifact} for update in metadata.xml * @throws IllegalArgumentException if oArtifact is null */ void update(OArtifact oArtifact) { Args.notNull(oArtifact, "oArtifact"); update(oArtifact, false); } /** * Update metadata.xml * @param oArtifacts - list of {@link OArtifact} for update in metadata.xml * @throws IllegalArgumentException if oArtifacts is null */ void update(List<OArtifact> oArtifacts) { Args.notNull(oArtifacts, "oArtifacts"); update(oArtifacts, false); } /** * Update jar for oArtifact in metadata.xml * @param oArtifact {@link OArtifact} for update * @param updateJar true - jar will be update * false - jar will not be update * @throws IllegalArgumentException if oArtifact is null */ void update(OArtifact oArtifact, boolean updateJar) { Args.notNull(oArtifact, "oArtifact"); update(Lists.newArrayList(oArtifact), updateJar); } /** * Update metadata.xml. * oArtifacts will be write to metadata.xml or will be update its flag load or trusted. * @param oArtifacts list of {@link OArtifact} for update * @param updateJar true - jar will be update * false - jar will not be update * @throws IllegalArgumentException if oArtifacts is null */ @SuppressWarnings("unchecked") void update(List<OArtifact> oArtifacts, boolean updateJar) { Args.notNull(oArtifacts, "oArtifacts"); Document document = readDocumentFromFile(pathToMetadata); if (document == null) documentCannotReadException(pathToMetadata); NodeList nodeList = executeExpression(ALL_MODULES_EXP, document); List<OArtifact> updatedModules = Lists.newArrayList(); if (nodeList != null) { for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Element.ELEMENT_NODE) { Element element = (Element) node; Element dependencyElement = (Element) element.getElementsByTagName(MetadataTag.DEPENDENCY.get()).item(0); OArtifact oArtifact = containsInModulesConfigsList(dependencyElement, oArtifacts); if (oArtifact != null) { if (updateJar) { changeoArtifactsLoadAndJar(element, oArtifact); } else changeArtifactElement(element, oArtifact); updatedModules.add(oArtifact); } } } } if (updatedModules.size() != oArtifacts.size()) { addArtifacts(difference(updatedModules, oArtifacts), document); } saveDocument(document, pathToMetadata); } /** * Replace artifactForReplace in metadata.xml by newArtifact * @param artifactForReplace - artifact for replace * @param newArtifact - new artifact * @throws IllegalArgumentException if artifactForReplace or newArtifact is null */ @SuppressWarnings("unchecked") void update(OArtifact artifactForReplace, OArtifact newArtifact) { Args.notNull(artifactForReplace, "artifactForUpdate"); Args.notNull(newArtifact, "newArtifact"); Document document = readDocumentFromFile(pathToMetadata); if (document == null) documentCannotReadException(pathToMetadata); NodeList nodeList = executeExpression(ALL_MODULES_EXP, document); if (nodeList != null) { for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; Element dependencyElement = (Element) element.getElementsByTagName(MetadataTag.DEPENDENCY.get()).item(0); OArtifact module = containsInModulesConfigsList(dependencyElement, Lists.newArrayList(artifactForReplace)); if (module != null) { changeArtifactElement(element, newArtifact); break; } } } saveDocument(document, pathToMetadata); } } /** * Delete oArtifact from metadata.xml * @param oArtifact {@link OArtifact} for delete from metadata.xml * @throws IllegalArgumentException if oArtifact is null */ @SuppressWarnings("unchecked") void delete(OArtifact oArtifact) { Args.notNull(oArtifact, "oArtifact"); Document document = readDocumentFromFile(pathToMetadata); if (document == null) documentCannotReadException(pathToMetadata); NodeList nodeList = executeExpression(ALL_MODULES_EXP, document); if (nodeList != null) { Element root = document.getDocumentElement(); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; Element dependencyElement = (Element) element.getElementsByTagName(MetadataTag.DEPENDENCY.get()).item(0); if (isNecessaryElement(dependencyElement, oArtifact)) { root.removeChild(element); break; } } } saveDocument(document, pathToMetadata); } } /** * Delete list of {@link OArtifact} from metadata.xml * @param oArtifacts list of {@link OArtifact} for delete from metadata.xml * @throws IllegalArgumentException if oArtifacts is null */ @SuppressWarnings("unchecked") void delete(List<OArtifact> oArtifacts) { Args.notNull(oArtifacts, "oArtifacts"); Document document = readDocumentFromFile(pathToMetadata); if (document == null) documentCannotReadException(pathToMetadata); NodeList nodeList = executeExpression(ALL_MODULES_EXP, document); if (nodeList != null) { Element root = document.getDocumentElement(); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; Element dependencyElement = (Element) element.getElementsByTagName(MetadataTag.DEPENDENCY.get()).item(0); OArtifact metadata = containsInModulesConfigsList(dependencyElement, oArtifacts); if (metadata != null) { root.removeChild(element); } } } saveDocument(document, pathToMetadata); } } /** * Add artifacts to {@link Document} document * @param oArtifacts list of {@link OArtifact} for add to document * @param document {@link Document} of metadata.xml */ private void addArtifacts(List<OArtifact> oArtifacts, Document document) { for (OArtifact artifact : oArtifacts) { addArtifact(artifact, document); } } /** * Add artifact to {@link Document} document * @param oArtifact {@link OArtifact} for add to document * @param document {@link Document} of metadata.xml */ private void addArtifact(OArtifact oArtifact, Document document) { Element root = document.getDocumentElement(); Element module = document.createElement(MetadataTag.MODULE.get()); root.appendChild(module); Element load = document.createElement(MetadataTag.LOAD.get()); load.appendChild(document.createTextNode(Boolean.toString(oArtifact.isLoad()))); module.appendChild(load); Element trusted = document.createElement(MetadataTag.TRUSTED.get()); trusted.appendChild(document.createTextNode(Boolean.toString(oArtifact.isTrusted()))); module.appendChild(trusted); module.appendChild(createMavenDependency(oArtifact.getArtifactReference(), document)); } /** * Change oArtifact in metadata.xml * @param artifactElement {@link Element} of oArtifact in metadata.xml * @param oArtifact {@link OArtifact} for change */ @SuppressWarnings("unchecked") private void changeArtifactElement(Element artifactElement, OArtifact oArtifact) { NodeList nodeList = artifactElement.getElementsByTagName("*"); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; MetadataTag tag = MetadataTag.getByName(element.getTagName()); switch (tag) { case LOAD: element.setTextContent(Boolean.toString(oArtifact.isLoad())); break; case TRUSTED: element.setTextContent(Boolean.toString(oArtifact.isTrusted())); break; case DEPENDENCY: changeMavenDependency(element, oArtifact.getArtifactReference()); break; } } } } /** * Change artifacts load and jar * @param artifactElement {@link Element} of oArtifact * @param oArtifact {@link OArtifact} artifact for change */ @SuppressWarnings("unchecked") private void changeoArtifactsLoadAndJar(Element artifactElement, OArtifact oArtifact) { Element jar = (Element) artifactElement.getElementsByTagName(MetadataTag.JAR.get()).item(0); Element load = (Element) artifactElement.getElementsByTagName(MetadataTag.LOAD.get()).item(0); Document document = artifactElement.getOwnerDocument(); load.setTextContent(Boolean.toString(oArtifact.isLoad())); if (jar == null) { Element jarElement = document.createElement(MetadataTag.JAR.get()); jarElement.appendChild(document.createTextNode(oArtifact.getArtifactReference().getFile().getAbsolutePath())); } else jar.setTextContent(oArtifact.getArtifactReference().getFile().getAbsolutePath()); } /** * Change maven dependency * @param dependency {@link Element} of dependency * @param artifactReference {@link OArtifactReference} for change */ private void changeMavenDependency(Element dependency, OArtifactReference artifactReference) { NodeList nodeList = dependency.getElementsByTagName("*"); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; MetadataTag tag = MetadataTag.getByName(element.getTagName()); switch (tag) { case GROUP_ID: element.setTextContent(artifactReference.getGroupId()); break; case ARTIFACT_ID: element.setTextContent(artifactReference.getArtifactId()); break; case VERSION: element.setTextContent(artifactReference.getVersion()); break; case REPOSITORY: element.setTextContent(artifactReference.getRepository()); break; case DESCRIPTION: element.setTextContent(artifactReference.getDescription()); break; case JAR: element.setTextContent(artifactReference.getFile().getAbsolutePath()); break; } } } } private boolean isNecessaryElement(Element dependencyElement, OArtifact module) { Element groupElement = (Element) dependencyElement.getElementsByTagName(MetadataTag.GROUP_ID.get()).item(0); Element artifactElement = (Element) dependencyElement.getElementsByTagName(MetadataTag.ARTIFACT_ID.get()).item(0); OArtifactReference artifact = module.getArtifactReference(); return groupElement.getTextContent().equals(artifact.getGroupId()) && artifactElement.getTextContent().equals(artifact.getArtifactId()); } private OArtifact containsInModulesConfigsList(Element dependencyElement, List<OArtifact> modulesConfigs) { for (OArtifact moduleConfig : modulesConfigs) { if (isNecessaryElement(dependencyElement, moduleConfig)) return moduleConfig; } return null; } private List<OArtifact> difference(List<OArtifact> list1, List<OArtifact> list2) { List<OArtifact> result = Lists.newArrayList(); for (OArtifact module : list2) { if (!list1.contains(module)) result.add(module); } return result; } /** * Add maven dependency to {@link Document} document. * @param artifactReference {@link OArtifactReference} which is maven dependency * @param document {@link Document} of metadata.xml * @return {@link Element} with maven dependency */ private Element createMavenDependency(OArtifactReference artifactReference, Document document) { Element mavenElement = document.createElement(MetadataTag.DEPENDENCY.get()); Element groupId = document.createElement(MetadataTag.GROUP_ID.get()); groupId.appendChild(document.createTextNode(artifactReference.getGroupId())); mavenElement.appendChild(groupId); Element artifactId = document.createElement(MetadataTag.ARTIFACT_ID.get()); artifactId.appendChild(document.createTextNode(artifactReference.getArtifactId())); mavenElement.appendChild(artifactId); Element version = document.createElement(MetadataTag.VERSION.get()); version.appendChild(document.createTextNode(artifactReference.getVersion())); mavenElement.appendChild(version); Element description = document.createElement(MetadataTag.DESCRIPTION.get()); description.appendChild(document.createTextNode(artifactReference.getDescription())); mavenElement.appendChild(description); Element jar = document.createElement(MetadataTag.JAR.get()); jar.appendChild(document.createTextNode(artifactReference.getFile().getAbsolutePath())); mavenElement.appendChild(jar); return mavenElement; } }
apache-2.0
OrienteerBAP/wicket-orientdb
wicket-orientdb/src/main/java/ru/ydn/wicket/wicketorientdb/model/OPropertyCustomModel.java
854
package ru.ydn.wicket.wicketorientdb.model; import org.apache.wicket.model.IModel; import com.orientechnologies.orient.core.metadata.schema.OProperty; /** * {@link IModel} to get and set dynamically custom properties of an {@link OProperty} */ public class OPropertyCustomModel extends AbstractCustomValueModel<OProperty, String, String> { private static final long serialVersionUID = 1L; public OPropertyCustomModel(IModel<OProperty> mainObjectModel, IModel<String> propertyModel) { super(mainObjectModel, propertyModel); } @Override public Class<String> getObjectClass() { return String.class; } @Override protected String getValue(OProperty object, String param) { return object.getCustom(param); } @Override protected void setValue(OProperty object, String param, String value) { object.setCustom(param, value); } }
apache-2.0
huanzhou/jeecms6
src/main/java/com/jeecms/cms/manager/assist/CmsSqlserverDataBackMng.java
1016
package com.jeecms.cms.manager.assist; import java.sql.SQLException; import java.util.List; import java.util.Map; import com.jeecms.cms.entity.back.CmsField; public interface CmsSqlserverDataBackMng { public List<String> listTabels(); public List<CmsField> listFields(String tablename); public List<String> getColumns(String tablename); public List<String> listDataBases(); public String createTableDDL(String tablename); public String createConstraintDDL(String sql, String tablename); public List<Object[]> createTableData(String tablename); public String getDefaultCatalog() throws SQLException; public void setDefaultCatalog(String catalog)throws SQLException; public String getNoCheckConstraintSql(String tablename); public String getCheckConstraintSql(String tablename); public Map<String, String> getBeReferForeignKeyFromTable(String tablename); public String getFKConstraintByName(String tablename,String fkConstraintName); public Boolean executeSQL(String sql); }
apache-2.0
asurve/arvind-sysml
src/main/java/org/apache/sysml/lops/CumulativeOffsetBinary.java
4609
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.lops; import org.apache.sysml.lops.Aggregate.OperationTypes; import org.apache.sysml.lops.LopProperties.ExecLocation; import org.apache.sysml.lops.LopProperties.ExecType; import org.apache.sysml.lops.compile.JobType; import org.apache.sysml.parser.Expression.*; public class CumulativeOffsetBinary extends Lop { private OperationTypes _op; private double _initValue = 0; public CumulativeOffsetBinary(Lop data, Lop offsets, DataType dt, ValueType vt, OperationTypes op, ExecType et) throws LopsException { super(Lop.Type.CumulativeOffsetBinary, dt, vt); checkSupportedOperations(op); _op = op; init(data, offsets, dt, vt, et); } public CumulativeOffsetBinary(Lop data, Lop offsets, DataType dt, ValueType vt, double init, OperationTypes op, ExecType et) throws LopsException { super(Lop.Type.CumulativeOffsetBinary, dt, vt); checkSupportedOperations(op); _op = op; //in case of Spark, CumulativeOffset includes CumulativeSplit and hence needs the init value _initValue = init; init(data, offsets, dt, vt, et); } private void init(Lop input1, Lop input2, DataType dt, ValueType vt, ExecType et) { this.addInput(input1); this.addInput(input2); input1.addOutput(this); input2.addOutput(this); if( et == ExecType.MR ) { //setup MR parameters boolean breaksAlignment = true; boolean aligner = false; boolean definesMRJob = false; lps.addCompatibility(JobType.GMR); lps.addCompatibility(JobType.DATAGEN); lps.setProperties(inputs, et, ExecLocation.Reduce, breaksAlignment, aligner, definesMRJob); } else //Spark/CP { //setup Spark parameters boolean breaksAlignment = false; boolean aligner = false; boolean definesMRJob = false; lps.addCompatibility(JobType.INVALID); lps.setProperties( inputs, et, ExecLocation.ControlProgram, breaksAlignment, aligner, definesMRJob ); } } public String toString() { return "CumulativeOffsetBinary"; } private void checkSupportedOperations(OperationTypes op) throws LopsException { //sanity check for supported aggregates if( !(op == OperationTypes.KahanSum || op == OperationTypes.Product || op == OperationTypes.Min || op == OperationTypes.Max) ) { throw new LopsException("Unsupported aggregate operation type: "+op); } } private String getOpcode() { switch( _op ) { case KahanSum: return "bcumoffk+"; case Product: return "bcumoff*"; case Min: return "bcumoffmin"; case Max: return "bcumoffmax"; default: return null; } } @Override public String getInstructions(int input_index1, int input_index2, int output_index) throws LopsException { StringBuilder sb = new StringBuilder(); sb.append( getExecType() ); sb.append( OPERAND_DELIMITOR ); sb.append( getOpcode() ); sb.append( OPERAND_DELIMITOR ); sb.append( getInputs().get(0).prepInputOperand(input_index1) ); sb.append( OPERAND_DELIMITOR ); sb.append( getInputs().get(1).prepInputOperand(input_index2) ); sb.append( OPERAND_DELIMITOR ); sb.append( this.prepOutputOperand(output_index) ); return sb.toString(); } @Override public String getInstructions(String input1, String input2, String output) throws LopsException { StringBuilder sb = new StringBuilder(); sb.append( getExecType() ); sb.append( OPERAND_DELIMITOR ); sb.append( getOpcode() ); sb.append( OPERAND_DELIMITOR ); sb.append( getInputs().get(0).prepInputOperand(input1) ); sb.append( OPERAND_DELIMITOR ); sb.append( getInputs().get(1).prepInputOperand(input2) ); sb.append( OPERAND_DELIMITOR ); sb.append( this.prepOutputOperand(output) ); if( getExecType() == ExecType.SPARK ) { sb.append( OPERAND_DELIMITOR ); sb.append( _initValue ); } return sb.toString(); } }
apache-2.0
knoguchi/druid
extensions-core/kinesis-indexing-service/src/main/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTuningConfig.java
8378
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.kinesis.supervisor; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.druid.indexing.kinesis.KinesisIndexTaskTuningConfig; import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskTuningConfig; import org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorTuningConfig; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.joda.time.Duration; import org.joda.time.Period; import javax.annotation.Nullable; import java.io.File; public class KinesisSupervisorTuningConfig extends KinesisIndexTaskTuningConfig implements SeekableStreamSupervisorTuningConfig { private final Integer workerThreads; private final Integer chatThreads; private final Long chatRetries; private final Duration httpTimeout; private final Duration shutdownTimeout; public KinesisSupervisorTuningConfig( @JsonProperty("maxRowsInMemory") Integer maxRowsInMemory, @JsonProperty("maxBytesInMemory") Long maxBytesInMemory, @JsonProperty("maxRowsPerSegment") Integer maxRowsPerSegment, @JsonProperty("maxTotalRows") Long maxTotalRows, @JsonProperty("intermediatePersistPeriod") Period intermediatePersistPeriod, @JsonProperty("basePersistDirectory") File basePersistDirectory, @JsonProperty("maxPendingPersists") Integer maxPendingPersists, @JsonProperty("indexSpec") IndexSpec indexSpec, @JsonProperty("buildV9Directly") Boolean buildV9Directly, @JsonProperty("reportParseExceptions") Boolean reportParseExceptions, @JsonProperty("handoffConditionTimeout") Long handoffConditionTimeout, @JsonProperty("resetOffsetAutomatically") Boolean resetOffsetAutomatically, @JsonProperty("skipSequenceNumberAvailabilityCheck") Boolean skipSequenceNumberAvailabilityCheck, @JsonProperty("segmentWriteOutMediumFactory") @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory, @JsonProperty("workerThreads") Integer workerThreads, @JsonProperty("chatThreads") Integer chatThreads, @JsonProperty("chatRetries") Long chatRetries, @JsonProperty("httpTimeout") Period httpTimeout, @JsonProperty("shutdownTimeout") Period shutdownTimeout, @JsonProperty("recordBufferSize") Integer recordBufferSize, @JsonProperty("recordBufferOfferTimeout") Integer recordBufferOfferTimeout, @JsonProperty("recordBufferFullWait") Integer recordBufferFullWait, @JsonProperty("fetchSequenceNumberTimeout") Integer fetchSequenceNumberTimeout, @JsonProperty("fetchThreads") Integer fetchThreads, @JsonProperty("logParseExceptions") @Nullable Boolean logParseExceptions, @JsonProperty("maxParseExceptions") @Nullable Integer maxParseExceptions, @JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions, @JsonProperty("maxRecordsPerPoll") @Nullable Integer maxRecordsPerPoll, @JsonProperty("intermediateHandoffPeriod") Period intermediateHandoffPeriod ) { super( maxRowsInMemory, maxBytesInMemory, maxRowsPerSegment, maxTotalRows, intermediatePersistPeriod, basePersistDirectory, maxPendingPersists, indexSpec, buildV9Directly, reportParseExceptions, handoffConditionTimeout, resetOffsetAutomatically, skipSequenceNumberAvailabilityCheck, recordBufferSize, recordBufferOfferTimeout, recordBufferFullWait, fetchSequenceNumberTimeout, fetchThreads, segmentWriteOutMediumFactory, logParseExceptions, maxParseExceptions, maxSavedParseExceptions, maxRecordsPerPoll, intermediateHandoffPeriod ); this.workerThreads = workerThreads; this.chatThreads = chatThreads; this.chatRetries = (chatRetries != null ? chatRetries : DEFAULT_CHAT_RETRIES); this.httpTimeout = SeekableStreamSupervisorTuningConfig.defaultDuration(httpTimeout, DEFAULT_HTTP_TIMEOUT); this.shutdownTimeout = SeekableStreamSupervisorTuningConfig.defaultDuration( shutdownTimeout, DEFAULT_SHUTDOWN_TIMEOUT ); } @Override @JsonProperty public Integer getWorkerThreads() { return workerThreads; } @Override @JsonProperty public Integer getChatThreads() { return chatThreads; } @Override @JsonProperty public Long getChatRetries() { return chatRetries; } @Override @JsonProperty public Duration getHttpTimeout() { return httpTimeout; } @Override @JsonProperty public Duration getShutdownTimeout() { return shutdownTimeout; } @Override public String toString() { return "KinesisSupervisorTuningConfig{" + "maxRowsInMemory=" + getMaxRowsInMemory() + ", maxBytesInMemory=" + getMaxBytesInMemory() + ", maxRowsPerSegment=" + getMaxRowsPerSegment() + ", maxTotalRows=" + getMaxTotalRows() + ", intermediatePersistPeriod=" + getIntermediatePersistPeriod() + ", basePersistDirectory=" + getBasePersistDirectory() + ", maxPendingPersists=" + getMaxPendingPersists() + ", indexSpec=" + getIndexSpec() + ", reportParseExceptions=" + isReportParseExceptions() + ", handoffConditionTimeout=" + getHandoffConditionTimeout() + ", resetOffsetAutomatically=" + isResetOffsetAutomatically() + ", skipSequenceNumberAvailabilityCheck=" + isSkipSequenceNumberAvailabilityCheck() + ", workerThreads=" + workerThreads + ", chatThreads=" + chatThreads + ", chatRetries=" + chatRetries + ", httpTimeout=" + httpTimeout + ", shutdownTimeout=" + shutdownTimeout + ", recordBufferSize=" + getRecordBufferSize() + ", recordBufferOfferTimeout=" + getRecordBufferOfferTimeout() + ", recordBufferFullWait=" + getRecordBufferFullWait() + ", fetchSequenceNumberTimeout=" + getFetchSequenceNumberTimeout() + ", fetchThreads=" + getFetchThreads() + ", segmentWriteOutMediumFactory=" + getSegmentWriteOutMediumFactory() + ", logParseExceptions=" + isLogParseExceptions() + ", maxParseExceptions=" + getMaxParseExceptions() + ", maxSavedParseExceptions=" + getMaxSavedParseExceptions() + ", maxRecordsPerPoll=" + getMaxRecordsPerPoll() + ", intermediateHandoffPeriod=" + getIntermediateHandoffPeriod() + '}'; } @Override public SeekableStreamIndexTaskTuningConfig convertToTaskTuningConfig() { return new KinesisIndexTaskTuningConfig( getMaxRowsInMemory(), getMaxBytesInMemory(), getMaxRowsPerSegment(), getMaxTotalRows(), getIntermediatePersistPeriod(), getBasePersistDirectory(), getMaxPendingPersists(), getIndexSpec(), true, isReportParseExceptions(), getHandoffConditionTimeout(), isResetOffsetAutomatically(), isSkipSequenceNumberAvailabilityCheck(), getRecordBufferSize(), getRecordBufferOfferTimeout(), getRecordBufferFullWait(), getFetchSequenceNumberTimeout(), getFetchThreads(), getSegmentWriteOutMediumFactory(), isLogParseExceptions(), getMaxParseExceptions(), getMaxSavedParseExceptions(), getMaxRecordsPerPoll(), getIntermediateHandoffPeriod() ); } }
apache-2.0
apache/uima-uimafit
uimafit-core/src/main/java/org/apache/uima/fit/component/CasAnnotator_ImplBase.java
1590
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.fit.component; import org.apache.uima.UimaContext; import org.apache.uima.fit.component.initialize.ConfigurationParameterInitializer; import org.apache.uima.fit.component.initialize.ExternalResourceInitializer; import org.apache.uima.resource.ResourceInitializationException; /** * Base class for CAS annotators which initializes itself based on annotations. * */ public abstract class CasAnnotator_ImplBase extends org.apache.uima.analysis_component.CasAnnotator_ImplBase { @Override public void initialize(final UimaContext context) throws ResourceInitializationException { super.initialize(context); ConfigurationParameterInitializer.initialize(this, context); ExternalResourceInitializer.initialize(this, context); } }
apache-2.0
letconex/MMT
src/persistence-mysql/src/main/java/eu/modernmt/persistence/mysql/utils/SQLUtils.java
1854
package eu.modernmt.persistence.mysql.utils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; /** * Created by andrea on 26/04/17. */ public class SQLUtils { private static final Logger logger = LogManager.getLogger(SQLUtils.class); public static void closeQuietly(Connection connection) { try { if (connection != null) connection.close(); } catch (Exception e) { //do nothing } } public static void closeQuietly(Statement statement) { try { if (statement != null) statement.close(); } catch (Exception e) { //do nothing } } public static void closeQuietly(ResultSet result) { try { if (result != null) result.close(); } catch (Exception e) { //do nothing } } public static void tryCommit(Connection connection) { try { connection.commit(); } catch (SQLException e) { logger.error("COULD NOT COMMIT THE TRANSACTION " + connection); //do nothing } } public static void tryRollback(Connection connection) { try { connection.rollback(); } catch (SQLException e) { logger.error("COULD NOT ROLLBACK THE TRANSACTION " + connection + ";"); //do nothing } } public static void trySetAutocommit(Connection connection, boolean value) { try { connection.setAutoCommit(value); } catch (SQLException e) { logger.error("Error while setting autocommit to " + value); //do nothing } } }
apache-2.0
gastaldi/AsciidocFX
src/main/java/com/kodcu/bean/RecentFiles.java
1160
package com.kodcu.bean; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * Created by usta on 30.08.2014. */ public class RecentFiles { private List<String> files; private List<String> favoriteDirectories; private String workingDirectory; public RecentFiles(List<String> files) { this.files = files; } public RecentFiles() { } public List<String> getFiles() { if(Objects.isNull(files)) files = new ArrayList<>(); return files; } public List<String> getFavoriteDirectories() { if(Objects.isNull(favoriteDirectories)) favoriteDirectories = new ArrayList<>(); return favoriteDirectories; } public void setFavoriteDirectories(List<String> favoriteDirectories) { this.favoriteDirectories = favoriteDirectories; } public void setFiles(List<String> files) { this.files = files; } public String getWorkingDirectory() { return workingDirectory; } public void setWorkingDirectory(String workingDirectory) { this.workingDirectory = workingDirectory; } }
apache-2.0
Orange-OpenSource/cf-java-client
cloudfoundry-operations/src/main/java/org/cloudfoundry/operations/spaceadmin/SpaceAdmin.java
1150
/* * Copyright 2013-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.operations.spaceadmin; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** * Main entry point to the Cloud Foundry Space Admin API */ public interface SpaceAdmin { /** * Gets a space quota * * @param request the Get Space Quota request * @return the space quota */ Mono<SpaceQuota> get(GetSpaceQuotaRequest request); /** * Lists the space quotas * * @return the space quotas */ Flux<SpaceQuota> listQuotas(); }
apache-2.0
gpolitis/libjitsi
src/org/jitsi/impl/configuration/HashtableConfigurationStore.java
5835
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.configuration; import java.util.*; /** * A simple in-memory {@link ConfigurationStore} implementation that only uses * a hashtable. * * @param <T> the hashtable extension that descendants are going to use. * @author Lyubomir Marinov */ @SuppressWarnings("rawtypes") public abstract class HashtableConfigurationStore<T extends Hashtable> implements ConfigurationStore { /** * The <tt>Hashtable</tt> instance which stores the property name-value * associations of this <tt>ConfigurationStore</tt> instance and which is * effectively adapted by this instance to <tt>ConfigurationStore</tt>. */ protected final T properties; /** * Creates an instance of this class using <tt>properties</tt> as the set * of properties where it will be storing an retrieving properties. * * @param properties the map that this store will use for storing and * retrieving properties. */ protected HashtableConfigurationStore(T properties) { this.properties = properties; } /** * Implements {@link ConfigurationStore#getProperty(String)}. If this * <tt>ConfigurationStore</tt> contains a value associated with the * specified property name, returns it. Otherwise, searches for a system * property with the specified name and returns its value. * * @param name the name of the property to get the value of * @return the value in this <tt>ConfigurationStore</tt> of the property * with the specified name; <tt>null</tt> if the property with the specified * name does not have an association with a value in this * <tt>ConfigurationStore</tt> * @see ConfigurationStore#getProperty(String) */ public Object getProperty(String name) { Object value = properties.get(name); return (value != null) ? value : System.getProperty(name); } /** * Implements {@link ConfigurationStore#getPropertyNames()}. Gets the names * of the properties which have values associated in this * <tt>ConfigurationStore</tt>. * * @return an array of <tt>String</tt>s which specify the names of the * properties that have values associated in this * <tt>ConfigurationStore</tt>; an empty array if this instance contains no * property values * @see ConfigurationStore#getPropertyNames() */ public String[] getPropertyNames() { synchronized (properties) { Set<?> propertyNames = properties.keySet(); return propertyNames.toArray(new String[propertyNames.size()]); } } /** * Implements {@link ConfigurationStore#isSystemProperty(String)}. Considers * a property to be system if the system properties contain a value * associated with its name. * * @param name the name of a property which is to be determined whether it * is a system property * @return <tt>true</tt> if the specified name stands for a system property; * <tt>false</tt>, otherwise * @see ConfigurationStore#isSystemProperty(String) */ public boolean isSystemProperty(String name) { return (System.getProperty(name) != null); } /** * Implements {@link ConfigurationStore#removeProperty(String)}. Removes the * value association in this <tt>ConfigurationStore</tt> of the property * with a specific name. If the property with the specified name is not * associated with a value in this <tt>ConfigurationStore</tt>, does * nothing. * * @param name the name of the property which is to have its value * association in this <tt>ConfigurationStore</tt> removed * @see ConfigurationStore#removeProperty(String) */ public void removeProperty(String name) { properties.remove(name); } /** * Implements * {@link ConfigurationStore#setNonSystemProperty(String, Object)}. * * @param name the name of the non-system property to be set to the * specified value in this <tt>ConfigurationStore</tt> * @param value the value to be assigned to the non-system property with the * specified name in this <tt>ConfigurationStore</tt> * @see ConfigurationStore#setNonSystemProperty(String, Object) */ @SuppressWarnings("unchecked") public void setNonSystemProperty(String name, Object value) { properties.put(name, value); } /** * Implements {@link ConfigurationStore#setSystemProperty(String)}. Since * system properties are managed through the <tt>System</tt> class, setting * a property as system in this <tt>ConfigurationStore</tt> effectively * removes any existing value associated with the specified property name * from this instance. * * @param name the name of the property to be set as a system property in * this <tt>ConfigurationStore</tt> * @see ConfigurationStore#setSystemProperty(String) */ public void setSystemProperty(String name) { removeProperty(name); } }
apache-2.0
Buble1981/MyDroolsFork
drools-core/src/main/java/org/drools/core/common/SynchronizedRightTupleSets.java
1740
package org.drools.core.common; import org.drools.core.reteoo.BetaMemory; import org.drools.core.reteoo.LeftTuple; import org.drools.core.reteoo.RightTuple; /** * Wraps some methods, to make them thread safe */ public class SynchronizedRightTupleSets extends RightTupleSetsImpl implements RightTupleSets { private final Object lock = new Object(); private BetaMemory bm; public SynchronizedRightTupleSets(BetaMemory bm) { this.bm = bm; } public Object getLock() { return lock; } public boolean addInsert(RightTuple rightTuple) { synchronized (lock) { if ( isEmpty() ) { bm.setNodeDirtyWithoutNotify(); } return super.addInsert(rightTuple); } } public boolean addDelete(RightTuple rightTuple) { synchronized (lock) { if ( isEmpty() ) { bm.setNodeDirtyWithoutNotify(); } return super.addDelete(rightTuple); } } public boolean addUpdate(RightTuple rightTuple) { synchronized (lock) { if ( isEmpty() ) { bm.setNodeDirtyWithoutNotify(); } return super.addUpdate(rightTuple); } } public void resetAll() { synchronized (lock) { super.resetAll(); } } /** * The returned RightTupleSet, is not thread safe. * @return */ public RightTupleSets takeAll() { synchronized (lock) { if ( !isEmpty() ) { bm.setNodeCleanWithoutNotify(); return super.takeAll(); } else { return new RightTupleSetsImpl(); } } } }
apache-2.0
YLimy/ERoadWifi
src/com/e_road/utils/parser/RegisterParser.java
1242
package com.e_road.utils.parser; import com.alibaba.fastjson.JSONException; import com.alibaba.fastjson.JSONObject; import com.e_road.utils.IConstant; import com.e_road.vo.UserInfo; public class RegisterParser extends BaseParser<Object> { /** * 解析注册结果<br> * 用户已存在 :IConstant.REGISTER_EXIST<br> * 注册成功 :IConstant.LOGIN 返回UserInfo对象<br> * 其他 :"注册异常" * * @return IConstant.REGISTER_EXIST<br> * UserInfo<br> * "注册异常" */ @Override public Object parseJson(String paramString) throws JSONException { String result = checkResponse(paramString); if (null != result) { if (result.equals(IConstant.REGISTER_EXIST)) { return IConstant.REGISTER_EXIST; } else if (result.equals(IConstant.RESPONSE_SUCCESS)) { // JSONObject jsonObj = JSONObject.parseObject(paramString); // UserInfo userInfo = jsonObj.getObject("userinfo", // UserInfo.class); UserInfo userInfo = new UserInfo(); return userInfo; } } return "注册失败"; } @Override public Object parseJson4App(String paramString) throws JSONException { // TODO Auto-generated method stub return null; } }
artistic-2.0
ColaMachine/MyBlock
src/main/java/de/matthiasmann/twl/utils/SparseGrid.java
18716
/* * Copyright (c) 2008-2009, Matthias Mann * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Matthias Mann nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package de.matthiasmann.twl.utils; import java.util.Arrays; /** * A 2d sparse grid built using a B+Tree. * Rows are the major axis. Operations on column ranges are slower. * * @author Matthias Mann */ public class SparseGrid { public interface GridFunction { public void apply(int row, int column, Entry e); } Node root; int numLevels; public SparseGrid(int pageSize) { root = new Node(pageSize); numLevels = 1; } public Entry get(int row, int column) { if(root.size > 0) { int levels = numLevels; Entry e = root; do { Node node = (Node)e; int pos = node.findPos(row, column, node.size); if(pos == node.size) { return null; } e = node.children[pos]; }while(--levels > 0); assert e != null; if(e.compare(row, column) == 0) { return e; } } return null; } public void set(int row, int column, Entry entry) { entry.row = row; entry.column = column; if(root.size == 0) { root.insertAt(0, entry); root.updateRowColumn(); } else if(!root.insert(entry, numLevels)) { splitRoot(); root.insert(entry, numLevels); } } public Entry remove(int row, int column) { if(root.size == 0) { return null; } Entry e = root.remove(row, column, numLevels); if(e != null) { maybeRemoveRoot(); } return e; } public void insertRows(int row, int count) { if(count > 0 && root.size > 0) { root.insertRows(row, count, numLevels); } } public void insertColumns(int column, int count) { if(count > 0 && root.size > 0) { root.insertColumns(column, count, numLevels); } } public void removeRows(int row, int count) { if(count > 0) { root.removeRows(row, count, numLevels); maybeRemoveRoot(); } } public void removeColumns(int column, int count) { if(count > 0) { root.removeColumns(column, count, numLevels); maybeRemoveRoot(); } } public void iterate(int startRow, int startColumn, int endRow, int endColumn, GridFunction func) { if(root.size > 0) { int levels = numLevels; Entry e = root; Node node; int pos; do { node = (Node)e; pos = node.findPos(startRow, startColumn, node.size-1); e = node.children[pos]; }while(--levels > 0); assert e != null; if(e.compare(startRow, startColumn) < 0) { return; } do { for(int size=node.size ; pos<size ; pos++) { e = node.children[pos]; if(e.row > endRow) { return; } if(e.column >= startColumn && e.column <= endColumn) { func.apply(e.row, e.column, e); } } pos = 0; node = node.next; } while(node != null); } } public boolean isEmpty() { return root.size == 0; } public void clear() { Arrays.fill(root.children, null); root.size = 0; numLevels = 1; } private void maybeRemoveRoot() { while(numLevels > 1 && root.size == 1) { root = (Node)root.children[0]; root.prev = null; root.next = null; numLevels--; } if(root.size == 0) { numLevels = 1; } } private void splitRoot() { Node newNode = root.split(); Node newRoot = new Node(root.children.length); newRoot.children[0] = root; newRoot.children[1] = newNode; newRoot.size = 2; root = newRoot; numLevels++; } static class Node extends Entry { final Entry[] children; int size; Node next; Node prev; public Node(int size) { this.children = new Entry[size]; } boolean insert(Entry e, int levels) { if(--levels == 0) { return insertLeaf(e); } for(;;) { int pos = findPos(e.row, e.column, size-1); assert pos < size; Node node = (Node)children[pos]; if(!node.insert(e, levels)) { if(isFull()) { return false; } Node node2 = node.split(); insertAt(pos+1, node2); continue; } updateRowColumn(); return true; } } boolean insertLeaf(Entry e) { int pos = findPos(e.row, e.column, size); if(pos < size) { Entry c = children[pos]; assert c.getClass() != Node.class; int cmp = c.compare(e.row, e.column); if(cmp == 0) { children[pos] = e; return true; } assert cmp > 0; } if(isFull()) { return false; } insertAt(pos, e); return true; } Entry remove(int row, int column, int levels) { if(--levels == 0) { return removeLeaf(row, column); } int pos = findPos(row, column, size-1); assert pos < size; Node node = (Node)children[pos]; Entry e = node.remove(row, column, levels); if(e != null) { if(node.size == 0) { removeNodeAt(pos); } else if(node.isBelowHalf()) { tryMerge(pos); } updateRowColumn(); } return e; } Entry removeLeaf(int row, int column) { int pos = findPos(row, column, size); if(pos == size) { return null; } Entry c = children[pos]; assert c.getClass() != Node.class; int cmp = c.compare(row, column); if(cmp == 0) { removeAt(pos); if(pos == size && size > 0) { updateRowColumn(); } return c; } return null; } int findPos(int row, int column, int high) { int low = 0; while(low < high) { int mid = (low + high) >>> 1; Entry e = children[mid]; int cmp = e.compare(row, column); if(cmp > 0) { high = mid; } else if(cmp < 0) { low = mid + 1; } else { return mid; } } return low; } void insertRows(int row, int count, int levels) { if(--levels > 0) { for(int i=size ; i-->0 ;) { Node n = (Node)children[i]; if(n.row < row) { break; } n.insertRows(row, count, levels); } } else { for(int i=size ; i-->0 ;) { Entry e = children[i]; if(e.row < row) { break; } e.row += count; } } updateRowColumn(); } void insertColumns(int column, int count, int levels) { if(--levels > 0) { for(int i=0 ; i<size ; i++) { Node n = (Node)children[i]; n.insertColumns(column, count, levels); } } else { for(int i=0 ; i<size ; i++) { Entry e = children[i]; if(e.column >= column) { e.column += count; } } } updateRowColumn(); } boolean removeRows(int row, int count, int levels) { if(--levels > 0) { boolean needsMerging = false; for(int i=size ; i-->0 ;) { Node n = (Node)children[i]; if(n.row < row) { break; } if(n.removeRows(row, count, levels)) { removeNodeAt(i); } else { needsMerging |= n.isBelowHalf(); } } if(needsMerging && size > 1) { tryMerge(); } } else { for(int i=size ; i-->0 ;) { Entry e = children[i]; if(e.row < row) { break; } e.row -= count; if(e.row < row) { removeAt(i); } } } if(size == 0) { return true; } updateRowColumn(); return false; } boolean removeColumns(int column, int count, int levels) { if(--levels > 0) { boolean needsMerging = false; for(int i=size ; i-->0 ;) { Node n = (Node)children[i]; if(n.removeColumns(column, count, levels)) { removeNodeAt(i); } else { needsMerging |= n.isBelowHalf(); } } if(needsMerging && size > 1) { tryMerge(); } } else { for(int i=size ; i-->0 ;) { Entry e = children[i]; if(e.column >= column) { e.column -= count; if(e.column < column) { removeAt(i); } } } } if(size == 0) { return true; } updateRowColumn(); return false; } void insertAt(int idx, Entry what) { System.arraycopy(children, idx, children, idx+1, size-idx); children[idx] = what; if(idx == size++) { updateRowColumn(); } } void removeAt(int idx) { size--; System.arraycopy(children, idx+1, children, idx, size-idx); children[size] = null; } void removeNodeAt(int idx) { Node n = (Node)children[idx]; if(n.next != null) { n.next.prev = n.prev; } if(n.prev != null) { n.prev.next = n.next; } n.next = null; n.prev = null; removeAt(idx); } void tryMerge() { if(size == 2) { tryMerge2(0); } else { for(int i=size-1 ; i-->1 ;) { if(tryMerge3(i)) { i--; } } } } void tryMerge(int pos) { switch (size) { case 0: case 1: // can't merge break; case 2: tryMerge2(0); break; default: if(pos+1 == size) { tryMerge3(pos-1); } else if(pos == 0) { tryMerge3(1); } else { tryMerge3(pos); } break; } } private void tryMerge2(int pos) { Node n1 = (Node)children[pos]; Node n2 = (Node)children[pos+1]; if(n1.isBelowHalf() || n2.isBelowHalf()) { int sumSize = n1.size + n2.size; if(sumSize < children.length) { System.arraycopy(n2.children, 0, n1.children, n1.size, n2.size); n1.size = sumSize; n1.updateRowColumn(); removeNodeAt(pos+1); } else { Object[] temp = collect2(sumSize, n1, n2); distribute2(temp, n1, n2); } } } private boolean tryMerge3(int pos) { Node n0 = (Node)children[pos-1]; Node n1 = (Node)children[pos]; Node n2 = (Node)children[pos+1]; if(n0.isBelowHalf() || n1.isBelowHalf() || n2.isBelowHalf()) { int sumSize = n0.size + n1.size + n2.size; if(sumSize < children.length) { System.arraycopy(n1.children, 0, n0.children, n0.size, n1.size); System.arraycopy(n2.children, 0, n0.children, n0.size+n1.size, n2.size); n0.size = sumSize; n0.updateRowColumn(); removeNodeAt(pos+1); removeNodeAt(pos ); return true; } else { Object[] temp = collect3(sumSize, n0, n1, n2); if(sumSize < 2*children.length) { distribute2(temp, n0, n1); removeNodeAt(pos+1); } else { distribute3(temp, n0, n1, n2); } } } return false; } private Object[] collect2(int sumSize, Node n0, Node n1) { Object[] temp = new Object[sumSize]; System.arraycopy(n0.children, 0, temp, 0, n0.size); System.arraycopy(n1.children, 0, temp, n0.size, n1.size); return temp; } private Object[] collect3(int sumSize, Node n0, Node n1, Node n2) { Object[] temp = new Object[sumSize]; System.arraycopy(n0.children, 0, temp, 0, n0.size); System.arraycopy(n1.children, 0, temp, n0.size, n1.size); System.arraycopy(n2.children, 0, temp, n0.size + n1.size, n2.size); return temp; } private void distribute2(Object[] src, Node n0, Node n1) { int sumSize = src.length; n0.size = sumSize/2; n1.size = sumSize - n0.size; System.arraycopy(src, 0, n0.children, 0, n0.size); System.arraycopy(src, n0.size, n1.children, 0, n1.size); n0.updateRowColumn(); n1.updateRowColumn(); } private void distribute3(Object[] src, Node n0, Node n1, Node n2) { int sumSize = src.length; n0.size = sumSize/3; n1.size = (sumSize - n0.size) / 2; n2.size = sumSize - (n0.size + n1.size); System.arraycopy(src, 0, n0.children, 0, n0.size); System.arraycopy(src, n0.size, n1.children, 0, n1.size); System.arraycopy(src, n0.size+n1.size, n2.children, 0, n2.size); n0.updateRowColumn(); n1.updateRowColumn(); n2.updateRowColumn(); } boolean isFull() { return size == children.length; } boolean isBelowHalf() { return size*2 < children.length; } Node split() { Node newNode = new Node(children.length); int size1 = size / 2; int size2 = size - size1; System.arraycopy(this.children, size1, newNode.children, 0, size2); Arrays.fill(this.children, size1, this.size, null); newNode.size = size2; newNode.updateRowColumn(); newNode.prev = this; newNode.next = this.next; this.size = size1; this.updateRowColumn(); this.next = newNode; if(newNode.next != null) { newNode.next.prev = newNode; } return newNode; } void updateRowColumn() { Entry e = children[size-1]; this.row = e.row; this.column = e.column; } } public static class Entry { int row; int column; int compare(int row, int column) { int diff = this.row - row; if(diff == 0) { diff = this.column - column; } return diff; } } }
bsd-2-clause
interdroid/ibis-ipl
benchmarks/src/ibis/ipl/benchmarks/LogP/LogP.java
8900
package ibis.ipl.benchmarks.LogP; /* $Id$ */ import ibis.ipl.Ibis; import ibis.ipl.IbisCapabilities; import ibis.ipl.IbisFactory; import ibis.ipl.IbisIdentifier; import ibis.ipl.PortType; import ibis.ipl.ReadMessage; import ibis.ipl.ReceivePort; import ibis.ipl.Registry; import ibis.ipl.SendPort; import ibis.ipl.WriteMessage; import java.io.IOException; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class Sender { SendPort sport; ReceivePort rport; Sender(ReceivePort rport, SendPort sport) { this.rport = rport; this.sport = sport; } void send(int count, int gapCount, int repeat) throws Exception { long time; long sendStart, sendTotal; long recvStart, recvTotal; long nanoStart, nanoTotal; long nanoStartTicks, nanoTotalTicks; int timeCount; System.out.println("LogP:" + " repeat " + repeat + " count " + count + " gapCount " + gapCount + " (times in usec)"); // measure clock overhead of the o_r/o_s tests below timeCount = 100000; sendTotal = 0; nanoStart = System.nanoTime(); nanoStartTicks = Native.timestamp(); for (int i = 0; i < timeCount; i++) { sendStart = System.nanoTime(); sendTotal += System.nanoTime() - sendStart; } nanoTotalTicks = Native.timestamp() - nanoStartTicks; nanoTotal = System.nanoTime() - nanoStart; double clock = (nanoTotal / 1000.0) / timeCount; double ratio = ((double) nanoTotalTicks / (double) nanoTotal); // rdtsc() overhead nanoStart = System.nanoTime(); for (int i = 0; i < timeCount; i++) { sendStart = Native.timestamp(); sendTotal += Native.timestamp() - sendStart; } nanoTotal = System.nanoTime() - nanoStart; double tick = (nanoTotal / 1000.0) / timeCount; System.out.println("LogP:" + " overhead: nano " + String.format("%.3f", clock) + " rdtsc " + String.format("%.3f", tick) + "; rdtsc/nano tickrate ratio " + String.format("%.3f", ratio)); for (int r = 0; r < repeat; r++) { WriteMessage writeMessage; ReadMessage readMessage; // rtt and o_s (send overhead) sendTotal = 0; time = System.nanoTime(); for (int i = 0; i < count; i++) { // do send, measuring overhead sendStart = Native.timestamp(); writeMessage = sport.newMessage(); writeMessage.finish(); sendTotal += Native.timestamp() - sendStart; readMessage = rport.receive(); readMessage.finish(); } time = System.nanoTime() - time; double rtt = (time / 1000.0) / count; double sendOverhead = (sendTotal / ratio / 1000.0) / count; time = System.nanoTime(); for (int i = 0; i < gapCount; i++) { writeMessage = sport.newMessage(); writeMessage.finish(); } time = System.nanoTime() - time; double g = (time / 1000.0) / gapCount; // wait till everything is received: readMessage = rport.receive(); readMessage.finish(); // o_r (receive overhead) recvTotal = 0; for (int i = 0; i < count; i++) { writeMessage = sport.newMessage(); writeMessage.finish(); // busywait for 2*RTT so that the next message // should be pending when we read it away recvStart = System.nanoTime() + (long) ((2.0 * rtt) * 1000.0); while (System.nanoTime() < recvStart) { // nothing } // do receive, measuring overhead recvStart = Native.timestamp(); readMessage = rport.receive(); readMessage.finish(); recvTotal += Native.timestamp() - recvStart; } double recvOverhead = (recvTotal / ratio / 1000.0) / count; double ovhd = tick / 2; System.out.println("LogP:" + " RTT " + String.format("%.2f", (rtt - ovhd)) + " os " + String.format("%.2f", (sendOverhead - ovhd)) + " or " + String.format("%.2f", (recvOverhead - ovhd)) + " g " + String.format("%.2f", g)); } } } class ExplicitReceiver { SendPort sport; ReceivePort rport; ExplicitReceiver(ReceivePort rport, SendPort sport) { this.rport = rport; this.sport = sport; } void receive(int count, int gapCount, int repeat) throws IOException { WriteMessage writeMessage; ReadMessage readMessage; for (int r = 0; r < repeat; r++) { // rtt/o_s for (int i = 0; i < count; i++) { readMessage = rport.receive(); readMessage.finish(); writeMessage = sport.newMessage(); writeMessage.finish(); } // g for (int i = 0; i < gapCount; i++) { readMessage = rport.receive(); readMessage.finish(); } writeMessage = sport.newMessage(); writeMessage.finish(); // o_r for (int i = 0; i < count; i++) { readMessage = rport.receive(); readMessage.finish(); writeMessage = sport.newMessage(); writeMessage.finish(); } } } } class LogP { static Logger logger = LoggerFactory.getLogger(LogP.class.getName()); static Ibis ibis; static Registry registry; static void usage() { System.out.println("Usage: LogP [-ibis] [-none] [count] [gapcount]"); System.exit(0); } public static void main(String[] args) { int count = -1; int gapCount = -1; int repeat = 10; int rank; boolean ibisSer = false; boolean noneSer = false; /* Parse commandline parameters. */ for (int i = 0; i < args.length; i++) { if (args[i].equals("-repeat")) { i++; repeat = Integer.parseInt(args[i]); } else if (args[i].equals("-ibis")) { ibisSer = true; } else if (args[i].equals("-none")) { noneSer = true; } else { if (count == -1) { count = Integer.parseInt(args[i]); } else if (gapCount == -1) { gapCount = Integer.parseInt(args[i]); } else { usage(); } } } if (count == -1) { count = 10000; } if (gapCount == -1) { if (count <= 10000) { gapCount = count; } else { // by default limit it for the gap since it is one-way gapCount = 10000; } } try { IbisCapabilities s = new IbisCapabilities( IbisCapabilities.ELECTIONS_STRICT ); PortType t = new PortType( noneSer ? PortType.SERIALIZATION_BYTE : PortType.SERIALIZATION_OBJECT, PortType.COMMUNICATION_RELIABLE, PortType.CONNECTION_ONE_TO_ONE, PortType.RECEIVE_AUTO_UPCALLS, PortType.RECEIVE_EXPLICIT); Properties p = new Properties(); if (ibisSer) { p.setProperty("ibis.serialization", "ibis"); } else if (noneSer) { } else { p.setProperty("ibis.serialization", "sun"); } ibis = IbisFactory.createIbis(s, p, true, null, t); registry = ibis.registry(); SendPort sport = ibis.createSendPort(t, "send port"); ReceivePort rport; logger.debug("LogP: pre elect"); IbisIdentifier master = registry.elect("logp"); IbisIdentifier remote; logger.debug("LogP: post elect"); if (master.equals(ibis.identifier())) { logger.debug("LogP: I am master"); remote = registry.getElectionResult("client"); rank = 0; } else { logger.debug("LogP: I am slave"); registry.elect("client"); rank = 1; remote = master; } if (rank == 0) { rport = ibis.createReceivePort(t, "master"); rport.enableConnections(); sport.connect(remote, "slave"); Sender sender = new Sender(rport, sport); sender.send(count, gapCount, repeat); } else { sport.connect(remote, "master"); rport = ibis.createReceivePort(t, "slave"); rport.enableConnections(); ExplicitReceiver receiver = new ExplicitReceiver(rport, sport); receiver.receive(count, gapCount, repeat); } /* free the send ports first */ sport.close(); rport.close(); ibis.end(); } catch (Exception e) { System.err.println("Got exception " + e); System.err.println("StackTrace:"); e.printStackTrace(); } } }
bsd-3-clause
ksclarke/basex
basex-core/src/main/java/org/basex/io/in/DecodingInput.java
1103
package org.basex.io.in; import java.io.*; /** * This server-side class wraps an {@link InputStream} filled by a database * client. The incoming bytes are decoded: * <ul> * <li> {@code 0x00} is treated as end of stream, and -1 is returned</li> * <li> {@code 0xFF} is treated as encoding flag and skipped</li> * </ul> * * @author BaseX Team 2005-15, BSD License * @author Christian Gruen */ public final class DecodingInput extends InputStream { /** Input stream. */ private final InputStream input; /** All bytes have been read. */ private boolean more = true; /** * Constructor. * @param input buffer input to be wrapped */ public DecodingInput(final InputStream input) { this.input = input; } @Override public int read() throws IOException { if(more) { final int b = input.read(); if(b != 0) return b == 0xFF ? input.read() : b; more = false; } return -1; } /** * Flushes the remaining client data. * @throws IOException I/O exception */ public void flush() throws IOException { while(read() != -1); } }
bsd-3-clause
OBHITA/Consent2Share
DS4P/access-control-service/common-library/src/main/java/org/hl7/v3/AdxpCity.java
267
package org.hl7.v3; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name="adxp.city") public class AdxpCity extends ADXP { }
bsd-3-clause
mapbased/vitess
java/client/src/main/java/com/youtube/vitess/client/Proto.java
9857
package com.youtube.vitess.client; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.primitives.UnsignedLong; import com.google.protobuf.ByteString; import com.youtube.vitess.client.cursor.Cursor; import com.youtube.vitess.client.cursor.SimpleCursor; import com.youtube.vitess.proto.Query; import com.youtube.vitess.proto.Query.BindVariable; import com.youtube.vitess.proto.Query.BoundQuery; import com.youtube.vitess.proto.Query.QueryResult; import com.youtube.vitess.proto.Vtgate.BoundKeyspaceIdQuery; import com.youtube.vitess.proto.Vtgate.BoundShardQuery; import com.youtube.vitess.proto.Vtgate.ExecuteEntityIdsRequest.EntityId; import com.youtube.vitess.proto.Vtrpc.RPCError; import java.math.BigDecimal; import java.sql.SQLException; import java.sql.SQLIntegrityConstraintViolationException; import java.sql.SQLInvalidAuthorizationSpecException; import java.sql.SQLNonTransientException; import java.sql.SQLRecoverableException; import java.sql.SQLSyntaxErrorException; import java.sql.SQLTimeoutException; import java.sql.SQLTransientException; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.annotation.Nullable; /** * Proto contains methods for working with Vitess protobuf messages. */ public class Proto { private static final int MAX_DECIMAL_UNIT = 30; /** * Throws the proper SQLException for an error returned by VTGate. * * <p> * Errors returned by Vitess are documented in the * <a href="https://github.com/youtube/vitess/blob/master/proto/vtrpc.proto">vtrpc proto</a>. */ public static void checkError(RPCError error) throws SQLException { if (error != null) { int errno = getErrno(error.getMessage()); String sqlState = getSQLState(error.getMessage()); switch (error.getCode()) { case SUCCESS: break; case BAD_INPUT: throw new SQLSyntaxErrorException(error.toString(), sqlState, errno); case DEADLINE_EXCEEDED: throw new SQLTimeoutException(error.toString(), sqlState, errno); case INTEGRITY_ERROR: throw new SQLIntegrityConstraintViolationException(error.toString(), sqlState, errno); case TRANSIENT_ERROR: throw new SQLTransientException(error.toString(), sqlState, errno); case UNAUTHENTICATED: throw new SQLInvalidAuthorizationSpecException(error.toString(), sqlState, errno); case NOT_IN_TX: throw new SQLRecoverableException(error.toString(), sqlState, errno); default: throw new SQLNonTransientException("Vitess RPC error: " + error.toString(), sqlState, errno); } } } /** * Extracts the MySQL errno from a Vitess error message, if any. * * <p> * If no errno information is found, it returns {@code 0}. */ public static int getErrno(@Nullable String errorMessage) { if (errorMessage == null) { return 0; } int tagPos = errorMessage.indexOf("(errno "); if (tagPos == -1) { return 0; } int start = tagPos + "(errno ".length(); if (start >= errorMessage.length()) { return 0; } int end = errorMessage.indexOf(')', start); if (end == -1) { return 0; } try { return Integer.parseInt(errorMessage.substring(start, end)); } catch (NumberFormatException e) { return 0; } } /** * Extracts the SQLSTATE from a Vitess error message, if any. * * <p> * If no SQLSTATE information is found, it returns {@code ""}. */ public static String getSQLState(@Nullable String errorMessage) { if (errorMessage == null) { return ""; } int tagPos = errorMessage.indexOf("(sqlstate "); if (tagPos == -1) { return ""; } int start = tagPos + "(sqlstate ".length(); if (start >= errorMessage.length()) { return ""; } int end = errorMessage.indexOf(')', start); if (end == -1) { return ""; } return errorMessage.substring(start, end); } public static BindVariable buildBindVariable(Object value) { if (value instanceof BindVariable) { return (BindVariable) value; } BindVariable.Builder builder = BindVariable.newBuilder(); if (value instanceof Iterable<?>) { // List Bind Vars Iterator<?> itr = ((Iterable<?>) value).iterator(); if (!itr.hasNext()) { throw new IllegalArgumentException("Can't pass empty list as list bind variable."); } builder.setType(Query.Type.TUPLE); while (itr.hasNext()) { TypedValue tval = new TypedValue(itr.next()); builder.addValues(Query.Value.newBuilder().setType(tval.type).setValue(tval.value).build()); } } else { TypedValue tval = new TypedValue(value); builder.setType(tval.type); builder.setValue(tval.value); } return builder.build(); } public static EntityId buildEntityId(byte[] keyspaceId, Object value) { TypedValue tval = new TypedValue(value); return EntityId.newBuilder().setKeyspaceId(ByteString.copyFrom(keyspaceId)).setType(tval.type) .setValue(tval.value).build(); } /** * bindQuery creates a BoundQuery from query and vars. */ public static BoundQuery bindQuery(String query, Map<String, ?> vars) { BoundQuery.Builder boundQueryBuilder = BoundQuery.newBuilder().setSql(query); if (vars != null) { for (Map.Entry<String, ?> entry : vars.entrySet()) { boundQueryBuilder.putBindVariables(entry.getKey(), buildBindVariable(entry.getValue())); } } return boundQueryBuilder.build(); } /** * bindShardQuery creates a BoundShardQuery. */ public static BoundShardQuery bindShardQuery(String keyspace, Iterable<String> shards, BoundQuery query) { return BoundShardQuery.newBuilder().setKeyspace(keyspace).addAllShards(shards).setQuery(query) .build(); } /** * bindShardQuery creates a BoundShardQuery. */ public static BoundShardQuery bindShardQuery(String keyspace, Iterable<String> shards, String query, Map<String, ?> vars) { return bindShardQuery(keyspace, shards, bindQuery(query, vars)); } /** * bindKeyspaceIdQuery creates a BoundKeyspaceIdQuery. */ public static BoundKeyspaceIdQuery bindKeyspaceIdQuery(String keyspace, Iterable<byte[]> keyspaceIds, BoundQuery query) { return BoundKeyspaceIdQuery.newBuilder().setKeyspace(keyspace) .addAllKeyspaceIds(Iterables.transform(keyspaceIds, BYTE_ARRAY_TO_BYTE_STRING)) .setQuery(query).build(); } /** * bindKeyspaceIdQuery creates a BoundKeyspaceIdQuery. */ public static BoundKeyspaceIdQuery bindKeyspaceIdQuery(String keyspace, Iterable<byte[]> keyspaceIds, String query, Map<String, ?> vars) { return bindKeyspaceIdQuery(keyspace, keyspaceIds, bindQuery(query, vars)); } public static List<Cursor> toCursorList(List<QueryResult> queryResults) { ImmutableList.Builder<Cursor> builder = new ImmutableList.Builder<Cursor>(); for (QueryResult queryResult : queryResults) { builder.add(new SimpleCursor(queryResult)); } return builder.build(); } public static final Function<byte[], ByteString> BYTE_ARRAY_TO_BYTE_STRING = new Function<byte[], ByteString>() { @Override public ByteString apply(byte[] from) { return ByteString.copyFrom(from); } }; public static final Function<Map.Entry<byte[], ?>, EntityId> MAP_ENTRY_TO_ENTITY_KEYSPACE_ID = new Function<Map.Entry<byte[], ?>, EntityId>() { @Override public EntityId apply(Map.Entry<byte[], ?> entry) { return buildEntityId(entry.getKey(), entry.getValue()); } }; /** * Represents a type and value in the type system used in query.proto. */ protected static class TypedValue { Query.Type type; ByteString value; TypedValue(Object value) { if (value == null) { this.type = Query.Type.NULL_TYPE; this.value = ByteString.EMPTY; } else if (value instanceof String) { // String this.type = Query.Type.VARCHAR; this.value = ByteString.copyFromUtf8((String) value); } else if (value instanceof byte[]) { // Bytes this.type = Query.Type.VARBINARY; this.value = ByteString.copyFrom((byte[]) value); } else if (value instanceof Integer || value instanceof Long || value instanceof Short || value instanceof Byte) { // Int32, Int64, Short, Byte this.type = Query.Type.INT64; this.value = ByteString.copyFromUtf8(value.toString()); } else if (value instanceof UnsignedLong) { // Uint64 this.type = Query.Type.UINT64; this.value = ByteString.copyFromUtf8(value.toString()); } else if (value instanceof Float || value instanceof Double) { // Float, Double this.type = Query.Type.FLOAT64; this.value = ByteString.copyFromUtf8(value.toString()); } else if (value instanceof Boolean) { // Boolean this.type = Query.Type.INT64; this.value = ByteString.copyFromUtf8(((boolean) value) ? "1" : "0"); } else if (value instanceof BigDecimal) { // BigDecimal BigDecimal bigDecimal = (BigDecimal) value; if (bigDecimal.scale() > MAX_DECIMAL_UNIT) { // MySQL only supports scale up to 30. bigDecimal = bigDecimal.setScale(MAX_DECIMAL_UNIT, BigDecimal.ROUND_HALF_UP); } this.type = Query.Type.DECIMAL; this.value = ByteString.copyFromUtf8(bigDecimal.toPlainString()); } else { throw new IllegalArgumentException( "unsupported type for Query.Value proto: " + value.getClass()); } } } }
bsd-3-clause
anildahiya/sdl_android
android/sdl_android/src/androidTest/java/com/smartdevicelink/test/rpc/enums/TextFieldNameTests.java
7789
package com.smartdevicelink.test.rpc.enums; import com.smartdevicelink.proxy.rpc.enums.TextFieldName; import com.smartdevicelink.test.Test; import junit.framework.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * This is a unit test class for the SmartDeviceLink library project class : * {@link com.smartdevicelink.rpc.enums.TextFieldName} */ public class TextFieldNameTests extends TestCase { /** * Verifies that the enum values are not null upon valid assignment. */ public void testValidEnums () { String example = "mainField1"; TextFieldName enumMainField1 = TextFieldName.valueForString(example); example = "mainField2"; TextFieldName enumMainField2 = TextFieldName.valueForString(example); example = "mainField3"; TextFieldName enumMainField3 = TextFieldName.valueForString(example); example = "mainField4"; TextFieldName enumMainField4 = TextFieldName.valueForString(example); example = "statusBar"; TextFieldName enumStatusBar = TextFieldName.valueForString(example); example = "mediaClock"; TextFieldName enumMediaClock = TextFieldName.valueForString(example); example = "mediaTrack"; TextFieldName enumMediaTrack = TextFieldName.valueForString(example); example = "alertText1"; TextFieldName enumAlertText1 = TextFieldName.valueForString(example); example = "alertText2"; TextFieldName enumAlertText2 = TextFieldName.valueForString(example); example = "alertText3"; TextFieldName enumAlertText3 = TextFieldName.valueForString(example); example = "scrollableMessageBody"; TextFieldName enumScrollableMessageBody = TextFieldName.valueForString(example); example = "initialInteractionText"; TextFieldName enumInitialInteractionText = TextFieldName.valueForString(example); example = "navigationText1"; TextFieldName enumNavigationText1 = TextFieldName.valueForString(example); example = "navigationText2"; TextFieldName enumNavigationText2 = TextFieldName.valueForString(example); example = "ETA"; TextFieldName enumEta = TextFieldName.valueForString(example); example = "totalDistance"; TextFieldName enumTotalDistance = TextFieldName.valueForString(example); example = "audioPassThruDisplayText1"; TextFieldName enumAudioPassThruDisplayText1 = TextFieldName.valueForString(example); example = "audioPassThruDisplayText2"; TextFieldName enumAudioPassThruDisplayText2 = TextFieldName.valueForString(example); example = "sliderHeader"; TextFieldName enumSliderHeader = TextFieldName.valueForString(example); example = "sliderFooter"; TextFieldName enumSliderFooter = TextFieldName.valueForString(example); example = "menuName"; TextFieldName enumMenuName = TextFieldName.valueForString(example); example = "secondaryText"; TextFieldName enumSecondaryText = TextFieldName.valueForString(example); example = "tertiaryText"; TextFieldName enumTertiaryText = TextFieldName.valueForString(example); example = "menuTitle"; TextFieldName enumMenuTitle = TextFieldName.valueForString(example); example = "locationName"; TextFieldName enumLocName = TextFieldName.valueForString(example); example = "locationDescription"; TextFieldName enumLocDesc = TextFieldName.valueForString(example); example = "addressLines"; TextFieldName enumAddLines = TextFieldName.valueForString(example); example = "phoneNumber"; TextFieldName enumPhone = TextFieldName.valueForString(example); assertNotNull("mainField1 returned null", enumMainField1); assertNotNull("mainField2 returned null", enumMainField2); assertNotNull("mainField3 returned null", enumMainField3); assertNotNull("mainField4 returned null", enumMainField4); assertNotNull("statusBar returned null", enumStatusBar); assertNotNull("mediaClock returned null", enumMediaClock); assertNotNull("mediaTrack returned null", enumMediaTrack); assertNotNull("alertText1 returned null", enumAlertText1); assertNotNull("alertText2 returned null", enumAlertText2); assertNotNull("alertText3 returned null", enumAlertText3); assertNotNull("scrollableMessageBody returned null", enumScrollableMessageBody); assertNotNull("initialInteractionText returned null", enumInitialInteractionText); assertNotNull("navigationText1 returned null", enumNavigationText1); assertNotNull("navigationText2 returned null", enumNavigationText2); assertNotNull("ETA returned null", enumEta); assertNotNull("totalDistance returned null", enumTotalDistance); assertNotNull("audioPassThruDisplayText1 returned null", enumAudioPassThruDisplayText1); assertNotNull("audioPassThruDisplayText2 returned null", enumAudioPassThruDisplayText2); assertNotNull("sliderHeader returned null", enumSliderHeader); assertNotNull("sliderFooter returned null", enumSliderFooter); assertNotNull("menuName returned null", enumMenuName); assertNotNull("secondaryText returned null", enumSecondaryText); assertNotNull("tertiaryText returned null", enumTertiaryText); assertNotNull("menuTitle returned null", enumMenuTitle); assertNotNull(Test.NOT_NULL, enumLocName); assertNotNull(Test.NOT_NULL, enumLocDesc); assertNotNull(Test.NOT_NULL, enumAddLines); assertNotNull(Test.NOT_NULL, enumPhone); } /** * Verifies that an invalid assignment is null. */ public void testInvalidEnum () { String example = "mAinField1"; try { TextFieldName temp = TextFieldName.valueForString(example); assertNull("Result of valueForString should be null.", temp); } catch (IllegalArgumentException exception) { fail("Invalid enum throws IllegalArgumentException."); } } /** * Verifies that a null assignment is invalid. */ public void testNullEnum () { String example = null; try { TextFieldName temp = TextFieldName.valueForString(example); assertNull("Result of valueForString should be null.", temp); } catch (NullPointerException exception) { fail("Null string throws NullPointerException."); } } /** * Verifies the possible enum values of TextFieldName. */ public void testListEnum() { List<TextFieldName> enumValueList = Arrays.asList(TextFieldName.values()); List<TextFieldName> enumTestList = new ArrayList<TextFieldName>(); enumTestList.add(TextFieldName.mainField1); enumTestList.add(TextFieldName.mainField2); enumTestList.add(TextFieldName.mainField3); enumTestList.add(TextFieldName.mainField4); enumTestList.add(TextFieldName.statusBar); enumTestList.add(TextFieldName.mediaClock); enumTestList.add(TextFieldName.mediaTrack); enumTestList.add(TextFieldName.alertText1); enumTestList.add(TextFieldName.alertText2); enumTestList.add(TextFieldName.alertText3); enumTestList.add(TextFieldName.scrollableMessageBody); enumTestList.add(TextFieldName.initialInteractionText); enumTestList.add(TextFieldName.navigationText1); enumTestList.add(TextFieldName.navigationText2); enumTestList.add(TextFieldName.ETA); enumTestList.add(TextFieldName.totalDistance); enumTestList.add(TextFieldName.audioPassThruDisplayText1); enumTestList.add(TextFieldName.audioPassThruDisplayText2); enumTestList.add(TextFieldName.sliderHeader); enumTestList.add(TextFieldName.sliderFooter); enumTestList.add(TextFieldName.menuName); enumTestList.add(TextFieldName.secondaryText); enumTestList.add(TextFieldName.tertiaryText); enumTestList.add(TextFieldName.menuTitle); enumTestList.add(TextFieldName.locationName); enumTestList.add(TextFieldName.locationDescription); enumTestList.add(TextFieldName.addressLines); enumTestList.add(TextFieldName.phoneNumber); assertTrue("Enum value list does not match enum class list", enumValueList.containsAll(enumTestList) && enumTestList.containsAll(enumValueList)); } }
bsd-3-clause
dushmis/Oracle-Cloud
PaaS-SaaS_HealthCareApp/DoctorPatientCRMExtension/HealthCare/HealthCareWSProxyClient/src/com/oracle/ptsdemo/healthcare/wsclient/osc/person/generated/DeletePerson.java
1563
package com.oracle.ptsdemo.healthcare.wsclient.osc.person.generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="personParty" type="{http://xmlns.oracle.com/apps/cdm/foundation/parties/personService/}Person"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "personParty" }) @XmlRootElement(name = "deletePerson") public class DeletePerson { @XmlElement(required = true) protected Person personParty; /** * Gets the value of the personParty property. * * @return * possible object is * {@link Person } * */ public Person getPersonParty() { return personParty; } /** * Sets the value of the personParty property. * * @param value * allowed object is * {@link Person } * */ public void setPersonParty(Person value) { this.personParty = value; } }
bsd-3-clause
act262/freeline
freeline-studio-plugin/src/main/java/com/antfortune/freeline/idea/actions/CrashReporterAction.java
788
package com.antfortune.freeline.idea.actions; import com.antfortune.freeline.idea.models.FreelineConfiguration; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.ToggleAction; /** * Created by huangyong on 17/2/14. */ public class CrashReporterAction extends ToggleAction { private FreelineConfiguration mConfigurationStorage; public CrashReporterAction() { mConfigurationStorage = FreelineConfiguration.getInstance(); } @Override public boolean isSelected(AnActionEvent anActionEvent) { return mConfigurationStorage.DISABLE_CRASH_REPORTER; } @Override public void setSelected(AnActionEvent anActionEvent, boolean b) { mConfigurationStorage.DISABLE_CRASH_REPORTER = b; } }
bsd-3-clause
Clashsoft/Dyvil
compiler/src/main/java/dyvilx/tools/compiler/ast/method/intrinsic/SpecialIntrinsicData.java
5193
package dyvilx.tools.compiler.ast.method.intrinsic; import dyvil.annotation.internal.Nullable; import dyvil.reflect.Opcodes; import dyvilx.tools.asm.Label; import dyvilx.tools.asm.Type; import dyvilx.tools.compiler.ast.classes.IClass; import dyvilx.tools.compiler.ast.expression.IValue; import dyvilx.tools.compiler.ast.method.IMethod; import dyvilx.tools.compiler.ast.parameter.ArgumentList; import dyvilx.tools.compiler.ast.structure.Package; import dyvilx.tools.compiler.backend.method.MethodWriter; import dyvilx.tools.compiler.backend.exception.BytecodeException; public class SpecialIntrinsicData implements IntrinsicData { private IMethod method; private int[] instructions; private String[] strings; private boolean[] targets; public SpecialIntrinsicData(IMethod method, int[] instructions, String[] strings, boolean[] targets) { this.method = method; this.instructions = instructions; this.strings = strings; this.targets = targets; } @Override public void writeIntrinsic(MethodWriter writer, IValue receiver, ArgumentList arguments, int lineNumber) throws BytecodeException { final int varIndex = writer.localCount(); final int[] ints = this.instructions; int insnIndex = 0; final Label[] labels = this.getLabels(); Label label; for (int i = 0, length = ints.length; i < length; i++) { if (labels != null && (label = labels[insnIndex++]) != null) { writer.visitTargetLabel(label); } final int opcode = ints[i]; if (Opcodes.isFieldOpcode(opcode)) { final String owner = this.strings[ints[i + 1]]; final String name = this.strings[ints[i + 2]]; final String desc = this.strings[ints[i + 3]]; writer.visitFieldInsn(opcode, owner, name, desc); i += 3; continue; } if (Opcodes.isMethodOpcode(opcode)) { final String owner = this.strings[ints[i + 1]]; final String name = this.strings[ints[i + 2]]; final String desc = this.strings[ints[i + 3]]; writer.visitLineNumber(lineNumber); visitMethodInsn(writer, opcode, owner, name, desc); i += 3; continue; } if (Opcodes.isJumpOpcode(opcode)) { //noinspection ConstantConditions writer.visitJumpInsn(opcode, labels[ints[i + 1]]); i += 1; continue; } if (Opcodes.isLoadOpcode(opcode) || Opcodes.isStoreOpcode(opcode)) { writer.visitVarInsn(opcode, varIndex + ints[i + 1]); i += 1; continue; } switch (opcode) { case Opcodes.BIPUSH: case Opcodes.SIPUSH: writer.visitLdcInsn(ints[i + 1]); i++; continue; case Opcodes.LDC: final String constant = this.strings[ints[i + 1]]; writeLDC(writer, constant); i++; continue; } IntrinsicData.writeInsn(writer, this.method, opcode, receiver, arguments, lineNumber); } if (labels != null && (label = labels[insnIndex]) != null) { writer.visitTargetLabel(label); } writer.resetLocals(varIndex); } @Nullable public Label[] getLabels() { if (this.targets == null) { return null; } final int length = this.targets.length; if (length <= 0) { return null; } final Label[] labels = new Label[length]; for (int i = 0; i < length; i++) { if (this.targets[i]) { labels[i] = new Label(); } } return labels; } private static void visitMethodInsn(MethodWriter writer, int opcode, String owner, String name, String desc) { final boolean isInterface; switch (opcode) { case Opcodes.INVOKEINTERFACE: // invokeINTERFACE -> definitely an interface isInterface = true; break; case Opcodes.INVOKESPECIAL: case Opcodes.INVOKEVIRTUAL: // private or virtual -> can't be an interface isInterface = false; break; case Opcodes.INVOKESTATIC: // check the class default: final IClass iclass = Package.rootPackage.resolveGlobalClass(owner); isInterface = iclass != null && iclass.isInterface(); break; } writer.visitMethodInsn(opcode, owner, name, desc, isInterface); } @Override public void writeIntrinsic(MethodWriter writer, Label dest, IValue receiver, ArgumentList arguments, int lineNumber) throws BytecodeException { this.writeIntrinsic(writer, receiver, arguments, lineNumber); writer.visitJumpInsn(Opcodes.IFNE, dest); } @Override public void writeInvIntrinsic(MethodWriter writer, Label dest, IValue receiver, ArgumentList arguments, int lineNumber) throws BytecodeException { this.writeIntrinsic(writer, receiver, arguments, lineNumber); writer.visitJumpInsn(Opcodes.IFEQ, dest); } private static void writeLDC(MethodWriter writer, String constant) { switch (constant.charAt(0)) { case 'I': writer.visitLdcInsn(Integer.parseInt(constant.substring(1))); return; case 'L': writer.visitLdcInsn(Long.parseLong(constant.substring(1))); return; case 'F': writer.visitLdcInsn(Float.parseFloat(constant.substring(1))); return; case 'D': writer.visitLdcInsn(Double.parseDouble(constant.substring(1))); return; case 'S': case '"': case '\'': writer.visitLdcInsn(constant.substring(1)); return; case 'C': writer.visitLdcInsn(Type.getType(constant.substring(1))); return; } } }
bsd-3-clause
QuietOne/MonkeyBrains
src/monkeystuff/weapon/AbstractFirearmWeapon.java
2666
/** * Copyright (c) 2014, jMonkeyEngine All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of 'jMonkeyEngine' nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package monkeystuff.weapon; /** * Abstract bullet based weapon with implementation for firearm weapons. * * @author Tihomir Radosavljević * @version 1.0.0 */ public abstract class AbstractFirearmWeapon extends AbstractBulletBasedWeapon { /** * Number of bullet that this weapon have left. Set to -1 if infinite. */ protected int numberOfBullets; /** * Check if there is any bullets in weapon. * * @return true - has, false - no, it doesn't */ public boolean isUsable() { if (numberOfBullets == 0) { return false; } return true; } public int getNumberOfBullets() { return numberOfBullets; } public void setNumberOfBullets(int numberOfBullets) { this.numberOfBullets = numberOfBullets; } public void addNumberOfBullets(int numberOfBullets) { this.numberOfBullets += numberOfBullets; } @Override protected boolean isUnlimitedUse() { return numberOfBullets == -1; } @Override protected void useWeapon() { numberOfBullets--; } }
bsd-3-clause
roth1002/infer
infer/models/java/src/java/io/ObjectInputStream.java
4256
package java.io; import com.facebook.infer.models.InferUndefined; import java.util.ArrayList; import java.util.HashMap; import java.util.List; public class ObjectInputStream extends InputStream { private InputStream emptyStream; private static Object UNSHARED_OBJ; private boolean hasPushbackTC; private byte pushbackTC; private int nestedLevels; private int nextHandle; private DataInputStream input; private DataInputStream primitiveTypes; private InputStream primitiveData; private boolean enableResolve; private ArrayList<Object> objectsRead; private Object currentObject; private ObjectStreamClass currentClass; private InputValidationDesc[] validations; private boolean subclassOverridingImplementation; private ClassLoader callerClassLoader; private boolean mustResolve; private int descriptorHandle; private static HashMap<String, Class<?>> PRIMITIVE_CLASSES; static class InputValidationDesc { ObjectInputValidation validator; int priority; } private static ClassLoader bootstrapLoader; private static ClassLoader systemLoader; private HashMap<Class<?>, List<Class<?>>> cachedSuperclasses; public ObjectInputStream(InputStream in) throws IOException { this.input = new DataInputStream(in); InferUndefined.can_throw_ioexception_void(); } protected ObjectInputStream() throws IOException, SecurityException { } public int available() throws IOException { return InferUndefined.can_throw_ioexception_int(); } public void close() throws IOException { input.close(); } public void defaultReadObject() throws IOException { InferUndefined.can_throw_ioexception_void(); } public int read() throws IOException { return InferUndefined.can_throw_ioexception_int(); } public int read(byte b[]) throws IOException { return InferUndefined.can_throw_ioexception_int(); } public int read(byte b[], int off, int len) throws IOException { return InferUndefined.can_throw_ioexception_int(); } public boolean readBoolean() throws IOException { return InferUndefined.can_throw_ioexception_boolean(); } public byte readByte() throws IOException { return InferUndefined.can_throw_ioexception_byte(); } public char readChar() throws IOException { return InferUndefined.can_throw_ioexception_char(); } public double readDouble() throws IOException { return InferUndefined.can_throw_ioexception_double(); } public ObjectInputStream.GetField readFields() throws IOException { throw new IOException(); } public float readFloat() throws IOException { return InferUndefined.can_throw_ioexception_float(); } public void readFully(byte[] buf) throws IOException { InferUndefined.can_throw_ioexception_void(); } public void readFully(byte[] buf, int off, int len) throws IOException { InferUndefined.can_throw_ioexception_void(); } public int readInt() throws IOException { return InferUndefined.can_throw_ioexception_int(); } public long readLong() throws IOException { return InferUndefined.can_throw_ioexception_long(); } public final Object readObject() throws IOException { return InferUndefined.can_throw_ioexception_object(); } public short readShort() throws IOException { return InferUndefined.can_throw_ioexception_short(); } public Object readUnshared() throws IOException, ClassNotFoundException { return InferUndefined.can_throw_ioexception_object(); } public int readUnsignedByte() throws IOException { return InferUndefined.can_throw_ioexception_int(); } public int readUnsignedShort() throws IOException { return InferUndefined.can_throw_ioexception_int(); } public String readUTF() throws IOException { return InferUndefined.can_throw_ioexception_string(); } public int skipBytes(int len) throws IOException { return InferUndefined.can_throw_ioexception_int(); } public static abstract class GetField { } }
bsd-3-clause
patrickianwilson/vijava-contrib
src/main/java/com/vmware/vim25/VmRelocateSpecEvent.java
1829
/*================================================================================ Copyright (c) 2013 Steve Jin. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the names of copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================*/ package com.vmware.vim25; /** * @author Steve Jin (http://www.doublecloud.org) * @version 5.1 */ @SuppressWarnings("all") public class VmRelocateSpecEvent extends VmEvent { }
bsd-3-clause
tivv/pgjdbc
org/postgresql/jdbc4/AbstractJdbc4DatabaseMetaData.java
4190
/*------------------------------------------------------------------------- * * Copyright (c) 2004-2011, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package org.postgresql.jdbc4; import java.sql.*; import java.util.Vector; import org.postgresql.core.*; public abstract class AbstractJdbc4DatabaseMetaData extends org.postgresql.jdbc3.AbstractJdbc3DatabaseMetaData { public AbstractJdbc4DatabaseMetaData(AbstractJdbc4Connection conn) { super(conn); } public RowIdLifetime getRowIdLifetime() throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "getRowIdLifetime()"); } public ResultSet getSchemas(String catalog, String schemaPattern) throws SQLException { return getSchemas(4, catalog, schemaPattern); } public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { return true; } public boolean autoCommitFailureClosesAllResultSets() throws SQLException { return false; } public ResultSet getClientInfoProperties() throws SQLException { Field f[] = new Field[4]; f[0] = new Field("NAME", Oid.VARCHAR); f[1] = new Field("MAX_LEN", Oid.INT4); f[2] = new Field("DEFAULT_VALUE", Oid.VARCHAR); f[3] = new Field("DESCRIPTION", Oid.VARCHAR); Vector v = new Vector(); if (connection.haveMinimumServerVersion("9.0")) { byte[][] tuple = new byte[4][]; tuple[0] = connection.encodeString("ApplicationName"); tuple[1] = connection.encodeString(Integer.toString(getMaxNameLength())); tuple[2] = connection.encodeString(""); tuple[3] = connection.encodeString("The name of the application currently utilizing the connection."); v.addElement(tuple); } return (ResultSet) ((BaseStatement)createMetaDataStatement()).createDriverResultSet(f, v); } public boolean providesQueryObjectGenerator() throws SQLException { return false; } public boolean isWrapperFor(Class<?> iface) throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "isWrapperFor(Class<?>)"); } public <T> T unwrap(Class<T> iface) throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "unwrap(Class<T>)"); } public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "getFunction(String, String, String)"); } public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "getFunctionColumns(String, String, String, String)"); } public int getJDBCMajorVersion() throws SQLException { return 4; } public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { return getColumns(4, catalog, schemaPattern, tableNamePattern, columnNamePattern); } public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { return getProcedures(4, catalog, schemaPattern, procedureNamePattern); } public ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) throws SQLException { return getProcedureColumns(4, catalog, schemaPattern, procedureNamePattern, columnNamePattern); } public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { throw org.postgresql.Driver.notImplemented(this.getClass(), "getPseudoColumns(String, String, String, String)"); } public boolean generatedKeyAlwaysReturned() throws SQLException { return true; } }
bsd-3-clause
dushmis/Oracle-Cloud
PaaS-SaaS_HealthCareApp/DoctorPatientCRMExtension/HealthCare/HealthCareWSProxyClient/src/com/oracle/ptsdemo/healthcare/wsclient/osc/opty/generated/XMLInfo.java
1371
package com.oracle.ptsdemo.healthcare.wsclient.osc.opty.generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for XMLInfo complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="XMLInfo"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="xmlElement" type="{commonj.sdo}Boolean" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "XMLInfo", namespace = "commonj.sdo/xml") public class XMLInfo { @XmlAttribute protected Boolean xmlElement; /** * Gets the value of the xmlElement property. * * @return * possible object is * {@link Boolean } * */ public Boolean isXmlElement() { return xmlElement; } /** * Sets the value of the xmlElement property. * * @param value * allowed object is * {@link Boolean } * */ public void setXmlElement(Boolean value) { this.xmlElement = value; } }
bsd-3-clause
Clashsoft/Dyvil
compiler/src/main/java/dyvilx/tools/compiler/phase/CompilerPhase.java
886
package dyvilx.tools.compiler.phase; import dyvil.annotation.internal.NonNull; import dyvilx.tools.compiler.DyvilCompiler; import java.util.function.Consumer; public class CompilerPhase implements ICompilerPhase { private final int id; private final String name; private final Consumer<DyvilCompiler> apply; public CompilerPhase(int id, String name, Consumer<DyvilCompiler> apply) { this.id = id; this.name = name; this.apply = apply; } @Override public String getName() { return this.name; } @Override public int getID() { return this.id; } @Override public void apply(DyvilCompiler compiler) { this.apply.accept(compiler); } @Override public int compareTo(@NonNull ICompilerPhase o) { return Integer.compare(this.id, o.getID()); } @Override public String toString() { return this.name; } }
bsd-3-clause
oci-pronghorn/PronghornIoT
foglight/src/main/java/com/ociweb/iot/grove/thumb_joystick/ThumbJoystickListener.java
132
package com.ociweb.iot.grove.thumb_joystick; public interface ThumbJoystickListener { public void joystickValues(int x, int y); }
bsd-3-clause
NCIP/caaers
caAERS/software/web/src/test/java/gov/nih/nci/cabig/caaers/web/participant/SubjectFlowAbstractTabTestCase.java
2469
/******************************************************************************* * Copyright SemanticBits, Northwestern University and Akaza Research * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caaers/LICENSE.txt for details. ******************************************************************************/ package gov.nih.nci.cabig.caaers.web.participant; import gov.nih.nci.cabig.caaers.domain.*; import gov.nih.nci.cabig.caaers.web.WebTestCase; import gov.nih.nci.cabig.caaers.web.fields.TabWithFields; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import java.util.ArrayList; /** * @author Ion C. Olaru */ public abstract class SubjectFlowAbstractTabTestCase extends WebTestCase { private static final Log log = LogFactory.getLog(SubjectFlowAbstractTabTestCase.class); protected TabWithFields tab; protected Errors errors; protected ParticipantInputCommand command; protected StudyParticipantAssignment assignment; @Override protected void setUp() throws Exception { super.setUp(); tab = createTab(); command = createCommand(); errors = new BindException(command, "command"); } protected ParticipantInputCommand createCommand() { ParticipantInputCommand command = new ParticipantInputCommand(); command.setAssignment(new StudyParticipantAssignment()); command.getAssignment().setParticipant(new Participant()); command.getAssignment().setStudySite(new StudySite()); command.getAssignment().getStudySite().setStudy(new LocalStudy()); command.getAssignment().getStudySite().setOrganization(new LocalOrganization()); command.getAssignment().setPreExistingConditions(new ArrayList<StudyParticipantPreExistingCondition>()); command.getAssignment().setConcomitantMedications(new ArrayList<StudyParticipantConcomitantMedication>()); command.getAssignment().setPriorTherapies(new ArrayList<StudyParticipantPriorTherapy>()); return command; } protected abstract TabWithFields createTab(); public TabWithFields getTab() { return tab; } public Errors getErrors() { return errors; } public StudyParticipantAssignment getAssignment() { return command.getAssignment(); } }
bsd-3-clause
jblancoperez/jaxen
src/java/main/org/jaxen/expr/DefaultLocationPath.java
5266
/* * $Header$ * $Revision: 1345 $ * $Date: 2009-10-22 07:25:23 -0400 (Thu, 22 Oct 2009) $ * * ==================================================================== * * Copyright 2000-2002 bob mcwhirter & James Strachan. * All rights reserved. * * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the Jaxen Project nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ==================================================================== * This software consists of voluntary contributions made by many * individuals on behalf of the Jaxen Project and was originally * created by bob mcwhirter <bob@werken.com> and * James Strachan <jstrachan@apache.org>. For more information on the * Jaxen Project, please see <http://www.jaxen.org/>. * * $Id: DefaultLocationPath.java 1345 2009-10-22 11:25:23Z elharo $ */ package org.jaxen.expr; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.jaxen.Context; import org.jaxen.ContextSupport; import org.jaxen.JaxenException; abstract class DefaultLocationPath extends DefaultExpr implements LocationPath { private List steps; /** * Create a new empty location path. */ DefaultLocationPath() { this.steps = new LinkedList(); } public void addStep(Step step) { getSteps().add(step); } public List getSteps() { return this.steps; } public Expr simplify() { Iterator stepIter = getSteps().iterator(); Step eachStep = null; while (stepIter.hasNext()) { eachStep = (Step) stepIter.next(); eachStep.simplify(); } return this; } public String getText() { StringBuffer buf = new StringBuffer(); Iterator stepIter = getSteps().iterator(); while (stepIter.hasNext()) { buf.append(((Step) stepIter.next()).getText()); if (stepIter.hasNext()) { buf.append("/"); } } return buf.toString(); } public String toString() { StringBuffer buf = new StringBuffer(); Iterator stepIter = getSteps().iterator(); while (stepIter.hasNext()) { buf.append(stepIter.next().toString()); if (stepIter.hasNext()) { buf.append("/"); } } return buf.toString(); } public boolean isAbsolute() { return false; } public Object evaluate(Context context) throws JaxenException { List nodeSet = context.getNodeSet(); List contextNodeSet = new ArrayList(nodeSet); ContextSupport support = context.getContextSupport(); Context stepContext = new Context(support); Iterator stepIter = getSteps().iterator(); while ( stepIter.hasNext() ) { Step eachStep = (Step) stepIter.next(); stepContext.setNodeSet(contextNodeSet); contextNodeSet = eachStep.evaluate(stepContext); // now we need to reverse the list if this is a reverse axis if (isReverseAxis(eachStep)) { Collections.reverse(contextNodeSet); } } if (getSteps().size() > 1 || nodeSet.size() > 1) { Collections.sort(contextNodeSet, new NodeComparator(support.getNavigator())); } return contextNodeSet; } private boolean isReverseAxis(Step step) { int axis = step.getAxis(); return axis == org.jaxen.saxpath.Axis.PRECEDING || axis == org.jaxen.saxpath.Axis.PRECEDING_SIBLING || axis == org.jaxen.saxpath.Axis.ANCESTOR || axis == org.jaxen.saxpath.Axis.ANCESTOR_OR_SELF; } }
bsd-3-clause
jeffbrown/prevayler
extras/facade/src/test/java/org/prevayler/contrib/facade/CampGuideTransactionHint.java
2234
/* * Copyright (c) 2003 Jay Sachs. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. Neither the name "Prevayler" nor the names of its contributors * may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.prevayler.contrib.facade; import java.lang.reflect.Method; import java.util.Date; /** * @author Jacob Kjome [hoju@visi.com] */ public class CampGuideTransactionHint implements TransactionHint { private static final long serialVersionUID = 1644083066884927855L; /** * @see org.prevayler.contrib.facade.TransactionHint#preExecute(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], java.util.Date) */ public void preExecute(Object p_prevalentSystem, Method p_method, Object[] p_args, Date p_timestamp) throws Exception { ((CampGuide) p_prevalentSystem).setTransactionTime(p_timestamp); } }
bsd-3-clause
beiyuxinke/CONNECT
Product/Production/Common/CONNECTCoreLib/src/test/java/gov/hhs/fha/nhinc/webserviceproxy/WebServiceProxyHelperRetryDelayTest.java
4095
/* * Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.webserviceproxy; import static org.junit.Assert.assertEquals; import gov.hhs.fha.nhinc.properties.PropertyAccessException; import org.jmock.Expectations; import org.jmock.integration.junit4.JMock; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(JMock.class) public class WebServiceProxyHelperRetryDelayTest extends AbstractWebServiceProxyHelpTest { WebServiceProxyHelperProperties oHelper; @Before public void before() throws PropertyAccessException { timeoutExpectation(mockPropertyAccessor, Expectations.returnValue("300")); retryAttemptsExpectation(mockPropertyAccessor, Expectations.returnValue("5")); exceptionExpectation(mockPropertyAccessor, Expectations.returnValue("PropertyAccessException")); } /** * Test the GetRetryDelay method happy path. * * @throws PropertyAccessException */ @Test public void testGetRetryDelayHappyPath() throws PropertyAccessException { retryDelayExpectation(mockPropertyAccessor, Expectations.returnValue("300")); oHelper = new WebServiceProxyHelperProperties(mockPropertyAccessor); int iRetryDelay = oHelper.getRetryDelay(); assertEquals("RetryDelay failed.", 300, iRetryDelay); } /** * Test the GetRetryDelay method with PropertyAccessException. * * @throws PropertyAccessException */ @Test public void testGetRetryDelayPropertyException() throws PropertyAccessException { retryDelayExpectation(mockPropertyAccessor, Expectations.throwException(new PropertyAccessException("Failed to retrieve property."))); oHelper = new WebServiceProxyHelperProperties(mockPropertyAccessor); int iRetryDelay = oHelper.getRetryDelay(); assertEquals("getRetryDelay failed: ", 0, iRetryDelay); } /** * Test the GetRetryDelay method with NumberFormatException. * * @throws PropertyAccessException */ @Test public void testGetRetryDelayNumberFormatException() throws PropertyAccessException { retryDelayExpectation(mockPropertyAccessor, Expectations.returnValue("NotANumber")); oHelper = new WebServiceProxyHelperProperties(mockPropertyAccessor); int iRetryDelay = oHelper.getRetryDelay(); assertEquals("getRetryDelay failed: ", 0, iRetryDelay); } }
bsd-3-clause
ksclarke/basex
basex-core/src/main/java/org/basex/query/func/db/DbRetrieve.java
799
package org.basex.query.func.db; import static org.basex.query.QueryError.*; import org.basex.data.*; import org.basex.io.*; import org.basex.query.*; import org.basex.query.value.item.*; import org.basex.util.*; /** * Function implementation. * * @author BaseX Team 2005-15, BSD License * @author Christian Gruen */ public final class DbRetrieve extends DbAccess { @Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { final Data data = checkData(qc); final String path = path(1, qc); if(data.inMemory()) throw BXDB_MEM_X.get(info, data.meta.name); final IOFile file = data.meta.binary(path); if(file == null || !file.exists() || file.isDir()) throw WHICHRES_X.get(info, path); return new B64Stream(file, IOERR_X); } }
bsd-3-clause
dushmis/Oracle-Cloud
PaaS-SaaS_DataSync/DataSync/DataSync/DataSync/src/com/oracle/pts/custom/wsclient/generated/DoubleResult.java
1952
package com.oracle.pts.custom.wsclient.generated; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for DoubleResult complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="DoubleResult"> * &lt;complexContent> * &lt;extension base="{http://xmlns.oracle.com/adf/svc/types/}MethodResult"> * &lt;sequence> * &lt;element name="Value" type="{http://www.w3.org/2001/XMLSchema}double" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "DoubleResult", propOrder = { "value" }) public class DoubleResult extends MethodResult { @XmlElement(name = "Value", type = Double.class) protected List<Double> value; /** * Gets the value of the value property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the value property. * * <p> * For example, to add a new item, do as follows: * <pre> * getValue().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Double } * * */ public List<Double> getValue() { if (value == null) { value = new ArrayList<Double>(); } return this.value; } }
bsd-3-clause
Tietoarkisto/metka
metka/src/main/java/fi/uta/fsd/metkaAmqp/MetkaMessage.java
6237
/************************************************************************************** * Copyright (c) 2013-2015, Finnish Social Science Data Archive/University of Tampere * * * * All rights reserved. * * * * Redistribution and use in source and binary forms, with or without modification, * * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * * this list of conditions and the following disclaimer in the documentation * * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * * may be used to endorse or promote products derived from this software * * without specific prior written permission. * * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * **************************************************************************************/ package fi.uta.fsd.metkaAmqp; import fi.uta.fsd.metka.enums.Language; import fi.uta.fsd.metka.enums.ReferenceType; import fi.uta.fsd.metka.model.configuration.Reference; import fi.uta.fsd.metka.storage.collecting.ReferenceCollector; import fi.uta.fsd.metka.storage.repository.enums.SerializationResults; import fi.uta.fsd.metka.storage.util.JSONUtil; import fi.uta.fsd.metka.transfer.reference.*; import fi.uta.fsd.metkaAmqp.factories.PayloadFactory; import fi.uta.fsd.metkaAmqp.payloads.PayloadObject; import org.apache.commons.lang3.tuple.Pair; import java.util.List; public class MetkaMessage<T extends PayloadObject> { protected final Reference familyExchangeRef; protected final Reference familyResourceRef; protected final Reference eventRef; protected final Reference eventExchangeRef; protected final ReferencePathRequest request; protected final PayloadFactory<T> factory; protected final String messageKey; protected final T payload; MetkaMessage(MetkaMessageType<T> type, T payload) { this.factory = type.getFactory(); this.payload = payload; familyExchangeRef = new Reference("family_exchange_ref", ReferenceType.DEPENDENCY, "amqp_messages", "exchange", null); familyResourceRef = new Reference("resource_ref", ReferenceType.DEPENDENCY, "amqp_messages", "resource", null); eventRef = new Reference("message_event_ref", ReferenceType.DEPENDENCY, "amqp_messages", "events.key", "event"); eventExchangeRef = new Reference("message_exchange_ref", ReferenceType.DEPENDENCY, "amqp_messages", "events.key", "exchange"); this.messageKey = type.getMessage(); request = new ReferencePathRequest(); request.setLanguage(Language.DEFAULT); request.setReturnFirst(true); request.setRoot(new ReferencePath(new Reference("family_ref", ReferenceType.JSON, "amqp_messages", "family", null), type.getFamily())); } public void send(ReferenceCollector references, JSONUtil json, Messenger.AmqpMessenger messenger) { String exchange = getExchange(references); String resource = getResource(references); String event = getEvent(references); String routingKey = factory.buildRoutingKey(resource, event, payload); Pair<SerializationResults, String> payloadPair = json.serialize(factory.build(resource, event, payload)); if(payloadPair.getLeft() != SerializationResults.SERIALIZATION_SUCCESS) { return; } messenger.write(exchange, routingKey, payloadPair.getRight().getBytes()); messenger.clean(); } private String getExchange(ReferenceCollector references) { String exchange = getTitle(references, new ReferencePath(eventExchangeRef, messageKey)); return (exchange != null ? exchange : getValue(references, familyExchangeRef)); } private String getResource(ReferenceCollector references) { return getValue(references, familyResourceRef); } private String getEvent(ReferenceCollector references) { return getTitle(references, new ReferencePath(eventRef, messageKey)); } private String getValue(ReferenceCollector references, Reference reference) { request.getRoot().setNext(new ReferencePath(reference, null)); List<ReferenceOption> options = references.handleReferenceRequest(request).getRight(); if(options.isEmpty()) { return null; } return options.get(0).getValue(); } private String getTitle(ReferenceCollector references, ReferencePath path) { request.getRoot().setNext(path); List<ReferenceOption> options = references.handleReferenceRequest(request).getRight(); if(options.isEmpty()) { return null; } return options.get(0).getTitle().getValue(); } }
bsd-3-clause
PetrF0X/podio-java
src/main/java/com/podio/common/AuthorizationEntityType.java
395
package com.podio.common; import org.codehaus.jackson.annotate.JsonCreator; import org.codehaus.jackson.annotate.JsonValue; public enum AuthorizationEntityType { USER, APP, SYSTEM; @Override @JsonValue public String toString() { return name().toLowerCase(); } @JsonCreator public static AuthorizationEntityType getByName(String value) { return valueOf(value.toUpperCase()); } }
mit