repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
zhangwei5095/memcached-session-manager | core/src/test/java/de/javakaffee/web/msm/RequestTrackingHostValveTest.java | 7744 | /*
* Copyright 2009 Martin Grotzke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.javakaffee.web.msm;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static org.testng.Assert.assertEquals;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import org.apache.catalina.Context;
import org.apache.catalina.Host;
import org.apache.catalina.Valve;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.apache.tomcat.util.http.ServerCookie;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import de.javakaffee.web.msm.MemcachedSessionService.SessionManager;
/**
* Test the {@link RequestTrackingHostValve}.
*
* @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a>
* @version $Id$
*/
public abstract class RequestTrackingHostValveTest {
protected MemcachedSessionService _service;
private RequestTrackingHostValve _sessionTrackerValve;
private Valve _nextValve;
private Request _request;
private Response _response;
@BeforeMethod
public void setUp() throws Exception {
_service = mock( MemcachedSessionService.class );
_request = mock( Request.class );
_response = mock( Response.class );
final Context _contextContainer = mock(Context.class);
final Host _hostContainer = mock(Host.class);
final SessionManager _manager = mock(SessionManager.class);
when(_service.getManager()).thenReturn(_manager);
when(_manager.getContext()).thenReturn(_contextContainer);
when(_contextContainer.getParent()).thenReturn(_hostContainer);
when(_contextContainer.getPath()).thenReturn("/");
_sessionTrackerValve = createSessionTrackerValve();
_nextValve = mock( Valve.class );
_sessionTrackerValve.setNext( _nextValve );
_sessionTrackerValve.setContainer(_hostContainer);
when(_request.getRequestURI()).thenReturn( "/someRequest");
when(_request.getMethod()).thenReturn("GET");
when(_request.getQueryString()).thenReturn(null);
when(_request.getContext()).thenReturn(_contextContainer);
when(_request.getNote(eq(RequestTrackingHostValve.REQUEST_PROCESSED))).thenReturn(Boolean.TRUE);
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.FALSE);
}
@Nonnull
protected RequestTrackingHostValve createSessionTrackerValve() {
return new RequestTrackingHostValve(".*\\.(png|gif|jpg|css|js|ico)$", "somesessionid", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
return RequestTrackingHostValveTest.this.getSetCookieHeaders(response);
}
};
}
protected abstract String[] getSetCookieHeaders(final Response response);
@AfterMethod
public void tearDown() throws Exception {
reset( _service,
_nextValve,
_request,
_response );
}
@Test
public final void testGetSessionCookieName() throws IOException, ServletException {
final RequestTrackingHostValve cut = new RequestTrackingHostValve(null, "foo", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
final Collection<String> result = response.getHeaders("Set-Cookie");
return result.toArray(new String[result.size()]);
}
};
assertEquals(cut.getSessionCookieName(), "foo");
}
@Test
public final void testProcessRequestNotePresent() throws IOException, ServletException {
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
verify(_request).setNote(eq(RequestTrackingHostValve.REQUEST_PROCESS), eq(Boolean.TRUE));
}
@Test
public final void testBackupSessionNotInvokedWhenNoSessionIdPresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
when( _response.getHeader( eq( "Set-Cookie" ) ) ).thenReturn( null );
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
}
@Test
public final void testBackupSessionInvokedWhenResponseCookiePresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), "foo" );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( "foo" ), eq( false), anyString() );
}
@Test
public final void testChangeSessionIdForRelocatedSession() throws IOException, ServletException {
final String sessionId = "bar";
final String newSessionId = "newId";
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.TRUE);
when( _request.getRequestedSessionId() ).thenReturn( sessionId );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), newSessionId );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( newSessionId ), eq( true ), anyString() );
}
@Test
public final void testRequestFinishedShouldBeInvokedForIgnoredResources() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( "foo" );
when(_request.getRequestURI()).thenReturn("/pixel.gif");
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).requestFinished( eq( "foo" ), anyString() );
}
protected abstract void setupGetResponseSetCookieHeadersExpectations(Response response, String[] result);
@Nonnull
protected String generateCookieString(final Cookie cookie) {
final StringBuffer sb = new StringBuffer();
ServerCookie.appendCookieValue
(sb, cookie.getVersion(), cookie.getName(), cookie.getValue(),
cookie.getPath(), cookie.getDomain(), cookie.getComment(),
cookie.getMaxAge(), cookie.getSecure(), true);
final String setSessionCookieHeader = sb.toString();
return setSessionCookieHeader;
}
}
| apache-2.0 |
ty1er/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java | 15726 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.rewriter.rules.subplan;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.ListSet;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
import org.apache.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
/**
* The rule searches for SUBPLAN operator with a optional PROJECT operator and
* an AGGREGATE followed by a join operator.
*
* <pre>
* Before
*
* plan__parent
* SUBPLAN {
* PROJECT?
* AGGREGATE
* plan__nested_A
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
*
* where $condition does not equal a constant true.
*
* After (This is a general application of the rule, specifics may vary based on the query plan.)
*
* plan__parent
* GROUP_BY {
* PROJECT?
* AGGREGATE
* plan__nested_A
* SELECT( algebricks:not( is_null( $right ) ) )
* NESTED_TUPLE_SOURCE
* }
* SUBPLAN {
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
* </pre>
*
* @author prestonc
*/
public class IntroduceGroupByForSubplanRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return false;
}
SubplanOperator subplan = (SubplanOperator) op0;
Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
ILogicalPlan p = null;
while (plansIter.hasNext()) {
p = plansIter.next();
}
if (p == null) {
return false;
}
if (p.getRoots().size() != 1) {
return false;
}
Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();
Mutable<ILogicalOperator> botRef = subplanRoot;
AbstractLogicalOperator op2;
// Project is optional
if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
op2 = op1;
} else {
ProjectOperator project = (ProjectOperator) op1;
botRef = project.getInputs().get(0);
op2 = (AbstractLogicalOperator) botRef.getValue();
}
if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op2;
Set<LogicalVariable> free = new HashSet<LogicalVariable>();
VariableUtilities.getUsedVariables(aggregate, free);
Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();
while (op3.getInputs().size() == 1) {
Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariables(op3, prod);
free.removeAll(prod);
VariableUtilities.getUsedVariables(op3, free);
botRef = op3Ref;
op3Ref = op3.getInputs().get(0);
op3 = (AbstractLogicalOperator) op3Ref.getValue();
}
if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
&& op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
return false;
}
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
if (join.getCondition().getValue() == ConstantExpression.TRUE) {
return false;
}
VariableUtilities.getUsedVariables(join, free);
AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
// see if there's an NTS at the end of the pipeline
NestedTupleSourceOperator outerNts = getNts(b0);
if (outerNts == null) {
AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
outerNts = getNts(b1);
if (outerNts == null) {
return false;
}
}
Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
if (pkVars == null || pkVars.size() < 1) {
// there is no non-trivial primary key, group-by keys are all live variables
// that were produced by descendant or self
ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
pkVars = new HashSet<LogicalVariable>();
//get live variables
VariableUtilities.getLiveVariables(subplanInput, pkVars);
//get produced variables
Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars);
//retain the intersection
pkVars.retainAll(producedVars);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
LogicalVariable testForNull = null;
AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
switch (right.getOperatorTag()) {
case UNNEST: {
UnnestOperator innerUnnest = (UnnestOperator) right;
// Select [ $y != null ]
testForNull = innerUnnest.getVariable();
break;
}
case RUNNINGAGGREGATE: {
ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
if (!producedVars.isEmpty()) {
// Select [ $y != null ]
testForNull = producedVars.iterator().next();
}
break;
}
case DATASOURCESCAN: {
DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
// Select [ $y != null ]
if (innerScan.getVariables().size() == 1) {
testForNull = innerScan.getVariables().get(0);
}
break;
}
default:
break;
}
if (testForNull == null) {
testForNull = context.newVar();
AssignOperator tmpAsgn = new AssignOperator(testForNull,
new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
}
IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING);
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
new MutableObject<ILogicalExpression>(isNullTest));
SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
opRef.setValue(g);
selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));
List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
prodInpList.clear();
prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));
ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
g.getNestedPlans().add(gPlan);
subplanRoot.setValue(op3Ref.getValue());
g.getInputs().add(newSubplanRef);
HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
underVars.removeAll(pkVars);
Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
context.updatePrimaryKeys(mappedVars);
for (LogicalVariable uv : underVars) {
g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
}
OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
context.computeAndSetTypeEnvironmentForOperator(g);
return true;
}
private NestedTupleSourceOperator getNts(AbstractLogicalOperator op) {
AbstractLogicalOperator alo = op;
do {
if (alo.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
return (NestedTupleSourceOperator) alo;
}
if (alo.getInputs().size() != 1) {
return null;
}
alo = (AbstractLogicalOperator) alo.getInputs().get(0).getValue();
} while (true);
}
protected Set<LogicalVariable> computeGbyVars(AbstractLogicalOperator op, Set<LogicalVariable> freeVars,
IOptimizationContext context) throws AlgebricksException {
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
List<FunctionalDependency> fdList = context.getFDList(op);
if (fdList == null) {
return null;
}
// check if any of the FDs is a key
List<LogicalVariable> all = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(op, all);
all.retainAll(freeVars);
for (FunctionalDependency fd : fdList) {
if (fd.getTail().containsAll(all)) {
return new HashSet<LogicalVariable>(fd.getHead());
}
}
return null;
}
private Map<LogicalVariable, LogicalVariable> buildVarExprList(Collection<LogicalVariable> vars,
IOptimizationContext context, GroupByOperator g,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> outVeList) throws AlgebricksException {
Map<LogicalVariable, LogicalVariable> m = new HashMap<LogicalVariable, LogicalVariable>();
for (LogicalVariable ov : vars) {
LogicalVariable newVar = context.newVar();
ILogicalExpression varExpr = new VariableReferenceExpression(newVar);
outVeList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(ov,
new MutableObject<ILogicalExpression>(varExpr)));
for (ILogicalPlan p : g.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
OperatorManipulationUtil.substituteVarRec((AbstractLogicalOperator) r.getValue(), ov, newVar, true,
context);
}
}
AbstractLogicalOperator opUnder = (AbstractLogicalOperator) g.getInputs().get(0).getValue();
OperatorManipulationUtil.substituteVarRec(opUnder, ov, newVar, true, context);
m.put(ov, newVar);
}
return m;
}
}
| apache-2.0 |
byzhang/terrastore | src/main/java/terrastore/store/features/Update.java | 3088 | /**
* Copyright 2009 - 2011 Sergio Bossa (sergio.bossa@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package terrastore.store.features;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.msgpack.MessagePackable;
import org.msgpack.MessageTypeException;
import org.msgpack.MessageUnpackable;
import org.msgpack.Packer;
import org.msgpack.Unpacker;
import terrastore.util.io.MsgPackUtils;
/**
* Update object carrying data about the update function, timeout and parameters.
*
* @author Sergio Bossa
*/
public class Update implements MessagePackable, MessageUnpackable, Serializable {
private static final long serialVersionUID = 12345678901L;
//
private String functionName;
private long timeoutInMillis;
private Map<String, Object> parameters;
public Update(String functionName, long timeoutInMillis, Map<String, Object> parameters) {
this.functionName = functionName;
this.timeoutInMillis = timeoutInMillis;
this.parameters = parameters;
}
public Update() {
}
public long getTimeoutInMillis() {
return timeoutInMillis;
}
public String getFunctionName() {
return functionName;
}
public Map<String, Object> getParameters() {
return parameters;
}
@Override
public void messagePack(Packer packer) throws IOException {
MsgPackUtils.packString(packer, functionName);
MsgPackUtils.packLong(packer, timeoutInMillis);
MsgPackUtils.packGenericMap(packer, parameters);
}
@Override
public void messageUnpack(Unpacker unpacker) throws IOException, MessageTypeException {
functionName = MsgPackUtils.unpackString(unpacker);
timeoutInMillis = MsgPackUtils.unpackLong(unpacker);
parameters = MsgPackUtils.unpackGenericMap(unpacker);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Update) {
Update other = (Update) obj;
return new EqualsBuilder().append(this.functionName, other.functionName).append(this.timeoutInMillis, other.timeoutInMillis).append(this.parameters, other.parameters).
isEquals();
} else {
return false;
}
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(functionName).append(timeoutInMillis).append(parameters).toHashCode();
}
}
| apache-2.0 |
4455jkjh/apktool | dexlib2/src/main/java/org/jf/dexlib2/writer/pool/StringPool.java | 2633 | /*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.writer.pool;
import org.jf.dexlib2.iface.reference.StringReference;
import org.jf.dexlib2.writer.StringSection;
import org.jf.util.ExceptionWithContext;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class StringPool extends StringTypeBasePool implements StringSection<CharSequence, StringReference> {
public StringPool(@Nonnull DexPool dexPool) {
super(dexPool);
}
public void intern(@Nonnull CharSequence string) {
internedItems.put(string.toString(), 0);
}
public void internNullable(@Nullable CharSequence string) {
if (string != null) {
intern(string);
}
}
@Override public int getItemIndex(@Nonnull StringReference key) {
Integer index = internedItems.get(key.toString());
if (index == null) {
throw new ExceptionWithContext("Item not found.: %s", key.toString());
}
return index;
}
@Override public boolean hasJumboIndexes() {
return internedItems.size() > 65536;
}
}
| apache-2.0 |
ern/elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/allocation/TrainedModelAllocationClusterServiceTests.java | 40420 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ml.inference.allocation;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.NodesShutdownMetadata;
import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction;
import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAllocationStateAction;
import org.elasticsearch.xpack.core.ml.inference.allocation.AllocationState;
import org.elasticsearch.xpack.core.ml.inference.allocation.RoutingState;
import org.elasticsearch.xpack.core.ml.inference.allocation.RoutingStateAndReason;
import org.elasticsearch.xpack.core.ml.inference.allocation.TrainedModelAllocation;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.job.NodeLoadDetector;
import org.elasticsearch.xpack.ml.process.MlMemoryTracker;
import org.junit.Before;
import java.util.Collections;
import java.util.Set;
import java.util.function.Function;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TrainedModelAllocationClusterServiceTests extends ESTestCase {
private ClusterService clusterService;
private NodeLoadDetector nodeLoadDetector;
@Before
public void setupObjects() {
clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Sets.newHashSet(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearning.USE_AUTO_MACHINE_MEMORY_PERCENT)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
MlMemoryTracker memoryTracker = mock(MlMemoryTracker.class);
when(memoryTracker.isRecentlyRefreshed()).thenReturn(true);
nodeLoadDetector = new NodeLoadDetector(memoryTracker);
}
public void testUpdateModelRoutingTable() {
String modelId = "existing-model";
String nodeId = "ml-node-with-room";
ClusterState currentState = ClusterState.builder(new ClusterName("testUpdateModelRoutingTable"))
.nodes(DiscoveryNodes.builder().add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes())).build())
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
modelId,
TrainedModelAllocation.Builder.empty(newParams(modelId, 10_000L)).addNewRoutingEntry(nodeId)
)
.build()
)
.build()
)
.build();
assertThatStoppingAllocationPreventsMutation(
state -> TrainedModelAllocationClusterService.updateModelRoutingTable(
state,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STARTED, ""))
),
currentState
);
ClusterState newState = TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STARTED, ""))
);
assertThat(
TrainedModelAllocationMetadata.fromState(newState).getModelAllocation(modelId).getNodeRoutingTable().get(nodeId).getState(),
equalTo(RoutingState.STARTED)
);
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(
"missingNode",
modelId,
new RoutingStateAndReason(RoutingState.STARTED, "")
)
)
);
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(
nodeId,
"missingModel",
new RoutingStateAndReason(RoutingState.STARTED, "")
)
)
);
// TEST Stopped
// We should allow a "stopped" update on missing models and nodes as entries may have already been deleted
TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request("missingNode", modelId, new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, "missingModel", new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
ClusterState updateState = TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
assertThat(
TrainedModelAllocationMetadata.fromState(updateState).getModelAllocation(modelId).getNodeRoutingTable(),
not(hasKey(nodeId))
);
}
public void testRemoveAllocation() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testRemoveAllocation"))
.metadata(Metadata.builder().build())
.build();
String modelId = "remove-allocation";
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.removeAllocation(clusterStateWithoutAllocation, modelId)
);
ClusterState clusterStateWithAllocation = ClusterState.builder(new ClusterName("testRemoveAllocation"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(modelId, TrainedModelAllocation.Builder.empty(newParams(modelId, randomNonNegativeLong())))
.build()
)
.build()
)
.build();
assertThat(TrainedModelAllocationMetadata.fromState(clusterStateWithAllocation).getModelAllocation(modelId), is(not(nullValue())));
ClusterState modified = TrainedModelAllocationClusterService.removeAllocation(clusterStateWithAllocation, modelId);
assertThat(TrainedModelAllocationMetadata.fromState(modified).getModelAllocation(modelId), is(nullValue()));
}
public void testRemoveAllAllocations() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testRemoveAllAllocations"))
.metadata(Metadata.builder().build())
.build();
assertThat(
TrainedModelAllocationClusterService.removeAllAllocations(clusterStateWithoutAllocation),
equalTo(clusterStateWithoutAllocation)
);
ClusterState clusterStateWithAllocations = ClusterState.builder(new ClusterName("testRemoveAllAllocations"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadataTests.randomInstance()
)
.build()
)
.build();
ClusterState modified = TrainedModelAllocationClusterService.removeAllAllocations(clusterStateWithAllocations);
assertThat(TrainedModelAllocationMetadata.fromState(modified).modelAllocations(), is(anEmptyMap()));
}
public void testCreateAllocation() {
ClusterState currentState = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-without-room", true, 1000L))
.add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-shutting-down", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildOldNode("old-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata("ml-node-shutting-down")))
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
ClusterState newState = trainedModelAllocationClusterService.createModelAllocation(currentState, newParams("new-model", 150));
TrainedModelAllocation createdAllocation = TrainedModelAllocationMetadata.fromState(newState).getModelAllocation("new-model");
assertThat(createdAllocation, is(not(nullValue())));
assertThat(createdAllocation.getNodeRoutingTable().keySet(), hasSize(2));
assertThat(createdAllocation.getNodeRoutingTable(), hasKey("ml-node-with-room"));
assertThat(createdAllocation.getNodeRoutingTable().get("ml-node-with-room").getState(), equalTo(RoutingState.STARTING));
assertThat(createdAllocation.getNodeRoutingTable(), hasKey("ml-node-without-room"));
assertThat(createdAllocation.getNodeRoutingTable().get("ml-node-without-room").getState(), equalTo(RoutingState.FAILED));
assertThat(
createdAllocation.getNodeRoutingTable().get("ml-node-without-room").getReason(),
containsString("This node has insufficient available memory.")
);
expectThrows(
ResourceAlreadyExistsException.class,
() -> trainedModelAllocationClusterService.createModelAllocation(newState, newParams("new-model", 150))
);
}
public void testCreateAllocationWhileResetModeIsTrue() {
ClusterState currentState = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(MlMetadata.TYPE, new MlMetadata.Builder().isResetMode(true).build()))
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
expectThrows(
ElasticsearchStatusException.class,
() -> trainedModelAllocationClusterService.createModelAllocation(currentState, newParams("new-model", 150))
);
ClusterState stateWithoutReset = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(MlMetadata.TYPE, new MlMetadata.Builder().isResetMode(false).build()))
.build();
// Shouldn't throw
trainedModelAllocationClusterService.createModelAllocation(stateWithoutReset, newParams("new-model", 150));
}
public void testAddRemoveAllocationNodes() {
ClusterState currentState = ClusterState.builder(new ClusterName("testAddRemoveAllocationNodes"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("new-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-without-room", true, 1000L))
.add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-shutting-down", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildOldNode("old-versioned-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(
Metadata.builder()
.putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata("ml-node-shutting-down"))
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
"model-1",
TrainedModelAllocation.Builder.empty(newParams("model-1", 10_000))
.addNewRoutingEntry("ml-node-with-room")
.updateExistingRoutingEntry("ml-node-with-room", started())
.addNewRoutingEntry("old-ml-node-with-room")
.updateExistingRoutingEntry("old-ml-node-with-room", started())
.addNewRoutingEntry("ml-node-shutting-down")
)
.addNewAllocation(
"model-2",
TrainedModelAllocation.Builder.empty(newParams("model-2", 10_000))
.addNewRoutingEntry("old-ml-node-with-room")
.updateExistingRoutingEntry("old-ml-node-with-room", started())
)
.build()
)
)
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
// Stopping shouldn't cause any updates
assertThatStoppingAllocationPreventsMutation(
trainedModelAllocationClusterService::addRemoveAllocationNodes,
currentState
);
ClusterState modified = trainedModelAllocationClusterService.addRemoveAllocationNodes(currentState);
TrainedModelAllocationMetadata trainedModelAllocationMetadata = TrainedModelAllocationMetadata.fromState(modified);
assertThat(trainedModelAllocationMetadata.modelAllocations().keySet(), hasSize(2));
assertThat(trainedModelAllocationMetadata.modelAllocations(), allOf(hasKey("model-1"), hasKey("model-2")));
assertThat(trainedModelAllocationMetadata.getModelAllocation("model-1").getNodeRoutingTable().keySet(), hasSize(3));
assertThat(
trainedModelAllocationMetadata.getModelAllocation("model-1").getNodeRoutingTable(),
allOf(hasKey("ml-node-with-room"), hasKey("new-ml-node-with-room"), hasKey("ml-node-without-room"))
);
assertNodeState(trainedModelAllocationMetadata, "model-1", "ml-node-with-room", RoutingState.STARTED);
assertNodeState(trainedModelAllocationMetadata, "model-1", "new-ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-1", "ml-node-without-room", RoutingState.FAILED);
assertThat(trainedModelAllocationMetadata.getModelAllocation("model-2").getNodeRoutingTable().keySet(), hasSize(3));
assertThat(
trainedModelAllocationMetadata.getModelAllocation("model-2").getNodeRoutingTable(),
allOf(hasKey("ml-node-with-room"), hasKey("new-ml-node-with-room"), hasKey("ml-node-without-room"))
);
assertNodeState(trainedModelAllocationMetadata, "model-2", "ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-2", "new-ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-2", "ml-node-without-room", RoutingState.FAILED);
}
public void testShouldAllocateModels() {
String model1 = "model-1";
String model2 = "model-2";
String mlNode1 = "ml-node-with-room";
String mlNode2 = "new-ml-node-with-room";
DiscoveryNode mlNode1Node = buildNode(mlNode1, true, ByteSizeValue.ofGb(4).getBytes());
DiscoveryNode mlNode2Node = buildNode(mlNode2, true, ByteSizeValue.ofGb(4).getBytes());
ClusterState stateWithTwoNodes = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node).add(mlNode2Node))
.build();
ClusterState stateWithOneNode = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node))
.build();
ClusterState stateWithOneNodeNotMl = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node).add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes())))
.build();
// No metadata in the new state means no allocations, so no updates
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes)).build(),
ClusterState.builder(randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// Even with metadata changes, unless there are node changes, do nothing
ClusterState randomState = randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes);
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(randomState)
.metadata(
Metadata.builder()
.putCustom(TrainedModelAllocationMetadata.NAME, TrainedModelAllocationMetadataTests.randomInstance())
.build()
)
.build(),
ClusterState.builder(randomState)
.metadata(
Metadata.builder()
.putCustom(TrainedModelAllocationMetadata.NAME, TrainedModelAllocationMetadataTests.randomInstance())
.build()
)
.build()
)
),
is(false)
);
// If the node removed is not even an ML node, we should not attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNodeNotMl)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If the node removed is an ML node, but no models are allocated to it, we should not attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a new ML node is added, we should attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(true)
);
// If a new ML node is added, but allocation is stopping, we should not re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).stopAllocation()
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a new ML node is added, but its shutting down, don't re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata(mlNode2))
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a ML node is removed and its routed to, re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build()
)
),
is(true)
);
// If a ML node is removed and its routed to, but the allocation is stopping, don't re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
.stopAllocation()
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build()
)
),
is(false)
);
}
public void testSetAllocationToStopping() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testSetAllocationToStopping"))
.metadata(Metadata.builder().build())
.build();
String modelId = "stopping-allocation";
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.setToStopping(clusterStateWithoutAllocation, modelId)
);
ClusterState clusterStateWithAllocation = ClusterState.builder(new ClusterName("testSetAllocationToStopping"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(modelId, TrainedModelAllocation.Builder.empty(newParams(modelId, randomNonNegativeLong())))
.build()
)
.build()
)
.build();
TrainedModelAllocationMetadata before = TrainedModelAllocationMetadata.fromState(clusterStateWithAllocation);
assertThat(before.getModelAllocation(modelId), is(not(nullValue())));
assertThat(before.getModelAllocation(modelId).getAllocationState(), equalTo(AllocationState.STARTED));
ClusterState modified = TrainedModelAllocationClusterService.setToStopping(clusterStateWithAllocation, modelId);
assertThat(
TrainedModelAllocationMetadata.fromState(modified).getModelAllocation(modelId).getAllocationState(),
equalTo(AllocationState.STOPPING)
);
}
private void assertThatStoppingAllocationPreventsMutation(
Function<ClusterState, ClusterState> mutationFunction,
ClusterState original
) {
TrainedModelAllocationMetadata tempMetadata = TrainedModelAllocationMetadata.fromState(original);
if (tempMetadata.modelAllocations().isEmpty()) {
return;
}
TrainedModelAllocationMetadata.Builder builder = TrainedModelAllocationMetadata.builder(original);
for (String modelId : tempMetadata.modelAllocations().keySet()) {
builder.getAllocation(modelId).stopAllocation();
}
TrainedModelAllocationMetadata metadataWithStopping = builder.build();
ClusterState originalWithStoppingAllocations = ClusterState.builder(original)
.metadata(Metadata.builder(original.metadata()).putCustom(TrainedModelAllocationMetadata.NAME, metadataWithStopping).build())
.build();
assertThat(
"setting all allocations to stopping did not prevent mutation",
TrainedModelAllocationMetadata.fromState(mutationFunction.apply(originalWithStoppingAllocations)),
equalTo(metadataWithStopping)
);
}
private TrainedModelAllocationClusterService createClusterService() {
return new TrainedModelAllocationClusterService(Settings.EMPTY, clusterService, nodeLoadDetector);
}
private static DiscoveryNode buildNode(String name, boolean isML, long nativeMemory) {
return buildNode(name, isML, nativeMemory, Version.CURRENT);
}
private static DiscoveryNode buildNode(String name, boolean isML, long nativeMemory, Version version) {
return new DiscoveryNode(
name,
name,
buildNewFakeTransportAddress(),
MapBuilder.<String, String>newMapBuilder()
.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, String.valueOf(nativeMemory))
.put(MachineLearning.MAX_JVM_SIZE_NODE_ATTR, String.valueOf(10))
.put(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, String.valueOf(10))
.map(),
isML ? DiscoveryNodeRole.roles() : Set.of(DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.MASTER_ROLE),
version
);
}
private static RoutingStateAndReason started() {
return new RoutingStateAndReason(RoutingState.STARTED, "");
}
private static DiscoveryNode buildOldNode(String name, boolean isML, long nativeMemory) {
return buildNode(name, isML, nativeMemory, Version.V_7_15_0);
}
private static StartTrainedModelDeploymentAction.TaskParams newParams(String modelId, long modelSize) {
return new StartTrainedModelDeploymentAction.TaskParams(modelId, modelSize);
}
private static void assertNodeState(TrainedModelAllocationMetadata metadata, String modelId, String nodeId, RoutingState routingState) {
assertThat(metadata.getModelAllocation(modelId).getNodeRoutingTable().get(nodeId).getState(), equalTo(routingState));
}
private static NodesShutdownMetadata shutdownMetadata(String nodeId) {
return new NodesShutdownMetadata(
Collections.singletonMap(
nodeId,
SingleNodeShutdownMetadata.builder()
.setType(SingleNodeShutdownMetadata.Type.REMOVE)
.setStartedAtMillis(randomNonNegativeLong())
.setReason("tests")
.setNodeId(nodeId)
.build()
)
);
}
}
| apache-2.0 |
project-ncl/pnc | indy-repository-manager/src/test/java/org/jboss/pnc/indyrepositorymanager/ExcludeInternalRepoByRegexTest.java | 6538 | /**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.indyrepositorymanager;
import org.apache.commons.io.IOUtils;
import org.commonjava.indy.client.core.Indy;
import org.commonjava.indy.client.core.util.UrlUtils;
import org.commonjava.indy.model.core.Group;
import org.commonjava.indy.model.core.RemoteRepository;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.indy.model.core.StoreType;
import org.jboss.pnc.enums.RepositoryType;
import org.jboss.pnc.indyrepositorymanager.fixture.TestBuildExecution;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.spi.repositorymanager.BuildExecution;
import org.jboss.pnc.spi.repositorymanager.RepositoryManagerResult;
import org.jboss.pnc.spi.repositorymanager.model.RepositorySession;
import org.jboss.pnc.test.category.ContainerTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.commonjava.indy.pkg.maven.model.MavenPackageTypeDescriptor.MAVEN_PKG_KEY;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.PUBLIC_GROUP_ID;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.SHARED_IMPORTS_ID;
@Category(ContainerTest.class)
public class ExcludeInternalRepoByRegexTest extends AbstractImportTest {
private static final String INTERNAL = "internal";
private static final String EXTERNAL = "external";
@Override
protected List<String> getIgnoredRepoPatterns() {
List<String> result = new ArrayList<>();
result.add("maven:.+:in.+");
return result;
}
@Test
public void extractBuildArtifacts_ContainsTwoDownloads() throws Exception {
// create a remote repo pointing at our server fixture's 'repo/test' directory.
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, INTERNAL, server.formatUrl(INTERNAL)),
"Creating internal test remote repo",
RemoteRepository.class);
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, EXTERNAL, server.formatUrl(EXTERNAL)),
"Creating external test remote repo",
RemoteRepository.class);
StoreKey publicKey = new StoreKey(MAVEN_PKG_KEY, StoreType.group, PUBLIC_GROUP_ID);
StoreKey internalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, INTERNAL);
StoreKey externalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, EXTERNAL);
Group publicGroup = indy.stores().load(publicKey, Group.class);
if (publicGroup == null) {
publicGroup = new Group(MAVEN_PKG_KEY, PUBLIC_GROUP_ID, internalKey, externalKey);
indy.stores().create(publicGroup, "creating public group", Group.class);
} else {
publicGroup.setConstituents(Arrays.asList(internalKey, externalKey));
indy.stores().update(publicGroup, "adding test remotes to public group");
}
String internalPath = "org/foo/internal/1.0/internal-1.0.pom";
String externalPath = "org/foo/external/1.1/external-1.1.pom";
String content = "This is a test " + System.currentTimeMillis();
// setup the expectation that the remote repo pointing at this server will request this file...and define its
// content.
server.expect(server.formatUrl(INTERNAL, internalPath), 200, content);
server.expect(server.formatUrl(EXTERNAL, externalPath), 200, content);
// create a dummy non-chained build execution and repo session based on it
BuildExecution execution = new TestBuildExecution();
RepositorySession rc = driver.createBuildRepository(
execution,
accessToken,
accessToken,
RepositoryType.MAVEN,
Collections.emptyMap(),
false);
assertThat(rc, notNullValue());
String baseUrl = rc.getConnectionInfo().getDependencyUrl();
// download the two files via the repo session's dependency URL, which will proxy the test http server
// using the expectations above
assertThat(download(UrlUtils.buildUrl(baseUrl, internalPath)), equalTo(content));
assertThat(download(UrlUtils.buildUrl(baseUrl, externalPath)), equalTo(content));
// extract the build artifacts, which should contain the two imported deps.
// This will also trigger promoting imported artifacts into the shared-imports hosted repo
RepositoryManagerResult repositoryManagerResult = rc.extractBuildArtifacts(true);
List<Artifact> deps = repositoryManagerResult.getDependencies();
System.out.println(deps);
assertThat(deps, notNullValue());
assertThat(deps.size(), equalTo(2));
Indy indy = driver.getIndy(accessToken);
StoreKey sharedImportsKey = new StoreKey(MAVEN_PKG_KEY, StoreType.hosted, SHARED_IMPORTS_ID);
// check that the imports from external locations are available from shared-imports
InputStream stream = indy.content().get(sharedImportsKey, externalPath);
String downloaded = IOUtils.toString(stream, (String) null);
assertThat(downloaded, equalTo(content));
stream.close();
// check that the imports from internal/trusted locations are NOT available from shared-imports
stream = indy.content().get(sharedImportsKey, internalPath);
assertThat(stream, nullValue());
}
}
| apache-2.0 |
gureronder/midpoint | testing/longtest/src/test/java/com/evolveum/midpoint/testing/longtest/TestLdapComplex.java | 12396 | /*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.testing.longtest;
import com.evolveum.midpoint.common.LoggingConfigurationManager;
import com.evolveum.midpoint.common.ProfilingConfigurationManager;
import com.evolveum.midpoint.model.impl.sync.ReconciliationTaskHandler;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.ResultHandler;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectQueryUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.test.util.MidPointTestConstants;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentPolicyEnforcementType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.mutable.MutableInt;
import org.opends.server.types.Entry;
import org.opends.server.types.LDIFImportConfig;
import org.opends.server.util.LDIFException;
import org.opends.server.util.LDIFReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import static com.evolveum.midpoint.test.IntegrationTestTools.display;
import static org.testng.AssertJUnit.assertEquals;
/**
* Mix of various tests for issues that are difficult to replicate using dummy resources.
*
* @author Radovan Semancik
*
*/
@ContextConfiguration(locations = {"classpath:ctx-longtest-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class TestLdapComplex extends AbstractModelIntegrationTest {
public static final File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "ldap-complex");
public static final File SYSTEM_CONFIGURATION_FILE = new File(COMMON_DIR, "system-configuration.xml");
public static final String SYSTEM_CONFIGURATION_OID = SystemObjectsType.SYSTEM_CONFIGURATION.value();
public static final File USER_TEMPLATE_FILE = new File(TEST_DIR, "user-template.xml");
protected static final File USER_ADMINISTRATOR_FILE = new File(COMMON_DIR, "user-administrator.xml");
protected static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
protected static final String USER_ADMINISTRATOR_USERNAME = "administrator";
protected static final File ROLE_SUPERUSER_FILE = new File(COMMON_DIR, "role-superuser.xml");
protected static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
protected static final File ROLE_CAPTAIN_FILE = new File(TEST_DIR, "role-captain.xml");
protected static final File ROLE_JUDGE_FILE = new File(TEST_DIR, "role-judge.xml");
protected static final File ROLE_PIRATE_FILE = new File(TEST_DIR, "role-pirate.xml");
protected static final File ROLE_SAILOR_FILE = new File(TEST_DIR, "role-sailor.xml");
protected static final String ROLE_PIRATE_OID = "12345678-d34d-b33f-f00d-555555556603";
protected static final File ROLES_LDIF_FILE = new File(TEST_DIR, "roles.ldif");
protected static final File RESOURCE_OPENDJ_FILE = new File(COMMON_DIR, "resource-opendj-complex.xml");
protected static final String RESOURCE_OPENDJ_NAME = "Localhost OpenDJ";
protected static final String RESOURCE_OPENDJ_OID = "10000000-0000-0000-0000-000000000003";
protected static final String RESOURCE_OPENDJ_NAMESPACE = MidPointConstants.NS_RI;
// Make it at least 1501 so it will go over the 3000 entries size limit
private static final int NUM_LDAP_ENTRIES = 1000;
private static final String LDAP_GROUP_PIRATES_DN = "cn=Pirates,ou=groups,dc=example,dc=com";
protected ResourceType resourceOpenDjType;
protected PrismObject<ResourceType> resourceOpenDj;
@Autowired
private ReconciliationTaskHandler reconciliationTaskHandler;
@Override
protected void startResources() throws Exception {
openDJController.startCleanServer();
}
@AfterClass
public static void stopResources() throws Exception {
openDJController.stop();
}
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
super.initSystem(initTask, initResult);
modelService.postInit(initResult);
// System Configuration
PrismObject<SystemConfigurationType> config;
try {
config = repoAddObjectFromFile(SYSTEM_CONFIGURATION_FILE, SystemConfigurationType.class, initResult);
} catch (ObjectAlreadyExistsException e) {
throw new ObjectAlreadyExistsException("System configuration already exists in repository;" +
"looks like the previous test haven't cleaned it up", e);
}
LoggingConfigurationManager.configure(
ProfilingConfigurationManager.checkSystemProfilingConfiguration(config),
config.asObjectable().getVersion(), initResult);
// administrator
PrismObject<UserType> userAdministrator = repoAddObjectFromFile(USER_ADMINISTRATOR_FILE, UserType.class, initResult);
repoAddObjectFromFile(ROLE_SUPERUSER_FILE, RoleType.class, initResult);
login(userAdministrator);
// Roles
repoAddObjectFromFile(ROLE_CAPTAIN_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_JUDGE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_PIRATE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_SAILOR_FILE, RoleType.class, initResult);
// templates
repoAddObjectFromFile(USER_TEMPLATE_FILE, ObjectTemplateType.class, initResult);
// Resources
resourceOpenDj = importAndGetObjectFromFile(ResourceType.class, RESOURCE_OPENDJ_FILE, RESOURCE_OPENDJ_OID, initTask, initResult);
resourceOpenDjType = resourceOpenDj.asObjectable();
openDJController.setResource(resourceOpenDj);
assumeAssignmentPolicy(AssignmentPolicyEnforcementType.RELATIVE);
openDJController.addEntriesFromLdifFile(ROLES_LDIF_FILE.getPath());
display("initial LDAP content", openDJController.dumpEntries());
}
@Test
public void test100BigImport() throws Exception {
final String TEST_NAME = "test100BigImport";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
loadEntries("u");
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
modelService.importFromResource(RESOURCE_OPENDJ_OID,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
OperationResult subresult = result.getLastSubresult();
TestUtil.assertInProgress("importAccountsFromResource result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void assertUser(String name, Task task, OperationResult result) throws com.evolveum.midpoint.util.exception.ObjectNotFoundException, com.evolveum.midpoint.util.exception.SchemaException, com.evolveum.midpoint.util.exception.SecurityViolationException, com.evolveum.midpoint.util.exception.CommunicationException, com.evolveum.midpoint.util.exception.ConfigurationException {
UserType user = findUserByUsername("u1").asObjectable();
display("user " + name, user.asPrismObject());
assertEquals("Wrong number of assignments", 4, user.getAssignment().size());
}
@Test(enabled = false)
public void test120BigReconciliation() throws Exception {
final String TEST_NAME = "test120BigReconciliation";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
ResourceType resource = modelService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, task, result).asObjectable();
reconciliationTaskHandler.launch(resource,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
// TODO
// OperationResult subresult = result.getLastSubresult();
// TestUtil.assertInProgress("reconciliation launch result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void loadEntries(String prefix) throws LDIFException, IOException {
long ldapPopStart = System.currentTimeMillis();
for(int i=0; i < NUM_LDAP_ENTRIES; i++) {
String name = "user"+i;
Entry entry = createEntry(prefix+i, name);
openDJController.addEntry(entry);
}
long ldapPopEnd = System.currentTimeMillis();
display("Loaded "+NUM_LDAP_ENTRIES+" LDAP entries in "+((ldapPopEnd-ldapPopStart)/1000)+" seconds");
}
private Entry createEntry(String uid, String name) throws IOException, LDIFException {
StringBuilder sb = new StringBuilder();
String dn = "uid="+uid+","+openDJController.getSuffixPeople();
sb.append("dn: ").append(dn).append("\n");
sb.append("objectClass: inetOrgPerson\n");
sb.append("uid: ").append(uid).append("\n");
sb.append("cn: ").append(name).append("\n");
sb.append("sn: ").append(name).append("\n");
LDIFImportConfig importConfig = new LDIFImportConfig(IOUtils.toInputStream(sb.toString(), "utf-8"));
LDIFReader ldifReader = new LDIFReader(importConfig);
Entry ldifEntry = ldifReader.readEntry();
return ldifEntry;
}
private String toDn(String username) {
return "uid="+username+","+OPENDJ_PEOPLE_SUFFIX;
}
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/services/cache/GfxdConcurrentCache.java | 6422 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.services.cache;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.cache.Cacheable;
import com.pivotal.gemfirexd.internal.iapi.services.cache.CacheableFactory;
/**
* An extension to {@link ConcurrentCache} for GemFireXD that sets the identity
* on a {@link CacheEntry} before inserting into the cache. This is to avoid
* deadlock scenario with DDL read-write locks:
*
* distributed write lock (other VM) -> local write lock -> cache hit with
* existing entry -> {@link CacheEntry#waitUntilIdentityIsSet()}
*
* cache miss -> cache put -> {@link Cacheable#setIdentity(Object)} -> read from
* SYSTABLES -> local read lock
*
* See bug #40683 for more details.
*
* Currently this is only used for <code>TDCacheble</code>s while for other
* {@link Cacheable}s the normal {@link ConcurrentCache} is used.
*
* @see ConcurrentCache
*
* @author swale
*/
final class GfxdConcurrentCache extends ConcurrentCache {
/**
* Creates a new cache manager.
*
* @param holderFactory
* factory which creates <code>Cacheable</code>s
* @param name
* the name of the cache
* @param initialSize
* the initial capacity of the cache
* @param maxSize
* maximum number of elements in the cache
*/
GfxdConcurrentCache(CacheableFactory holderFactory, String name,
int initialSize, int maxSize) {
super(holderFactory, name, initialSize, maxSize);
}
// Overrides of ConcurrentCache
/**
* Find an object in the cache. If it is not present, add it to the cache. The
* returned object is kept until <code>release()</code> is called.
*
* @param key
* identity of the object to find
* @return the cached object, or <code>null</code> if it cannot be found
*/
@Override
public Cacheable find(Object key) throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = cache.get(key);
while (true) {
if (entry != null) {
// Found an entry in the cache, lock it.
entry.lock();
if (entry.isValid()) {
try {
// Entry is still valid. Return it.
item = entry.getCacheable();
// The object is already cached. Increase the use count and
// return it.
entry.keep(true);
return item;
} finally {
entry.unlock();
}
}
else {
// This entry has been removed from the cache while we were
// waiting for the lock. Unlock it and try again.
entry.unlock();
entry = cache.get(key);
}
}
else {
entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Set the identity without holding the lock on the entry. If we
// hold the lock, we may run into a deadlock if the user code in
// setIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.setIdentity(key);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
// add the entry to cache
CacheEntry oldEntry = cache.putIfAbsent(key, entry);
if (oldEntry != null) {
// Someone inserted the entry while we created a new
// one. Retry with the entry currently in the cache.
entry = oldEntry;
}
else {
// We successfully inserted a new entry.
return itemWithIdentity;
}
}
else {
return null;
}
}
}
}
/**
* Create an object in the cache. The object is kept until
* <code>release()</code> is called.
*
* @param key
* identity of the object to create
* @param createParameter
* parameters passed to <code>Cacheable.createIdentity()</code>
* @return a reference to the cached object, or <code>null</code> if the
* object cannot be created
* @exception StandardException
* if the object is already in the cache, or if some other error
* occurs
* @see Cacheable#createIdentity(Object,Object)
*/
@Override
public Cacheable create(Object key, Object createParameter)
throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Create the identity without holding the lock on the entry.
// Otherwise, we may run into a deadlock if the user code in
// createIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.createIdentity(key, createParameter);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
if (cache.putIfAbsent(key, entry) != null) {
// We can't create the object if it's already in the cache.
throw StandardException.newException(SQLState.OBJECT_EXISTS_IN_CACHE,
name, key);
}
}
return itemWithIdentity;
}
}
| apache-2.0 |
internetarchive/pig | test/org/apache/pig/test/TestRegisteredJarVisibility.java | 6888 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.ByteStreams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Ensure classes from a registered jar are available in the UDFContext.
* Please see PIG-2532 for additional details.
*/
public class TestRegisteredJarVisibility {
private static final Log LOG = LogFactory.getLog(TestRegisteredJarVisibility.class);
private static final String JAR_FILE_NAME = "test-foo-loader.jar";
private static final String PACKAGE_NAME = "org.apache.pig.test";
// Actual data is not important. Reusing an existing input file.
private static final File INPUT_FILE = new File("test/data/pigunit/top_queries_input_data.txt");
private static MiniCluster cluster;
private static File jarFile;
@BeforeClass()
public static void setUp() throws IOException {
String testResourcesDir = "test/resources/" + PACKAGE_NAME.replace(".", "/");
String testBuildDataDir = "build/test/data";
// Create the test data directory if needed
File testDataDir = new File(testBuildDataDir,
TestRegisteredJarVisibility.class.getCanonicalName());
testDataDir.mkdirs();
jarFile = new File(testDataDir, JAR_FILE_NAME);
File[] javaFiles = new File[]{
new File(testResourcesDir, "RegisteredJarVisibilityLoader.java"),
new File(testResourcesDir, "RegisteredJarVisibilitySchema.java")};
List<File> classFiles = compile(javaFiles);
// Canonical class name to class file
Map<String, File> filesToJar = Maps.newHashMap();
for (File classFile : classFiles) {
filesToJar.put(
PACKAGE_NAME + "." + classFile.getName().replace(".class", ""),
classFile);
}
jar(filesToJar);
cluster = MiniCluster.buildCluster();
}
@AfterClass()
public static void tearDown() {
cluster.shutDown();
}
@Test()
public void testRegisteredJarVisibilitySchemaNotOnClasspath() {
boolean exceptionThrown = false;
try {
Class.forName("org.apache.pig.test.FooSchema");
} catch (ClassNotFoundException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
}
@Test()
public void testRegisteredJarVisibility() throws IOException {
cluster.getFileSystem().copyFromLocalFile(
new Path("file://" + INPUT_FILE.getAbsolutePath()), new Path(INPUT_FILE.getName()));
PigServer pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
String query = "register " + jarFile.getAbsolutePath() + ";\n"
+ "a = load '" + INPUT_FILE.getName()
+ "' using org.apache.pig.test.RegisteredJarVisibilityLoader();";
LOG.info("Running pig script:\n" + query);
pigServer.registerScript(new ByteArrayInputStream(query.getBytes()));
pigServer.openIterator("a");
pigServer.shutdown();
}
private static List<File> compile(File[] javaFiles) {
LOG.info("Compiling: " + Arrays.asList(javaFiles));
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> compilationUnits1 =
fileManager.getJavaFileObjects(javaFiles);
JavaCompiler.CompilationTask task =
compiler.getTask(null, fileManager, null, null, null, compilationUnits1);
task.call();
List<File> classFiles = Lists.newArrayList();
for (File javaFile : javaFiles) {
File classFile = new File(javaFile.getAbsolutePath().replace(".java", ".class"));
classFile.deleteOnExit();
Assert.assertTrue(classFile.exists());
classFiles.add(classFile);
LOG.info("Created " + classFile.getAbsolutePath());
}
return classFiles;
}
/**
* Create a jar file containing the generated classes.
*
* @param filesToJar map of canonical class name to class file
* @throws IOException on error
*/
private static void jar(Map<String, File> filesToJar) throws IOException {
LOG.info("Creating jar file containing: " + filesToJar);
JarOutputStream jos = new JarOutputStream(new FileOutputStream(jarFile.getAbsolutePath()));
try {
for (Map.Entry<String, File> entry : filesToJar.entrySet()) {
String zipEntryName = entry.getKey().replace(".", "/") + ".class";
LOG.info("Adding " + zipEntryName + " to " + jarFile.getAbsolutePath());
jos.putNextEntry(new ZipEntry(zipEntryName));
InputStream classInputStream = new FileInputStream(entry.getValue().getAbsolutePath());
try {
ByteStreams.copy(classInputStream, jos);
} finally {
classInputStream.close();
}
}
} finally {
jos.close();
}
Assert.assertTrue(jarFile.exists());
LOG.info("Created " + jarFile.getAbsolutePath());
}
}
| apache-2.0 |
gastaldi/AsciidocFX | src/main/java/com/kodcu/service/extension/chart/AreaChartBuilderService.java | 1187 | package com.kodcu.service.extension.chart;
import com.kodcu.controller.ApplicationController;
import com.kodcu.other.Current;
import com.kodcu.service.ThreadService;
import javafx.scene.chart.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Created by usta on 31.03.2015.
*/
@Component("area-bean")
public class AreaChartBuilderService extends XYChartBuilderService {
private final ThreadService threadService;
private final Current current;
private final ApplicationController controller;
@Autowired
public AreaChartBuilderService(ThreadService threadService, Current current, ApplicationController controller) {
super(threadService, current, controller);
this.threadService = threadService;
this.current = current;
this.controller = controller;
}
@Override
protected XYChart<Number, Number> createXYChart() {
final NumberAxis xAxis = new NumberAxis();
final NumberAxis yAxis = new NumberAxis();
final XYChart<Number, Number> lineChart = new AreaChart<Number, Number>(xAxis, yAxis);
return lineChart;
}
}
| apache-2.0 |
wwzhe/dataworks-zeus | schedule/src/main/java/com/taobao/zeus/broadcast/alarm/MailAlarm.java | 4315 | package com.taobao.zeus.broadcast.alarm;
import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.NoSuchProviderException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.taobao.zeus.model.LogDescriptor;
import com.taobao.zeus.store.UserManager;
import com.taobao.zeus.store.mysql.MysqlLogManager;
import com.taobao.zeus.store.mysql.persistence.ZeusUser;
import com.taobao.zeus.util.Environment;
public class MailAlarm extends AbstractZeusAlarm {
private static Logger log = LoggerFactory.getLogger(MailAlarm.class);
@Autowired
private UserManager userManager;
@Autowired
private MysqlLogManager zeusLogManager;
private static String host = Environment.getHost();// 邮件服务器
private static String port = Environment.getPort();// 端口
private static String from = Environment.getSendFrom();// 发送者
private static String user = Environment.getUsername();// 用户名
private static String password = Environment.getPassword();// 密码
@Override
public void alarm(String jobId, List<String> users, String title, String content)
throws Exception {
List<ZeusUser> userList = userManager.findListByUidByOrder(users);
List<String> emails = new ArrayList<String>();
if (userList != null && userList.size() > 0) {
for (ZeusUser user : userList) {
String userEmail = user.getEmail();
if (userEmail != null && !userEmail.isEmpty()
&& userEmail.contains("@")) {
if (userEmail.contains(";")) {
String[] userEmails = userEmail.split(";");
for (String ems : userEmails) {
if (ems.contains("@")) {
emails.add(ems);
}
}
} else {
emails.add(userEmail);
}
}
}
if (emails.size() > 0) {
content = content.replace("<br/>", "\r\n");
sendEmail(jobId, emails, title, content);
/*try{
LogDescriptor logDescriptor = new LogDescriptor();
logDescriptor.setLogType("email");
logDescriptor.setIp(InetAddress.getLocalHost().getHostAddress());
logDescriptor.setUserName("zeus");
logDescriptor.setUrl(jobId);
logDescriptor.setRpc(emails.toString());
logDescriptor.setDelegate(title);
logDescriptor.setMethod("");
// logDescriptor.setDescription((content.length()>4000 ? content.substring(4000) : content));
logDescriptor.setDescription("");
zeusLogManager.addLog(logDescriptor);
}catch(Exception ex){
log.error(ex.toString());
}*/
}
}
}
public void sendEmail(String jobId, List<String> emails, String subject,
String body) {
try {
log.info( "jobId: " + jobId +" begin to send the email!");
Properties props = new Properties();
props.put("mail.smtp.host", host);
props.put("mail.smtp.port", port);
props.put("mail.smtp.auth", "true");
Transport transport = null;
Session session = Session.getDefaultInstance(props, null);
transport = session.getTransport("smtp");
transport.connect(host, user, password);
MimeMessage msg = new MimeMessage(session);
msg.setSentDate(new Date());
InternetAddress fromAddress = new InternetAddress(from);
msg.setFrom(fromAddress);
InternetAddress[] toAddress = new InternetAddress[emails.size()];
for (int i = 0; i < emails.size(); i++) {
toAddress[i] = new InternetAddress(emails.get(i));
}
msg.setRecipients(Message.RecipientType.TO, toAddress);
msg.setSubject(subject, "UTF-8");
msg.setText(body, "UTF-8");
msg.saveChanges();
transport.sendMessage(msg, msg.getAllRecipients());
log.info("jobId: " + jobId + " send email: " + emails + "; from: " + from + " subject: "
+ subject + ", send success!");
} catch (NoSuchProviderException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (MessagingException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (Exception e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
}
}
}
| apache-2.0 |
igor-sfdc/felix | tooling/features-maven-plugin/src/test/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesXmlMojoTest.java | 1613 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.felix.karaf.tooling.features;
import org.apache.maven.artifact.Artifact;
import org.easymock.EasyMock;
import static org.easymock.EasyMock.*;
import junit.framework.TestCase;
/**
* Test cases for {@link GenerateFeaturesXmlMojo}
*/
public class GenerateFeaturesXmlMojoTest extends TestCase {
public void testToString() throws Exception {
Artifact artifact = EasyMock.createMock(Artifact.class);
expect(artifact.getGroupId()).andReturn("org.apache.felix.karaf.test");
expect(artifact.getArtifactId()).andReturn("test-artifact");
expect(artifact.getVersion()).andReturn("1.2.3");
replay(artifact);
assertEquals("org.apache.felix.karaf.test/test-artifact/1.2.3", GenerateFeaturesXmlMojo.toString(artifact));
}
}
| apache-2.0 |
ibmkendrick/phonebookdemo-v2 | src/main/java/com/ibmcloud/contest/phonebook/BadRequestException.java | 1011 | /**
* Copyright 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibmcloud.contest.phonebook;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
/**
* Throw a 400 status code
*/
public class BadRequestException extends WebApplicationException {
private static final long serialVersionUID = 1L;
public BadRequestException() {
super(Response.status(Status.BAD_REQUEST).build());
}
}
| apache-2.0 |
zstackorg/zstack | sdk/src/main/java/org/zstack/sdk/zwatch/monitorgroup/api/CreateMonitorTemplateResult.java | 417 | package org.zstack.sdk.zwatch.monitorgroup.api;
import org.zstack.sdk.zwatch.monitorgroup.entity.MonitorTemplateInventory;
public class CreateMonitorTemplateResult {
public MonitorTemplateInventory inventory;
public void setInventory(MonitorTemplateInventory inventory) {
this.inventory = inventory;
}
public MonitorTemplateInventory getInventory() {
return this.inventory;
}
}
| apache-2.0 |
google/google-authenticator | mobile/blackberry/src/com/google/authenticator/blackberry/Uri.java | 65026 | /*-
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Modifications:
* -Changed package name
* -Removed Android dependencies
* -Removed/replaced Java SE dependencies
* -Removed/replaced annotations
*/
package com.google.authenticator.blackberry;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.io.ByteArrayOutputStream;
import java.util.Vector;
/**
* Immutable URI reference. A URI reference includes a URI and a fragment, the
* component of the URI following a '#'. Builds and parses URI references
* which conform to
* <a href="http://www.faqs.org/rfcs/rfc2396.html">RFC 2396</a>.
*
* <p>In the interest of performance, this class performs little to no
* validation. Behavior is undefined for invalid input. This class is very
* forgiving--in the face of invalid input, it will return garbage
* rather than throw an exception unless otherwise specified.
*/
public abstract class Uri {
/*
This class aims to do as little up front work as possible. To accomplish
that, we vary the implementation dependending on what the user passes in.
For example, we have one implementation if the user passes in a
URI string (StringUri) and another if the user passes in the
individual components (OpaqueUri).
*Concurrency notes*: Like any truly immutable object, this class is safe
for concurrent use. This class uses a caching pattern in some places where
it doesn't use volatile or synchronized. This is safe to do with ints
because getting or setting an int is atomic. It's safe to do with a String
because the internal fields are final and the memory model guarantees other
threads won't see a partially initialized instance. We are not guaranteed
that some threads will immediately see changes from other threads on
certain platforms, but we don't mind if those threads reconstruct the
cached result. As a result, we get thread safe caching with no concurrency
overhead, which means the most common case, access from a single thread,
is as fast as possible.
From the Java Language spec.:
"17.5 Final Field Semantics
... when the object is seen by another thread, that thread will always
see the correctly constructed version of that object's final fields.
It will also see versions of any object or array referenced by
those final fields that are at least as up-to-date as the final fields
are."
In that same vein, all non-transient fields within Uri
implementations should be final and immutable so as to ensure true
immutability for clients even when they don't use proper concurrency
control.
For reference, from RFC 2396:
"4.3. Parsing a URI Reference
A URI reference is typically parsed according to the four main
components and fragment identifier in order to determine what
components are present and whether the reference is relative or
absolute. The individual components are then parsed for their
subparts and, if not opaque, to verify their validity.
Although the BNF defines what is allowed in each component, it is
ambiguous in terms of differentiating between an authority component
and a path component that begins with two slash characters. The
greedy algorithm is used for disambiguation: the left-most matching
rule soaks up as much of the URI reference string as it is capable of
matching. In other words, the authority component wins."
The "four main components" of a hierarchical URI consist of
<scheme>://<authority><path>?<query>
*/
/**
* NOTE: EMPTY accesses this field during its own initialization, so this
* field *must* be initialized first, or else EMPTY will see a null value!
*
* Placeholder for strings which haven't been cached. This enables us
* to cache null. We intentionally create a new String instance so we can
* compare its identity and there is no chance we will confuse it with
* user data.
*/
private static final String NOT_CACHED = new String("NOT CACHED");
/**
* The empty URI, equivalent to "".
*/
public static final Uri EMPTY = new HierarchicalUri(null, Part.NULL,
PathPart.EMPTY, Part.NULL, Part.NULL);
/**
* Prevents external subclassing.
*/
private Uri() {}
/**
* Returns true if this URI is hierarchical like "http://google.com".
* Absolute URIs are hierarchical if the scheme-specific part starts with
* a '/'. Relative URIs are always hierarchical.
*/
public abstract boolean isHierarchical();
/**
* Returns true if this URI is opaque like "mailto:nobody@google.com". The
* scheme-specific part of an opaque URI cannot start with a '/'.
*/
public boolean isOpaque() {
return !isHierarchical();
}
/**
* Returns true if this URI is relative, i.e. if it doesn't contain an
* explicit scheme.
*
* @return true if this URI is relative, false if it's absolute
*/
public abstract boolean isRelative();
/**
* Returns true if this URI is absolute, i.e. if it contains an
* explicit scheme.
*
* @return true if this URI is absolute, false if it's relative
*/
public boolean isAbsolute() {
return !isRelative();
}
/**
* Gets the scheme of this URI. Example: "http"
*
* @return the scheme or null if this is a relative URI
*/
public abstract String getScheme();
/**
* Gets the scheme-specific part of this URI, i.e. everything between the
* scheme separator ':' and the fragment separator '#'. If this is a
* relative URI, this method returns the entire URI. Decodes escaped octets.
*
* <p>Example: "//www.google.com/search?q=android"
*
* @return the decoded scheme-specific-part
*/
public abstract String getSchemeSpecificPart();
/**
* Gets the scheme-specific part of this URI, i.e. everything between the
* scheme separator ':' and the fragment separator '#'. If this is a
* relative URI, this method returns the entire URI. Leaves escaped octets
* intact.
*
* <p>Example: "//www.google.com/search?q=android"
*
* @return the decoded scheme-specific-part
*/
public abstract String getEncodedSchemeSpecificPart();
/**
* Gets the decoded authority part of this URI. For
* server addresses, the authority is structured as follows:
* {@code [ userinfo '@' ] host [ ':' port ]}
*
* <p>Examples: "google.com", "bob@google.com:80"
*
* @return the authority for this URI or null if not present
*/
public abstract String getAuthority();
/**
* Gets the encoded authority part of this URI. For
* server addresses, the authority is structured as follows:
* {@code [ userinfo '@' ] host [ ':' port ]}
*
* <p>Examples: "google.com", "bob@google.com:80"
*
* @return the authority for this URI or null if not present
*/
public abstract String getEncodedAuthority();
/**
* Gets the decoded user information from the authority.
* For example, if the authority is "nobody@google.com", this method will
* return "nobody".
*
* @return the user info for this URI or null if not present
*/
public abstract String getUserInfo();
/**
* Gets the encoded user information from the authority.
* For example, if the authority is "nobody@google.com", this method will
* return "nobody".
*
* @return the user info for this URI or null if not present
*/
public abstract String getEncodedUserInfo();
/**
* Gets the encoded host from the authority for this URI. For example,
* if the authority is "bob@google.com", this method will return
* "google.com".
*
* @return the host for this URI or null if not present
*/
public abstract String getHost();
/**
* Gets the port from the authority for this URI. For example,
* if the authority is "google.com:80", this method will return 80.
*
* @return the port for this URI or -1 if invalid or not present
*/
public abstract int getPort();
/**
* Gets the decoded path.
*
* @return the decoded path, or null if this is not a hierarchical URI
* (like "mailto:nobody@google.com") or the URI is invalid
*/
public abstract String getPath();
/**
* Gets the encoded path.
*
* @return the encoded path, or null if this is not a hierarchical URI
* (like "mailto:nobody@google.com") or the URI is invalid
*/
public abstract String getEncodedPath();
/**
* Gets the decoded query component from this URI. The query comes after
* the query separator ('?') and before the fragment separator ('#'). This
* method would return "q=android" for
* "http://www.google.com/search?q=android".
*
* @return the decoded query or null if there isn't one
*/
public abstract String getQuery();
/**
* Gets the encoded query component from this URI. The query comes after
* the query separator ('?') and before the fragment separator ('#'). This
* method would return "q=android" for
* "http://www.google.com/search?q=android".
*
* @return the encoded query or null if there isn't one
*/
public abstract String getEncodedQuery();
/**
* Gets the decoded fragment part of this URI, everything after the '#'.
*
* @return the decoded fragment or null if there isn't one
*/
public abstract String getFragment();
/**
* Gets the encoded fragment part of this URI, everything after the '#'.
*
* @return the encoded fragment or null if there isn't one
*/
public abstract String getEncodedFragment();
/**
* Gets the decoded path segments.
*
* @return decoded path segments, each without a leading or trailing '/'
*/
public abstract String[] getPathSegments();
/**
* Gets the decoded last segment in the path.
*
* @return the decoded last segment or null if the path is empty
*/
public abstract String getLastPathSegment();
/**
* Compares this Uri to another object for equality. Returns true if the
* encoded string representations of this Uri and the given Uri are
* equal. Case counts. Paths are not normalized. If one Uri specifies a
* default port explicitly and the other leaves it implicit, they will not
* be considered equal.
*/
public boolean equals(Object o) {
if (!(o instanceof Uri)) {
return false;
}
Uri other = (Uri) o;
return toString().equals(other.toString());
}
/**
* Hashes the encoded string represention of this Uri consistently with
* {@link #equals(Object)}.
*/
public int hashCode() {
return toString().hashCode();
}
/**
* Compares the string representation of this Uri with that of
* another.
*/
public int compareTo(Uri other) {
return toString().compareTo(other.toString());
}
/**
* Returns the encoded string representation of this URI.
* Example: "http://google.com/"
*/
public abstract String toString();
/**
* Constructs a new builder, copying the attributes from this Uri.
*/
public abstract Builder buildUpon();
/** Index of a component which was not found. */
private final static int NOT_FOUND = -1;
/** Placeholder value for an index which hasn't been calculated yet. */
private final static int NOT_CALCULATED = -2;
/**
* Error message presented when a user tries to treat an opaque URI as
* hierarchical.
*/
private static final String NOT_HIERARCHICAL
= "This isn't a hierarchical URI.";
/** Default encoding. */
private static final String DEFAULT_ENCODING = "UTF-8";
/**
* Creates a Uri which parses the given encoded URI string.
*
* @param uriString an RFC 2396-compliant, encoded URI
* @throws NullPointerException if uriString is null
* @return Uri for this given uri string
*/
public static Uri parse(String uriString) {
return new StringUri(uriString);
}
/**
* An implementation which wraps a String URI. This URI can be opaque or
* hierarchical, but we extend AbstractHierarchicalUri in case we need
* the hierarchical functionality.
*/
private static class StringUri extends AbstractHierarchicalUri {
/** Used in parcelling. */
static final int TYPE_ID = 1;
/** URI string representation. */
private final String uriString;
private StringUri(String uriString) {
if (uriString == null) {
throw new NullPointerException("uriString");
}
this.uriString = uriString;
}
/** Cached scheme separator index. */
private volatile int cachedSsi = NOT_CALCULATED;
/** Finds the first ':'. Returns -1 if none found. */
private int findSchemeSeparator() {
return cachedSsi == NOT_CALCULATED
? cachedSsi = uriString.indexOf(':')
: cachedSsi;
}
/** Cached fragment separator index. */
private volatile int cachedFsi = NOT_CALCULATED;
/** Finds the first '#'. Returns -1 if none found. */
private int findFragmentSeparator() {
return cachedFsi == NOT_CALCULATED
? cachedFsi = uriString.indexOf('#', findSchemeSeparator())
: cachedFsi;
}
public boolean isHierarchical() {
int ssi = findSchemeSeparator();
if (ssi == NOT_FOUND) {
// All relative URIs are hierarchical.
return true;
}
if (uriString.length() == ssi + 1) {
// No ssp.
return false;
}
// If the ssp starts with a '/', this is hierarchical.
return uriString.charAt(ssi + 1) == '/';
}
public boolean isRelative() {
// Note: We return true if the index is 0
return findSchemeSeparator() == NOT_FOUND;
}
private volatile String scheme = NOT_CACHED;
public String getScheme() {
boolean cached = (scheme != NOT_CACHED);
return cached ? scheme : (scheme = parseScheme());
}
private String parseScheme() {
int ssi = findSchemeSeparator();
return ssi == NOT_FOUND ? null : uriString.substring(0, ssi);
}
private Part ssp;
private Part getSsp() {
return ssp == null ? ssp = Part.fromEncoded(parseSsp()) : ssp;
}
public String getEncodedSchemeSpecificPart() {
return getSsp().getEncoded();
}
public String getSchemeSpecificPart() {
return getSsp().getDecoded();
}
private String parseSsp() {
int ssi = findSchemeSeparator();
int fsi = findFragmentSeparator();
// Return everything between ssi and fsi.
return fsi == NOT_FOUND
? uriString.substring(ssi + 1)
: uriString.substring(ssi + 1, fsi);
}
private Part authority;
private Part getAuthorityPart() {
if (authority == null) {
String encodedAuthority
= parseAuthority(this.uriString, findSchemeSeparator());
return authority = Part.fromEncoded(encodedAuthority);
}
return authority;
}
public String getEncodedAuthority() {
return getAuthorityPart().getEncoded();
}
public String getAuthority() {
return getAuthorityPart().getDecoded();
}
private PathPart path;
private PathPart getPathPart() {
return path == null
? path = PathPart.fromEncoded(parsePath())
: path;
}
public String getPath() {
return getPathPart().getDecoded();
}
public String getEncodedPath() {
return getPathPart().getEncoded();
}
public String[] getPathSegments() {
return getPathPart().getPathSegments().segments;
}
private String parsePath() {
String uriString = this.uriString;
int ssi = findSchemeSeparator();
// If the URI is absolute.
if (ssi > -1) {
// Is there anything after the ':'?
boolean schemeOnly = ssi + 1 == uriString.length();
if (schemeOnly) {
// Opaque URI.
return null;
}
// A '/' after the ':' means this is hierarchical.
if (uriString.charAt(ssi + 1) != '/') {
// Opaque URI.
return null;
}
} else {
// All relative URIs are hierarchical.
}
return parsePath(uriString, ssi);
}
private Part query;
private Part getQueryPart() {
return query == null
? query = Part.fromEncoded(parseQuery()) : query;
}
public String getEncodedQuery() {
return getQueryPart().getEncoded();
}
private String parseQuery() {
// It doesn't make sense to cache this index. We only ever
// calculate it once.
int qsi = uriString.indexOf('?', findSchemeSeparator());
if (qsi == NOT_FOUND) {
return null;
}
int fsi = findFragmentSeparator();
if (fsi == NOT_FOUND) {
return uriString.substring(qsi + 1);
}
if (fsi < qsi) {
// Invalid.
return null;
}
return uriString.substring(qsi + 1, fsi);
}
public String getQuery() {
return getQueryPart().getDecoded();
}
private Part fragment;
private Part getFragmentPart() {
return fragment == null
? fragment = Part.fromEncoded(parseFragment()) : fragment;
}
public String getEncodedFragment() {
return getFragmentPart().getEncoded();
}
private String parseFragment() {
int fsi = findFragmentSeparator();
return fsi == NOT_FOUND ? null : uriString.substring(fsi + 1);
}
public String getFragment() {
return getFragmentPart().getDecoded();
}
public String toString() {
return uriString;
}
/**
* Parses an authority out of the given URI string.
*
* @param uriString URI string
* @param ssi scheme separator index, -1 for a relative URI
*
* @return the authority or null if none is found
*/
static String parseAuthority(String uriString, int ssi) {
int length = uriString.length();
// If "//" follows the scheme separator, we have an authority.
if (length > ssi + 2
&& uriString.charAt(ssi + 1) == '/'
&& uriString.charAt(ssi + 2) == '/') {
// We have an authority.
// Look for the start of the path, query, or fragment, or the
// end of the string.
int end = ssi + 3;
LOOP: while (end < length) {
switch (uriString.charAt(end)) {
case '/': // Start of path
case '?': // Start of query
case '#': // Start of fragment
break LOOP;
}
end++;
}
return uriString.substring(ssi + 3, end);
} else {
return null;
}
}
/**
* Parses a path out of this given URI string.
*
* @param uriString URI string
* @param ssi scheme separator index, -1 for a relative URI
*
* @return the path
*/
static String parsePath(String uriString, int ssi) {
int length = uriString.length();
// Find start of path.
int pathStart;
if (length > ssi + 2
&& uriString.charAt(ssi + 1) == '/'
&& uriString.charAt(ssi + 2) == '/') {
// Skip over authority to path.
pathStart = ssi + 3;
LOOP: while (pathStart < length) {
switch (uriString.charAt(pathStart)) {
case '?': // Start of query
case '#': // Start of fragment
return ""; // Empty path.
case '/': // Start of path!
break LOOP;
}
pathStart++;
}
} else {
// Path starts immediately after scheme separator.
pathStart = ssi + 1;
}
// Find end of path.
int pathEnd = pathStart;
LOOP: while (pathEnd < length) {
switch (uriString.charAt(pathEnd)) {
case '?': // Start of query
case '#': // Start of fragment
break LOOP;
}
pathEnd++;
}
return uriString.substring(pathStart, pathEnd);
}
public Builder buildUpon() {
if (isHierarchical()) {
return new Builder()
.scheme(getScheme())
.authority(getAuthorityPart())
.path(getPathPart())
.query(getQueryPart())
.fragment(getFragmentPart());
} else {
return new Builder()
.scheme(getScheme())
.opaquePart(getSsp())
.fragment(getFragmentPart());
}
}
}
/**
* Creates an opaque Uri from the given components. Encodes the ssp
* which means this method cannot be used to create hierarchical URIs.
*
* @param scheme of the URI
* @param ssp scheme-specific-part, everything between the
* scheme separator (':') and the fragment separator ('#'), which will
* get encoded
* @param fragment fragment, everything after the '#', null if undefined,
* will get encoded
*
* @throws NullPointerException if scheme or ssp is null
* @return Uri composed of the given scheme, ssp, and fragment
*
* @see Builder if you don't want the ssp and fragment to be encoded
*/
public static Uri fromParts(String scheme, String ssp,
String fragment) {
if (scheme == null) {
throw new NullPointerException("scheme");
}
if (ssp == null) {
throw new NullPointerException("ssp");
}
return new OpaqueUri(scheme, Part.fromDecoded(ssp),
Part.fromDecoded(fragment));
}
/**
* Opaque URI.
*/
private static class OpaqueUri extends Uri {
/** Used in parcelling. */
static final int TYPE_ID = 2;
private final String scheme;
private final Part ssp;
private final Part fragment;
private OpaqueUri(String scheme, Part ssp, Part fragment) {
this.scheme = scheme;
this.ssp = ssp;
this.fragment = fragment == null ? Part.NULL : fragment;
}
public boolean isHierarchical() {
return false;
}
public boolean isRelative() {
return scheme == null;
}
public String getScheme() {
return this.scheme;
}
public String getEncodedSchemeSpecificPart() {
return ssp.getEncoded();
}
public String getSchemeSpecificPart() {
return ssp.getDecoded();
}
public String getAuthority() {
return null;
}
public String getEncodedAuthority() {
return null;
}
public String getPath() {
return null;
}
public String getEncodedPath() {
return null;
}
public String getQuery() {
return null;
}
public String getEncodedQuery() {
return null;
}
public String getFragment() {
return fragment.getDecoded();
}
public String getEncodedFragment() {
return fragment.getEncoded();
}
public String[] getPathSegments() {
return new String[0];
}
public String getLastPathSegment() {
return null;
}
public String getUserInfo() {
return null;
}
public String getEncodedUserInfo() {
return null;
}
public String getHost() {
return null;
}
public int getPort() {
return -1;
}
private volatile String cachedString = NOT_CACHED;
public String toString() {
boolean cached = cachedString != NOT_CACHED;
if (cached) {
return cachedString;
}
StringBuffer sb = new StringBuffer();
sb.append(scheme).append(':');
sb.append(getEncodedSchemeSpecificPart());
if (!fragment.isEmpty()) {
sb.append('#').append(fragment.getEncoded());
}
return cachedString = sb.toString();
}
public Builder buildUpon() {
return new Builder()
.scheme(this.scheme)
.opaquePart(this.ssp)
.fragment(this.fragment);
}
}
/**
* Wrapper for path segment array.
*/
static class PathSegments {
static final PathSegments EMPTY = new PathSegments(null, 0);
final String[] segments;
final int size;
PathSegments(String[] segments, int size) {
this.segments = segments;
this.size = size;
}
public String get(int index) {
if (index >= size) {
throw new IndexOutOfBoundsException();
}
return segments[index];
}
public int size() {
return this.size;
}
}
/**
* Builds PathSegments.
*/
static class PathSegmentsBuilder {
String[] segments;
int size = 0;
void add(String segment) {
if (segments == null) {
segments = new String[4];
} else if (size + 1 == segments.length) {
String[] expanded = new String[segments.length * 2];
System.arraycopy(segments, 0, expanded, 0, segments.length);
segments = expanded;
}
segments[size++] = segment;
}
PathSegments build() {
if (segments == null) {
return PathSegments.EMPTY;
}
try {
return new PathSegments(segments, size);
} finally {
// Makes sure this doesn't get reused.
segments = null;
}
}
}
/**
* Support for hierarchical URIs.
*/
private abstract static class AbstractHierarchicalUri extends Uri {
public String getLastPathSegment() {
// TODO: If we haven't parsed all of the segments already, just
// grab the last one directly so we only allocate one string.
String[] segments = getPathSegments();
int size = segments.length;
if (size == 0) {
return null;
}
return segments[size - 1];
}
private Part userInfo;
private Part getUserInfoPart() {
return userInfo == null
? userInfo = Part.fromEncoded(parseUserInfo()) : userInfo;
}
public final String getEncodedUserInfo() {
return getUserInfoPart().getEncoded();
}
private String parseUserInfo() {
String authority = getEncodedAuthority();
if (authority == null) {
return null;
}
int end = authority.indexOf('@');
return end == NOT_FOUND ? null : authority.substring(0, end);
}
public String getUserInfo() {
return getUserInfoPart().getDecoded();
}
private volatile String host = NOT_CACHED;
public String getHost() {
boolean cached = (host != NOT_CACHED);
return cached ? host
: (host = parseHost());
}
private String parseHost() {
String authority = getEncodedAuthority();
if (authority == null) {
return null;
}
// Parse out user info and then port.
int userInfoSeparator = authority.indexOf('@');
int portSeparator = authority.indexOf(':', userInfoSeparator);
String encodedHost = portSeparator == NOT_FOUND
? authority.substring(userInfoSeparator + 1)
: authority.substring(userInfoSeparator + 1, portSeparator);
return decode(encodedHost);
}
private volatile int port = NOT_CALCULATED;
public int getPort() {
return port == NOT_CALCULATED
? port = parsePort()
: port;
}
private int parsePort() {
String authority = getEncodedAuthority();
if (authority == null) {
return -1;
}
// Make sure we look for the port separtor *after* the user info
// separator. We have URLs with a ':' in the user info.
int userInfoSeparator = authority.indexOf('@');
int portSeparator = authority.indexOf(':', userInfoSeparator);
if (portSeparator == NOT_FOUND) {
return -1;
}
String portString = decode(authority.substring(portSeparator + 1));
try {
return Integer.parseInt(portString);
} catch (NumberFormatException e) {
return -1;
}
}
}
/**
* Hierarchical Uri.
*/
private static class HierarchicalUri extends AbstractHierarchicalUri {
/** Used in parcelling. */
static final int TYPE_ID = 3;
private final String scheme; // can be null
private final Part authority;
private final PathPart path;
private final Part query;
private final Part fragment;
private HierarchicalUri(String scheme, Part authority, PathPart path,
Part query, Part fragment) {
this.scheme = scheme;
this.authority = Part.nonNull(authority);
this.path = path == null ? PathPart.NULL : path;
this.query = Part.nonNull(query);
this.fragment = Part.nonNull(fragment);
}
public boolean isHierarchical() {
return true;
}
public boolean isRelative() {
return scheme == null;
}
public String getScheme() {
return scheme;
}
private Part ssp;
private Part getSsp() {
return ssp == null
? ssp = Part.fromEncoded(makeSchemeSpecificPart()) : ssp;
}
public String getEncodedSchemeSpecificPart() {
return getSsp().getEncoded();
}
public String getSchemeSpecificPart() {
return getSsp().getDecoded();
}
/**
* Creates the encoded scheme-specific part from its sub parts.
*/
private String makeSchemeSpecificPart() {
StringBuffer builder = new StringBuffer();
appendSspTo(builder);
return builder.toString();
}
private void appendSspTo(StringBuffer builder) {
String encodedAuthority = authority.getEncoded();
if (encodedAuthority != null) {
// Even if the authority is "", we still want to append "//".
builder.append("//").append(encodedAuthority);
}
String encodedPath = path.getEncoded();
if (encodedPath != null) {
builder.append(encodedPath);
}
if (!query.isEmpty()) {
builder.append('?').append(query.getEncoded());
}
}
public String getAuthority() {
return this.authority.getDecoded();
}
public String getEncodedAuthority() {
return this.authority.getEncoded();
}
public String getEncodedPath() {
return this.path.getEncoded();
}
public String getPath() {
return this.path.getDecoded();
}
public String getQuery() {
return this.query.getDecoded();
}
public String getEncodedQuery() {
return this.query.getEncoded();
}
public String getFragment() {
return this.fragment.getDecoded();
}
public String getEncodedFragment() {
return this.fragment.getEncoded();
}
public String[] getPathSegments() {
return this.path.getPathSegments().segments;
}
private volatile String uriString = NOT_CACHED;
/**
* {@inheritDoc}
*/
public String toString() {
boolean cached = (uriString != NOT_CACHED);
return cached ? uriString
: (uriString = makeUriString());
}
private String makeUriString() {
StringBuffer builder = new StringBuffer();
if (scheme != null) {
builder.append(scheme).append(':');
}
appendSspTo(builder);
if (!fragment.isEmpty()) {
builder.append('#').append(fragment.getEncoded());
}
return builder.toString();
}
public Builder buildUpon() {
return new Builder()
.scheme(scheme)
.authority(authority)
.path(path)
.query(query)
.fragment(fragment);
}
}
/**
* Helper class for building or manipulating URI references. Not safe for
* concurrent use.
*
* <p>An absolute hierarchical URI reference follows the pattern:
* {@code <scheme>://<authority><absolute path>?<query>#<fragment>}
*
* <p>Relative URI references (which are always hierarchical) follow one
* of two patterns: {@code <relative or absolute path>?<query>#<fragment>}
* or {@code //<authority><absolute path>?<query>#<fragment>}
*
* <p>An opaque URI follows this pattern:
* {@code <scheme>:<opaque part>#<fragment>}
*/
public static final class Builder {
private String scheme;
private Part opaquePart;
private Part authority;
private PathPart path;
private Part query;
private Part fragment;
/**
* Constructs a new Builder.
*/
public Builder() {}
/**
* Sets the scheme.
*
* @param scheme name or {@code null} if this is a relative Uri
*/
public Builder scheme(String scheme) {
this.scheme = scheme;
return this;
}
Builder opaquePart(Part opaquePart) {
this.opaquePart = opaquePart;
return this;
}
/**
* Encodes and sets the given opaque scheme-specific-part.
*
* @param opaquePart decoded opaque part
*/
public Builder opaquePart(String opaquePart) {
return opaquePart(Part.fromDecoded(opaquePart));
}
/**
* Sets the previously encoded opaque scheme-specific-part.
*
* @param opaquePart encoded opaque part
*/
public Builder encodedOpaquePart(String opaquePart) {
return opaquePart(Part.fromEncoded(opaquePart));
}
Builder authority(Part authority) {
// This URI will be hierarchical.
this.opaquePart = null;
this.authority = authority;
return this;
}
/**
* Encodes and sets the authority.
*/
public Builder authority(String authority) {
return authority(Part.fromDecoded(authority));
}
/**
* Sets the previously encoded authority.
*/
public Builder encodedAuthority(String authority) {
return authority(Part.fromEncoded(authority));
}
Builder path(PathPart path) {
// This URI will be hierarchical.
this.opaquePart = null;
this.path = path;
return this;
}
/**
* Sets the path. Leaves '/' characters intact but encodes others as
* necessary.
*
* <p>If the path is not null and doesn't start with a '/', and if
* you specify a scheme and/or authority, the builder will prepend the
* given path with a '/'.
*/
public Builder path(String path) {
return path(PathPart.fromDecoded(path));
}
/**
* Sets the previously encoded path.
*
* <p>If the path is not null and doesn't start with a '/', and if
* you specify a scheme and/or authority, the builder will prepend the
* given path with a '/'.
*/
public Builder encodedPath(String path) {
return path(PathPart.fromEncoded(path));
}
/**
* Encodes the given segment and appends it to the path.
*/
public Builder appendPath(String newSegment) {
return path(PathPart.appendDecodedSegment(path, newSegment));
}
/**
* Appends the given segment to the path.
*/
public Builder appendEncodedPath(String newSegment) {
return path(PathPart.appendEncodedSegment(path, newSegment));
}
Builder query(Part query) {
// This URI will be hierarchical.
this.opaquePart = null;
this.query = query;
return this;
}
/**
* Encodes and sets the query.
*/
public Builder query(String query) {
return query(Part.fromDecoded(query));
}
/**
* Sets the previously encoded query.
*/
public Builder encodedQuery(String query) {
return query(Part.fromEncoded(query));
}
Builder fragment(Part fragment) {
this.fragment = fragment;
return this;
}
/**
* Encodes and sets the fragment.
*/
public Builder fragment(String fragment) {
return fragment(Part.fromDecoded(fragment));
}
/**
* Sets the previously encoded fragment.
*/
public Builder encodedFragment(String fragment) {
return fragment(Part.fromEncoded(fragment));
}
/**
* Encodes the key and value and then appends the parameter to the
* query string.
*
* @param key which will be encoded
* @param value which will be encoded
*/
public Builder appendQueryParameter(String key, String value) {
// This URI will be hierarchical.
this.opaquePart = null;
String encodedParameter = encode(key, null) + "="
+ encode(value, null);
if (query == null) {
query = Part.fromEncoded(encodedParameter);
return this;
}
String oldQuery = query.getEncoded();
if (oldQuery == null || oldQuery.length() == 0) {
query = Part.fromEncoded(encodedParameter);
} else {
query = Part.fromEncoded(oldQuery + "&" + encodedParameter);
}
return this;
}
/**
* Constructs a Uri with the current attributes.
*
* @throws UnsupportedOperationException if the URI is opaque and the
* scheme is null
*/
public Uri build() {
if (opaquePart != null) {
if (this.scheme == null) {
throw new UnsupportedOperationException(
"An opaque URI must have a scheme.");
}
return new OpaqueUri(scheme, opaquePart, fragment);
} else {
// Hierarchical URIs should not return null for getPath().
PathPart path = this.path;
if (path == null || path == PathPart.NULL) {
path = PathPart.EMPTY;
} else {
// If we have a scheme and/or authority, the path must
// be absolute. Prepend it with a '/' if necessary.
if (hasSchemeOrAuthority()) {
path = PathPart.makeAbsolute(path);
}
}
return new HierarchicalUri(
scheme, authority, path, query, fragment);
}
}
private boolean hasSchemeOrAuthority() {
return scheme != null
|| (authority != null && authority != Part.NULL);
}
/**
* {@inheritDoc}
*/
public String toString() {
return build().toString();
}
}
/**
* Searches the query string for parameter values with the given key.
*
* @param key which will be encoded
*
* @throws UnsupportedOperationException if this isn't a hierarchical URI
* @throws NullPointerException if key is null
*
* @return a list of decoded values
*/
public String[] getQueryParameters(String key) {
if (isOpaque()) {
throw new UnsupportedOperationException(NOT_HIERARCHICAL);
}
String query = getEncodedQuery();
if (query == null) {
return new String[0];
}
String encodedKey;
try {
encodedKey = URLEncoder.encode(key, DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
// Prepend query with "&" making the first parameter the same as the
// rest.
query = "&" + query;
// Parameter prefix.
String prefix = "&" + encodedKey + "=";
Vector values = new Vector();
int start = 0;
int length = query.length();
while (start < length) {
start = query.indexOf(prefix, start);
if (start == -1) {
// No more values.
break;
}
// Move start to start of value.
start += prefix.length();
// Find end of value.
int end = query.indexOf('&', start);
if (end == -1) {
end = query.length();
}
String value = query.substring(start, end);
values.addElement(decode(value));
start = end;
}
int size = values.size();
String[] result = new String[size];
values.copyInto(result);
return result;
}
/**
* Searches the query string for the first value with the given key.
*
* @param key which will be encoded
* @throws UnsupportedOperationException if this isn't a hierarchical URI
* @throws NullPointerException if key is null
*
* @return the decoded value or null if no parameter is found
*/
public String getQueryParameter(String key) {
if (isOpaque()) {
throw new UnsupportedOperationException(NOT_HIERARCHICAL);
}
String query = getEncodedQuery();
if (query == null) {
return null;
}
String encodedKey;
try {
encodedKey = URLEncoder.encode(key, DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
String prefix = encodedKey + "=";
if (query.length() < prefix.length()) {
return null;
}
int start;
if (query.startsWith(prefix)) {
// It's the first parameter.
start = prefix.length();
} else {
// It must be later in the query string.
prefix = "&" + prefix;
start = query.indexOf(prefix);
if (start == -1) {
// Not found.
return null;
}
start += prefix.length();
}
// Find end of value.
int end = query.indexOf('&', start);
if (end == -1) {
end = query.length();
}
String value = query.substring(start, end);
return decode(value);
}
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
/**
* Encodes characters in the given string as '%'-escaped octets
* using the UTF-8 scheme. Leaves letters ("A-Z", "a-z"), numbers
* ("0-9"), and unreserved characters ("_-!.~'()*") intact. Encodes
* all other characters.
*
* @param s string to encode
* @return an encoded version of s suitable for use as a URI component,
* or null if s is null
*/
public static String encode(String s) {
return encode(s, null);
}
/**
* Encodes characters in the given string as '%'-escaped octets
* using the UTF-8 scheme. Leaves letters ("A-Z", "a-z"), numbers
* ("0-9"), and unreserved characters ("_-!.~'()*") intact. Encodes
* all other characters with the exception of those specified in the
* allow argument.
*
* @param s string to encode
* @param allow set of additional characters to allow in the encoded form,
* null if no characters should be skipped
* @return an encoded version of s suitable for use as a URI component,
* or null if s is null
*/
public static String encode(String s, String allow) {
if (s == null) {
return null;
}
// Lazily-initialized buffers.
StringBuffer encoded = null;
int oldLength = s.length();
// This loop alternates between copying over allowed characters and
// encoding in chunks. This results in fewer method calls and
// allocations than encoding one character at a time.
int current = 0;
while (current < oldLength) {
// Start in "copying" mode where we copy over allowed chars.
// Find the next character which needs to be encoded.
int nextToEncode = current;
while (nextToEncode < oldLength
&& isAllowed(s.charAt(nextToEncode), allow)) {
nextToEncode++;
}
// If there's nothing more to encode...
if (nextToEncode == oldLength) {
if (current == 0) {
// We didn't need to encode anything!
return s;
} else {
// Presumably, we've already done some encoding.
encoded.append(s.substring(current, oldLength));
return encoded.toString();
}
}
if (encoded == null) {
encoded = new StringBuffer();
}
if (nextToEncode > current) {
// Append allowed characters leading up to this point.
encoded.append(s.substring(current, nextToEncode));
} else {
// assert nextToEncode == current
}
// Switch to "encoding" mode.
// Find the next allowed character.
current = nextToEncode;
int nextAllowed = current + 1;
while (nextAllowed < oldLength
&& !isAllowed(s.charAt(nextAllowed), allow)) {
nextAllowed++;
}
// Convert the substring to bytes and encode the bytes as
// '%'-escaped octets.
String toEncode = s.substring(current, nextAllowed);
try {
byte[] bytes = toEncode.getBytes(DEFAULT_ENCODING);
int bytesLength = bytes.length;
for (int i = 0; i < bytesLength; i++) {
encoded.append('%');
encoded.append(HEX_DIGITS[(bytes[i] & 0xf0) >> 4]);
encoded.append(HEX_DIGITS[bytes[i] & 0xf]);
}
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
current = nextAllowed;
}
// Encoded could still be null at this point if s is empty.
return encoded == null ? s : encoded.toString();
}
/**
* Returns true if the given character is allowed.
*
* @param c character to check
* @param allow characters to allow
* @return true if the character is allowed or false if it should be
* encoded
*/
private static boolean isAllowed(char c, String allow) {
return (c >= 'A' && c <= 'Z')
|| (c >= 'a' && c <= 'z')
|| (c >= '0' && c <= '9')
|| "_-!.~'()*".indexOf(c) != NOT_FOUND
|| (allow != null && allow.indexOf(c) != NOT_FOUND);
}
/** Unicode replacement character: \\uFFFD. */
private static final byte[] REPLACEMENT = { (byte) 0xFF, (byte) 0xFD };
/**
* Decodes '%'-escaped octets in the given string using the UTF-8 scheme.
* Replaces invalid octets with the unicode replacement character
* ("\\uFFFD").
*
* @param s encoded string to decode
* @return the given string with escaped octets decoded, or null if
* s is null
*/
public static String decode(String s) {
/*
Compared to java.net.URLEncoderDecoder.decode(), this method decodes a
chunk at a time instead of one character at a time, and it doesn't
throw exceptions. It also only allocates memory when necessary--if
there's nothing to decode, this method won't do much.
*/
if (s == null) {
return null;
}
// Lazily-initialized buffers.
StringBuffer decoded = null;
ByteArrayOutputStream out = null;
int oldLength = s.length();
// This loop alternates between copying over normal characters and
// escaping in chunks. This results in fewer method calls and
// allocations than decoding one character at a time.
int current = 0;
while (current < oldLength) {
// Start in "copying" mode where we copy over normal characters.
// Find the next escape sequence.
int nextEscape = s.indexOf('%', current);
if (nextEscape == NOT_FOUND) {
if (decoded == null) {
// We didn't actually decode anything.
return s;
} else {
// Append the remainder and return the decoded string.
decoded.append(s.substring(current, oldLength));
return decoded.toString();
}
}
// Prepare buffers.
if (decoded == null) {
// Looks like we're going to need the buffers...
// We know the new string will be shorter. Using the old length
// may overshoot a bit, but it will save us from resizing the
// buffer.
decoded = new StringBuffer(oldLength);
out = new ByteArrayOutputStream(4);
} else {
// Clear decoding buffer.
out.reset();
}
// Append characters leading up to the escape.
if (nextEscape > current) {
decoded.append(s.substring(current, nextEscape));
current = nextEscape;
} else {
// assert current == nextEscape
}
// Switch to "decoding" mode where we decode a string of escape
// sequences.
// Decode and append escape sequences. Escape sequences look like
// "%ab" where % is literal and a and b are hex digits.
try {
do {
if (current + 2 >= oldLength) {
// Truncated escape sequence.
out.write(REPLACEMENT);
} else {
int a = Character.digit(s.charAt(current + 1), 16);
int b = Character.digit(s.charAt(current + 2), 16);
if (a == -1 || b == -1) {
// Non hex digits.
out.write(REPLACEMENT);
} else {
// Combine the hex digits into one byte and write.
out.write((a << 4) + b);
}
}
// Move passed the escape sequence.
current += 3;
} while (current < oldLength && s.charAt(current) == '%');
// Decode UTF-8 bytes into a string and append it.
decoded.append(new String(out.toByteArray(), DEFAULT_ENCODING));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
} catch (IOException e) {
throw new RuntimeException("AssertionError: " + e);
}
}
// If we don't have a buffer, we didn't have to decode anything.
return decoded == null ? s : decoded.toString();
}
/**
* Support for part implementations.
*/
static abstract class AbstractPart {
/**
* Enum which indicates which representation of a given part we have.
*/
static class Representation {
static final int BOTH = 0;
static final int ENCODED = 1;
static final int DECODED = 2;
}
volatile String encoded;
volatile String decoded;
AbstractPart(String encoded, String decoded) {
this.encoded = encoded;
this.decoded = decoded;
}
abstract String getEncoded();
final String getDecoded() {
boolean hasDecoded = decoded != NOT_CACHED;
return hasDecoded ? decoded : (decoded = decode(encoded));
}
}
/**
* Immutable wrapper of encoded and decoded versions of a URI part. Lazily
* creates the encoded or decoded version from the other.
*/
static class Part extends AbstractPart {
/** A part with null values. */
static final Part NULL = new EmptyPart(null);
/** A part with empty strings for values. */
static final Part EMPTY = new EmptyPart("");
private Part(String encoded, String decoded) {
super(encoded, decoded);
}
boolean isEmpty() {
return false;
}
String getEncoded() {
boolean hasEncoded = encoded != NOT_CACHED;
return hasEncoded ? encoded : (encoded = encode(decoded));
}
/**
* Returns given part or {@link #NULL} if the given part is null.
*/
static Part nonNull(Part part) {
return part == null ? NULL : part;
}
/**
* Creates a part from the encoded string.
*
* @param encoded part string
*/
static Part fromEncoded(String encoded) {
return from(encoded, NOT_CACHED);
}
/**
* Creates a part from the decoded string.
*
* @param decoded part string
*/
static Part fromDecoded(String decoded) {
return from(NOT_CACHED, decoded);
}
/**
* Creates a part from the encoded and decoded strings.
*
* @param encoded part string
* @param decoded part string
*/
static Part from(String encoded, String decoded) {
// We have to check both encoded and decoded in case one is
// NOT_CACHED.
if (encoded == null) {
return NULL;
}
if (encoded.length() == 0) {
return EMPTY;
}
if (decoded == null) {
return NULL;
}
if (decoded .length() == 0) {
return EMPTY;
}
return new Part(encoded, decoded);
}
private static class EmptyPart extends Part {
public EmptyPart(String value) {
super(value, value);
}
/**
* {@inheritDoc}
*/
boolean isEmpty() {
return true;
}
}
}
/**
* Immutable wrapper of encoded and decoded versions of a path part. Lazily
* creates the encoded or decoded version from the other.
*/
static class PathPart extends AbstractPart {
/** A part with null values. */
static final PathPart NULL = new PathPart(null, null);
/** A part with empty strings for values. */
static final PathPart EMPTY = new PathPart("", "");
private PathPart(String encoded, String decoded) {
super(encoded, decoded);
}
String getEncoded() {
boolean hasEncoded = encoded != NOT_CACHED;
// Don't encode '/'.
return hasEncoded ? encoded : (encoded = encode(decoded, "/"));
}
/**
* Cached path segments. This doesn't need to be volatile--we don't
* care if other threads see the result.
*/
private PathSegments pathSegments;
/**
* Gets the individual path segments. Parses them if necessary.
*
* @return parsed path segments or null if this isn't a hierarchical
* URI
*/
PathSegments getPathSegments() {
if (pathSegments != null) {
return pathSegments;
}
String path = getEncoded();
if (path == null) {
return pathSegments = PathSegments.EMPTY;
}
PathSegmentsBuilder segmentBuilder = new PathSegmentsBuilder();
int previous = 0;
int current;
while ((current = path.indexOf('/', previous)) > -1) {
// This check keeps us from adding a segment if the path starts
// '/' and an empty segment for "//".
if (previous < current) {
String decodedSegment
= decode(path.substring(previous, current));
segmentBuilder.add(decodedSegment);
}
previous = current + 1;
}
// Add in the final path segment.
if (previous < path.length()) {
segmentBuilder.add(decode(path.substring(previous)));
}
return pathSegments = segmentBuilder.build();
}
static PathPart appendEncodedSegment(PathPart oldPart,
String newSegment) {
// If there is no old path, should we make the new path relative
// or absolute? I pick absolute.
if (oldPart == null) {
// No old path.
return fromEncoded("/" + newSegment);
}
String oldPath = oldPart.getEncoded();
if (oldPath == null) {
oldPath = "";
}
int oldPathLength = oldPath.length();
String newPath;
if (oldPathLength == 0) {
// No old path.
newPath = "/" + newSegment;
} else if (oldPath.charAt(oldPathLength - 1) == '/') {
newPath = oldPath + newSegment;
} else {
newPath = oldPath + "/" + newSegment;
}
return fromEncoded(newPath);
}
static PathPart appendDecodedSegment(PathPart oldPart, String decoded) {
String encoded = encode(decoded);
// TODO: Should we reuse old PathSegments? Probably not.
return appendEncodedSegment(oldPart, encoded);
}
/**
* Creates a path from the encoded string.
*
* @param encoded part string
*/
static PathPart fromEncoded(String encoded) {
return from(encoded, NOT_CACHED);
}
/**
* Creates a path from the decoded string.
*
* @param decoded part string
*/
static PathPart fromDecoded(String decoded) {
return from(NOT_CACHED, decoded);
}
/**
* Creates a path from the encoded and decoded strings.
*
* @param encoded part string
* @param decoded part string
*/
static PathPart from(String encoded, String decoded) {
if (encoded == null) {
return NULL;
}
if (encoded.length() == 0) {
return EMPTY;
}
return new PathPart(encoded, decoded);
}
/**
* Prepends path values with "/" if they're present, not empty, and
* they don't already start with "/".
*/
static PathPart makeAbsolute(PathPart oldPart) {
boolean encodedCached = oldPart.encoded != NOT_CACHED;
// We don't care which version we use, and we don't want to force
// unneccessary encoding/decoding.
String oldPath = encodedCached ? oldPart.encoded : oldPart.decoded;
if (oldPath == null || oldPath.length() == 0
|| oldPath.startsWith("/")) {
return oldPart;
}
// Prepend encoded string if present.
String newEncoded = encodedCached
? "/" + oldPart.encoded : NOT_CACHED;
// Prepend decoded string if present.
boolean decodedCached = oldPart.decoded != NOT_CACHED;
String newDecoded = decodedCached
? "/" + oldPart.decoded
: NOT_CACHED;
return new PathPart(newEncoded, newDecoded);
}
}
/**
* Creates a new Uri by appending an already-encoded path segment to a
* base Uri.
*
* @param baseUri Uri to append path segment to
* @param pathSegment encoded path segment to append
* @return a new Uri based on baseUri with the given segment appended to
* the path
* @throws NullPointerException if baseUri is null
*/
public static Uri withAppendedPath(Uri baseUri, String pathSegment) {
Builder builder = baseUri.buildUpon();
builder = builder.appendEncodedPath(pathSegment);
return builder.build();
}
}
| apache-2.0 |
pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/distributed/internal/membership/MemberServices.java | 3644 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.membership;
import org.apache.geode.distributed.internal.DMStats;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.distributed.internal.LocatorStats;
import org.apache.geode.distributed.internal.membership.gms.NetLocator;
import org.apache.geode.internal.admin.remote.RemoteTransportConfig;
import org.apache.geode.internal.security.SecurityService;
import java.io.File;
import java.net.InetAddress;
/**
* This is the SPI for a provider of membership services.
*
* @see org.apache.geode.distributed.internal.membership.NetMember
*/
public interface MemberServices {
/**
* Return a new NetMember, possibly for a different host
*
* @param i the name of the host for the specified NetMember, the current host (hopefully) if
* there are any problems.
* @param port the membership port
* @param splitBrainEnabled whether the member has this feature enabled
* @param canBeCoordinator whether the member can be membership coordinator
* @param payload the payload to be associated with the resulting object
* @param version TODO
* @return the new NetMember
*/
public abstract NetMember newNetMember(InetAddress i, int port, boolean splitBrainEnabled,
boolean canBeCoordinator, MemberAttributes payload, short version);
/**
* Return a new NetMember representing current host
*
* @param i an InetAddress referring to the current host
* @param port the membership port being used
*
* @return the new NetMember
*/
public abstract NetMember newNetMember(InetAddress i, int port);
/**
* Return a new NetMember representing current host
*
* @param s a String referring to the current host
* @param p the membership port being used
* @return the new member
*/
public abstract NetMember newNetMember(String s, int p);
/**
* Create a new MembershipManager
*
* @param listener the listener to notify for callbacks
* @param transport holds configuration information that can be used by the manager to configure
* itself
* @param stats a gemfire statistics collection object for communications stats
*
* @return a MembershipManager
*/
public abstract MembershipManager newMembershipManager(DistributedMembershipListener listener,
DistributionConfig config, RemoteTransportConfig transport, DMStats stats,
SecurityService securityService);
/**
* currently this is a test method but it ought to be used by InternalLocator to create the peer
* location TcpHandler
*/
public abstract NetLocator newLocatorHandler(InetAddress bindAddress, File stateFile,
String locatorString, boolean usePreferredCoordinators,
boolean networkPartitionDetectionEnabled, LocatorStats stats, String securityUDPDHAlgo);
}
| apache-2.0 |
khuxtable/seaglass | src/main/java/com/seaglasslookandfeel/ui/SeaGlassTextPaneUI.java | 6509 | /*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id: org.eclipse.jdt.ui.prefs 172 2009-10-06 18:31:12Z kathryn@kathrynhuxtable.org $
*/
package com.seaglasslookandfeel.ui;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.beans.PropertyChangeEvent;
import javax.swing.JComponent;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.synth.SynthContext;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyleContext;
import javax.swing.text.StyledDocument;
import com.seaglasslookandfeel.SeaGlassContext;
/**
* SeaGlass TextPaneUI delegate.
*
* Based on SynthTextPaneUI by Georges Saab and David Karlton.
*
* The only reason this exists is that we had to modify SynthTextPaneUI.
*
* @see javax.swing.plaf.synth.SynthTextPaneUI
*/
public class SeaGlassTextPaneUI extends SeaGlassEditorPaneUI {
/**
* Creates a UI for the JTextPane.
*
* @param c the JTextPane object
* @return the UI object
*/
public static ComponentUI createUI(JComponent c) {
return new SeaGlassTextPaneUI();
}
/**
* Fetches the name used as a key to lookup properties through the
* UIManager. This is used as a prefix to all the standard
* text properties.
*
* @return the name ("TextPane")
*/
@Override
protected String getPropertyPrefix() {
return "TextPane";
}
/**
* Installs the UI for a component. This does the following
* things.
* <ol>
* <li>
* Sets opaqueness of the associated component according to its style,
* if the opaque property has not already been set by the client program.
* <li>
* Installs the default caret and highlighter into the
* associated component. These properties are only set if their
* current value is either {@code null} or an instance of
* {@link UIResource}.
* <li>
* Attaches to the editor and model. If there is no
* model, a default one is created.
* <li>
* Creates the view factory and the view hierarchy used
* to represent the model.
* </ol>
*
* @param c the editor component
* @see javax.swing.plaf.basic.BasicTextUI#installUI
* @see ComponentUI#installUI
*/
@Override
public void installUI(JComponent c) {
super.installUI(c);
updateForeground(c.getForeground());
updateFont(c.getFont());
}
/**
* This method gets called when a bound property is changed
* on the associated JTextComponent. This is a hook
* which UI implementations may change to reflect how the
* UI displays bound properties of JTextComponent subclasses.
* If the font, foreground or document has changed, the
* the appropriate property is set in the default style of
* the document.
*
* @param evt the property change event
*/
@Override
protected void propertyChange(PropertyChangeEvent evt) {
super.propertyChange(evt);
String name = evt.getPropertyName();
if (name.equals("foreground")) {
updateForeground((Color)evt.getNewValue());
} else if (name.equals("font")) {
updateFont((Font)evt.getNewValue());
} else if (name.equals("document")) {
JComponent comp = getComponent();
updateForeground(comp.getForeground());
updateFont(comp.getFont());
}
}
/**
* Update the color in the default style of the document.
*
* @param color the new color to use or null to remove the color attribute
* from the document's style
*/
private void updateForeground(Color color) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (color == null) {
style.removeAttribute(StyleConstants.Foreground);
} else {
StyleConstants.setForeground(style, color);
}
}
/**
* Update the font in the default style of the document.
*
* @param font the new font to use or null to remove the font attribute
* from the document's style
*/
private void updateFont(Font font) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (font == null) {
style.removeAttribute(StyleConstants.FontFamily);
style.removeAttribute(StyleConstants.FontSize);
style.removeAttribute(StyleConstants.Bold);
style.removeAttribute(StyleConstants.Italic);
} else {
StyleConstants.setFontFamily(style, font.getName());
StyleConstants.setFontSize(style, font.getSize());
StyleConstants.setBold(style, font.isBold());
StyleConstants.setItalic(style, font.isItalic());
}
}
@Override
void paintBackground(SynthContext context, Graphics g, JComponent c) {
((SeaGlassContext)context).getPainter().paintTextPaneBackground(context, g, 0, 0,
c.getWidth(), c.getHeight());
}
/**
* @inheritDoc
*/
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
((SeaGlassContext)context).getPainter().paintTextPaneBorder(context, g, x, y, w, h);
}
}
| apache-2.0 |
eric-stanley/qalingo-engine | apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/service/pojo/CustomerPojoService.java | 3861 | /**
* Most of the code in the Qalingo project is copyrighted Hoteia and licensed
* under the Apache License Version 2.0 (release version 0.8.0)
* http://www.apache.org/licenses/LICENSE-2.0
*
* Copyright (c) Hoteia, 2012-2014
* http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com
*
*/
package org.hoteia.qalingo.core.service.pojo;
import java.util.List;
import java.util.Set;
import org.dozer.Mapper;
import org.hoteia.qalingo.core.domain.Customer;
import org.hoteia.qalingo.core.domain.CustomerMarketArea;
import org.hoteia.qalingo.core.domain.CustomerWishlist;
import org.hoteia.qalingo.core.domain.MarketArea;
import org.hoteia.qalingo.core.pojo.customer.CustomerPojo;
import org.hoteia.qalingo.core.pojo.customer.CustomerWishlistPojo;
import org.hoteia.qalingo.core.pojo.util.mapper.PojoUtil;
import org.hoteia.qalingo.core.service.CustomerService;
import org.hoteia.qalingo.core.service.MarketService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service("customerPojoService")
@Transactional(readOnly = true)
public class CustomerPojoService {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private Mapper dozerBeanMapper;
@Autowired
protected MarketService marketService;
@Autowired
private CustomerService customerService;
public List<CustomerPojo> getAllCustomers() {
List<Customer> customers = customerService.findCustomers();
logger.debug("Found {} customers", customers.size());
return PojoUtil.mapAll(dozerBeanMapper, customers, CustomerPojo.class);
}
public CustomerPojo getCustomerById(final String id) {
Customer customer = customerService.getCustomerById(id);
logger.debug("Found customer {} for id {}", customer, id);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
public CustomerPojo getCustomerByLoginOrEmail(final String usernameOrEmail) {
Customer customer = customerService.getCustomerByLoginOrEmail(usernameOrEmail);
logger.debug("Found customer {} for usernameOrEmail {}", customer, usernameOrEmail);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
public CustomerPojo getCustomerByPermalink(final String permalink) {
Customer customer = customerService.getCustomerByPermalink(permalink);
logger.debug("Found customer {} for usernameOrEmail {}", customer, permalink);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
@Transactional
public void saveOrUpdate(final CustomerPojo customerJsonPojo) throws Exception {
Customer customer = dozerBeanMapper.map(customerJsonPojo, Customer.class);
logger.info("Saving customer {}", customer);
customerService.saveOrUpdateCustomer(customer);
}
public List<CustomerWishlistPojo> getWishlist(final Customer customer, final MarketArea marketArea) {
final CustomerMarketArea customerMarketArea = customer.getCurrentCustomerMarketArea(marketArea.getId());
Set<CustomerWishlist> wishlistProducts = customerMarketArea.getWishlistProducts();
List<CustomerWishlistPojo> wishlists = PojoUtil.mapAll(dozerBeanMapper, wishlistProducts, CustomerWishlistPojo.class);
return wishlists;
}
public void addProductSkuToWishlist(MarketArea marketArea, Customer customer, String catalogCategoryCode, String productSkuCode) throws Exception {
customerService.addProductSkuToWishlist(marketArea, customer, catalogCategoryCode, productSkuCode);
}
} | apache-2.0 |
roberthafner/flowable-engine | modules/flowable-engine/src/main/java/org/activiti/engine/impl/context/ExecutionContext.java | 1626 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.context;
import org.activiti.engine.impl.persistence.entity.DeploymentEntity;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.impl.util.ProcessDefinitionUtil;
import org.activiti.engine.repository.ProcessDefinition;
/**
* @author Tom Baeyens
*/
public class ExecutionContext {
protected ExecutionEntity execution;
public ExecutionContext(ExecutionEntity execution) {
this.execution = execution;
}
public ExecutionEntity getExecution() {
return execution;
}
public ExecutionEntity getProcessInstance() {
return execution.getProcessInstance();
}
public ProcessDefinition getProcessDefinition() {
return ProcessDefinitionUtil.getProcessDefinition(execution.getProcessDefinitionId());
}
public DeploymentEntity getDeployment() {
String deploymentId = getProcessDefinition().getDeploymentId();
DeploymentEntity deployment = Context.getCommandContext().getDeploymentEntityManager().findById(deploymentId);
return deployment;
}
}
| apache-2.0 |
scalanlp/nak | src/main/java/nak/liblinear/Tron.java | 7591 | package nak.liblinear;
import static nak.liblinear.Linear.info;
/**
* Trust Region Newton Method optimization
*/
class Tron {
private final Function fun_obj;
private final double eps;
private final int max_iter;
public Tron( final Function fun_obj ) {
this(fun_obj, 0.1);
}
public Tron( final Function fun_obj, double eps ) {
this(fun_obj, eps, 1000);
}
public Tron( final Function fun_obj, double eps, int max_iter ) {
this.fun_obj = fun_obj;
this.eps = eps;
this.max_iter = max_iter;
}
void tron(double[] w) {
// Parameters for updating the iterates.
double eta0 = 1e-4, eta1 = 0.25, eta2 = 0.75;
// Parameters for updating the trust region size delta.
double sigma1 = 0.25, sigma2 = 0.5, sigma3 = 4;
int n = fun_obj.get_nr_variable();
int i, cg_iter;
double delta, snorm, one = 1.0;
double alpha, f, fnew, prered, actred, gs;
int search = 1, iter = 1;
double[] s = new double[n];
double[] r = new double[n];
double[] w_new = new double[n];
double[] g = new double[n];
for (i = 0; i < n; i++)
w[i] = 0;
f = fun_obj.fun(w);
fun_obj.grad(w, g);
delta = euclideanNorm(g);
double gnorm1 = delta;
double gnorm = gnorm1;
if (gnorm <= eps * gnorm1) search = 0;
iter = 1;
while (iter <= max_iter && search != 0) {
cg_iter = trcg(delta, g, s, r);
System.arraycopy(w, 0, w_new, 0, n);
daxpy(one, s, w_new);
gs = dot(g, s);
prered = -0.5 * (gs - dot(s, r));
fnew = fun_obj.fun(w_new);
// Compute the actual reduction.
actred = f - fnew;
// On the first iteration, adjust the initial step bound.
snorm = euclideanNorm(s);
if (iter == 1) delta = Math.min(delta, snorm);
// Compute prediction alpha*snorm of the step.
if (fnew - f - gs <= 0)
alpha = sigma3;
else
alpha = Math.max(sigma1, -0.5 * (gs / (fnew - f - gs)));
// Update the trust region bound according to the ratio of actual to
// predicted reduction.
if (actred < eta0 * prered)
delta = Math.min(Math.max(alpha, sigma1) * snorm, sigma2 * delta);
else if (actred < eta1 * prered)
delta = Math.max(sigma1 * delta, Math.min(alpha * snorm, sigma2 * delta));
else if (actred < eta2 * prered)
delta = Math.max(sigma1 * delta, Math.min(alpha * snorm, sigma3 * delta));
else
delta = Math.max(delta, Math.min(alpha * snorm, sigma3 * delta));
info("iter %2d act %5.3e pre %5.3e delta %5.3e f %5.3e |g| %5.3e CG %3d%n", iter, actred, prered, delta, f, gnorm, cg_iter);
if (actred > eta0 * prered) {
iter++;
System.arraycopy(w_new, 0, w, 0, n);
f = fnew;
fun_obj.grad(w, g);
gnorm = euclideanNorm(g);
if (gnorm <= eps * gnorm1) break;
}
if (f < -1.0e+32) {
info("WARNING: f < -1.0e+32%n");
break;
}
if (Math.abs(actred) <= 0 && prered <= 0) {
info("WARNING: actred and prered <= 0%n");
break;
}
if (Math.abs(actred) <= 1.0e-12 * Math.abs(f) && Math.abs(prered) <= 1.0e-12 * Math.abs(f)) {
info("WARNING: actred and prered too small%n");
break;
}
}
}
private int trcg(double delta, double[] g, double[] s, double[] r) {
int n = fun_obj.get_nr_variable();
double one = 1;
double[] d = new double[n];
double[] Hd = new double[n];
double rTr, rnewTrnew, cgtol;
for (int i = 0; i < n; i++) {
s[i] = 0;
r[i] = -g[i];
d[i] = r[i];
}
cgtol = 0.1 * euclideanNorm(g);
int cg_iter = 0;
rTr = dot(r, r);
while (true) {
if (euclideanNorm(r) <= cgtol) break;
cg_iter++;
fun_obj.Hv(d, Hd);
double alpha = rTr / dot(d, Hd);
daxpy(alpha, d, s);
if (euclideanNorm(s) > delta) {
info("cg reaches trust region boundary%n");
alpha = -alpha;
daxpy(alpha, d, s);
double std = dot(s, d);
double sts = dot(s, s);
double dtd = dot(d, d);
double dsq = delta * delta;
double rad = Math.sqrt(std * std + dtd * (dsq - sts));
if (std >= 0)
alpha = (dsq - sts) / (std + rad);
else
alpha = (rad - std) / dtd;
daxpy(alpha, d, s);
alpha = -alpha;
daxpy(alpha, Hd, r);
break;
}
alpha = -alpha;
daxpy(alpha, Hd, r);
rnewTrnew = dot(r, r);
double beta = rnewTrnew / rTr;
scale(beta, d);
daxpy(one, r, d);
rTr = rnewTrnew;
}
return (cg_iter);
}
/**
* constant times a vector plus a vector
*
* <pre>
* vector2 += constant * vector1
* </pre>
*
* @since 1.8
*/
private static void daxpy(double constant, double vector1[], double vector2[]) {
if (constant == 0) return;
assert vector1.length == vector2.length;
for (int i = 0; i < vector1.length; i++) {
vector2[i] += constant * vector1[i];
}
}
/**
* returns the dot product of two vectors
*
* @since 1.8
*/
private static double dot(double vector1[], double vector2[]) {
double product = 0;
assert vector1.length == vector2.length;
for (int i = 0; i < vector1.length; i++) {
product += vector1[i] * vector2[i];
}
return product;
}
/**
* returns the euclidean norm of a vector
*
* @since 1.8
*/
private static double euclideanNorm(double vector[]) {
int n = vector.length;
if (n < 1) {
return 0;
}
if (n == 1) {
return Math.abs(vector[0]);
}
// this algorithm is (often) more accurate than just summing up the squares and taking the square-root afterwards
double scale = 0; // scaling factor that is factored out
double sum = 1; // basic sum of squares from which scale has been factored out
for (int i = 0; i < n; i++) {
if (vector[i] != 0) {
double abs = Math.abs(vector[i]);
// try to get the best scaling factor
if (scale < abs) {
double t = scale / abs;
sum = 1 + sum * (t * t);
scale = abs;
} else {
double t = abs / scale;
sum += t * t;
}
}
}
return scale * Math.sqrt(sum);
}
/**
* scales a vector by a constant
*
* @since 1.8
*/
private static void scale(double constant, double vector[]) {
if (constant == 1.0) return;
for (int i = 0; i < vector.length; i++) {
vector[i] *= constant;
}
}
}
| apache-2.0 |
paplorinc/intellij-community | xml/xml-psi-impl/src/com/intellij/psi/impl/source/xml/XmlTagValueImpl.java | 5970 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.xml;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.XmlElementFactory;
import com.intellij.psi.impl.source.xml.behavior.DefaultXmlPsiPolicy;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.xml.*;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class XmlTagValueImpl implements XmlTagValue{
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.xml.XmlTagValueImpl");
private final XmlTag myTag;
private final XmlTagChild[] myElements;
private volatile XmlText[] myTextElements;
private volatile String myText;
private volatile String myTrimmedText;
public XmlTagValueImpl(@NotNull XmlTagChild[] bodyElements, @NotNull XmlTag tag) {
myTag = tag;
myElements = bodyElements;
}
@Override
@NotNull
public XmlTagChild[] getChildren() {
return myElements;
}
@Override
@NotNull
public XmlText[] getTextElements() {
XmlText[] textElements = myTextElements;
if (textElements == null) {
textElements = Arrays.stream(myElements)
.filter(element -> element instanceof XmlText)
.map(element -> (XmlText)element).toArray(XmlText[]::new);
myTextElements = textElements = textElements.length == 0 ? XmlText.EMPTY_ARRAY : textElements;
}
return textElements;
}
@Override
@NotNull
public String getText() {
String text = myText;
if (text == null) {
final StringBuilder consolidatedText = new StringBuilder();
for (final XmlTagChild element : myElements) {
consolidatedText.append(element.getText());
}
myText = text = consolidatedText.toString();
}
return text;
}
@Override
@NotNull
public TextRange getTextRange() {
if(myElements.length == 0){
final ASTNode child = XmlChildRole.START_TAG_END_FINDER.findChild( (ASTNode)myTag);
if(child != null)
return new TextRange(child.getStartOffset() + 1, child.getStartOffset() + 1);
return new TextRange(myTag.getTextRange().getEndOffset(), myTag.getTextRange().getEndOffset());
}
return new TextRange(myElements[0].getTextRange().getStartOffset(), myElements[myElements.length - 1].getTextRange().getEndOffset());
}
@Override
@NotNull
public String getTrimmedText() {
String trimmedText = myTrimmedText;
if (trimmedText == null) {
final StringBuilder consolidatedText = new StringBuilder();
final XmlText[] textElements = getTextElements();
for (final XmlText textElement : textElements) {
consolidatedText.append(textElement.getValue());
}
myTrimmedText = trimmedText = consolidatedText.toString().trim();
}
return trimmedText;
}
@Override
public void setText(String value) {
setText(value, false);
}
@Override
public void setEscapedText(String value) {
setText(value, true);
}
private void setText(String value, boolean defaultPolicy) {
try {
XmlText text = null;
if (value != null) {
final XmlText[] texts = getTextElements();
if (texts.length == 0) {
text = (XmlText)myTag.add(XmlElementFactory.getInstance(myTag.getProject()).createDisplayText("x"));
} else {
text = texts[0];
}
if (StringUtil.isEmpty(value)) {
text.delete();
}
else {
if (defaultPolicy && text instanceof XmlTextImpl) {
((XmlTextImpl)text).doSetValue(value, new DefaultXmlPsiPolicy());
} else {
text.setValue(value);
}
}
}
if(myElements.length > 0){
for (final XmlTagChild child : myElements) {
if (child != text) {
child.delete();
}
}
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override
public boolean hasCDATA() {
for (XmlText xmlText : getTextElements()) {
PsiElement[] children = xmlText.getChildren();
for (PsiElement child : children) {
if (child.getNode().getElementType() == XmlElementType.XML_CDATA) {
return true;
}
}
}
return false;
}
public static XmlTagValue createXmlTagValue(XmlTag tag) {
final List<XmlTagChild> bodyElements = new ArrayList<>();
tag.processElements(new PsiElementProcessor() {
boolean insideBody;
@Override
public boolean execute(@NotNull PsiElement element) {
final ASTNode treeElement = element.getNode();
if (insideBody) {
if (treeElement != null && treeElement.getElementType() == XmlTokenType.XML_END_TAG_START) return false;
if (!(element instanceof XmlTagChild)) return true;
bodyElements.add((XmlTagChild)element);
}
else if (treeElement != null && treeElement.getElementType() == XmlTokenType.XML_TAG_END) insideBody = true;
return true;
}
}, tag);
XmlTagChild[] tagChildren = bodyElements.toArray(XmlTagChild.EMPTY_ARRAY);
return new XmlTagValueImpl(tagChildren, tag);
}
}
| apache-2.0 |
apache/syncope | core/spring/src/main/java/org/apache/syncope/core/spring/security/AuthDataAccessor.java | 23971 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.spring.security;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.security.auth.login.AccountNotFoundException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.syncope.common.keymaster.client.api.ConfParamOps;
import org.apache.syncope.common.lib.SyncopeConstants;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.AuditElements;
import org.apache.syncope.common.lib.types.EntitlementsHolder;
import org.apache.syncope.common.lib.types.IdRepoEntitlement;
import org.apache.syncope.core.persistence.api.ImplementationLookup;
import org.apache.syncope.core.persistence.api.dao.AccessTokenDAO;
import org.apache.syncope.core.persistence.api.dao.AnySearchDAO;
import org.apache.syncope.core.persistence.api.entity.AnyType;
import org.apache.syncope.core.persistence.api.entity.resource.Provision;
import org.apache.syncope.core.provisioning.api.utils.RealmUtils;
import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO;
import org.apache.syncope.core.persistence.api.dao.DelegationDAO;
import org.apache.syncope.core.persistence.api.dao.GroupDAO;
import org.apache.syncope.core.persistence.api.dao.RealmDAO;
import org.apache.syncope.core.persistence.api.dao.RoleDAO;
import org.apache.syncope.core.persistence.api.dao.UserDAO;
import org.apache.syncope.core.persistence.api.dao.search.AttrCond;
import org.apache.syncope.core.persistence.api.dao.search.SearchCond;
import org.apache.syncope.core.persistence.api.entity.AccessToken;
import org.apache.syncope.core.persistence.api.entity.Delegation;
import org.apache.syncope.core.persistence.api.entity.DynRealm;
import org.apache.syncope.core.persistence.api.entity.Realm;
import org.apache.syncope.core.persistence.api.entity.Role;
import org.apache.syncope.core.persistence.api.entity.resource.ExternalResource;
import org.apache.syncope.core.persistence.api.entity.user.User;
import org.apache.syncope.core.provisioning.api.AuditManager;
import org.apache.syncope.core.provisioning.api.ConnectorManager;
import org.apache.syncope.core.provisioning.api.MappingManager;
import org.apache.syncope.core.spring.ApplicationContextProvider;
import org.identityconnectors.framework.common.objects.Uid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.security.authentication.AuthenticationCredentialsNotFoundException;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.web.authentication.session.SessionAuthenticationException;
import org.springframework.transaction.annotation.Transactional;
/**
* Domain-sensible (via {@code @Transactional}) access to authentication / authorization data.
*
* @see JWTAuthenticationProvider
* @see UsernamePasswordAuthenticationProvider
* @see SyncopeAuthenticationDetails
*/
public class AuthDataAccessor {
protected static final Logger LOG = LoggerFactory.getLogger(AuthDataAccessor.class);
public static final String GROUP_OWNER_ROLE = "GROUP_OWNER";
protected static final Encryptor ENCRYPTOR = Encryptor.getInstance();
protected static final Set<SyncopeGrantedAuthority> ANONYMOUS_AUTHORITIES =
Set.of(new SyncopeGrantedAuthority(IdRepoEntitlement.ANONYMOUS));
protected static final Set<SyncopeGrantedAuthority> MUST_CHANGE_PASSWORD_AUTHORITIES =
Set.of(new SyncopeGrantedAuthority(IdRepoEntitlement.MUST_CHANGE_PASSWORD));
protected final SecurityProperties securityProperties;
protected final RealmDAO realmDAO;
protected final UserDAO userDAO;
protected final GroupDAO groupDAO;
protected final AnyTypeDAO anyTypeDAO;
protected final AnySearchDAO anySearchDAO;
protected final AccessTokenDAO accessTokenDAO;
protected final ConfParamOps confParamOps;
protected final RoleDAO roleDAO;
protected final DelegationDAO delegationDAO;
protected final ConnectorManager connectorManager;
protected final AuditManager auditManager;
protected final MappingManager mappingManager;
protected final ImplementationLookup implementationLookup;
private Map<String, JWTSSOProvider> jwtSSOProviders;
public AuthDataAccessor(
final SecurityProperties securityProperties,
final RealmDAO realmDAO,
final UserDAO userDAO,
final GroupDAO groupDAO,
final AnyTypeDAO anyTypeDAO,
final AnySearchDAO anySearchDAO,
final AccessTokenDAO accessTokenDAO,
final ConfParamOps confParamOps,
final RoleDAO roleDAO,
final DelegationDAO delegationDAO,
final ConnectorManager connectorManager,
final AuditManager auditManager,
final MappingManager mappingManager,
final ImplementationLookup implementationLookup) {
this.securityProperties = securityProperties;
this.realmDAO = realmDAO;
this.userDAO = userDAO;
this.groupDAO = groupDAO;
this.anyTypeDAO = anyTypeDAO;
this.anySearchDAO = anySearchDAO;
this.accessTokenDAO = accessTokenDAO;
this.confParamOps = confParamOps;
this.roleDAO = roleDAO;
this.delegationDAO = delegationDAO;
this.connectorManager = connectorManager;
this.auditManager = auditManager;
this.mappingManager = mappingManager;
this.implementationLookup = implementationLookup;
}
public JWTSSOProvider getJWTSSOProvider(final String issuer) {
synchronized (this) {
if (jwtSSOProviders == null) {
jwtSSOProviders = new HashMap<>();
implementationLookup.getJWTSSOProviderClasses().stream().
map(clazz -> (JWTSSOProvider) ApplicationContextProvider.getBeanFactory().
createBean(clazz, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, true)).
forEach(jwtSSOProvider -> jwtSSOProviders.put(jwtSSOProvider.getIssuer(), jwtSSOProvider));
}
}
if (issuer == null) {
throw new AuthenticationCredentialsNotFoundException("A null issuer is not permitted");
}
JWTSSOProvider provider = jwtSSOProviders.get(issuer);
if (provider == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find any registered JWTSSOProvider for issuer " + issuer);
}
return provider;
}
protected String getDelegationKey(final SyncopeAuthenticationDetails details, final String delegatedKey) {
if (details.getDelegatedBy() == null) {
return null;
}
String delegatingKey = SyncopeConstants.UUID_PATTERN.matcher(details.getDelegatedBy()).matches()
? details.getDelegatedBy()
: userDAO.findKey(details.getDelegatedBy());
if (delegatingKey == null) {
throw new SessionAuthenticationException(
"Delegating user " + details.getDelegatedBy() + " cannot be found");
}
LOG.debug("Delegation request: delegating:{}, delegated:{}", delegatingKey, delegatedKey);
return delegationDAO.findValidFor(delegatingKey, delegatedKey).
orElseThrow(() -> new SessionAuthenticationException(
"Delegation by " + delegatingKey + " was requested but none found"));
}
/**
* Attempts to authenticate the given credentials against internal storage and pass-through resources (if
* configured): the first succeeding causes global success.
*
* @param domain domain
* @param authentication given credentials
* @return {@code null} if no matching user was found, authentication result otherwise
*/
@Transactional(noRollbackFor = DisabledException.class)
public Triple<User, Boolean, String> authenticate(final String domain, final Authentication authentication) {
User user = null;
String[] authAttrValues = confParamOps.get(
domain, "authentication.attributes", new String[] { "username" }, String[].class);
for (int i = 0; user == null && i < authAttrValues.length; i++) {
if ("username".equals(authAttrValues[i])) {
user = userDAO.findByUsername(authentication.getName());
} else {
AttrCond attrCond = new AttrCond(AttrCond.Type.EQ);
attrCond.setSchema(authAttrValues[i]);
attrCond.setExpression(authentication.getName());
try {
List<User> users = anySearchDAO.search(SearchCond.getLeaf(attrCond), AnyTypeKind.USER);
if (users.size() == 1) {
user = users.get(0);
} else {
LOG.warn("Search condition {} does not uniquely match a user", attrCond);
}
} catch (IllegalArgumentException e) {
LOG.error("While searching user for authentication via {}", attrCond, e);
}
}
}
Boolean authenticated = null;
String delegationKey = null;
if (user != null) {
authenticated = false;
if (user.isSuspended() != null && user.isSuspended()) {
throw new DisabledException("User " + user.getUsername() + " is suspended");
}
String[] authStatuses = confParamOps.get(
domain, "authentication.statuses", new String[] {}, String[].class);
if (!ArrayUtils.contains(authStatuses, user.getStatus())) {
throw new DisabledException("User " + user.getUsername() + " not allowed to authenticate");
}
boolean userModified = false;
authenticated = authenticate(user, authentication.getCredentials().toString());
if (authenticated) {
delegationKey = getDelegationKey(
SyncopeAuthenticationDetails.class.cast(authentication.getDetails()), user.getKey());
if (confParamOps.get(domain, "log.lastlogindate", true, Boolean.class)) {
user.setLastLoginDate(new Date());
userModified = true;
}
if (user.getFailedLogins() != 0) {
user.setFailedLogins(0);
userModified = true;
}
} else {
user.setFailedLogins(user.getFailedLogins() + 1);
userModified = true;
}
if (userModified) {
userDAO.save(user);
}
}
return Triple.of(user, authenticated, delegationKey);
}
protected boolean authenticate(final User user, final String password) {
boolean authenticated = ENCRYPTOR.verify(password, user.getCipherAlgorithm(), user.getPassword());
LOG.debug("{} authenticated on internal storage: {}", user.getUsername(), authenticated);
for (Iterator<? extends ExternalResource> itor = getPassthroughResources(user).iterator();
itor.hasNext() && !authenticated;) {
ExternalResource resource = itor.next();
String connObjectKey = null;
try {
AnyType userType = anyTypeDAO.findUser();
Provision provision = resource.getProvision(userType).
orElseThrow(() -> new AccountNotFoundException(
"Unable to locate provision for user type " + userType.getKey()));
connObjectKey = mappingManager.getConnObjectKeyValue(user, provision).
orElseThrow(() -> new AccountNotFoundException(
"Unable to locate conn object key value for " + userType.getKey()));
Uid uid = connectorManager.getConnector(resource).authenticate(connObjectKey, password, null);
if (uid != null) {
authenticated = true;
}
} catch (Exception e) {
LOG.debug("Could not authenticate {} on {}", user.getUsername(), resource.getKey(), e);
}
LOG.debug("{} authenticated on {} as {}: {}",
user.getUsername(), resource.getKey(), connObjectKey, authenticated);
}
return authenticated;
}
protected Set<? extends ExternalResource> getPassthroughResources(final User user) {
Set<? extends ExternalResource> result = null;
// 1. look for assigned resources, pick the ones whose account policy has authentication resources
for (ExternalResource resource : userDAO.findAllResources(user)) {
if (resource.getAccountPolicy() != null && !resource.getAccountPolicy().getResources().isEmpty()) {
if (result == null) {
result = resource.getAccountPolicy().getResources();
} else {
result.retainAll(resource.getAccountPolicy().getResources());
}
}
}
// 2. look for realms, pick the ones whose account policy has authentication resources
for (Realm realm : realmDAO.findAncestors(user.getRealm())) {
if (realm.getAccountPolicy() != null && !realm.getAccountPolicy().getResources().isEmpty()) {
if (result == null) {
result = realm.getAccountPolicy().getResources();
} else {
result.retainAll(realm.getAccountPolicy().getResources());
}
}
}
return result == null ? Set.of() : result;
}
protected Set<SyncopeGrantedAuthority> getAdminAuthorities() {
return EntitlementsHolder.getInstance().getValues().stream().
map(entitlement -> new SyncopeGrantedAuthority(entitlement, SyncopeConstants.ROOT_REALM)).
collect(Collectors.toSet());
}
protected Set<SyncopeGrantedAuthority> buildAuthorities(final Map<String, Set<String>> entForRealms) {
Set<SyncopeGrantedAuthority> authorities = new HashSet<>();
entForRealms.forEach((entitlement, realms) -> {
Pair<Set<String>, Set<String>> normalized = RealmUtils.normalize(realms);
SyncopeGrantedAuthority authority = new SyncopeGrantedAuthority(entitlement);
authority.addRealms(normalized.getLeft());
authority.addRealms(normalized.getRight());
authorities.add(authority);
});
return authorities;
}
protected Set<SyncopeGrantedAuthority> getUserAuthorities(final User user) {
if (user.isMustChangePassword()) {
return MUST_CHANGE_PASSWORD_AUTHORITIES;
}
Map<String, Set<String>> entForRealms = new HashMap<>();
// Give entitlements as assigned by roles (with static or dynamic realms, where applicable) - assigned
// either statically and dynamically
userDAO.findAllRoles(user).stream().
filter(role -> !GROUP_OWNER_ROLE.equals(role.getKey())).
forEach(role -> role.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
Set<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.addAll(role.getRealms().stream().map(Realm::getFullPath).collect(Collectors.toSet()));
if (!entitlement.endsWith("_CREATE") && !entitlement.endsWith("_DELETE")) {
realms.addAll(role.getDynRealms().stream().map(DynRealm::getKey).collect(Collectors.toList()));
}
}));
// Give group entitlements for owned groups
groupDAO.findOwnedByUser(user.getKey()).forEach(group -> {
Role groupOwnerRole = roleDAO.find(GROUP_OWNER_ROLE);
if (groupOwnerRole == null) {
LOG.warn("Role {} was not found", GROUP_OWNER_ROLE);
} else {
groupOwnerRole.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
HashSet<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.add(RealmUtils.getGroupOwnerRealm(group.getRealm().getFullPath(), group.getKey()));
});
}
});
return buildAuthorities(entForRealms);
}
protected Set<SyncopeGrantedAuthority> getDelegatedAuthorities(final Delegation delegation) {
Map<String, Set<String>> entForRealms = new HashMap<>();
delegation.getRoles().stream().filter(role -> !GROUP_OWNER_ROLE.equals(role.getKey())).
forEach(role -> role.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
HashSet<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.addAll(role.getRealms().stream().map(Realm::getFullPath).collect(Collectors.toSet()));
if (!entitlement.endsWith("_CREATE") && !entitlement.endsWith("_DELETE")) {
realms.addAll(role.getDynRealms().stream().map(DynRealm::getKey).collect(Collectors.toList()));
}
}));
return buildAuthorities(entForRealms);
}
@Transactional
public Set<SyncopeGrantedAuthority> getAuthorities(final String username, final String delegationKey) {
Set<SyncopeGrantedAuthority> authorities;
if (securityProperties.getAnonymousUser().equals(username)) {
authorities = ANONYMOUS_AUTHORITIES;
} else if (securityProperties.getAdminUser().equals(username)) {
authorities = getAdminAuthorities();
} else if (delegationKey != null) {
Delegation delegation = Optional.ofNullable(delegationDAO.find(delegationKey)).
orElseThrow(() -> new UsernameNotFoundException(
"Could not find delegation " + delegationKey));
authorities = delegation.getRoles().isEmpty()
? getUserAuthorities(delegation.getDelegating())
: getDelegatedAuthorities(delegation);
} else {
User user = Optional.ofNullable(userDAO.findByUsername(username)).
orElseThrow(() -> new UsernameNotFoundException(
"Could not find any user with username " + username));
authorities = getUserAuthorities(user);
}
return authorities;
}
@Transactional
public Pair<String, Set<SyncopeGrantedAuthority>> authenticate(final JWTAuthentication authentication) {
String username;
Set<SyncopeGrantedAuthority> authorities;
if (securityProperties.getAdminUser().equals(authentication.getClaims().getSubject())) {
AccessToken accessToken = accessTokenDAO.find(authentication.getClaims().getJWTID());
if (accessToken == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find an Access Token for JWT " + authentication.getClaims().getJWTID());
}
username = securityProperties.getAdminUser();
authorities = getAdminAuthorities();
} else {
JWTSSOProvider jwtSSOProvider = getJWTSSOProvider(authentication.getClaims().getIssuer());
Pair<User, Set<SyncopeGrantedAuthority>> resolved = jwtSSOProvider.resolve(authentication.getClaims());
if (resolved == null || resolved.getLeft() == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find User " + authentication.getClaims().getSubject()
+ " for JWT " + authentication.getClaims().getJWTID());
}
User user = resolved.getLeft();
String delegationKey = getDelegationKey(authentication.getDetails(), user.getKey());
username = user.getUsername();
authorities = resolved.getRight() == null
? Set.of()
: delegationKey == null
? resolved.getRight()
: getAuthorities(username, delegationKey);
LOG.debug("JWT {} issued by {} resolved to User {} with authorities {}",
authentication.getClaims().getJWTID(),
authentication.getClaims().getIssuer(),
username + Optional.ofNullable(delegationKey).
map(d -> " [under delegation " + delegationKey + "]").orElse(StringUtils.EMPTY),
authorities);
if (BooleanUtils.isTrue(user.isSuspended())) {
throw new DisabledException("User " + username + " is suspended");
}
List<String> authStatuses = List.of(confParamOps.get(authentication.getDetails().getDomain(),
"authentication.statuses", new String[] {}, String[].class));
if (!authStatuses.contains(user.getStatus())) {
throw new DisabledException("User " + username + " not allowed to authenticate");
}
if (BooleanUtils.isTrue(user.isMustChangePassword())) {
LOG.debug("User {} must change password, resetting authorities", username);
authorities = MUST_CHANGE_PASSWORD_AUTHORITIES;
}
}
return Pair.of(username, authorities);
}
@Transactional
public void removeExpired(final String tokenKey) {
accessTokenDAO.delete(tokenKey);
}
@Transactional(readOnly = true)
public void audit(
final String username,
final String delegationKey,
final AuditElements.Result result,
final Object output,
final Object... input) {
auditManager.audit(
username + Optional.ofNullable(delegationKey).
map(d -> " [under delegation " + delegationKey + "]").orElse(StringUtils.EMPTY),
AuditElements.EventCategoryType.LOGIC, AuditElements.AUTHENTICATION_CATEGORY, null,
AuditElements.LOGIN_EVENT, result, null, output, input);
}
}
| apache-2.0 |
papicella/snappy-store | tests/core/src/main/java/admin/keepalive/TestPrms.java | 1450 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package admin.keepalive;
import hydra.*;
//import util.*;
/**
*
* A class used to store keys for Admin API region "keep alive" Tests
*
*/
public class TestPrms extends BasePrms {
//---------------------------------------------------------------------
// Default Values
//---------------------------------------------------------------------
// Test-specific parameters
/** (boolean) controls whether CacheLoader is defined
*/
public static Long defineCacheLoaderRemote;
/*
* Returns boolean value of TestPrms.defineCacheLoaderRemote.
* Defaults to false.
*/
public static boolean getDefineCacheLoaderRemote() {
Long key = defineCacheLoaderRemote;
return (tasktab().booleanAt(key, tab().booleanAt(key, false)));
}
}
| apache-2.0 |
OBIGOGIT/etch | binding-java/runtime/src/main/java/org/apache/etch/bindings/java/msg/ValueFactory.java | 5950 | /* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.bindings.java.msg;
import java.util.Set;
import org.apache.etch.bindings.java.msg.Validator.Level;
/**
* Interface which defines the value factory which helps
* the idl compiler serialize and deserialize messages,
* convert values, etc.
*/
public interface ValueFactory
{
//////////
// Type //
//////////
/**
* Translates a type id into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param id a type id.
* @return id translated into the appropriate Type.
*/
public Type getType( Integer id );
/**
* Translates a type name into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param name a type name.
* @return name translated into the appropriate Type.
*/
public Type getType( String name );
/**
* Adds the type if it doesn't already exist. Use this to dynamically add
* types to a ValueFactory. The type is per instance of the ValueFactory,
* not global. Not available if dynamic typing is locked.
* @param type
*/
public void addType( Type type );
/**
* Locks the dynamic typing so that no new types may be created by addType
* or getType.
*/
public void lockDynamicTypes();
/**
* Unlocks the dynamic typing so that new types may be created by addType
* or getType.
*/
public void unlockDynamicTypes();
/**
* @return a collection of all the types.
*/
public Set<Type> getTypes();
/////////////////////
// STRING ENCODING //
/////////////////////
/**
* @return the encoding to use for strings.
*/
public String getStringEncoding();
////////////////
// MESSAGE ID //
////////////////
/**
* @param msg the message whose well-known message-id field is to be
* returned.
* @return the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined, or if the message-id field has not been set, then
* return null.
*/
public Long getMessageId( Message msg );
/**
* Sets the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined then nothing is done. If msgid is null, then the
* field is cleared.
* @param msg the message whose well-known message-id field is to
* be set.
* @param msgid the value of the well-known message-id field.
*/
public void setMessageId( Message msg, Long msgid );
/**
* @return well-known message field for message id.
*/
public Field get_mf__messageId();
/////////////////
// IN REPLY TO //
/////////////////
/**
* @param msg the message whose well-known in-reply-to field is to
* be returned.
* @return the value of the in-reply-to field, or null if there is
* none or if there is no such field defined.
*/
public Long getInReplyTo( Message msg );
/**
* @param msg the message whose well-known in-reply-to field is to
* be set.
* @param msgid the value of the well-known in-reply-to field. If
* there is no well-known in-reply-to field defined then nothing
* is done. If msgid is null, then the field is cleared.
*/
public void setInReplyTo( Message msg, Long msgid );
/**
* @return well-known message field for in reply to.
*/
public Field get_mf__inReplyTo();
//////////////////////
// VALUE CONVERSION //
//////////////////////
/**
* Converts a value to a struct value representation to be exported
* to a tagged data output.
* @param value a custom type defined by a service, or a well-known
* standard type (e.g., date).
* @return a struct value representing the value.
* @throws UnsupportedOperationException if the type cannot be exported.
*/
public StructValue exportCustomValue( Object value )
throws UnsupportedOperationException;
/**
* Converts a struct value imported from a tagged data input to
* a normal type.
* @param struct a struct value representation of a custom type, or a
* well known standard type.
* @return a custom type, or a well known standard type.
* @throws UnsupportedOperationException if the type cannot be imported.
*/
public Object importCustomValue( StructValue struct )
throws UnsupportedOperationException;
/**
* @param c the class of a custom value.
* @return the struct type of a custom value class.
* @throws UnsupportedOperationException
* @see #exportCustomValue(Object)
*/
public Type getCustomStructType( Class<?> c )
throws UnsupportedOperationException;
/**
* @return well-known message type for exception thrown by one-way
* message.
*/
public Type get_mt__exception();
/**
* @return the validation level of field StructValue.put and TaggedDataOutput.
*/
public Level getLevel();
/**
* Sets the validation level of field StructValue.put and TaggedDataOutput.
* @param level
* @return the old value
*/
public Level setLevel( Level level );
}
| apache-2.0 |
gab1one/imagej-ops | src/main/java/net/imagej/ops/create/img/CreateImgFromInterval.java | 2532 | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2017 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.create.img;
import net.imagej.ops.Ops;
import net.imagej.ops.special.chain.UFViaUFSameIO;
import net.imagej.ops.special.function.Functions;
import net.imagej.ops.special.function.UnaryFunctionOp;
import net.imglib2.Interval;
import net.imglib2.img.Img;
import net.imglib2.type.numeric.real.DoubleType;
import org.scijava.plugin.Plugin;
/**
* Creates an {@link Img} from an {@link Interval} with no additional hints.
* {@link Interval} contents are not copied.
*
* @author Curtis Rueden
*/
@Plugin(type = Ops.Create.Img.class)
public class CreateImgFromInterval extends
UFViaUFSameIO<Interval, Img<DoubleType>> implements Ops.Create.Img
{
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public UnaryFunctionOp<Interval, Img<DoubleType>> createWorker(
final Interval input)
{
// NB: Intended to match CreateImgFromDimsAndType.
return (UnaryFunctionOp) Functions.unary(ops(), Ops.Create.Img.class,
Img.class, input, new DoubleType());
}
}
| bsd-2-clause |
steffeli/inf5750-tracker-capture | dhis-support/dhis-support-system/src/main/java/org/hisp/dhis/system/filter/AggregatableDataElementFilter.java | 2620 | package org.hisp.dhis.system.filter;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Sets;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.commons.filter.Filter;
import org.hisp.dhis.dataelement.DataElement;
import java.util.Set;
/**
* @author Lars Helge Overland
*/
public class AggregatableDataElementFilter
implements Filter<DataElement>
{
public static final AggregatableDataElementFilter INSTANCE = new AggregatableDataElementFilter();
private static final Set<ValueType> VALUE_TYPES = Sets.newHashSet(
ValueType.BOOLEAN, ValueType.TRUE_ONLY, ValueType.TEXT, ValueType.LONG_TEXT, ValueType.LETTER,
ValueType.INTEGER, ValueType.INTEGER_POSITIVE, ValueType.INTEGER_NEGATIVE, ValueType.INTEGER_ZERO_OR_POSITIVE,
ValueType.NUMBER, ValueType.UNIT_INTERVAL, ValueType.PERCENTAGE, ValueType.COORDINATE
);
@Override
public boolean retain( DataElement object )
{
return object != null && VALUE_TYPES.contains( object.getValueType() );
}
}
| bsd-3-clause |
nativelibs4java/BridJ | src/main/java/org/bridj/DefaultNativeList.java | 8041 | /*
* BridJ - Dynamic and blazing-fast native interop for Java.
* http://bridj.googlecode.com/
*
* Copyright (c) 2010-2015, Olivier Chafik (http://ochafik.com/)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Olivier Chafik nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.bridj;
import static org.bridj.Pointer.allocate;
import static org.bridj.Pointer.allocateArray;
import java.util.AbstractList;
import java.util.Collection;
import java.util.RandomAccess;
import org.bridj.Pointer.ListType;
/**
* TODO : smart rewrite by chunks for removeAll and retainAll !
*
* @author ochafik
* @param <T> component type
*/
class DefaultNativeList<T> extends AbstractList<T> implements NativeList<T>, RandomAccess {
/*
* For optimization purposes, please look at AbstractList.java and AbstractCollection.java :
* http://www.koders.com/java/fidCFCB47A1819AB345234CC04B6A1EA7554C2C17C0.aspx?s=iso
* http://www.koders.com/java/fidA34BB0789922998CD34313EE49D61B06851A4397.aspx?s=iso
*
* We've reimplemented more methods than needed on purpose, for performance reasons (mainly using a native-optimized indexOf, that uses memmem and avoids deserializing too many elements)
*/
final ListType type;
final PointerIO<T> io;
volatile Pointer<T> pointer;
volatile long size;
public Pointer<?> getPointer() {
return pointer;
}
/**
* Create a native list that uses the provided storage and implementation
* strategy
*
* @param pointer
* @param type Implementation type
*/
DefaultNativeList(Pointer<T> pointer, ListType type) {
if (pointer == null || type == null) {
throw new IllegalArgumentException("Cannot build a " + getClass().getSimpleName() + " with " + pointer + " and " + type);
}
this.io = pointer.getIO("Cannot create a list out of untyped pointer " + pointer);
this.type = type;
this.size = pointer.getValidElements();
this.pointer = pointer;
}
protected void checkModifiable() {
if (type == ListType.Unmodifiable) {
throw new UnsupportedOperationException("This list is unmodifiable");
}
}
protected int safelyCastLongToInt(long i, String content) {
if (i > Integer.MAX_VALUE) {
throw new RuntimeException(content + " is bigger than Java int's maximum value : " + i);
}
return (int) i;
}
@Override
public int size() {
return safelyCastLongToInt(size, "Size of the native list");
}
@Override
public void clear() {
checkModifiable();
size = 0;
}
@Override
public T get(int i) {
if (i >= size || i < 0) {
throw new IndexOutOfBoundsException("Invalid index : " + i + " (list has size " + size + ")");
}
return pointer.get(i);
}
@Override
public T set(int i, T e) {
checkModifiable();
if (i >= size || i < 0) {
throw new IndexOutOfBoundsException("Invalid index : " + i + " (list has size " + size + ")");
}
T old = pointer.get(i);
pointer.set(i, e);
return old;
}
@SuppressWarnings("deprecation")
void add(long i, T e) {
checkModifiable();
if (i > size || i < 0) {
throw new IndexOutOfBoundsException("Invalid index : " + i + " (list has size " + size + ")");
}
requireSize(size + 1);
if (i < size) {
pointer.moveBytesAtOffsetTo(i, pointer, i + 1, size - i);
}
pointer.set(i, e);
size++;
}
@Override
public void add(int i, T e) {
add((long) i, e);
}
protected void requireSize(long newSize) {
if (newSize > pointer.getValidElements()) {
switch (type) {
case Dynamic:
long nextSize = newSize < 5 ? newSize + 1 : (long) (newSize * 1.6);
Pointer<T> newPointer = allocateArray(io, nextSize);
pointer.copyTo(newPointer);
pointer = newPointer;
break;
case FixedCapacity:
throw new UnsupportedOperationException("This list has a fixed capacity, cannot grow its storage");
case Unmodifiable:
// should not happen !
checkModifiable();
}
}
}
@SuppressWarnings("deprecation")
T remove(long i) {
checkModifiable();
if (i >= size || i < 0) {
throw new IndexOutOfBoundsException("Invalid index : " + i + " (list has size " + size + ")");
}
T old = pointer.get(i);
long targetSize = io.getTargetSize();
pointer.moveBytesAtOffsetTo((i + 1) * targetSize, pointer, i * targetSize, targetSize);
size--;
return old;
}
@Override
public T remove(int i) {
return remove((long) i);
}
@Override
public boolean remove(Object o) {
checkModifiable();
long i = indexOf(o, true, 0);
if (i < 0) {
return false;
}
remove(i);
return true;
}
@SuppressWarnings("unchecked")
long indexOf(Object o, boolean last, int offset) {
Pointer<T> pointer = this.pointer;
assert offset >= 0 && (last || offset > 0);
if (offset > 0) {
pointer = pointer.next(offset);
}
Pointer<T> needle = allocate(io);
needle.set((T) o);
Pointer<T> occurrence = last ? pointer.findLast(needle) : pointer.find(needle);
if (occurrence == null) {
return -1;
}
return occurrence.getPeer() - pointer.getPeer();
}
@Override
public int indexOf(Object o) {
return safelyCastLongToInt(indexOf(o, false, 0), "Index of the object");
}
@Override
public int lastIndexOf(Object o) {
return safelyCastLongToInt(indexOf(o, true, 0), "Last index of the object");
}
@Override
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
@Override
public boolean addAll(int i, Collection<? extends T> clctn) {
if (i >= 0 && i < size) {
requireSize(size + clctn.size());
}
return super.addAll(i, clctn);
}
@Override
public Object[] toArray() {
return pointer.validElements(size).toArray();
}
@SuppressWarnings("hiding")
@Override
public <T> T[] toArray(T[] ts) {
return pointer.validElements(size).toArray(ts);
}
}
| bsd-3-clause |
LWJGL-CI/lwjgl3 | modules/lwjgl/lz4/src/generated/java/org/lwjgl/util/lz4/LZ4StreamHC.java | 6358 | /*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.util.lz4;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.util.lz4.LZ4HC.LZ4_STREAMHCSIZE_VOIDP;
/**
* <h3>Layout</h3>
*
* <pre><code>
* union LZ4_streamHC_t {
* size_t table[LZ4_STREAMHCSIZE_VOIDP];
* {@link LZ4HCCCtxInternal struct LZ4HC_CCtx_internal} internal_donotuse;
* }</code></pre>
*/
@NativeType("union LZ4_streamHC_t")
public class LZ4StreamHC extends Struct {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
TABLE,
INTERNAL_DONOTUSE;
static {
Layout layout = __union(
__array(POINTER_SIZE, LZ4_STREAMHCSIZE_VOIDP),
__member(LZ4HCCCtxInternal.SIZEOF, LZ4HCCCtxInternal.ALIGNOF)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
TABLE = layout.offsetof(0);
INTERNAL_DONOTUSE = layout.offsetof(1);
}
/**
* Creates a {@code LZ4StreamHC} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public LZ4StreamHC(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** @return a {@link PointerBuffer} view of the {@code table} field. */
@NativeType("size_t[LZ4_STREAMHCSIZE_VOIDP]")
public PointerBuffer table() { return ntable(address()); }
/** @return the value at the specified index of the {@code table} field. */
@NativeType("size_t")
public long table(int index) { return ntable(address(), index); }
/** @return a {@link LZ4HCCCtxInternal} view of the {@code internal_donotuse} field. */
@NativeType("struct LZ4HC_CCtx_internal")
public LZ4HCCCtxInternal internal_donotuse() { return ninternal_donotuse(address()); }
// -----------------------------------
/** Returns a new {@code LZ4StreamHC} instance for the specified memory address. */
public static LZ4StreamHC create(long address) {
return wrap(LZ4StreamHC.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static LZ4StreamHC createSafe(long address) {
return address == NULL ? null : wrap(LZ4StreamHC.class, address);
}
/**
* Create a {@link LZ4StreamHC.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static LZ4StreamHC.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static LZ4StreamHC.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Unsafe version of {@link #table}. */
public static PointerBuffer ntable(long struct) { return memPointerBuffer(struct + LZ4StreamHC.TABLE, LZ4_STREAMHCSIZE_VOIDP); }
/** Unsafe version of {@link #table(int) table}. */
public static long ntable(long struct, int index) {
return memGetAddress(struct + LZ4StreamHC.TABLE + check(index, LZ4_STREAMHCSIZE_VOIDP) * POINTER_SIZE);
}
/** Unsafe version of {@link #internal_donotuse}. */
public static LZ4HCCCtxInternal ninternal_donotuse(long struct) { return LZ4HCCCtxInternal.create(struct + LZ4StreamHC.INTERNAL_DONOTUSE); }
// -----------------------------------
/** An array of {@link LZ4StreamHC} structs. */
public static class Buffer extends StructBuffer<LZ4StreamHC, Buffer> {
private static final LZ4StreamHC ELEMENT_FACTORY = LZ4StreamHC.create(-1L);
/**
* Creates a new {@code LZ4StreamHC.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link LZ4StreamHC#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected LZ4StreamHC getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return a {@link PointerBuffer} view of the {@code table} field. */
@NativeType("size_t[LZ4_STREAMHCSIZE_VOIDP]")
public PointerBuffer table() { return LZ4StreamHC.ntable(address()); }
/** @return the value at the specified index of the {@code table} field. */
@NativeType("size_t")
public long table(int index) { return LZ4StreamHC.ntable(address(), index); }
/** @return a {@link LZ4HCCCtxInternal} view of the {@code internal_donotuse} field. */
@NativeType("struct LZ4HC_CCtx_internal")
public LZ4HCCCtxInternal internal_donotuse() { return LZ4StreamHC.ninternal_donotuse(address()); }
}
} | bsd-3-clause |
magicDGS/gatk | src/main/java/org/broadinstitute/hellbender/tools/spark/pathseq/PSBuildReferenceTaxonomyUtils.java | 17481 | package org.broadinstitute.hellbender.tools.spark.pathseq;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Output;
import htsjdk.samtools.SAMSequenceRecord;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import scala.Tuple2;
import java.io.*;
import java.util.*;
import java.util.zip.GZIPInputStream;
public final class PSBuildReferenceTaxonomyUtils {
protected static final Logger logger = LogManager.getLogger(PSBuildReferenceTaxonomyUtils.class);
private static final String VERTICAL_BAR_DELIMITER_REGEX = "\\s*\\|\\s*";
/**
* Build set of accessions contained in the reference.
* Returns: a map from accession to the name and length of the record. If the sequence name contains the
* taxonomic ID, it instead gets added to taxIdToProperties. Later we merge both results into taxIdToProperties.
* Method: First, look for either "taxid|<taxid>|" or "ref|<accession>|" in the sequence name. If neither of
* those are found, use the first word of the name as the accession.
*/
protected static Map<String, Tuple2<String, Long>> parseReferenceRecords(final List<SAMSequenceRecord> dictionaryList,
final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties) {
final Map<String, Tuple2<String, Long>> accessionToNameAndLength = new HashMap<>();
for (final SAMSequenceRecord record : dictionaryList) {
final String recordName = record.getSequenceName();
final long recordLength = record.getSequenceLength();
final String[] tokens = recordName.split(VERTICAL_BAR_DELIMITER_REGEX);
String recordAccession = null;
int recordTaxId = PSTree.NULL_NODE;
for (int i = 0; i < tokens.length - 1 && recordTaxId == PSTree.NULL_NODE; i++) {
if (tokens[i].equals("ref")) {
recordAccession = tokens[i + 1];
} else if (tokens[i].equals("taxid")) {
recordTaxId = parseTaxonId(tokens[i + 1]);
}
}
if (recordTaxId == PSTree.NULL_NODE) {
if (recordAccession == null) {
final String[] tokens2 = tokens[0].split(" "); //Default accession to first word in the name
recordAccession = tokens2[0];
}
accessionToNameAndLength.put(recordAccession, new Tuple2<>(recordName, recordLength));
} else {
addReferenceAccessionToTaxon(recordTaxId, recordName, recordLength, taxIdToProperties);
}
}
return accessionToNameAndLength;
}
private static int parseTaxonId(final String taxonId) {
try {
return Integer.valueOf(taxonId);
} catch (final NumberFormatException e) {
throw new UserException.BadInput("Expected taxonomy ID to be an integer but found \"" + taxonId + "\"", e);
}
}
/**
* Helper classes for defining RefSeq and GenBank catalog formats. Columns should be given as 0-based indices.
*/
private interface AccessionCatalogFormat {
int getTaxIdColumn();
int getAccessionColumn();
}
private static final class RefSeqCatalogFormat implements AccessionCatalogFormat {
private static final int TAX_ID_COLUMN = 0;
private static final int ACCESSION_COLUMN = 2;
public int getTaxIdColumn() {
return TAX_ID_COLUMN;
}
public int getAccessionColumn() {
return ACCESSION_COLUMN;
}
}
private static final class GenBankCatalogFormat implements AccessionCatalogFormat {
private static final int TAX_ID_COLUMN = 6;
private static final int ACCESSION_COLUMN = 1;
public int getTaxIdColumn() {
return TAX_ID_COLUMN;
}
public int getAccessionColumn() {
return ACCESSION_COLUMN;
}
}
/**
* Builds maps of reference contig accessions to their taxonomic ids and vice versa.
* Input can be a RefSeq or Genbank catalog file. accNotFound is an initial list of
* accessions from the reference that have not been successfully looked up; if null,
* will be initialized to the accToRefInfo key set by default.
* <p>
* Returns a collection of reference accessions that could not be found, if any.
*/
protected static Set<String> parseCatalog(final BufferedReader reader,
final Map<String, Tuple2<String, Long>> accessionToNameAndLength,
final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties,
final boolean bGenBank,
final Set<String> accessionsNotFoundIn) {
final Set<String> accessionsNotFoundOut;
try {
String line;
final AccessionCatalogFormat catalogFormat = bGenBank ? new GenBankCatalogFormat() : new RefSeqCatalogFormat();
final int taxIdColumnIndex = catalogFormat.getTaxIdColumn();
final int accessionColumnIndex = catalogFormat.getAccessionColumn();
if (accessionsNotFoundIn == null) {
//If accessionsNotFoundIn is null, this is the first call to parseCatalog, so initialize the set to all accessions
accessionsNotFoundOut = new HashSet<>(accessionToNameAndLength.keySet());
} else {
//Otherwise this is a subsequent call and we continue to look for any remaining accessions
accessionsNotFoundOut = new HashSet<>(accessionsNotFoundIn);
}
final int minColumns = Math.max(taxIdColumnIndex, accessionColumnIndex) + 1;
long lineNumber = 1;
while ((line = reader.readLine()) != null && !line.isEmpty()) {
final String[] tokens = line.trim().split("\t", minColumns + 1);
if (tokens.length >= minColumns) {
final int taxId = parseTaxonId(tokens[taxIdColumnIndex]);
final String accession = tokens[accessionColumnIndex];
if (accessionToNameAndLength.containsKey(accession)) {
final Tuple2<String, Long> nameAndLength = accessionToNameAndLength.get(accession);
addReferenceAccessionToTaxon(taxId, nameAndLength._1, nameAndLength._2, taxIdToProperties);
accessionsNotFoundOut.remove(accession);
}
} else {
throw new UserException.BadInput("Expected at least " + minColumns + " tab-delimited columns in " +
"GenBank catalog file, but only found " + tokens.length + " on line " + lineNumber);
}
lineNumber++;
}
} catch (final IOException e) {
throw new UserException.CouldNotReadInputFile("Error reading from catalog file", e);
}
return accessionsNotFoundOut;
}
/**
* Parses scientific name of each taxon and puts it in taxIdToProperties
*/
protected static void parseNamesFile(final BufferedReader reader, final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties) {
try {
String line;
while ((line = reader.readLine()) != null) {
//Split into columns delimited by <TAB>|<TAB>
final String[] tokens = line.split(VERTICAL_BAR_DELIMITER_REGEX);
if (tokens.length < 4) {
throw new UserException.BadInput("Expected at least 4 columns in tax dump names file but found " + tokens.length);
}
final String nameType = tokens[3];
if (nameType.equals("scientific name")) {
final int taxId = parseTaxonId(tokens[0]);
final String name = tokens[1];
if (taxIdToProperties.containsKey(taxId)) {
taxIdToProperties.get(taxId).setName(name);
} else {
taxIdToProperties.put(taxId, new PSPathogenReferenceTaxonProperties(name));
}
}
}
} catch (final IOException e) {
throw new UserException.CouldNotReadInputFile("Error reading from taxonomy dump names file", e);
}
}
/**
* Gets the rank and parent of each taxon.
* Returns a Collection of tax ID's found in the nodes file that are not in taxIdToProperties (i.e. were not found in
* a reference sequence name using the taxid|\<taxid\> tag or the catalog file).
*/
protected static Collection<Integer> parseNodesFile(final BufferedReader reader, final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties) {
try {
final Collection<Integer> taxIdsNotFound = new ArrayList<>();
String line;
while ((line = reader.readLine()) != null) {
final String[] tokens = line.split(VERTICAL_BAR_DELIMITER_REGEX);
if (tokens.length < 3) {
throw new UserException.BadInput("Expected at least 3 columns in tax dump nodes file but found " + tokens.length);
}
final int taxId = parseTaxonId(tokens[0]);
final int parent = parseTaxonId(tokens[1]);
final String rank = tokens[2];
final PSPathogenReferenceTaxonProperties taxonProperties;
if (taxIdToProperties.containsKey(taxId)) {
taxonProperties = taxIdToProperties.get(taxId);
} else {
taxonProperties = new PSPathogenReferenceTaxonProperties("tax_" + taxId);
taxIdsNotFound.add(taxId);
}
taxonProperties.setRank(rank);
if (taxId != PSTaxonomyConstants.ROOT_ID) { //keep root's parent set to null
taxonProperties.setParent(parent);
}
taxIdToProperties.put(taxId, taxonProperties);
}
return taxIdsNotFound;
} catch (final IOException e) {
throw new UserException.CouldNotReadInputFile("Error reading from taxonomy dump nodes file", e);
}
}
/**
* Helper function for building the map from tax id to reference contig accession
*/
private static void addReferenceAccessionToTaxon(final int taxId, final String accession, final long length, final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties) {
taxIdToProperties.putIfAbsent(taxId, new PSPathogenReferenceTaxonProperties());
taxIdToProperties.get(taxId).addAccession(accession, length);
}
/**
* Removes nodes not in the tree from the tax_id-to-properties map
*/
static void removeUnusedTaxIds(final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties,
final PSTree tree) {
taxIdToProperties.keySet().retainAll(tree.getNodeIDs());
}
/**
* Create reference_name-to-taxid map (just an inversion on taxIdToProperties)
*/
protected static Map<String, Integer> buildAccessionToTaxIdMap(final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties,
final PSTree tree,
final int minNonVirusContigLength) {
final Map<String, Integer> accessionToTaxId = new HashMap<>();
for (final int taxId : taxIdToProperties.keySet()) {
final boolean isVirus = tree.getPathOf(taxId).contains(PSTaxonomyConstants.VIRUS_ID);
final PSPathogenReferenceTaxonProperties taxonProperties = taxIdToProperties.get(taxId);
for (final String name : taxonProperties.getAccessions()) {
if (isVirus || taxonProperties.getAccessionLength(name) >= minNonVirusContigLength) {
accessionToTaxId.put(name, taxId);
}
}
}
return accessionToTaxId;
}
/**
* Returns a PSTree representing a reduced taxonomic tree containing only taxa present in the reference
*/
protected static PSTree buildTaxonomicTree(final Map<Integer, PSPathogenReferenceTaxonProperties> taxIdToProperties) {
//Build tree of all taxa
final PSTree tree = new PSTree(PSTaxonomyConstants.ROOT_ID);
final Collection<Integer> invalidIds = new HashSet<>(taxIdToProperties.size());
for (final int taxId : taxIdToProperties.keySet()) {
if (taxId != PSTaxonomyConstants.ROOT_ID) {
final PSPathogenReferenceTaxonProperties taxonProperties = taxIdToProperties.get(taxId);
if (taxonProperties.getName() != null && taxonProperties.getParent() != PSTree.NULL_NODE && taxonProperties.getRank() != null) {
tree.addNode(taxId, taxonProperties.getName(), taxonProperties.getParent(), taxonProperties.getTotalLength(), taxonProperties.getRank());
} else {
invalidIds.add(taxId);
}
}
}
PSUtils.logItemizedWarning(logger, invalidIds, "The following taxonomic IDs did not have name/taxonomy information (this may happen when the catalog and taxdump files are inconsistent)");
final Set<Integer> unreachableNodes = tree.removeUnreachableNodes();
if (!unreachableNodes.isEmpty()) {
PSUtils.logItemizedWarning(logger, unreachableNodes, "Removed " + unreachableNodes.size() + " unreachable tree nodes");
}
tree.checkStructure();
//Trim tree down to nodes corresponding only to reference taxa (and their ancestors)
final Set<Integer> relevantNodes = new HashSet<>();
for (final int taxonId : taxIdToProperties.keySet()) {
if (!taxIdToProperties.get(taxonId).getAccessions().isEmpty() && tree.hasNode(taxonId)) {
relevantNodes.addAll(tree.getPathOf(taxonId));
}
}
if (relevantNodes.isEmpty()) {
throw new UserException.BadInput("Did not find any taxa corresponding to reference sequence names.\n\n"
+ "Check that reference names follow one of the required formats:\n\n"
+ "\t...|ref|<accession.version>|...\n"
+ "\t...|taxid|<taxonomy_id>|...\n"
+ "\t<accession.version><mask>...");
}
tree.retainNodes(relevantNodes);
return tree;
}
/**
* Gets a buffered reader for a gzipped file
* @param path File path
* @return Reader for the file
*/
public static BufferedReader getBufferedReaderGz(final String path) {
try {
return new BufferedReader(IOUtils.makeReaderMaybeGzipped(new File(path)));
} catch (final IOException e) {
throw new UserException.BadInput("Could not open file " + path, e);
}
}
/**
* Gets a Reader for a file in a gzipped tarball
* @param tarPath Path to the tarball
* @param fileName File within the tarball
* @return The file's reader
*/
public static BufferedReader getBufferedReaderTarGz(final String tarPath, final String fileName) {
try {
InputStream result = null;
final TarArchiveInputStream tarStream = new TarArchiveInputStream(new GZIPInputStream(new FileInputStream(tarPath)));
TarArchiveEntry entry = tarStream.getNextTarEntry();
while (entry != null) {
if (entry.getName().equals(fileName)) {
result = tarStream;
break;
}
entry = tarStream.getNextTarEntry();
}
if (result == null) {
throw new UserException.BadInput("Could not find file " + fileName + " in tarball " + tarPath);
}
return new BufferedReader(new InputStreamReader(result));
} catch (final IOException e) {
throw new UserException.BadInput("Could not open compressed tarball file " + fileName + " in " + tarPath, e);
}
}
/**
* Writes objects using Kryo to specified local file path.
* NOTE: using setReferences(false), which must also be set when reading the file. Does not work with nested
* objects that reference its parent.
*/
public static void writeTaxonomyDatabase(final String filePath, final PSTaxonomyDatabase taxonomyDatabase) {
try {
final Kryo kryo = new Kryo();
kryo.setReferences(false);
Output output = new Output(new FileOutputStream(filePath));
kryo.writeObject(output, taxonomyDatabase);
output.close();
} catch (final FileNotFoundException e) {
throw new UserException.CouldNotCreateOutputFile("Could not serialize objects to file", e);
}
}
}
| bsd-3-clause |
PeterWangIntel/chromium-crosswalk | content/public/android/java/src/org/chromium/content/browser/input/AdapterInputConnection.java | 27345 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser.input;
import android.os.SystemClock;
import android.text.Editable;
import android.text.InputType;
import android.text.Selection;
import android.text.TextUtils;
import android.view.KeyCharacterMap;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.BaseInputConnection;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.ExtractedText;
import android.view.inputmethod.ExtractedTextRequest;
import org.chromium.base.Log;
import org.chromium.base.VisibleForTesting;
import org.chromium.blink_public.web.WebInputEventType;
import org.chromium.blink_public.web.WebTextInputFlags;
import org.chromium.ui.base.ime.TextInputType;
/**
* InputConnection is created by ContentView.onCreateInputConnection.
* It then adapts android's IME to chrome's RenderWidgetHostView using the
* native ImeAdapterAndroid via the class ImeAdapter.
*/
public class AdapterInputConnection extends BaseInputConnection {
private static final String TAG = "cr.InputConnection";
private static final boolean DEBUG = false;
/**
* Selection value should be -1 if not known. See EditorInfo.java for details.
*/
public static final int INVALID_SELECTION = -1;
public static final int INVALID_COMPOSITION = -1;
private final View mInternalView;
private final ImeAdapter mImeAdapter;
private final Editable mEditable;
private boolean mSingleLine;
private int mNumNestedBatchEdits = 0;
private int mPendingAccent;
private int mLastUpdateSelectionStart = INVALID_SELECTION;
private int mLastUpdateSelectionEnd = INVALID_SELECTION;
private int mLastUpdateCompositionStart = INVALID_COMPOSITION;
private int mLastUpdateCompositionEnd = INVALID_COMPOSITION;
@VisibleForTesting
AdapterInputConnection(View view, ImeAdapter imeAdapter, Editable editable,
EditorInfo outAttrs) {
super(view, true);
mInternalView = view;
mImeAdapter = imeAdapter;
mImeAdapter.setInputConnection(this);
mEditable = editable;
// The editable passed in might have been in use by a prior keyboard and could have had
// prior composition spans set. To avoid keyboard conflicts, remove all composing spans
// when taking ownership of an existing Editable.
finishComposingText();
mSingleLine = true;
outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_FULLSCREEN
| EditorInfo.IME_FLAG_NO_EXTRACT_UI;
outAttrs.inputType = EditorInfo.TYPE_CLASS_TEXT
| EditorInfo.TYPE_TEXT_VARIATION_WEB_EDIT_TEXT;
int inputType = imeAdapter.getTextInputType();
int inputFlags = imeAdapter.getTextInputFlags();
if ((inputFlags & WebTextInputFlags.AutocompleteOff) != 0) {
outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_NO_SUGGESTIONS;
}
if (inputType == TextInputType.TEXT) {
// Normal text field
outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO;
if ((inputFlags & WebTextInputFlags.AutocorrectOff) == 0) {
outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_AUTO_CORRECT;
}
} else if (inputType == TextInputType.TEXT_AREA
|| inputType == TextInputType.CONTENT_EDITABLE) {
outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_MULTI_LINE;
if ((inputFlags & WebTextInputFlags.AutocorrectOff) == 0) {
outAttrs.inputType |= EditorInfo.TYPE_TEXT_FLAG_AUTO_CORRECT;
}
outAttrs.imeOptions |= EditorInfo.IME_ACTION_NONE;
mSingleLine = false;
} else if (inputType == TextInputType.PASSWORD) {
// Password
outAttrs.inputType = InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_WEB_PASSWORD;
outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO;
} else if (inputType == TextInputType.SEARCH) {
// Search
outAttrs.imeOptions |= EditorInfo.IME_ACTION_SEARCH;
} else if (inputType == TextInputType.URL) {
// Url
outAttrs.inputType = InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_URI;
outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO;
} else if (inputType == TextInputType.EMAIL) {
// Email
outAttrs.inputType = InputType.TYPE_CLASS_TEXT
| InputType.TYPE_TEXT_VARIATION_WEB_EMAIL_ADDRESS;
outAttrs.imeOptions |= EditorInfo.IME_ACTION_GO;
} else if (inputType == TextInputType.TELEPHONE) {
// Telephone
// Number and telephone do not have both a Tab key and an
// action in default OSK, so set the action to NEXT
outAttrs.inputType = InputType.TYPE_CLASS_PHONE;
outAttrs.imeOptions |= EditorInfo.IME_ACTION_NEXT;
} else if (inputType == TextInputType.NUMBER) {
// Number
outAttrs.inputType = InputType.TYPE_CLASS_NUMBER
| InputType.TYPE_NUMBER_VARIATION_NORMAL
| InputType.TYPE_NUMBER_FLAG_DECIMAL;
outAttrs.imeOptions |= EditorInfo.IME_ACTION_NEXT;
}
// Handling of autocapitalize. Blink will send the flag taking into account the element's
// type. This is not using AutocapitalizeNone because Android does not autocapitalize by
// default and there is no way to express no capitalization.
// Autocapitalize is meant as a hint to the virtual keyboard.
if ((inputFlags & WebTextInputFlags.AutocapitalizeCharacters) != 0) {
outAttrs.inputType |= InputType.TYPE_TEXT_FLAG_CAP_CHARACTERS;
} else if ((inputFlags & WebTextInputFlags.AutocapitalizeWords) != 0) {
outAttrs.inputType |= InputType.TYPE_TEXT_FLAG_CAP_WORDS;
} else if ((inputFlags & WebTextInputFlags.AutocapitalizeSentences) != 0) {
outAttrs.inputType |= InputType.TYPE_TEXT_FLAG_CAP_SENTENCES;
}
// Content editable doesn't use autocapitalize so we need to set it manually.
if (inputType == TextInputType.CONTENT_EDITABLE) {
outAttrs.inputType |= InputType.TYPE_TEXT_FLAG_CAP_SENTENCES;
}
outAttrs.initialSelStart = Selection.getSelectionStart(mEditable);
outAttrs.initialSelEnd = Selection.getSelectionEnd(mEditable);
mLastUpdateSelectionStart = outAttrs.initialSelStart;
mLastUpdateSelectionEnd = outAttrs.initialSelEnd;
if (DEBUG) Log.w(TAG, "Constructor called with outAttrs: " + outAttrs);
Selection.setSelection(mEditable, outAttrs.initialSelStart, outAttrs.initialSelEnd);
updateSelectionIfRequired();
}
/**
* Updates the AdapterInputConnection's internal representation of the text being edited and
* its selection and composition properties. The resulting Editable is accessible through the
* getEditable() method. If the text has not changed, this also calls updateSelection on the
* InputMethodManager.
*
* @param text The String contents of the field being edited.
* @param selectionStart The character offset of the selection start, or the caret position if
* there is no selection.
* @param selectionEnd The character offset of the selection end, or the caret position if there
* is no selection.
* @param compositionStart The character offset of the composition start, or -1 if there is no
* composition.
* @param compositionEnd The character offset of the composition end, or -1 if there is no
* selection.
* @param isNonImeChange True when the update was caused by non-IME (e.g. Javascript).
*/
@VisibleForTesting
public void updateState(String text, int selectionStart, int selectionEnd, int compositionStart,
int compositionEnd, boolean isNonImeChange) {
if (DEBUG) {
Log.w(TAG, "updateState [" + text + "] [" + selectionStart + " " + selectionEnd + "] ["
+ compositionStart + " " + compositionEnd + "] [" + isNonImeChange + "]");
}
// If this update is from the IME, no further state modification is necessary because the
// state should have been updated already by the IM framework directly.
if (!isNonImeChange) return;
// Non-breaking spaces can cause the IME to get confused. Replace with normal spaces.
text = text.replace('\u00A0', ' ');
selectionStart = Math.min(selectionStart, text.length());
selectionEnd = Math.min(selectionEnd, text.length());
compositionStart = Math.min(compositionStart, text.length());
compositionEnd = Math.min(compositionEnd, text.length());
String prevText = mEditable.toString();
boolean textUnchanged = prevText.equals(text);
if (!textUnchanged) {
mEditable.replace(0, mEditable.length(), text);
}
Selection.setSelection(mEditable, selectionStart, selectionEnd);
if (compositionStart == compositionEnd) {
removeComposingSpans(mEditable);
} else {
super.setComposingRegion(compositionStart, compositionEnd);
}
updateSelectionIfRequired();
}
/**
* @return Editable object which contains the state of current focused editable element.
*/
@Override
public Editable getEditable() {
return mEditable;
}
/**
* Sends selection update to the InputMethodManager unless we are currently in a batch edit or
* if the exact same selection and composition update was sent already.
*/
private void updateSelectionIfRequired() {
if (mNumNestedBatchEdits != 0) return;
int selectionStart = Selection.getSelectionStart(mEditable);
int selectionEnd = Selection.getSelectionEnd(mEditable);
int compositionStart = getComposingSpanStart(mEditable);
int compositionEnd = getComposingSpanEnd(mEditable);
// Avoid sending update if we sent an exact update already previously.
if (mLastUpdateSelectionStart == selectionStart
&& mLastUpdateSelectionEnd == selectionEnd
&& mLastUpdateCompositionStart == compositionStart
&& mLastUpdateCompositionEnd == compositionEnd) {
return;
}
if (DEBUG) {
Log.w(TAG, "updateSelectionIfRequired [" + selectionStart + " " + selectionEnd + "] ["
+ compositionStart + " " + compositionEnd + "]");
}
// updateSelection should be called every time the selection or composition changes
// if it happens not within a batch edit, or at the end of each top level batch edit.
getInputMethodManagerWrapper().updateSelection(
mInternalView, selectionStart, selectionEnd, compositionStart, compositionEnd);
mLastUpdateSelectionStart = selectionStart;
mLastUpdateSelectionEnd = selectionEnd;
mLastUpdateCompositionStart = compositionStart;
mLastUpdateCompositionEnd = compositionEnd;
}
/**
* @see BaseInputConnection#setComposingText(java.lang.CharSequence, int)
*/
@Override
public boolean setComposingText(CharSequence text, int newCursorPosition) {
if (DEBUG) Log.w(TAG, "setComposingText [" + text + "] [" + newCursorPosition + "]");
if (maybePerformEmptyCompositionWorkaround(text)) return true;
mPendingAccent = 0;
super.setComposingText(text, newCursorPosition);
updateSelectionIfRequired();
return mImeAdapter.checkCompositionQueueAndCallNative(text, newCursorPosition, false);
}
/**
* @see BaseInputConnection#commitText(java.lang.CharSequence, int)
*/
@Override
public boolean commitText(CharSequence text, int newCursorPosition) {
if (DEBUG) Log.w(TAG, "commitText [" + text + "] [" + newCursorPosition + "]");
if (maybePerformEmptyCompositionWorkaround(text)) return true;
mPendingAccent = 0;
super.commitText(text, newCursorPosition);
updateSelectionIfRequired();
return mImeAdapter.checkCompositionQueueAndCallNative(text, newCursorPosition,
text.length() > 0);
}
/**
* @see BaseInputConnection#performEditorAction(int)
*/
@Override
public boolean performEditorAction(int actionCode) {
if (DEBUG) Log.w(TAG, "performEditorAction [" + actionCode + "]");
if (actionCode == EditorInfo.IME_ACTION_NEXT) {
restartInput();
// Send TAB key event
long timeStampMs = SystemClock.uptimeMillis();
mImeAdapter.sendSyntheticKeyEvent(
WebInputEventType.RawKeyDown, timeStampMs, KeyEvent.KEYCODE_TAB, 0, 0);
} else {
mImeAdapter.sendKeyEventWithKeyCode(KeyEvent.KEYCODE_ENTER,
KeyEvent.FLAG_SOFT_KEYBOARD | KeyEvent.FLAG_KEEP_TOUCH_MODE
| KeyEvent.FLAG_EDITOR_ACTION);
}
return true;
}
/**
* @see BaseInputConnection#performContextMenuAction(int)
*/
@Override
public boolean performContextMenuAction(int id) {
if (DEBUG) Log.w(TAG, "performContextMenuAction [" + id + "]");
switch (id) {
case android.R.id.selectAll:
return mImeAdapter.selectAll();
case android.R.id.cut:
return mImeAdapter.cut();
case android.R.id.copy:
return mImeAdapter.copy();
case android.R.id.paste:
return mImeAdapter.paste();
default:
return false;
}
}
/**
* @see BaseInputConnection#getExtractedText(android.view.inputmethod.ExtractedTextRequest,
* int)
*/
@Override
public ExtractedText getExtractedText(ExtractedTextRequest request, int flags) {
if (DEBUG) Log.w(TAG, "getExtractedText");
ExtractedText et = new ExtractedText();
et.text = mEditable.toString();
et.partialEndOffset = mEditable.length();
et.selectionStart = Selection.getSelectionStart(mEditable);
et.selectionEnd = Selection.getSelectionEnd(mEditable);
et.flags = mSingleLine ? ExtractedText.FLAG_SINGLE_LINE : 0;
return et;
}
/**
* @see BaseInputConnection#beginBatchEdit()
*/
@Override
public boolean beginBatchEdit() {
if (DEBUG) Log.w(TAG, "beginBatchEdit [" + (mNumNestedBatchEdits == 0) + "]");
mNumNestedBatchEdits++;
return true;
}
/**
* @see BaseInputConnection#endBatchEdit()
*/
@Override
public boolean endBatchEdit() {
if (mNumNestedBatchEdits == 0) return false;
--mNumNestedBatchEdits;
if (DEBUG) Log.w(TAG, "endBatchEdit [" + (mNumNestedBatchEdits == 0) + "]");
if (mNumNestedBatchEdits == 0) updateSelectionIfRequired();
return mNumNestedBatchEdits != 0;
}
/**
* @see BaseInputConnection#deleteSurroundingText(int, int)
*/
@Override
public boolean deleteSurroundingText(int beforeLength, int afterLength) {
return deleteSurroundingTextImpl(beforeLength, afterLength, false);
}
/**
* Check if the given {@code index} is between UTF-16 surrogate pair.
* @param str The String.
* @param index The index
* @return True if the index is between UTF-16 surrogate pair, false otherwise.
*/
@VisibleForTesting
static boolean isIndexBetweenUtf16SurrogatePair(CharSequence str, int index) {
return index > 0 && index < str.length() && Character.isHighSurrogate(str.charAt(index - 1))
&& Character.isLowSurrogate(str.charAt(index));
}
private boolean deleteSurroundingTextImpl(
int beforeLength, int afterLength, boolean fromPhysicalKey) {
if (DEBUG) {
Log.w(TAG, "deleteSurroundingText [" + beforeLength + " " + afterLength + " "
+ fromPhysicalKey + "]");
}
if (mPendingAccent != 0) {
finishComposingText();
}
int originalBeforeLength = beforeLength;
int originalAfterLength = afterLength;
int selectionStart = Selection.getSelectionStart(mEditable);
int selectionEnd = Selection.getSelectionEnd(mEditable);
int availableBefore = selectionStart;
int availableAfter = mEditable.length() - selectionEnd;
beforeLength = Math.min(beforeLength, availableBefore);
afterLength = Math.min(afterLength, availableAfter);
// Adjust these values even before calling super.deleteSurroundingText() to be consistent
// with the super class.
if (isIndexBetweenUtf16SurrogatePair(mEditable, selectionStart - beforeLength)) {
beforeLength += 1;
}
if (isIndexBetweenUtf16SurrogatePair(mEditable, selectionEnd + afterLength)) {
afterLength += 1;
}
super.deleteSurroundingText(beforeLength, afterLength);
updateSelectionIfRequired();
// If this was called due to a physical key, no need to generate a key event here as
// the caller will take care of forwarding the original.
if (fromPhysicalKey) {
return true;
}
// For single-char deletion calls |ImeAdapter.sendKeyEventWithKeyCode| with the real key
// code. For multi-character deletion, executes deletion by calling
// |ImeAdapter.deleteSurroundingText| and sends synthetic key events with a dummy key code.
int keyCode = KeyEvent.KEYCODE_UNKNOWN;
if (originalBeforeLength == 1 && originalAfterLength == 0) {
keyCode = KeyEvent.KEYCODE_DEL;
} else if (originalBeforeLength == 0 && originalAfterLength == 1) {
keyCode = KeyEvent.KEYCODE_FORWARD_DEL;
}
boolean result = true;
if (keyCode == KeyEvent.KEYCODE_UNKNOWN) {
result = mImeAdapter.sendSyntheticKeyEvent(
WebInputEventType.RawKeyDown, SystemClock.uptimeMillis(), keyCode, 0, 0);
result &= mImeAdapter.deleteSurroundingText(beforeLength, afterLength);
result &= mImeAdapter.sendSyntheticKeyEvent(
WebInputEventType.KeyUp, SystemClock.uptimeMillis(), keyCode, 0, 0);
} else {
mImeAdapter.sendKeyEventWithKeyCode(
keyCode, KeyEvent.FLAG_SOFT_KEYBOARD | KeyEvent.FLAG_KEEP_TOUCH_MODE);
}
return result;
}
/**
* @see BaseInputConnection#sendKeyEvent(android.view.KeyEvent)
*/
@Override
public boolean sendKeyEvent(KeyEvent event) {
if (DEBUG) {
Log.w(TAG, "sendKeyEvent [" + event.getAction() + "] [" + event.getKeyCode() + "] ["
+ event.getUnicodeChar() + "]");
}
int action = event.getAction();
int keycode = event.getKeyCode();
int unicodeChar = event.getUnicodeChar();
// If this isn't a KeyDown event, no need to update composition state; just pass the key
// event through and return. But note that some keys, such as enter, may actually be
// handled on ACTION_UP in Blink.
if (action != KeyEvent.ACTION_DOWN) {
mImeAdapter.translateAndSendNativeEvents(event);
return true;
}
// If this is backspace/del or if the key has a character representation,
// need to update the underlying Editable (i.e. the local representation of the text
// being edited). Some IMEs like Jellybean stock IME and Samsung IME mix in delete
// KeyPress events instead of calling deleteSurroundingText.
if (keycode == KeyEvent.KEYCODE_DEL) {
deleteSurroundingTextImpl(1, 0, true);
} else if (keycode == KeyEvent.KEYCODE_FORWARD_DEL) {
deleteSurroundingTextImpl(0, 1, true);
} else if (keycode == KeyEvent.KEYCODE_ENTER) {
// Finish text composition when pressing enter, as that may submit a form field.
// TODO(aurimas): remove this workaround when crbug.com/278584 is fixed.
finishComposingText();
} else if ((unicodeChar & KeyCharacterMap.COMBINING_ACCENT) != 0) {
// Store a pending accent character and make it the current composition.
int pendingAccent = unicodeChar & KeyCharacterMap.COMBINING_ACCENT_MASK;
StringBuilder builder = new StringBuilder();
builder.appendCodePoint(pendingAccent);
setComposingText(builder.toString(), 1);
mPendingAccent = pendingAccent;
return true;
} else if (mPendingAccent != 0 && unicodeChar != 0) {
int combined = KeyEvent.getDeadChar(mPendingAccent, unicodeChar);
if (combined != 0) {
StringBuilder builder = new StringBuilder();
builder.appendCodePoint(combined);
commitText(builder.toString(), 1);
return true;
}
// Noncombinable character; commit the accent character and fall through to sending
// the key event for the character afterwards.
finishComposingText();
}
replaceSelectionWithUnicodeChar(unicodeChar);
mImeAdapter.translateAndSendNativeEvents(event);
return true;
}
/**
* Update the mEditable state to reflect what Blink will do in response to the KeyDown
* for a unicode-mapped key event.
* @param unicodeChar The Unicode character to update selection with.
*/
private void replaceSelectionWithUnicodeChar(int unicodeChar) {
if (unicodeChar == 0) return;
int selectionStart = Selection.getSelectionStart(mEditable);
int selectionEnd = Selection.getSelectionEnd(mEditable);
if (selectionStart > selectionEnd) {
int temp = selectionStart;
selectionStart = selectionEnd;
selectionEnd = temp;
}
mEditable.replace(selectionStart, selectionEnd, Character.toString((char) unicodeChar));
updateSelectionIfRequired();
}
/**
* @see BaseInputConnection#finishComposingText()
*/
@Override
public boolean finishComposingText() {
if (DEBUG) Log.w(TAG, "finishComposingText");
mPendingAccent = 0;
if (getComposingSpanStart(mEditable) == getComposingSpanEnd(mEditable)) {
return true;
}
super.finishComposingText();
updateSelectionIfRequired();
mImeAdapter.finishComposingText();
return true;
}
/**
* @see BaseInputConnection#setSelection(int, int)
*/
@Override
public boolean setSelection(int start, int end) {
if (DEBUG) Log.w(TAG, "setSelection [" + start + " " + end + "]");
int textLength = mEditable.length();
if (start < 0 || end < 0 || start > textLength || end > textLength) return true;
super.setSelection(start, end);
updateSelectionIfRequired();
return mImeAdapter.setEditableSelectionOffsets(start, end);
}
/**
* Informs the InputMethodManager and InputMethodSession (i.e. the IME) that the text
* state is no longer what the IME has and that it needs to be updated.
*/
void restartInput() {
if (DEBUG) Log.w(TAG, "restartInput");
getInputMethodManagerWrapper().restartInput(mInternalView);
mNumNestedBatchEdits = 0;
mPendingAccent = 0;
}
/**
* @see BaseInputConnection#setComposingRegion(int, int)
*/
@Override
public boolean setComposingRegion(int start, int end) {
if (DEBUG) Log.w(TAG, "setComposingRegion [" + start + " " + end + "]");
int textLength = mEditable.length();
int a = Math.min(start, end);
int b = Math.max(start, end);
if (a < 0) a = 0;
if (b < 0) b = 0;
if (a > textLength) a = textLength;
if (b > textLength) b = textLength;
if (a == b) {
removeComposingSpans(mEditable);
} else {
super.setComposingRegion(a, b);
}
updateSelectionIfRequired();
CharSequence regionText = null;
if (b > a) {
regionText = mEditable.subSequence(a, b);
}
return mImeAdapter.setComposingRegion(regionText, a, b);
}
boolean isActive() {
return getInputMethodManagerWrapper().isActive(mInternalView);
}
private InputMethodManagerWrapper getInputMethodManagerWrapper() {
return mImeAdapter.getInputMethodManagerWrapper();
}
/**
* This method works around the issue crbug.com/373934 where Blink does not cancel
* the composition when we send a commit with the empty text.
*
* TODO(aurimas) Remove this once crbug.com/373934 is fixed.
*
* @param text Text that software keyboard requested to commit.
* @return Whether the workaround was performed.
*/
private boolean maybePerformEmptyCompositionWorkaround(CharSequence text) {
int selectionStart = Selection.getSelectionStart(mEditable);
int selectionEnd = Selection.getSelectionEnd(mEditable);
int compositionStart = getComposingSpanStart(mEditable);
int compositionEnd = getComposingSpanEnd(mEditable);
if (TextUtils.isEmpty(text) && (selectionStart == selectionEnd)
&& compositionStart != INVALID_COMPOSITION
&& compositionEnd != INVALID_COMPOSITION) {
beginBatchEdit();
finishComposingText();
int selection = Selection.getSelectionStart(mEditable);
deleteSurroundingText(selection - compositionStart, selection - compositionEnd);
endBatchEdit();
return true;
}
return false;
}
@VisibleForTesting
static class ImeState {
public final String text;
public final int selectionStart;
public final int selectionEnd;
public final int compositionStart;
public final int compositionEnd;
public ImeState(String text, int selectionStart, int selectionEnd,
int compositionStart, int compositionEnd) {
this.text = text;
this.selectionStart = selectionStart;
this.selectionEnd = selectionEnd;
this.compositionStart = compositionStart;
this.compositionEnd = compositionEnd;
}
}
@VisibleForTesting
ImeState getImeStateForTesting() {
String text = mEditable.toString();
int selectionStart = Selection.getSelectionStart(mEditable);
int selectionEnd = Selection.getSelectionEnd(mEditable);
int compositionStart = getComposingSpanStart(mEditable);
int compositionEnd = getComposingSpanEnd(mEditable);
return new ImeState(text, selectionStart, selectionEnd, compositionStart, compositionEnd);
}
}
| bsd-3-clause |
oyachai/HearthSim | src/main/java/com/hearthsim/card/basic/spell/HeroicStrike.java | 1140 | package com.hearthsim.card.basic.spell;
import com.hearthsim.card.spellcard.SpellTargetableCard;
import com.hearthsim.event.effect.EffectCharacter;
import com.hearthsim.event.effect.EffectCharacterBuffTemp;
import com.hearthsim.event.filter.FilterCharacter;
import com.hearthsim.event.filter.FilterCharacterTargetedSpell;
public class HeroicStrike extends SpellTargetableCard {
private final static EffectCharacter effect = new EffectCharacterBuffTemp(4);
/**
* Constructor
*
* Defaults to hasBeenUsed = false
*/
public HeroicStrike() {
super();
}
@Override
public FilterCharacter getTargetableFilter() {
return FilterCharacterTargetedSpell.SELF;
}
/**
* Heroic Strike
*
* Gives the hero +4 attack this turn
*
*
*
* @param side
* @param boardState The BoardState before this card has performed its action. It will be manipulated and returned.
*
* @return The boardState is manipulated and returned
*/
@Override
public EffectCharacter getTargetableEffect() {
return HeroicStrike.effect;
}
}
| mit |
oscarguindzberg/multibit-hd | mbhd-core/src/main/java/org/multibit/hd/core/events/CoreEvent.java | 449 | package org.multibit.hd.core.events;
/**
* <p>Signature interface to provide the following to Core Event API:</p>
* <ul>
* <li>Identification of core events</li>
* </ul>
* <p>A core event should be named using a noun as the first part of the name (e.g. ExchangeRateChangedEvent)</p>
* <p>A core event can occur at any time and will not be synchronized with other events.</p>
*
* @since 0.0.1
*
*/
public interface CoreEvent {
}
| mit |
asdfzt/CPS450-MiniJava | MiniJavaParserWithAST/gen-src/com/bju/cps450/node/TPlus.java | 738 | /* This file was generated by SableCC (http://www.sablecc.org/). */
package com.bju.cps450.node;
import com.bju.cps450.analysis.*;
@SuppressWarnings("nls")
public final class TPlus extends Token
{
public TPlus()
{
super.setText("+");
}
public TPlus(int line, int pos)
{
super.setText("+");
setLine(line);
setPos(pos);
}
@Override
public Object clone()
{
return new TPlus(getLine(), getPos());
}
@Override
public void apply(Switch sw)
{
((Analysis) sw).caseTPlus(this);
}
@Override
public void setText(@SuppressWarnings("unused") String text)
{
throw new RuntimeException("Cannot change TPlus text.");
}
}
| mit |
ahmedvc/umple | Umplificator/UmplifiedProjects/jhotdraw7/src/main/java/org/jhotdraw/samples/mini/LayouterSample.java | 2276 | /*
* @(#)LayouterSample.java
*
* Copyright (c) 1996-2010 by the original authors of JHotDraw and all its
* contributors. All rights reserved.
*
* You may not use, copy or modify this file, except in compliance with the
* license agreement you entered into with the copyright holders. For details
* see accompanying license terms.
*/
package org.jhotdraw.samples.mini;
import org.jhotdraw.draw.tool.DelegationSelectionTool;
import org.jhotdraw.draw.layouter.VerticalLayouter;
import javax.swing.*;
import org.jhotdraw.draw.*;
/**
* Example showing how to layout two editable text figures and a line figure
* within a graphical composite figure.
*
* @author Werner Randelshofer
* @version $Id: LayouterSample.java 718 2010-11-21 17:49:53Z rawcoder $
*/
public class LayouterSample {
public static void main(String[] args) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// Create a graphical composite figure.
GraphicalCompositeFigure composite = new GraphicalCompositeFigure();
// Add child figures to the composite figure
composite.add(new TextFigure("Above the line"));
composite.add(new LineFigure());
composite.add(new TextFigure("Below the line"));
// Set a layouter and perform the layout
composite.setLayouter(new VerticalLayouter());
composite.layout();
// Add the composite figure to a drawing
Drawing drawing = new DefaultDrawing();
drawing.add(composite);
// Create a frame with a drawing view and a drawing editor
JFrame f = new JFrame("My Drawing");
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setSize(400, 300);
DrawingView view = new DefaultDrawingView();
view.setDrawing(drawing);
f.getContentPane().add(view.getComponent());
DrawingEditor editor = new DefaultDrawingEditor();
editor.add(view);
editor.setTool(new DelegationSelectionTool());
f.setVisible(true);
}
});
}
}
| mit |
dlee0113/java-design-patterns | front-controller/src/main/java/com/iluwatar/front/controller/ArcherCommand.java | 204 | package com.iluwatar.front.controller;
/**
*
* Command for archers.
*
*/
public class ArcherCommand implements Command {
@Override
public void process() {
new ArcherView().display();
}
}
| mit |
general-language-syntax/GLS | test/integration/ListSortStrings/list sort strings.java | 73 | //
import java.util.Comparator;
abc.sort(Comparator.naturalOrder());
//
| mit |
oscarguindzberg/multibit-hd | mbhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/appearance_settings/AppearanceSettingsWizard.java | 984 | package org.multibit.hd.ui.views.wizards.appearance_settings;
import com.google.common.base.Optional;
import org.multibit.hd.ui.views.wizards.AbstractWizard;
import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView;
import java.util.Map;
/**
* <p>Wizard to provide the following to UI for "appearance" wizard:</p>
* <ol>
* <li>Enter details</li>
* </ol>
*
* @since 0.0.1
*
*/
public class AppearanceSettingsWizard extends AbstractWizard<AppearanceSettingsWizardModel> {
public AppearanceSettingsWizard(AppearanceSettingsWizardModel model) {
super(model, false, Optional.absent());
}
@Override
protected void populateWizardViewMap(Map<String, AbstractWizardPanelView> wizardViewMap) {
// Use the wizard parameter to retrieve the appropriate mode
wizardViewMap.put(
AppearanceSettingsState.APPEARANCE_ENTER_DETAILS.name(),
new AppearanceSettingsPanelView(this, AppearanceSettingsState.APPEARANCE_ENTER_DETAILS.name())
);
}
}
| mit |
aryantaheri/controller | opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/mapping/attributes/toxml/SimpleUnionAttributeWritingStrategy.java | 1436 | /*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.netconf.confignetconfconnector.mapping.attributes.toxml;
import com.google.common.base.Preconditions;
import org.opendaylight.controller.netconf.confignetconfconnector.util.Util;
import org.w3c.dom.Document;
import java.util.List;
import java.util.Map;
public class SimpleUnionAttributeWritingStrategy extends SimpleAttributeWritingStrategy {
/**
* @param document
* @param key
*/
public SimpleUnionAttributeWritingStrategy(Document document, String key) {
super(document, key);
}
protected Object preprocess(Object value) {
Util.checkType(value, Map.class);
Preconditions.checkArgument(((Map)value).size() == 1, "Unexpected number of values in %s, expected 1", value);
Object listOfStrings = ((Map) value).values().iterator().next();
Util.checkType(listOfStrings, List.class);
StringBuilder b = new StringBuilder();
for (Object character: (List)listOfStrings) {
Util.checkType(character, String.class);
b.append(character);
}
return b.toString();
}
}
| epl-1.0 |
drbgfc/mdht | hl7/plugins/org.openhealthtools.mdht.emf.hl7.mif2/src/org/openhealthtools/mdht/emf/w3c/xhtml/internal/impl/CodeImpl.java | 8990 | /*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*******************************************************************************/
package org.openhealthtools.mdht.emf.w3c.xhtml.internal.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.openhealthtools.mdht.emf.w3c.xhtml.Code;
import org.openhealthtools.mdht.emf.w3c.xhtml.MifClassType;
import org.openhealthtools.mdht.emf.w3c.xhtml.StyleSheet;
import org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Code</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.internal.impl.CodeImpl#getClass_ <em>Class</em>}</li>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.internal.impl.CodeImpl#getLang <em>Lang</em>}</li>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.internal.impl.CodeImpl#getStyle <em>Style</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class CodeImpl extends InlineImpl implements Code {
/**
* The default value of the '{@link #getClass_() <em>Class</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getClass_()
* @generated
* @ordered
*/
protected static final MifClassType CLASS_EDEFAULT = MifClassType.INSERTED;
/**
* The cached value of the '{@link #getClass_() <em>Class</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getClass_()
* @generated
* @ordered
*/
protected MifClassType class_ = CLASS_EDEFAULT;
/**
* This is true if the Class attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean classESet;
/**
* The default value of the '{@link #getLang() <em>Lang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLang()
* @generated
* @ordered
*/
protected static final String LANG_EDEFAULT = null;
/**
* The cached value of the '{@link #getLang() <em>Lang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLang()
* @generated
* @ordered
*/
protected String lang = LANG_EDEFAULT;
/**
* The default value of the '{@link #getStyle() <em>Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStyle()
* @generated
* @ordered
*/
protected static final StyleSheet STYLE_EDEFAULT = StyleSheet.REQUIREMENT;
/**
* The cached value of the '{@link #getStyle() <em>Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStyle()
* @generated
* @ordered
*/
protected StyleSheet style = STYLE_EDEFAULT;
/**
* This is true if the Style attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean styleESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CodeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return XhtmlPackage.Literals.CODE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MifClassType getClass_() {
return class_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setClass(MifClassType newClass) {
MifClassType oldClass = class_;
class_ = newClass == null
? CLASS_EDEFAULT
: newClass;
boolean oldClassESet = classESet;
classESet = true;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(
this, Notification.SET, XhtmlPackage.CODE__CLASS, oldClass, class_, !oldClassESet));
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetClass() {
MifClassType oldClass = class_;
boolean oldClassESet = classESet;
class_ = CLASS_EDEFAULT;
classESet = false;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(
this, Notification.UNSET, XhtmlPackage.CODE__CLASS, oldClass, CLASS_EDEFAULT, oldClassESet));
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetClass() {
return classESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLang() {
return lang;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLang(String newLang) {
String oldLang = lang;
lang = newLang;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(this, Notification.SET, XhtmlPackage.CODE__LANG, oldLang, lang));
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public StyleSheet getStyle() {
return style;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStyle(StyleSheet newStyle) {
StyleSheet oldStyle = style;
style = newStyle == null
? STYLE_EDEFAULT
: newStyle;
boolean oldStyleESet = styleESet;
styleESet = true;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(
this, Notification.SET, XhtmlPackage.CODE__STYLE, oldStyle, style, !oldStyleESet));
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetStyle() {
StyleSheet oldStyle = style;
boolean oldStyleESet = styleESet;
style = STYLE_EDEFAULT;
styleESet = false;
if (eNotificationRequired()) {
eNotify(new ENotificationImpl(
this, Notification.UNSET, XhtmlPackage.CODE__STYLE, oldStyle, STYLE_EDEFAULT, oldStyleESet));
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetStyle() {
return styleESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case XhtmlPackage.CODE__CLASS:
return getClass_();
case XhtmlPackage.CODE__LANG:
return getLang();
case XhtmlPackage.CODE__STYLE:
return getStyle();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case XhtmlPackage.CODE__CLASS:
setClass((MifClassType) newValue);
return;
case XhtmlPackage.CODE__LANG:
setLang((String) newValue);
return;
case XhtmlPackage.CODE__STYLE:
setStyle((StyleSheet) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case XhtmlPackage.CODE__CLASS:
unsetClass();
return;
case XhtmlPackage.CODE__LANG:
setLang(LANG_EDEFAULT);
return;
case XhtmlPackage.CODE__STYLE:
unsetStyle();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case XhtmlPackage.CODE__CLASS:
return isSetClass();
case XhtmlPackage.CODE__LANG:
return LANG_EDEFAULT == null
? lang != null
: !LANG_EDEFAULT.equals(lang);
case XhtmlPackage.CODE__STYLE:
return isSetStyle();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) {
return super.toString();
}
StringBuffer result = new StringBuffer(super.toString());
result.append(" (class: ");
if (classESet) {
result.append(class_);
} else {
result.append("<unset>");
}
result.append(", lang: ");
result.append(lang);
result.append(", style: ");
if (styleESet) {
result.append(style);
} else {
result.append("<unset>");
}
result.append(')');
return result.toString();
}
} // CodeImpl
| epl-1.0 |
boa0332/mercurialeclipse | plugin/src/com/vectrace/MercurialEclipse/commands/RootlessHgCommand.java | 1464 | /*******************************************************************************
* Copyright (c) 2005-2010 VecTrace (Zingo Andersen) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* John Peberdy implementation
*******************************************************************************/
package com.vectrace.MercurialEclipse.commands;
import java.io.File;
import java.util.List;
import org.eclipse.core.runtime.Assert;
/**
* A command to invoke hg definitely outside of an hg root.
*/
public class RootlessHgCommand extends AbstractShellCommand {
public RootlessHgCommand(String command, String uiName) {
this(command, uiName, null);
}
public RootlessHgCommand(String command, String uiName, File workingDir) {
super(uiName, null, workingDir, false);
Assert.isNotNull(command);
this.command = command;
}
// operations
/**
* @see com.vectrace.MercurialEclipse.commands.AbstractShellCommand#customizeCommands(java.util.List)
*/
@Override
protected void customizeCommands(List<String> cmd) {
cmd.add(1, "-y");
}
/**
* @see com.vectrace.MercurialEclipse.commands.AbstractShellCommand#getExecutable()
*/
@Override
protected String getExecutable() {
return HgClients.getExecutable();
}
}
| epl-1.0 |
rfdrake/opennms | smoke-test/src/test/java/org/opennms/smoketest/ChartsPageTest.java | 1723 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2011-2012 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.smoketest;
import org.junit.Before;
import org.junit.Test;
public class ChartsPageTest extends OpenNMSSeleniumTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
clickAndWait("link=Charts");
}
@Test
public void testChartsPage() throws Exception {
waitForText("Charts");
waitForElement("css=img[alt=sample-bar-chart]");
waitForElement("css=img[alt=sample-bar-chart2]");
waitForElement("css=img[alt=sample-bar-chart3]");
}
}
| gpl-2.0 |
91wzhang/sei-jphotoalbum | src/fi/iki/jka/JPhotoDirectory.java | 2070 | /*
* This file is part of JPhotoAlbum.
* Copyright 2004 Jari Karjala <jpkware.com> & Tarja Hakala <hakalat.net>
*
* @version $Id: JPhotoDirectory.java,v 1.1.1.1 2004/05/21 18:24:59 jkarjala Exp $
*/
/** Container for a single photo, may not always contain a real photo, but
* just text element.
* @see JPhotoAlbumLink.java
*/
package fi.iki.jka;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.InputStream;
import java.util.Iterator;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.FileCacheImageInputStream;
import com.drew.metadata.exif.ExifDirectory;
public class JPhotoDirectory extends JPhoto {
public JPhotoDirectory() {
this(defaultOwner, null);
}
public JPhotoDirectory(JPhotoCollection owner) {
this(owner, null);
}
public JPhotoDirectory(File original) {
this(defaultOwner, original);
}
public JPhotoDirectory(JPhotoCollection owner, File original) {
super(owner, original);
}
public BufferedImage getThumbImage() {
InputStream ins = getClass().getClassLoader().getResourceAsStream("pics/directory.png");
Iterator readers = ImageIO.getImageReadersBySuffix("png");
ImageReader imageReader = (ImageReader)readers.next();
BufferedImage thumb = null;
try {
imageReader.setInput(new FileCacheImageInputStream(ins, null));
thumb = imageReader.read(0);
ins.close();
}
catch (Exception e) {
System.out.println("getThumbImage:"+e);
}
imageReader.dispose();
return thumb;
}
public BufferedImage getCachedThumb() {
return getThumbImage();
}
public BufferedImage getThumbImageAsync() {
return getThumbImage();
}
public BufferedImage getFullImage() {
return null;
}
public ExifDirectory getExifDirectory() {
return null;
}
public String toString() {
return "Directory='"+getImageName()+"'";
}
}
| gpl-2.0 |
bigretromike/UniversalMediaServer | src/main/java/net/pms/encoders/TsMuxeRVideo.java | 27024 | /*
* PS3 Media Server, for streaming any medias to your PS3.
* Copyright (C) 2008 A.Brochard
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License only.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package net.pms.encoders;
import com.jgoodies.forms.builder.PanelBuilder;
import com.jgoodies.forms.factories.Borders;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import java.awt.ComponentOrientation;
import java.awt.Font;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Locale;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import net.pms.Messages;
import net.pms.PMS;
import net.pms.configuration.DeviceConfiguration;
import net.pms.configuration.PmsConfiguration;
import net.pms.configuration.RendererConfiguration;
import net.pms.dlna.*;
import net.pms.formats.Format;
import net.pms.io.*;
import net.pms.newgui.GuiUtil;
import net.pms.util.CodecUtil;
import net.pms.util.FormLayoutUtil;
import net.pms.util.PlayerUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TsMuxeRVideo extends Player {
private static final Logger LOGGER = LoggerFactory.getLogger(TsMuxeRVideo.class);
private static final String COL_SPEC = "left:pref, 0:grow";
private static final String ROW_SPEC = "p, 3dlu, p, 3dlu, p, 3dlu, p, 3dlu, p, 3dlu, 0:grow";
public static final String ID = "tsmuxer";
@Deprecated
public TsMuxeRVideo(PmsConfiguration configuration) {
this();
}
public TsMuxeRVideo() {
}
@Override
public boolean excludeFormat(Format format) {
String extension = format.getMatchedExtension();
return extension != null
&& !extension.equals("mp4")
&& !extension.equals("mkv")
&& !extension.equals("ts")
&& !extension.equals("tp")
&& !extension.equals("m2ts")
&& !extension.equals("m2t")
&& !extension.equals("mpg")
&& !extension.equals("evo")
&& !extension.equals("mpeg")
&& !extension.equals("vob")
&& !extension.equals("m2v")
&& !extension.equals("mts")
&& !extension.equals("mov");
}
@Override
public int purpose() {
return VIDEO_SIMPLEFILE_PLAYER;
}
@Override
public String id() {
return ID;
}
@Override
public boolean isTimeSeekable() {
return true;
}
@Override
public String[] args() {
return null;
}
@Override
public String executable() {
return configuration.getTsmuxerPath();
}
@Override
public ProcessWrapper launchTranscode(
DLNAResource dlna,
DLNAMediaInfo media,
OutputParams params
) throws IOException {
// Use device-specific pms conf
PmsConfiguration prev = configuration;
configuration = (DeviceConfiguration) params.mediaRenderer;
final String filename = dlna.getSystemName();
setAudioAndSubs(filename, media, params);
PipeIPCProcess ffVideoPipe;
ProcessWrapperImpl ffVideo;
PipeIPCProcess ffAudioPipe[] = null;
ProcessWrapperImpl ffAudio[] = null;
String fps = media.getValidFps(false);
int width = media.getWidth();
int height = media.getHeight();
if (width < 320 || height < 240) {
width = -1;
height = -1;
}
String videoType = "V_MPEG4/ISO/AVC";
if (media.getCodecV() != null && media.getCodecV().startsWith("mpeg2")) {
videoType = "V_MPEG-2";
}
boolean aacTranscode = false;
String[] ffmpegCommands;
if (this instanceof TsMuxeRAudio && media.getFirstAudioTrack() != null) {
ffVideoPipe = new PipeIPCProcess(System.currentTimeMillis() + "fakevideo", System.currentTimeMillis() + "videoout", false, true);
String timeEndValue1 = "-t";
String timeEndValue2 = "" + params.timeend;
if (params.timeend < 1) {
timeEndValue1 = "-y";
timeEndValue2 = "-y";
}
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
timeEndValue1, timeEndValue2,
"-loop", "1",
"-i", "DummyInput.jpg",
"-f", "h264",
"-c:v", "libx264",
"-level", "31",
"-tune", "zerolatency",
"-pix_fmt", "yuv420p",
"-an",
"-y",
ffVideoPipe.getInputPipe()
};
videoType = "V_MPEG4/ISO/AVC";
OutputParams ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffVideo = new ProcessWrapperImpl(ffmpegCommands, ffparams);
if (
filename.toLowerCase().endsWith(".flac") &&
media.getFirstAudioTrack().getBitsperSample() >= 24 &&
media.getFirstAudioTrack().getSampleRate() % 48000 == 0
) {
ffAudioPipe = new PipeIPCProcess[1];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "flacaudio", System.currentTimeMillis() + "audioout", false, true);
String[] flacCmd = new String[] {
configuration.getFlacPath(),
"--output-name=" + ffAudioPipe[0].getInputPipe(),
"-d",
"-f",
"-F",
filename
};
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffAudio = new ProcessWrapperImpl[1];
ffAudio[0] = new ProcessWrapperImpl(flacCmd, ffparams);
} else {
ffAudioPipe = new PipeIPCProcess[1];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "mlpaudio", System.currentTimeMillis() + "audioout", false, true);
String depth = "pcm_s16le";
String rate = "48000";
if (media.getFirstAudioTrack().getBitsperSample() >= 24) {
depth = "pcm_s24le";
}
if (media.getFirstAudioTrack().getSampleRate() > 48000) {
rate = "" + media.getFirstAudioTrack().getSampleRate();
}
String[] flacCmd = new String[] {
configuration.getFfmpegPath(),
"-i", filename,
"-ar", rate,
"-f", "wav",
"-acodec", depth,
"-y",
ffAudioPipe[0].getInputPipe()
};
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffAudio = new ProcessWrapperImpl[1];
ffAudio[0] = new ProcessWrapperImpl(flacCmd, ffparams);
}
} else {
params.waitbeforestart = 5000;
params.manageFastStart();
ffVideoPipe = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegvideo", System.currentTimeMillis() + "videoout", false, true);
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-c", "copy",
"-f", "rawvideo",
"-y",
ffVideoPipe.getInputPipe()
};
InputFile newInput = new InputFile();
newInput.setFilename(filename);
newInput.setPush(params.stdin);
/**
* Note: This logic is weird; on one hand we check if the renderer requires videos to be Level 4.1 or below, but then
* the other function allows the video to exceed those limits.
* In reality this won't cause problems since renderers typically don't support above 4.1 anyway - nor are many
* videos encoded higher than that either - but it's worth acknowledging the logic discrepancy.
*/
if (!media.isVideoWithinH264LevelLimits(newInput, params.mediaRenderer) && params.mediaRenderer.isH264Level41Limited()) {
LOGGER.info("The video will not play or will show a black screen");
}
if (media.getH264AnnexB() != null && media.getH264AnnexB().length > 0) {
StreamModifier sm = new StreamModifier();
sm.setHeader(media.getH264AnnexB());
sm.setH264AnnexB(true);
ffVideoPipe.setModifier(sm);
}
OutputParams ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffVideo = new ProcessWrapperImpl(ffmpegCommands, ffparams);
int numAudioTracks = 1;
if (media.getAudioTracksList() != null && media.getAudioTracksList().size() > 1 && configuration.isMuxAllAudioTracks()) {
numAudioTracks = media.getAudioTracksList().size();
}
boolean singleMediaAudio = media.getAudioTracksList().size() <= 1;
if (params.aid != null) {
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
if (numAudioTracks <= 1) {
ffAudioPipe = new PipeIPCProcess[numAudioTracks];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegaudio01", System.currentTimeMillis() + "audioout", false, true);
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = params.aid.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && params.aid.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
params.aid.isLossless() ||
(params.aid.isDTS() && params.aid.getAudioProperties().getNumberOfChannels() <= 6) ||
params.aid.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
int channels;
if (ac3Remux) {
channels = params.aid.getAudioProperties().getNumberOfChannels(); // AC-3 remux
} else if (dtsRemux || encodedAudioPassthrough) {
channels = 2;
} else if (pcm) {
channels = params.aid.getAudioProperties().getNumberOfChannels();
} else {
channels = configuration.getAudioChannelCount(); // 5.1 max for AC-3 encoding
}
if (!ac3Remux && (dtsRemux || pcm || encodedAudioPassthrough)) {
// DTS remux or LPCM
StreamModifier sm = new StreamModifier();
sm.setPcm(pcm);
sm.setDtsEmbed(dtsRemux);
sm.setEncodedAudioPassthrough(encodedAudioPassthrough);
sm.setNbChannels(channels);
sm.setSampleFrequency(params.aid.getSampleRate() < 48000 ? 48000 : params.aid.getSampleRate());
sm.setBitsPerSample(16);
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + sm.getNbChannels(),
"-f", "ac3",
"-c:a", sm.isDtsEmbed() || sm.isEncodedAudioPassthrough() ? "copy" : "pcm",
"-y",
ffAudioPipe[0].getInputPipe()
};
// Use PCM trick when media renderer does not support DTS in MPEG
if (!params.mediaRenderer.isMuxDTSToMpeg()) {
ffAudioPipe[0].setModifier(sm);
}
} else if (!ac3Remux && params.mediaRenderer.isTranscodeToAAC()) {
// AAC audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "adts",
"-c:a", "aac",
"-strict", "experimental",
"-ab", Math.min(configuration.getAudioBitrate(), 320) + "k",
"-y",
ffAudioPipe[0].getInputPipe()
};
aacTranscode = true;
} else {
// AC-3 audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "ac3",
"-c:a", (ac3Remux) ? "copy" : "ac3",
"-ab", String.valueOf(CodecUtil.getAC3Bitrate(configuration, params.aid)) + "k",
"-y",
ffAudioPipe[0].getInputPipe()
};
}
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffAudio = new ProcessWrapperImpl[numAudioTracks];
ffAudio[0] = new ProcessWrapperImpl(ffmpegCommands, ffparams);
} else {
ffAudioPipe = new PipeIPCProcess[numAudioTracks];
ffAudio = new ProcessWrapperImpl[numAudioTracks];
for (int i = 0; i < media.getAudioTracksList().size(); i++) {
DLNAMediaAudio audio = media.getAudioTracksList().get(i);
ffAudioPipe[i] = new PipeIPCProcess(System.currentTimeMillis() + "ffmpeg" + i, System.currentTimeMillis() + "audioout" + i, false, true);
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = audio.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && audio.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
audio.isLossless() ||
(audio.isDTS() && audio.getAudioProperties().getNumberOfChannels() <= 6) ||
audio.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
audio.isAC3() ||
audio.isMP3() ||
audio.isAAC() ||
audio.isVorbis() ||
// audio.isWMA() ||
audio.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
int channels;
if (ac3Remux) {
channels = audio.getAudioProperties().getNumberOfChannels(); // AC-3 remux
} else if (dtsRemux || encodedAudioPassthrough) {
channels = 2;
} else if (pcm) {
channels = audio.getAudioProperties().getNumberOfChannels();
} else {
channels = configuration.getAudioChannelCount(); // 5.1 max for AC-3 encoding
}
if (!ac3Remux && (dtsRemux || pcm || encodedAudioPassthrough)) {
// DTS remux or LPCM
StreamModifier sm = new StreamModifier();
sm.setPcm(pcm);
sm.setDtsEmbed(dtsRemux);
sm.setEncodedAudioPassthrough(encodedAudioPassthrough);
sm.setNbChannels(channels);
sm.setSampleFrequency(audio.getSampleRate() < 48000 ? 48000 : audio.getSampleRate());
sm.setBitsPerSample(16);
if (!params.mediaRenderer.isMuxDTSToMpeg()) {
ffAudioPipe[i].setModifier(sm);
}
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + sm.getNbChannels(),
"-f", "ac3",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", sm.isDtsEmbed() || sm.isEncodedAudioPassthrough() ? "copy" : "pcm",
"-y",
ffAudioPipe[i].getInputPipe()
};
} else if (!ac3Remux && params.mediaRenderer.isTranscodeToAAC()) {
// AAC audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "adts",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", "aac",
"-strict", "experimental",
"-ab", Math.min(configuration.getAudioBitrate(), 320) + "k",
"-y",
ffAudioPipe[i].getInputPipe()
};
aacTranscode = true;
} else {
// AC-3 remux or encoding
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "ac3",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", (ac3Remux) ? "copy" : "ac3",
"-ab", String.valueOf(CodecUtil.getAC3Bitrate(configuration, audio)) + "k",
"-y",
ffAudioPipe[i].getInputPipe()
};
}
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffAudio[i] = new ProcessWrapperImpl(ffmpegCommands, ffparams);
}
}
}
}
File f = new File(configuration.getTempFolder(), "pms-tsmuxer.meta");
params.log = false;
try (PrintWriter pw = new PrintWriter(f)) {
pw.print("MUXOPT --no-pcr-on-video-pid");
pw.print(" --new-audio-pes");
pw.print(" --no-asyncio");
pw.print(" --vbr");
pw.println(" --vbv-len=500");
String sei = "insertSEI";
if (
params.mediaRenderer.isPS3() &&
media.isWebDl(filename, params)
) {
sei = "forceSEI";
}
String videoparams = "level=4.1, " + sei + ", contSPS, track=1";
if (this instanceof TsMuxeRAudio) {
videoparams = "track=224";
}
if (configuration.isFix25FPSAvMismatch()) {
fps = "25";
}
pw.println(videoType + ", \"" + ffVideoPipe.getOutputPipe() + "\", " + (fps != null ? ("fps=" + fps + ", ") : "") + (width != -1 ? ("video-width=" + width + ", ") : "") + (height != -1 ? ("video-height=" + height + ", ") : "") + videoparams);
if (ffAudioPipe != null && ffAudioPipe.length == 1) {
String timeshift = "";
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = params.aid.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && params.aid.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
params.aid.isLossless() ||
(params.aid.isDTS() && params.aid.getAudioProperties().getNumberOfChannels() <= 6) ||
params.aid.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
String type = "A_AC3";
if (ac3Remux) {
// AC-3 remux takes priority
type = "A_AC3";
} else if (aacTranscode) {
type = "A_AAC";
} else {
if (pcm || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
}
if (encodedAudioPassthrough || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
}
if (dtsRemux || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
if (params.mediaRenderer.isMuxDTSToMpeg()) {
type = "A_DTS";
}
}
}
if (params.aid != null && params.aid.getAudioProperties().getAudioDelay() != 0 && params.timeseek == 0) {
timeshift = "timeshift=" + params.aid.getAudioProperties().getAudioDelay() + "ms, ";
}
pw.println(type + ", \"" + ffAudioPipe[0].getOutputPipe() + "\", " + timeshift + "track=2");
} else if (ffAudioPipe != null) {
for (int i = 0; i < media.getAudioTracksList().size(); i++) {
DLNAMediaAudio lang = media.getAudioTracksList().get(i);
String timeshift = "";
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = lang.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough;
dtsRemux = configuration.isAudioEmbedDtsInPcm() && lang.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
lang.isLossless() ||
(lang.isDTS() && lang.getAudioProperties().getNumberOfChannels() <= 6) ||
lang.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
String type = "A_AC3";
if (ac3Remux) {
// AC-3 remux takes priority
type = "A_AC3";
} else {
if (pcm) {
type = "A_LPCM";
}
if (encodedAudioPassthrough) {
type = "A_LPCM";
}
if (dtsRemux) {
type = "A_LPCM";
if (params.mediaRenderer.isMuxDTSToMpeg()) {
type = "A_DTS";
}
}
}
if (lang.getAudioProperties().getAudioDelay() != 0 && params.timeseek == 0) {
timeshift = "timeshift=" + lang.getAudioProperties().getAudioDelay() + "ms, ";
}
pw.println(type + ", \"" + ffAudioPipe[i].getOutputPipe() + "\", " + timeshift + "track=" + (2 + i));
}
}
}
PipeProcess tsPipe = new PipeProcess(System.currentTimeMillis() + "tsmuxerout.ts");
/**
* Use the newer version of tsMuxeR on PS3 since other renderers
* like Panasonic TVs don't always recognize the new output
*/
String executable = executable();
if (params.mediaRenderer.isPS3()) {
executable = configuration.getTsmuxerNewPath();
}
String[] cmdArray = new String[]{
executable,
f.getAbsolutePath(),
tsPipe.getInputPipe()
};
cmdArray = finalizeTranscoderArgs(
filename,
dlna,
media,
params,
cmdArray
);
ProcessWrapperImpl p = new ProcessWrapperImpl(cmdArray, params);
params.maxBufferSize = 100;
params.input_pipes[0] = tsPipe;
params.stdin = null;
ProcessWrapper pipe_process = tsPipe.getPipeProcess();
p.attachProcess(pipe_process);
pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
tsPipe.deleteLater();
ProcessWrapper ff_pipe_process = ffVideoPipe.getPipeProcess();
p.attachProcess(ff_pipe_process);
ff_pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
ffVideoPipe.deleteLater();
p.attachProcess(ffVideo);
ffVideo.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
if (ffAudioPipe != null && params.aid != null) {
for (int i = 0; i < ffAudioPipe.length; i++) {
ff_pipe_process = ffAudioPipe[i].getPipeProcess();
p.attachProcess(ff_pipe_process);
ff_pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
ffAudioPipe[i].deleteLater();
p.attachProcess(ffAudio[i]);
ffAudio[i].runInNewThread();
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
p.runInNewThread();
configuration = prev;
return p;
}
@Override
public String mimeType() {
return "video/mpeg";
}
@Override
public String name() {
return "tsMuxeR";
}
@Override
public int type() {
return Format.VIDEO;
}
private JCheckBox tsmuxerforcefps;
private JCheckBox muxallaudiotracks;
@Override
public JComponent config() {
// Apply the orientation for the locale
ComponentOrientation orientation = ComponentOrientation.getOrientation(PMS.getLocale());
String colSpec = FormLayoutUtil.getColSpec(COL_SPEC, orientation);
FormLayout layout = new FormLayout(colSpec, ROW_SPEC);
PanelBuilder builder = new PanelBuilder(layout);
builder.border(Borders.EMPTY);
builder.opaque(false);
CellConstraints cc = new CellConstraints();
JComponent cmp = builder.addSeparator(Messages.getString("NetworkTab.5"), FormLayoutUtil.flip(cc.xyw(2, 1, 1), colSpec, orientation));
cmp = (JComponent) cmp.getComponent(0);
cmp.setFont(cmp.getFont().deriveFont(Font.BOLD));
tsmuxerforcefps = new JCheckBox(Messages.getString("TsMuxeRVideo.2"), configuration.isTsmuxerForceFps());
tsmuxerforcefps.setContentAreaFilled(false);
tsmuxerforcefps.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
configuration.setTsmuxerForceFps(e.getStateChange() == ItemEvent.SELECTED);
}
});
builder.add(GuiUtil.getPreferredSizeComponent(tsmuxerforcefps), FormLayoutUtil.flip(cc.xy(2, 3), colSpec, orientation));
muxallaudiotracks = new JCheckBox(Messages.getString("TsMuxeRVideo.19"), configuration.isMuxAllAudioTracks());
muxallaudiotracks.setContentAreaFilled(false);
muxallaudiotracks.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
configuration.setMuxAllAudioTracks(e.getStateChange() == ItemEvent.SELECTED);
}
});
builder.add(GuiUtil.getPreferredSizeComponent(muxallaudiotracks), FormLayoutUtil.flip(cc.xy(2, 5), colSpec, orientation));
JPanel panel = builder.getPanel();
// Apply the orientation to the panel and all components in it
panel.applyComponentOrientation(orientation);
return panel;
}
@Override
public boolean isInternalSubtitlesSupported() {
return false;
}
@Override
public boolean isExternalSubtitlesSupported() {
return false;
}
@Override
public boolean isPlayerCompatible(RendererConfiguration mediaRenderer) {
return mediaRenderer != null && mediaRenderer.isMuxH264MpegTS();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCompatible(DLNAResource resource) {
DLNAMediaSubtitle subtitle = resource.getMediaSubtitle();
// Check whether the subtitle actually has a language defined,
// uninitialized DLNAMediaSubtitle objects have a null language.
if (subtitle != null && subtitle.getLang() != null) {
// The resource needs a subtitle, but PMS does not support subtitles for tsMuxeR.
return false;
}
try {
String audioTrackName = resource.getMediaAudio().toString();
String defaultAudioTrackName = resource.getMedia().getAudioTracksList().get(0).toString();
if (!audioTrackName.equals(defaultAudioTrackName)) {
// PMS only supports playback of the default audio track for tsMuxeR
return false;
}
} catch (NullPointerException e) {
LOGGER.trace("tsMuxeR cannot determine compatibility based on audio track for " + resource.getSystemName());
} catch (IndexOutOfBoundsException e) {
LOGGER.trace("tsMuxeR cannot determine compatibility based on default audio track for " + resource.getSystemName());
}
if (
PlayerUtil.isVideo(resource, Format.Identifier.MKV) ||
PlayerUtil.isVideo(resource, Format.Identifier.MPG)
) {
return true;
}
return false;
}
}
| gpl-2.0 |
FauxFaux/jdk9-jdk | test/java/lang/management/ThreadMXBean/ThreadUserTime.java | 8036 | /*
* Copyright (c) 2004, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4997799
* @summary Basic test of ThreadMXBean.getThreadUserTime and
* getCurrentThreadUserTime.
* @author Mandy Chung
* @modules java.management
*/
import java.lang.management.*;
public class ThreadUserTime {
private static ThreadMXBean mbean = ManagementFactory.getThreadMXBean();
private static boolean testFailed = false;
private static boolean done = false;
private static Object obj = new Object();
private static final int NUM_THREADS = 10;
private static Thread[] threads = new Thread[NUM_THREADS];
private static long[] times = new long[NUM_THREADS];
// careful about this value
private static final int DELTA = 100;
public static void main(String[] argv)
throws Exception {
if (!mbean.isCurrentThreadCpuTimeSupported()) {
return;
}
// disable user time
if (mbean.isThreadCpuTimeEnabled()) {
mbean.setThreadCpuTimeEnabled(false);
}
Thread curThread = Thread.currentThread();
long t = mbean.getCurrentThreadUserTime();
if (t != -1) {
throw new RuntimeException("Invalid CurrenThreadUserTime returned = " +
t + " expected = -1");
}
if (mbean.isThreadCpuTimeSupported()) {
long t1 = mbean.getThreadUserTime(curThread.getId());
if (t1 != -1) {
throw new RuntimeException("Invalid ThreadUserTime returned = " +
t1 + " expected = -1");
}
}
// Enable CPU Time measurement
if (!mbean.isThreadCpuTimeEnabled()) {
mbean.setThreadCpuTimeEnabled(true);
}
if (!mbean.isThreadCpuTimeEnabled()) {
throw new RuntimeException("ThreadUserTime is expected to be enabled");
}
long time = mbean.getCurrentThreadUserTime();
if (time < 0) {
throw new RuntimeException("Invalid user time returned = " + time);
}
if (!mbean.isThreadCpuTimeSupported()) {
return;
}
// Expected to be time1 >= time
long time1 = mbean.getThreadUserTime(curThread.getId());
if (time1 < time) {
throw new RuntimeException("User time " + time1 +
" expected >= " + time);
}
System.out.println(curThread.getName() +
" Current Thread User Time = " + time +
" user time = " + time1);
for (int i = 0; i < NUM_THREADS; i++) {
threads[i] = new MyThread("MyThread-" + i);
threads[i].start();
}
waitUntilThreadBlocked();
for (int i = 0; i < NUM_THREADS; i++) {
times[i] = mbean.getThreadUserTime(threads[i].getId());
}
goSleep(200);
for (int i = 0; i < NUM_THREADS; i++) {
long newTime = mbean.getThreadUserTime(threads[i].getId());
if (times[i] > newTime) {
throw new RuntimeException("TEST FAILED: " +
threads[i].getName() +
" previous user user time = " + times[i] +
" > current user user time = " + newTime);
}
if ((times[i] + DELTA) < newTime) {
throw new RuntimeException("TEST FAILED: " +
threads[i].getName() +
" user time = " + newTime +
" previous user time " + times[i] +
" out of expected range");
}
System.out.println(threads[i].getName() +
" Previous User Time = " + times[i] +
" Current User time = " + newTime);
}
synchronized (obj) {
done = true;
obj.notifyAll();
}
for (int i = 0; i < NUM_THREADS; i++) {
try {
threads[i].join();
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
e.printStackTrace(System.out);
testFailed = true;
break;
}
}
if (testFailed) {
throw new RuntimeException("TEST FAILED");
}
System.out.println("Test passed");
}
private static void goSleep(long ms) throws Exception {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
throw e;
}
}
private static void waitUntilThreadBlocked()
throws Exception {
int count = 0;
while (count != NUM_THREADS) {
goSleep(100);
count = 0;
for (int i = 0; i < NUM_THREADS; i++) {
ThreadInfo info = mbean.getThreadInfo(threads[i].getId());
if (info.getThreadState() == Thread.State.WAITING) {
count++;
}
}
}
}
static class MyThread extends Thread {
public MyThread(String name) {
super(name);
}
public void run() {
double sum = 0;
for (int i = 0; i < 5000; i++) {
double r = Math.random();
double x = Math.pow(3, r);
sum += x - r;
}
synchronized (obj) {
while (!done) {
try {
obj.wait();
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
e.printStackTrace(System.out);
testFailed = true;
break;
}
}
}
sum = 0;
for (int i = 0; i < 5000; i++) {
double r = Math.random();
double x = Math.pow(3, r);
sum += x - r;
}
long time1 = mbean.getCurrentThreadCpuTime();
long utime1 = mbean.getCurrentThreadUserTime();
long time2 = mbean.getThreadCpuTime(getId());
long utime2 = mbean.getThreadUserTime(getId());
System.out.println(getName() + ":");
System.out.println("CurrentThreadUserTime = " + utime1 +
" ThreadUserTime = " + utime2);
System.out.println("CurrentThreadCpuTime = " + time1 +
" ThreadCpuTime = " + time2);
if (time1 > time2) {
throw new RuntimeException("TEST FAILED: " + getName() +
" CurrentThreadCpuTime = " + time1 +
" > ThreadCpuTime = " + time2);
}
if (utime1 > utime2) {
throw new RuntimeException("TEST FAILED: " + getName() +
" CurrentThreadUserTime = " + utime1 +
" > ThreadUserTime = " + utime2);
}
}
}
}
| gpl-2.0 |
hanhailong/VCL-Official | vlc-android/src/org/videolan/vlc/gui/tv/SearchFragment.java | 4897 | /*****************************************************************************
* SearchFragment.java
*****************************************************************************
* Copyright © 2014-2015 VLC authors, VideoLAN and VideoLabs
* Author: Geoffrey Métais
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
*****************************************************************************/
package org.videolan.vlc.gui.tv;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.support.v17.leanback.widget.ArrayObjectAdapter;
import android.support.v17.leanback.widget.HeaderItem;
import android.support.v17.leanback.widget.ListRow;
import android.support.v17.leanback.widget.ListRowPresenter;
import android.support.v17.leanback.widget.ObjectAdapter;
import android.support.v17.leanback.widget.OnItemViewClickedListener;
import android.support.v17.leanback.widget.Presenter;
import android.support.v17.leanback.widget.Row;
import android.support.v17.leanback.widget.RowPresenter;
import android.text.TextUtils;
import org.videolan.vlc.R;
import org.videolan.vlc.VLCApplication;
import org.videolan.vlc.media.MediaLibrary;
import org.videolan.vlc.media.MediaWrapper;
import java.util.ArrayList;
public class SearchFragment extends android.support.v17.leanback.app.SearchFragment
implements android.support.v17.leanback.app.SearchFragment.SearchResultProvider {
private static final String TAG = "SearchFragment";
private ArrayObjectAdapter mRowsAdapter;
private Handler mHandler = new Handler();
private SearchRunnable mDelayedLoad;
protected Activity mActivity;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mRowsAdapter = new ArrayObjectAdapter(new ListRowPresenter());
setSearchResultProvider(this);
setOnItemViewClickedListener(getDefaultItemClickedListener());
mDelayedLoad = new SearchRunnable();
mActivity = getActivity();
}
@Override
public ObjectAdapter getResultsAdapter() {
return mRowsAdapter;
}
private void queryByWords(String words) {
mRowsAdapter.clear();
if (!TextUtils.isEmpty(words) && words.length() > 2) {
mDelayedLoad.setSearchQuery(words);
mDelayedLoad.setSearchType(MediaWrapper.TYPE_ALL);
VLCApplication.runBackground(mDelayedLoad);
}
}
@Override
public boolean onQueryTextChange(String newQuery) {
queryByWords(newQuery);
return true;
}
@Override
public boolean onQueryTextSubmit(String query) {
queryByWords(query);
return true;
}
private void loadRows(String query, int type) {
ArrayList<MediaWrapper> mediaList = MediaLibrary.getInstance().searchMedia(query);
final ArrayObjectAdapter listRowAdapter = new ArrayObjectAdapter(new CardPresenter(mActivity));
listRowAdapter.addAll(0, mediaList);
mHandler.post(new Runnable() {
@Override
public void run() {
HeaderItem header = new HeaderItem(0, getResources().getString(R.string.search_results));
mRowsAdapter.add(new ListRow(header, listRowAdapter));
}
});
}
protected OnItemViewClickedListener getDefaultItemClickedListener() {
return new OnItemViewClickedListener() {
@Override
public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) {
if (item instanceof MediaWrapper) {
TvUtil.openMedia(mActivity, (MediaWrapper) item, row);
}
}
};
}
private class SearchRunnable implements Runnable {
private volatile String searchQuery;
private volatile int searchType;
public SearchRunnable() {}
public void run() {
loadRows(searchQuery, searchType);
}
public void setSearchQuery(String value) {
this.searchQuery = value;
}
public void setSearchType(int value) {
this.searchType = value;
}
}
}
| gpl-2.0 |
nicholaschum/substratum | app/src/main/java/projekt/substratum/common/Broadcasts.java | 8846 | /*
* Copyright (c) 2016-2019 Projekt Substratum
* This file is part of Substratum.
*
* SPDX-License-Identifier: GPL-3.0-Or-Later
*/
package projekt.substratum.common;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.util.Log;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import projekt.substratum.Substratum;
import projekt.substratum.services.crash.AppCrashReceiver;
import projekt.substratum.services.packages.OverlayFound;
import projekt.substratum.services.packages.OverlayUpdater;
import projekt.substratum.services.packages.PackageModificationDetector;
import projekt.substratum.services.profiles.ScheduledProfileReceiver;
import projekt.substratum.services.system.InterfacerAuthorizationReceiver;
import static projekt.substratum.common.Internal.ENCRYPTION_KEY_EXTRA;
import static projekt.substratum.common.Internal.IV_ENCRYPTION_KEY_EXTRA;
import static projekt.substratum.common.Internal.MAIN_ACTIVITY_RECEIVER;
import static projekt.substratum.common.Internal.OVERLAY_REFRESH;
import static projekt.substratum.common.Internal.THEME_FRAGMENT_REFRESH;
import static projekt.substratum.common.References.ACTIVITY_FINISHER;
import static projekt.substratum.common.References.APP_CRASHED;
import static projekt.substratum.common.References.INTERFACER_PACKAGE;
import static projekt.substratum.common.References.KEY_RETRIEVAL;
import static projekt.substratum.common.References.MANAGER_REFRESH;
import static projekt.substratum.common.References.PACKAGE_ADDED;
import static projekt.substratum.common.References.PACKAGE_FULLY_REMOVED;
import static projekt.substratum.common.References.SUBSTRATUM_LOG;
import static projekt.substratum.common.References.TEMPLATE_RECEIVE_KEYS;
import static projekt.substratum.common.References.scheduledProfileReceiver;
public class Broadcasts {
/**
* Send a localized key message for encryption to take place
*
* @param context Context
* @param encryptionKey Encryption key
* @param ivEncryptKey IV encryption key
*/
private static void sendLocalizedKeyMessage(Context context,
byte[] encryptionKey,
byte[] ivEncryptKey) {
Substratum.log("KeyRetrieval",
"The system has completed the handshake for keys retrieval " +
"and is now passing it to the activity...");
Intent intent = new Intent(KEY_RETRIEVAL);
intent.putExtra(ENCRYPTION_KEY_EXTRA, encryptionKey);
intent.putExtra(IV_ENCRYPTION_KEY_EXTRA, ivEncryptKey);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* Close Substratum as a whole
*
* @param context Context
*/
public static void sendKillMessage(Context context) {
Substratum.log("SubstratumKiller",
"A crucial action has been conducted by the user and " +
"Substratum is now shutting down!");
Intent intent = new Intent(MAIN_ACTIVITY_RECEIVER);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the ThemeFragment
*
* @param context Context
*/
public static void sendRefreshMessage(Context context) {
Substratum.log("ThemeFragmentRefresher",
"A theme has been modified, sending update signal to refresh the list!");
Intent intent = new Intent(THEME_FRAGMENT_REFRESH);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the Overlays tab
*
* @param context Context
*/
public static void sendOverlayRefreshMessage(Context context) {
Substratum.log("OverlayRefresher",
"A theme has been modified, sending update signal to refresh the list!");
Intent intent = new Intent(OVERLAY_REFRESH);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* Activity finisher when a theme was updated
*
* @param context Context
* @param packageName Package of theme to close
*/
public static void sendActivityFinisherMessage(Context context,
String packageName) {
Substratum.log("ThemeInstaller",
"A theme has been installed, sending update signal to app for further processing!");
Intent intent = new Intent(ACTIVITY_FINISHER);
intent.putExtra(Internal.THEME_PID, packageName);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the ManagerFragment
*
* @param context Context
*/
public static void sendRefreshManagerMessage(Context context) {
Intent intent = new Intent(MANAGER_REFRESH);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* Register the implicit intent broadcast receivers
*
* @param context Context
*/
public static void registerBroadcastReceivers(Context context) {
try {
IntentFilter intentPackageAdded = new IntentFilter(PACKAGE_ADDED);
intentPackageAdded.addDataScheme("package");
IntentFilter intentPackageFullyRemoved = new IntentFilter(PACKAGE_FULLY_REMOVED);
intentPackageFullyRemoved.addDataScheme("package");
if (Systems.checkOMS(context)) {
IntentFilter intentAppCrashed = new IntentFilter(APP_CRASHED);
context.getApplicationContext().registerReceiver(
new AppCrashReceiver(), intentAppCrashed);
context.getApplicationContext().registerReceiver(
new OverlayUpdater(), intentPackageAdded);
}
if (Systems.checkThemeInterfacer(context)) {
IntentFilter interfacerAuthorize = new IntentFilter(
INTERFACER_PACKAGE + ".CALLER_AUTHORIZED");
context.getApplicationContext().registerReceiver(
new InterfacerAuthorizationReceiver(), interfacerAuthorize);
}
context.getApplicationContext().registerReceiver(
new OverlayFound(), intentPackageAdded);
context.getApplicationContext().registerReceiver(
new PackageModificationDetector(), intentPackageAdded);
context.getApplicationContext().registerReceiver(
new PackageModificationDetector(), intentPackageFullyRemoved);
Substratum.log(SUBSTRATUM_LOG,
"Successfully registered broadcast receivers for Substratum functionality!");
} catch (Exception e) {
Log.e(SUBSTRATUM_LOG,
"Failed to register broadcast receivers for Substratum functionality...");
}
}
/**
* Register the profile screen off receiver
*
* @param context Context
*/
public static void registerProfileScreenOffReceiver(Context context) {
scheduledProfileReceiver = new ScheduledProfileReceiver();
context.registerReceiver(scheduledProfileReceiver,
new IntentFilter(Intent.ACTION_SCREEN_OFF));
}
/**
* Unload the profile screen off receiver
*
* @param context Context
*/
public static void unregisterProfileScreenOffReceiver(Context context) {
try {
context.unregisterReceiver(scheduledProfileReceiver);
} catch (Exception ignored) {
}
}
/**
* Start the key retrieval receiver to obtain the key from the theme
*
* @param context Context
*/
public static void startKeyRetrievalReceiver(Context context) {
try {
IntentFilter intentGetKeys = new IntentFilter(TEMPLATE_RECEIVE_KEYS);
context.getApplicationContext().registerReceiver(
new KeyRetriever(), intentGetKeys);
Substratum.log(SUBSTRATUM_LOG, "Successfully registered key retrieval receiver!");
} catch (Exception e) {
Log.e(SUBSTRATUM_LOG, "Failed to register key retrieval receiver...");
}
}
/**
* Key Retriever Receiver
*/
public static class KeyRetriever extends BroadcastReceiver {
@Override
public void onReceive(Context context,
Intent intent) {
sendLocalizedKeyMessage(
context,
intent.getByteArrayExtra(ENCRYPTION_KEY_EXTRA),
intent.getByteArrayExtra(IV_ENCRYPTION_KEY_EXTRA));
}
}
} | gpl-3.0 |
y123456yz/reading-and-annotate-rocketmq-3.4.6 | rocketmq-src/RocketMQ-3.4.6/rocketmq-client/src/main/java/com/alibaba/rocketmq/client/hook/SendMessageContext.java | 3205 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.client.hook;
import java.util.Map;
import com.alibaba.rocketmq.client.impl.CommunicationMode;
import com.alibaba.rocketmq.client.producer.SendResult;
import com.alibaba.rocketmq.common.message.Message;
import com.alibaba.rocketmq.common.message.MessageQueue;
public class SendMessageContext {
private String producerGroup;
private Message message;
private MessageQueue mq;
private String brokerAddr;
private String bornHost;
private CommunicationMode communicationMode;
private SendResult sendResult;
private Exception exception;
private Object mqTraceContext;
private Map<String, String> props;
public String getProducerGroup() {
return producerGroup;
}
public void setProducerGroup(String producerGroup) {
this.producerGroup = producerGroup;
}
public Message getMessage() {
return message;
}
public void setMessage(Message message) {
this.message = message;
}
public MessageQueue getMq() {
return mq;
}
public void setMq(MessageQueue mq) {
this.mq = mq;
}
public String getBrokerAddr() {
return brokerAddr;
}
public void setBrokerAddr(String brokerAddr) {
this.brokerAddr = brokerAddr;
}
public CommunicationMode getCommunicationMode() {
return communicationMode;
}
public void setCommunicationMode(CommunicationMode communicationMode) {
this.communicationMode = communicationMode;
}
public SendResult getSendResult() {
return sendResult;
}
public void setSendResult(SendResult sendResult) {
this.sendResult = sendResult;
}
public Exception getException() {
return exception;
}
public void setException(Exception exception) {
this.exception = exception;
}
public Object getMqTraceContext() {
return mqTraceContext;
}
public void setMqTraceContext(Object mqTraceContext) {
this.mqTraceContext = mqTraceContext;
}
public Map<String, String> getProps() {
return props;
}
public void setProps(Map<String, String> props) {
this.props = props;
}
public String getBornHost() {
return bornHost;
}
public void setBornHost(String bornHost) {
this.bornHost = bornHost;
}
}
| gpl-3.0 |
ckaestne/LEADT | workspace/hsqldb/src/org/hsqldb/sample/Testdb.java | 7388 | /* Copyright (c) 2001-2008, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.sample;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import org.hsqldb.jdbc.jdbcDataSource;
/**
* Title: Testdb
* Description: simple hello world db example of a
* standalone persistent db application
*
* every time it runs it adds four more rows to sample_table
* it does a query and prints the results to standard out
*
* Author: Karl Meissner karl@meissnersd.com
*/
public class Testdb {
Connection conn; //our connnection to the db - presist for life of program
// we dont want this garbage collected until we are done
public Testdb(String db_file_name_prefix) throws Exception { // note more general exception
// connect to the database. This will load the db files and start the
// database if it is not alread running.
// db_file_name_prefix is used to open or create files that hold the state
// of the db.
// It can contain directory names relative to the
// current working directory
jdbcDataSource dataSource = new jdbcDataSource();
dataSource.setDatabase("jdbc:hsqldb:" + db_file_name_prefix);
Connection c = dataSource.getConnection("sa", "");
}
public void shutdown() throws SQLException {
Statement st = conn.createStatement();
// db writes out to files and performs clean shuts down
// otherwise there will be an unclean shutdown
// when program ends
st.execute("SHUTDOWN");
conn.close(); // if there are no other open connection
}
//use for SQL command SELECT
public synchronized void query(String expression) throws SQLException {
Statement st = null;
ResultSet rs = null;
st = conn.createStatement(); // statement objects can be reused with
// repeated calls to execute but we
// choose to make a new one each time
rs = st.executeQuery(expression); // run the query
// do something with the result set.
dump(rs);
st.close(); // NOTE!! if you close a statement the associated ResultSet is
// closed too
// so you should copy the contents to some other object.
// the result set is invalidated also if you recycle an Statement
// and try to execute some other query before the result set has been
// completely examined.
}
//use for SQL commands CREATE, DROP, INSERT and UPDATE
public synchronized void update(String expression) throws SQLException {
Statement st = null;
st = conn.createStatement(); // statements
int i = st.executeUpdate(expression); // run the query
if (i == -1) {
System.out.println("db error : " + expression);
}
st.close();
} // void update()
public static void dump(ResultSet rs) throws SQLException {
// the order of the rows in a cursor
// are implementation dependent unless you use the SQL ORDER statement
ResultSetMetaData meta = rs.getMetaData();
int colmax = meta.getColumnCount();
int i;
Object o = null;
// the result set is a cursor into the data. You can only
// point to one row at a time
// assume we are pointing to BEFORE the first row
// rs.next() points to next row and returns true
// or false if there is no next row, which breaks the loop
for (; rs.next(); ) {
for (i = 0; i < colmax; ++i) {
o = rs.getObject(i + 1); // Is SQL the first column is indexed
// with 1 not 0
System.out.print(o.toString() + " ");
}
System.out.println(" ");
}
} //void dump( ResultSet rs )
public static void main(String[] args) {
Testdb db = null;
try {
db = new Testdb("db_file");
} catch (Exception ex1) {
ex1.printStackTrace(); // could not start db
return; // bye bye
}
try {
//make an empty table
//
// by declaring the id column IDENTITY, the db will automatically
// generate unique values for new rows- useful for row keys
db.update(
"CREATE TABLE sample_table ( id INTEGER IDENTITY, str_col VARCHAR(256), num_col INTEGER)");
} catch (SQLException ex2) {
//ignore
//ex2.printStackTrace(); // second time we run program
// should throw execption since table
// already there
//
// this will have no effect on the db
}
try {
// add some rows - will create duplicates if run more then once
// the id column is automatically generated
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Ford', 100)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Toyota', 200)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Honda', 300)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('GM', 400)");
// do a query
db.query("SELECT * FROM sample_table WHERE num_col < 250");
// at end of program
db.shutdown();
} catch (SQLException ex3) {
ex3.printStackTrace();
}
} // main()
} // class Testdb
| gpl-3.0 |
kevinwang/minecarft | lwjgl-source-2.8.2/src/java/org/lwjgl/opengl/MacOSXCanvasImplementation.java | 2780 | /*
* Copyright (c) 2002-2008 LWJGL Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'LWJGL' nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.lwjgl.opengl;
import java.awt.GraphicsConfiguration;
import java.awt.GraphicsDevice;
import java.awt.Canvas;
import org.lwjgl.LWJGLException;
/**
*
* @author elias_naur <elias_naur@users.sourceforge.net>
* @version $Revision: 3632 $
* $Id: MacOSXCanvasImplementation.java 3632 2011-09-03 18:52:45Z spasi $
*/
final class MacOSXCanvasImplementation implements AWTCanvasImplementation {
public PeerInfo createPeerInfo(Canvas component, PixelFormat pixel_format, ContextAttribs attribs) throws LWJGLException {
try {
return new MacOSXAWTGLCanvasPeerInfo(component, pixel_format, attribs, true);
} catch (LWJGLException e) {
return new MacOSXAWTGLCanvasPeerInfo(component, pixel_format, attribs, false);
}
}
/**
* Find a proper GraphicsConfiguration from the given GraphicsDevice and PixelFormat.
*
* @return The GraphicsConfiguration corresponding to a visual that matches the pixel format.
*/
public GraphicsConfiguration findConfiguration(GraphicsDevice device, PixelFormat pixel_format) throws LWJGLException {
/*
* It seems like the best way is to simply return null
*/
return null;
}
}
| gpl-3.0 |
CellularPrivacy/Android-IMSI-Catcher-Detector | AIMSICD/src/main/java/com/secupwn/aimsicd/ui/drawer/NavDrawerItem.java | 575 | /* Android IMSI-Catcher Detector | (c) AIMSICD Privacy Project
* -----------------------------------------------------------
* LICENSE: http://git.io/vki47 | TERMS: http://git.io/vki4o
* -----------------------------------------------------------
*/
package com.secupwn.aimsicd.ui.drawer;
import android.support.annotation.DrawableRes;
public interface NavDrawerItem {
int getId();
String getLabel();
void setLabel(String label);
void setIconId(@DrawableRes int icon);
int getType();
boolean isEnabled();
boolean updateActionBarTitle();
}
| gpl-3.0 |
gihon19/postHotelSonrisa | postHotelSonrisa/src/view/rendes/RoundJTextField.java | 992 | package view.rendes;
import java.awt.Graphics;
import java.awt.Shape;
import java.awt.geom.RoundRectangle2D;
import javax.swing.JTextField;
public class RoundJTextField extends JTextField {
private Shape shape;
public RoundJTextField(int size) {
super(size);
setOpaque(false); // As suggested by @AVD in comment.
}
protected void paintComponent(Graphics g) {
g.setColor(getBackground());
g.fillRoundRect(0, 0, getWidth()-1, getHeight()-1, 15, 15);
super.paintComponent(g);
}
protected void paintBorder(Graphics g) {
g.setColor(getForeground());
g.drawRoundRect(0, 0, getWidth()-1, getHeight()-1, 15, 15);
}
public boolean contains(int x, int y) {
if (shape == null || !shape.getBounds().equals(getBounds())) {
shape = new RoundRectangle2D.Float(0, 0, getWidth()-1, getHeight()-1, 15, 15);
}
return shape.contains(x, y);
}
} | gpl-3.0 |
jvanz/core | connectivity/com/sun/star/sdbcx/comp/hsqldb/StorageFileAccess.java | 3316 | /*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
package com.sun.star.sdbcx.comp.hsqldb;
import org.hsqldb.lib.FileAccess;
import org.hsqldb.lib.FileSystemRuntimeException;
@SuppressWarnings("ucd")
public class StorageFileAccess implements org.hsqldb.lib.FileAccess{
static { NativeLibraries.load(); }
String ds_name;
String key;
/** Creates a new instance of StorageFileAccess */
public StorageFileAccess(Object key) throws java.lang.Exception{
this.key = (String)key;
}
public void createParentDirs(String filename) {
}
public boolean isStreamElement(String elementName) {
return isStreamElement(key,elementName);
}
public java.io.InputStream openInputStreamElement(String streamName) throws java.io.IOException {
return new NativeInputStreamHelper(key,streamName);
}
public java.io.OutputStream openOutputStreamElement(String streamName) throws java.io.IOException {
return new NativeOutputStreamHelper(key,streamName);
}
public void removeElement(String filename) throws java.util.NoSuchElementException {
try {
if ( isStreamElement(key,filename) )
removeElement(key,filename);
} catch (java.io.IOException e) {
throw new FileSystemRuntimeException( e );
}
}
public void renameElement(String oldName, String newName) throws java.util.NoSuchElementException {
try {
if ( isStreamElement(key,oldName) ){
removeElement(key,newName);
renameElement(key,oldName, newName);
}
} catch (java.io.IOException e) {
throw new FileSystemRuntimeException( e );
}
}
private class FileSync implements FileAccess.FileSync
{
private final NativeOutputStreamHelper os;
private FileSync(NativeOutputStreamHelper _os)
{
os = _os;
}
public void sync() throws java.io.IOException
{
os.sync();
}
}
public FileAccess.FileSync getFileSync(java.io.OutputStream os) throws java.io.IOException
{
return new FileSync((NativeOutputStreamHelper)os);
}
static native boolean isStreamElement(String key,String elementName);
static native void removeElement(String key,String filename) throws java.util.NoSuchElementException, java.io.IOException;
static native void renameElement(String key,String oldName, String newName) throws java.util.NoSuchElementException, java.io.IOException;
}
| gpl-3.0 |
Niall7459/Nukkit | src/main/java/cn/nukkit/redstone/Redstone.java | 8365 | package cn.nukkit.redstone;
import cn.nukkit.block.Block;
import cn.nukkit.block.BlockRedstoneWire;
import cn.nukkit.block.BlockSolid;
import cn.nukkit.math.Vector3;
import java.util.*;
/**
* author: Angelic47
* Nukkit Project
*/
public class Redstone {
public static final int POWER_NONE = 0;
public static final int POWER_WEAKEST = 1;
public static final int POWER_STRONGEST = 16;
//NOTICE: Here POWER_STRONGEST is 16, not 15.
//I set it to 16 in order to calculate the energy in blocks, such as the redstone torch under the cobblestone.
//At that time, the cobblestone's energy is 16, not 15. If you put a redstone wire next to it, the redstone wire will got 15 energy.
//So, POWER_WEAKEST also means that energy in blocks, not redstone wire it self. So set it to 1.
private static final Comparator<UpdateObject> orderIsdn = new Comparator<UpdateObject>() {
@Override
public int compare(UpdateObject o1, UpdateObject o2) {
if (o1.getPopulation() > o2.getPopulation()) {
return -1;
} else if (o1.getPopulation() < o2.getPopulation()) {
return 1;
} else {
return 0;
}
}
};
public static void active(Block source) {
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
int currentLevel = source.getPowerLevel() - 1;
if (currentLevel <= 0) {
return;
}
addToQueue(updateQueue, source);
while (!updateQueue.isEmpty()) {
UpdateObject updatingObj = updateQueue.poll();
Block updating = updatingObj.getLocation();
currentLevel = updatingObj.getPopulation();
if (currentLevel > updating.getPowerLevel()) {
updating.setPowerLevel(currentLevel);
updating.getLevel().setBlock(updating, updating, true, true);
addToQueue(updateQueue, updating);
}
}
}
public static void active(Block source, Map<String, Block> allBlocks) {
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
int currentLevel = source.getPowerLevel() - 1;
if (currentLevel <= 0) {
return;
}
addToQueue(updateQueue, source);
while (!updateQueue.isEmpty()) {
UpdateObject updatingObj = updateQueue.poll();
Block updating = updatingObj.getLocation();
currentLevel = updatingObj.getPopulation();
if (currentLevel > updating.getPowerLevel()) {
updating.setPowerLevel(currentLevel);
updating.getLevel().setBlock(updating, updating, true, true);
if (allBlocks.containsKey(updating.getLocationHash())) {
allBlocks.remove(updating.getLocationHash());
}
addToQueue(updateQueue, updating);
}
}
}
public static void deactive(Block source, int updateLevel) {
//Step 1: find blocks which need to update
Queue<UpdateObject> updateQueue = new PriorityQueue<>(1, orderIsdn);
Queue<UpdateObject> sourceList = new PriorityQueue<>(1, orderIsdn);
Map<String, Block> updateMap = new HashMap<>();
Map<String, Block> closedMap = new HashMap<>();
int currentLevel = updateLevel;
if (currentLevel <= 0) {
return;
}
addToDeactiveQueue(updateQueue, source, closedMap, sourceList, currentLevel);
while (!updateQueue.isEmpty()) {
UpdateObject updateObject = updateQueue.poll();
Block updating = updateObject.getLocation();
currentLevel = updateObject.getPopulation();
if (currentLevel >= updating.getPowerLevel()) {
updating.setPowerLevel(0);
updateMap.put(updating.getLocationHash(), updating);
addToDeactiveQueue(updateQueue, updating, closedMap, sourceList, currentLevel);
} else {
sourceList.add(new UpdateObject(updating.getPowerLevel(), updating));
}
}
//Step 2: recalculate redstone power
while (!sourceList.isEmpty()) {
active(sourceList.poll().getLocation(), updateMap);
}
for (Block block : updateMap.values()) {
block.setPowerLevel(0);
block.getLevel().setBlock(block, block, true, true);
}
}
private static void addToQueue(Queue<UpdateObject> updateQueue, Block location) {
if (location.getPowerLevel() <= 0) {
return;
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST, Vector3.SIDE_UP, Vector3.SIDE_DOWN}) {
if (location.getSide(side) instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, location.getSide(side)));
}
}
if (location instanceof BlockRedstoneWire) {
Block block = location.getSide(Vector3.SIDE_UP);
if (!(block instanceof BlockSolid)) {
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
if (block.getSide(side) instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, block.getSide(side)));
}
}
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_WEST, Vector3.SIDE_EAST, Vector3.SIDE_SOUTH}) {
block = location.getSide(side);
if (!(block instanceof BlockSolid)) {
Block blockDown;
blockDown = block.getSide(Vector3.SIDE_DOWN);
if (blockDown instanceof BlockRedstoneWire) {
updateQueue.add(new UpdateObject(location.getPowerLevel() - 1, blockDown));
}
}
}
}
}
private static void addToDeactiveQueue(Queue<UpdateObject> updateQueue, Block location, Map<String, Block> closedMap, Queue<UpdateObject> sourceList, int updateLevel) {
if (updateLevel < 0) {
return;
}
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST, Vector3.SIDE_UP, Vector3.SIDE_DOWN}) {
if (location.getSide(side).isPowerSource() || (updateLevel == 0 && location.getSide(side).isPowered())) {
sourceList.add(new UpdateObject(location.getPowerLevel(side), location.getSide(side)));
} else if (location.getSide(side) instanceof BlockRedstoneWire) {
if (!closedMap.containsKey(location.getSide(side).getLocationHash())) {
closedMap.put(location.getSide(side).getLocationHash(), location.getSide(side));
updateQueue.add(new UpdateObject(updateLevel - 1, location.getSide(side)));
}
}
}
if (location instanceof BlockRedstoneWire) {
Block block = location.getSide(Vector3.SIDE_UP);
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
if (block.getSide(side) instanceof BlockRedstoneWire) {
if (!closedMap.containsKey(block.getSide(side).getLocationHash())) {
closedMap.put(block.getSide(side).getLocationHash(), block.getSide(side));
updateQueue.add(new UpdateObject(updateLevel - 1, block.getSide(side)));
}
}
}
Block blockDown;
for (int side : new int[]{Vector3.SIDE_NORTH, Vector3.SIDE_SOUTH, Vector3.SIDE_EAST, Vector3.SIDE_WEST}) {
block = location.getSide(side);
blockDown = block.getSide(Vector3.SIDE_DOWN);
if (blockDown instanceof BlockRedstoneWire) {
if (!closedMap.containsKey(blockDown.getLocationHash())) {
closedMap.put(blockDown.getLocationHash(), blockDown);
updateQueue.add(new UpdateObject(updateLevel - 1, blockDown));
}
}
}
}
}
} | gpl-3.0 |
aebert1/BigTransport | build/tmp/recompileMc/sources/net/minecraft/entity/monster/EntityMob.java | 6264 | package net.minecraft.entity.monster;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityCreature;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.ItemAxe;
import net.minecraft.item.ItemStack;
import net.minecraft.util.DamageSource;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.SoundEvent;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.world.EnumDifficulty;
import net.minecraft.world.EnumSkyBlock;
import net.minecraft.world.World;
public abstract class EntityMob extends EntityCreature implements IMob
{
public EntityMob(World worldIn)
{
super(worldIn);
this.experienceValue = 5;
}
public SoundCategory getSoundCategory()
{
return SoundCategory.HOSTILE;
}
/**
* Called frequently so the entity can update its state every tick as required. For example, zombies and skeletons
* use this to react to sunlight and start to burn.
*/
public void onLivingUpdate()
{
this.updateArmSwingProgress();
float f = this.getBrightness(1.0F);
if (f > 0.5F)
{
this.entityAge += 2;
}
super.onLivingUpdate();
}
/**
* Called to update the entity's position/logic.
*/
public void onUpdate()
{
super.onUpdate();
if (!this.worldObj.isRemote && this.worldObj.getDifficulty() == EnumDifficulty.PEACEFUL)
{
this.setDead();
}
}
protected SoundEvent getSwimSound()
{
return SoundEvents.entity_hostile_swim;
}
protected SoundEvent getSplashSound()
{
return SoundEvents.entity_hostile_splash;
}
/**
* Called when the entity is attacked.
*/
public boolean attackEntityFrom(DamageSource source, float amount)
{
return this.isEntityInvulnerable(source) ? false : super.attackEntityFrom(source, amount);
}
protected SoundEvent getHurtSound()
{
return SoundEvents.entity_hostile_hurt;
}
protected SoundEvent getDeathSound()
{
return SoundEvents.entity_hostile_death;
}
protected SoundEvent getFallSound(int heightIn)
{
return heightIn > 4 ? SoundEvents.entity_hostile_big_fall : SoundEvents.entity_hostile_small_fall;
}
public boolean attackEntityAsMob(Entity entityIn)
{
float f = (float)this.getEntityAttribute(SharedMonsterAttributes.ATTACK_DAMAGE).getAttributeValue();
int i = 0;
if (entityIn instanceof EntityLivingBase)
{
f += EnchantmentHelper.getModifierForCreature(this.getHeldItemMainhand(), ((EntityLivingBase)entityIn).getCreatureAttribute());
i += EnchantmentHelper.getKnockbackModifier(this);
}
boolean flag = entityIn.attackEntityFrom(DamageSource.causeMobDamage(this), f);
if (flag)
{
if (i > 0 && entityIn instanceof EntityLivingBase)
{
((EntityLivingBase)entityIn).knockBack(this, (float)i * 0.5F, (double)MathHelper.sin(this.rotationYaw * 0.017453292F), (double)(-MathHelper.cos(this.rotationYaw * 0.017453292F)));
this.motionX *= 0.6D;
this.motionZ *= 0.6D;
}
int j = EnchantmentHelper.getFireAspectModifier(this);
if (j > 0)
{
entityIn.setFire(j * 4);
}
if (entityIn instanceof EntityPlayer)
{
EntityPlayer entityplayer = (EntityPlayer)entityIn;
ItemStack itemstack = this.getHeldItemMainhand();
ItemStack itemstack1 = entityplayer.isHandActive() ? entityplayer.getActiveItemStack() : null;
if (itemstack != null && itemstack1 != null && itemstack.getItem() instanceof ItemAxe && itemstack1.getItem() == Items.shield)
{
float f1 = 0.25F + (float)EnchantmentHelper.getEfficiencyModifier(this) * 0.05F;
if (this.rand.nextFloat() < f1)
{
entityplayer.getCooldownTracker().setCooldown(Items.shield, 100);
this.worldObj.setEntityState(entityplayer, (byte)30);
}
}
}
this.applyEnchantments(this, entityIn);
}
return flag;
}
public float getBlockPathWeight(BlockPos pos)
{
return 0.5F - this.worldObj.getLightBrightness(pos);
}
/**
* Checks to make sure the light is not too bright where the mob is spawning
*/
protected boolean isValidLightLevel()
{
BlockPos blockpos = new BlockPos(this.posX, this.getEntityBoundingBox().minY, this.posZ);
if (this.worldObj.getLightFor(EnumSkyBlock.SKY, blockpos) > this.rand.nextInt(32))
{
return false;
}
else
{
int i = this.worldObj.getLightFromNeighbors(blockpos);
if (this.worldObj.isThundering())
{
int j = this.worldObj.getSkylightSubtracted();
this.worldObj.setSkylightSubtracted(10);
i = this.worldObj.getLightFromNeighbors(blockpos);
this.worldObj.setSkylightSubtracted(j);
}
return i <= this.rand.nextInt(8);
}
}
/**
* Checks if the entity's current position is a valid location to spawn this entity.
*/
public boolean getCanSpawnHere()
{
return this.worldObj.getDifficulty() != EnumDifficulty.PEACEFUL && this.isValidLightLevel() && super.getCanSpawnHere();
}
protected void applyEntityAttributes()
{
super.applyEntityAttributes();
this.getAttributeMap().registerAttribute(SharedMonsterAttributes.ATTACK_DAMAGE);
}
/**
* Entity won't drop items or experience points if this returns false
*/
protected boolean canDropLoot()
{
return true;
}
} | gpl-3.0 |
s20121035/rk3288_android5.1_repo | external/apache-harmony/jdwp/src/test/java/org/apache/harmony/jpda/tests/jdwp/ThreadReference/ThreadGroupDebuggee.java | 4431 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vitaly A. Provodin
*/
/**
* Created on 18.02.2005
*/
package org.apache.harmony.jpda.tests.jdwp.ThreadReference;
import org.apache.harmony.jpda.tests.framework.DebuggeeSynchronizer;
import org.apache.harmony.jpda.tests.framework.LogWriter;
import org.apache.harmony.jpda.tests.share.JPDADebuggeeSynchronizer;
import org.apache.harmony.jpda.tests.share.SyncDebuggee;
/**
* The class specifies debuggee for <code>org.apache.harmony.jpda.tests.jdwp.ThreadReference.ThreadGroupTest</code>.
* This debuggee is started as follow:
* <ol>
* <li>the tested group <code>TESTED_GROUP</code> is created
* <li>the tested thread <code>TESTED_THREAD</code> is started so this
* thread belongs to that thread group
* </ol>
* For different goals of tests, the debuggee sends the <code>SGNL_READY</code>
* signal to and waits for the <code>SGNL_CONTINUE</code> signal from debugger
* in two places:
* <ul>
* <li>right away when the tested thread has been started
* <li>when the tested thread has been finished
* </ul>
*/
public class ThreadGroupDebuggee extends SyncDebuggee {
public static final String TESTED_GROUP = "TestedGroup";
public static final String TESTED_THREAD = "TestedThread";
static Object waitForStart = new Object();
static Object waitForFinish = new Object();
static Object waitTimeObject = new Object();
static void waitMlsecsTime(long mlsecsTime) {
synchronized(waitTimeObject) {
try {
waitTimeObject.wait(mlsecsTime);
} catch (Throwable throwable) {
// ignore
}
}
}
public void run() {
ThreadGroup thrdGroup = new ThreadGroup(TESTED_GROUP);
DebuggeeThread thrd = new DebuggeeThread(thrdGroup, TESTED_THREAD,
logWriter, synchronizer);
synchronized(waitForStart){
thrd.start();
try {
waitForStart.wait();
} catch (InterruptedException e) {
}
}
while ( thrd.isAlive() ) {
waitMlsecsTime(100);
}
// synchronized(waitForFinish){
logWriter.println("thread is finished");
// }
logWriter.println("send SGNL_READY");
synchronizer.sendMessage(JPDADebuggeeSynchronizer.SGNL_READY);
synchronizer.receiveMessage(JPDADebuggeeSynchronizer.SGNL_CONTINUE);
}
class DebuggeeThread extends Thread {
LogWriter logWriter;
DebuggeeSynchronizer synchronizer;
public DebuggeeThread(ThreadGroup thrdGroup, String name,
LogWriter logWriter, DebuggeeSynchronizer synchronizer) {
super(thrdGroup, name);
this.logWriter = logWriter;
this.synchronizer = synchronizer;
}
public void run() {
synchronized(ThreadGroupDebuggee.waitForFinish){
synchronized(ThreadGroupDebuggee.waitForStart){
ThreadGroupDebuggee.waitForStart.notifyAll();
logWriter.println(getName() + ": started");
synchronizer.sendMessage(JPDADebuggeeSynchronizer.SGNL_READY);
logWriter.println(getName() + ": wait for SGNL_CONTINUE");
synchronizer.receiveMessage(JPDADebuggeeSynchronizer.SGNL_CONTINUE);
logWriter.println(getName() + ": finished");
}
}
}
}
public static void main(String [] args) {
runDebuggee(ThreadGroupDebuggee.class);
}
}
| gpl-3.0 |
mark47/OESandbox | app/src/us/mn/state/health/lims/codeelementtype/action/CodeElementTypeNextPreviousAction.java | 5173 | /**
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations under
* the License.
*
* The Original Code is OpenELIS code.
*
* Copyright (C) The Minnesota Department of Health. All Rights Reserved.
*/
package us.mn.state.health.lims.codeelementtype.action;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import us.mn.state.health.lims.codeelementtype.dao.CodeElementTypeDAO;
import us.mn.state.health.lims.codeelementtype.daoimpl.CodeElementTypeDAOImpl;
import us.mn.state.health.lims.codeelementtype.valueholder.CodeElementType;
import us.mn.state.health.lims.common.action.BaseAction;
import us.mn.state.health.lims.common.action.BaseActionForm;
import us.mn.state.health.lims.common.exception.LIMSRuntimeException;
import us.mn.state.health.lims.common.log.LogEvent;
import us.mn.state.health.lims.common.util.StringUtil;
/**
* @author diane benz
*
* To change this generated comment edit the template variable "typecomment":
* Window>Preferences>Java>Templates. To enable and disable the creation of type
* comments go to Window>Preferences>Java>Code Generation.
*/
public class CodeElementTypeNextPreviousAction extends BaseAction {
private boolean isNew = false;
protected ActionForward performAction(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
// The first job is to determine if we are coming to this action with an
// ID parameter in the request. If there is no parameter, we are
// creating a new Analyte.
// If there is a parameter present, we should bring up an existing
// Analyte to edit.
String forward = FWD_SUCCESS;
request.setAttribute(ALLOW_EDITS_KEY, "true");
request.setAttribute(PREVIOUS_DISABLED, "false");
request.setAttribute(NEXT_DISABLED, "false");
String id = request.getParameter(ID);
if (StringUtil.isNullorNill(id) || "0".equals(id)) {
isNew = true;
} else {
isNew = false;
}
BaseActionForm dynaForm = (BaseActionForm) form;
String start = (String) request.getParameter("startingRecNo");
String direction = (String) request.getParameter("direction");
// System.out.println("This is ID from request " + id);
CodeElementType codeElementType = new CodeElementType();
codeElementType.setId(id);
try {
CodeElementTypeDAO codeElementTypeDAO = new CodeElementTypeDAOImpl();
//retrieve analyte by id since the name may have changed
codeElementTypeDAO.getData(codeElementType);
if (FWD_NEXT.equals(direction)) {
//bugzilla 1427 pass in name not id
List codeElementTypes = codeElementTypeDAO.getNextCodeElementTypeRecord(codeElementType.getText());
if (codeElementTypes != null && codeElementTypes.size() > 0) {
codeElementType = (CodeElementType) codeElementTypes.get(0);
codeElementTypeDAO.getData(codeElementType);
if (codeElementTypes.size() < 2) {
// disable next button
request.setAttribute(NEXT_DISABLED, "true");
}
id = codeElementType.getId();
} else {
// just disable next button
request.setAttribute(NEXT_DISABLED, "true");
}
}
if (FWD_PREVIOUS.equals(direction)) {
//bugzilla 1427 pass in name not id
List codeElementTypes = codeElementTypeDAO.getPreviousCodeElementTypeRecord(codeElementType.getText());
if (codeElementTypes != null && codeElementTypes.size() > 0) {
codeElementType = (CodeElementType) codeElementTypes.get(0);
codeElementTypeDAO.getData(codeElementType);
if (codeElementTypes.size() < 2) {
// disable previous button
request.setAttribute(PREVIOUS_DISABLED, "true");
}
id = codeElementType.getId();
} else {
// just disable next button
request.setAttribute(PREVIOUS_DISABLED, "true");
}
}
} catch (LIMSRuntimeException lre) {
//bugzilla 2154
LogEvent.logError("CodeElementTypeNextPreviousAction","performAction()",lre.toString());
request.setAttribute(ALLOW_EDITS_KEY, "false");
// disable previous and next
request.setAttribute(PREVIOUS_DISABLED, "true");
request.setAttribute(NEXT_DISABLED, "true");
forward = FWD_FAIL;
}
if (forward.equals(FWD_FAIL))
return mapping.findForward(forward);
if (codeElementType.getId() != null && !codeElementType.getId().equals("0")) {
request.setAttribute(ID, codeElementType.getId());
}
return getForward(mapping.findForward(forward), id, start);
}
protected String getPageTitleKey() {
return null;
}
protected String getPageSubtitleKey() {
return null;
}
} | mpl-2.0 |
DevinZ1993/Pieces-of-Code | java/Sets/src/SegmentTree.java | 3871 | import java.util.Scanner;
public class SegmentTree {
private static class Node {
public int left, right;
public long add, sum;
public Node(int left, int right, long sum) {
this.left = left;
this.right = right;
this.sum = sum;
}
}
private Node[] tree;
private int size;
public SegmentTree(int n,int[] arr) {
size = (n<<2);
tree = new Node[size];
build(0, 0, n-1, arr);
}
private void build(int pos, int p, int r, int[] arr) {
if (p == r) {
tree[pos] = new Node(p, r, arr[p]);
} else {
build(2*pos+1, p, (p+r)/2, arr);
build(2*pos+2, (p+r)/2+1, r, arr);
tree[pos] = new Node(p, r, tree[2*pos+1].sum + tree[2*pos+2].sum);
}
}
public void update(int p, int r, long delt) {
p = (tree[0].left < p)?
p : tree[0].left;
r = (tree[0].right > r)?
r : tree[0].right;
if (p <= r) {
updateHelp(0, p, r, delt);
}
}
private void updateHelp(int pos, int p, int r, long delt) {
if (tree[pos].left>=p && tree[pos].right<=r) {
tree[pos].add += delt;
tree[pos].sum +=
(tree[pos].right-tree[pos].left+1)*delt;
} else {
if (tree[pos].add!=0) {
pushDown(pos);
}
int mid = (tree[pos].left+tree[pos].right)/2;
if (p <= mid) {
updateHelp(2*pos+1, p, r, delt);
}
if (mid+1 <= r) {
updateHelp(2*pos+2, p, r, delt);
}
tree[pos].sum = tree[2*pos+1].sum + tree[2*pos+2].sum;
}
}
private void pushDown(int pos) {
int left = 2*pos+1, right = 2*pos+2;
tree[left].add += tree[pos].add;
tree[right].add += tree[pos].add;
tree[left].sum +=
(tree[left].right-tree[left].left+1)*tree[pos].add;
tree[right].sum +=
(tree[right].right-tree[right].left+1)*tree[pos].add;
tree[pos].add = 0;
}
public long query(int p,int r) {
if (tree[0].left<=p && tree[0].right>=r) {
return queryHelp(0,p,r);
} else {
return 0;
}
}
private long queryHelp(int pos,int p,int r) {
if (tree[pos].left>=p && tree[pos].right<=r) {
return tree[pos].sum;
} else {
if (tree[pos].add!=0) {
pushDown(pos);
}
long val = 0;
int mid = (tree[pos].left+tree[pos].right)/2;
if (p <= mid) {
val += queryHelp(2*pos+1, p, r);
}
if (mid+1 <= r) {
val += queryHelp(2*pos+2, p, r);
}
return val;
}
}
public static void main(String[] args) {
Main.main(args);
}
}
class Main {
/** POJ 3468: http://poj.org/problem?id=3468 */
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
int n = in.nextInt();
int[] arr = new int[n];
int q = in.nextInt();
for (int i=0;i<n;i++) {
arr[i] = in.nextInt();
}
SegmentTree tr = new SegmentTree(n,arr);
for (int i=0;i<q;i++) {
String op = in.next();
if (op.equals("C")) {
int p = in.nextInt()-1;
int r = in.nextInt()-1;
tr.update(p,r,in.nextInt());
} else if (op.equals("Q")) {
int p = in.nextInt()-1;
int r = in.nextInt()-1;
System.out.println(tr.query(p,r));
}
}
in.close();
}
}
| mpl-2.0 |
ahachete/torodb | torod/torod-core/src/main/java/com/torodb/torod/core/language/querycriteria/IsGreaterOrEqualQueryCriteria.java | 1681 | /*
* This file is part of ToroDB.
*
* ToroDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ToroDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ToroDB. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (c) 2014, 8Kdata Technology
*
*/
package com.torodb.torod.core.language.querycriteria;
import com.torodb.torod.core.language.AttributeReference;
import com.torodb.torod.core.language.querycriteria.utils.QueryCriteriaVisitor;
import com.torodb.torod.core.subdocument.values.Value;
/**
*
*/
public class IsGreaterOrEqualQueryCriteria extends AttributeAndValueQueryCriteria {
private static final long serialVersionUID = 1L;
public IsGreaterOrEqualQueryCriteria(AttributeReference attributeReference, Value<?> val) {
super(attributeReference, val);
}
@Override
protected int getBaseHash() {
return 5;
}
@Override
public String toString() {
return getAttributeReference() + " >= " + getValue();
}
@Override
public <Result, Arg> Result accept(QueryCriteriaVisitor<Result, Arg> visitor, Arg arg) {
return visitor.visit(this, arg);
}
}
| agpl-3.0 |
acz-icm/coansys | disambiguation-author/disambiguation-author-logic/src/main/java/pl/edu/icm/coansys/disambiguation/author/pig/merger/MergeDocumentWithOrcid.java | 6921 | /*
* This file is part of CoAnSys project.
* Copyright (c) 2012-2015 ICM-UW
*
* CoAnSys is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* CoAnSys is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with CoAnSys. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.edu.icm.coansys.disambiguation.author.pig.merger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.tools.pigstats.PigStatusReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.edu.icm.coansys.commons.java.DiacriticsRemover;
import pl.edu.icm.coansys.commons.java.StackTraceExtractor;
import pl.edu.icm.coansys.models.DocumentProtos.Author;
import pl.edu.icm.coansys.models.DocumentProtos.BasicMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentWrapper;
import pl.edu.icm.coansys.models.DocumentProtos.KeyValue;
/**
*
* @author pdendek
*/
public class MergeDocumentWithOrcid extends EvalFunc<Tuple> {
PigStatusReporter myPigStatusReporter;
private static final Logger logger = LoggerFactory
.getLogger(MergeDocumentWithOrcid.class);
@Override
public Schema outputSchema(Schema p_input) {
try {
return Schema.generateNestedSchema(DataType.TUPLE,
DataType.CHARARRAY, DataType.BYTEARRAY);
} catch (FrontendException e) {
logger.error("Error in creating output schema:", e);
throw new IllegalStateException(e);
}
}
@Override
public Tuple exec(Tuple input) throws IOException {
if (input == null || input.size() != 3) {
return null;
}
try {
myPigStatusReporter = PigStatusReporter.getInstance();
//load input tuple
////doi
String docId = (String) input.get(0);
////normal document
DataByteArray dbaD = (DataByteArray) input.get(1);
////orcid document
DataByteArray dbaO = (DataByteArray) input.get(2);
//load input documents
DocumentWrapper dwD = DocumentWrapper.parseFrom(dbaD.get());
List<Author> aDL = dwD.getDocumentMetadata().getBasicMetadata().getAuthorList();
DocumentWrapper dwO = DocumentWrapper.parseFrom(dbaO.get());
List<Author> aOL = dwO.getDocumentMetadata().getBasicMetadata().getAuthorList();
//calculate merged author list
List<Author> aRL = matchAuthors(docId,aDL,aOL);
//construct resulting document
BasicMetadata.Builder bmR = BasicMetadata.newBuilder(DocumentWrapper.newBuilder(dwD).getDocumentMetadata().getBasicMetadata());
bmR.clearAuthor();
bmR.addAllAuthor(aRL);
DocumentMetadata.Builder dmR = DocumentMetadata.newBuilder(DocumentWrapper.newBuilder(dwD).getDocumentMetadata());
dmR.setBasicMetadata(bmR);
DocumentWrapper.Builder dwR = DocumentWrapper.newBuilder(dwD);
dwR.setDocumentMetadata(dmR);
//construct resulting tuple
Tuple result = TupleFactory.getInstance().newTuple();
result.append(docId);
result.append(new DataByteArray(dwR.build().toByteArray()));
return result;
} catch (Exception e) {
logger.error("Error in processing input row:", e);
throw new IOException("Caught exception processing input row:\n"
+ StackTraceExtractor.getStackTrace(e));
}
}
protected List<Author> matchAuthors(String docId, List<Author> base,
List<Author> second) {
List<Author> result = new ArrayList<Author>(base.size());
List<Author> secondCopy = new ArrayList<Author>(second);
boolean changedBln = false;
int changedInt = 0;
logger.error("-------------------------------------------");
logger.error("number of base authors: "+base.size()+"\tnumber of orcid authors");
for (Author author : base) {
Author foundAuthor = null;
for (Author secondAuthor : secondCopy) {
if (
equalsIgnoreCaseIgnoreDiacritics(author.getName(), secondAuthor.getName())
||
//equalsIgnoreCaseIgnoreDiacritics(author.getForenames(), secondAuthor.getForenames()) &&
equalsIgnoreCaseIgnoreDiacritics(author.getSurname(), secondAuthor.getSurname())
){
foundAuthor = secondAuthor;
break;
}
}
if (foundAuthor != null) {
result.add(merge(author,foundAuthor));
changedBln = true;
changedInt++;
if(myPigStatusReporter != null){
Counter c = myPigStatusReporter.getCounter("ORCID Enhancement", "Author Enhanced");
if(c!=null){
c.increment(1);
}
}
} else {
result.add(Author.newBuilder(author).build());
}
}
if(changedBln){
logger.info("------------------------------------------");
logger.info("Changed docId:"+docId);
if(myPigStatusReporter != null){
Counter c = myPigStatusReporter.getCounter("ORCID Enhancement", "Document Enhanced");
if(c!=null){
c.increment(1);
}
}
}
logger.error("number of intersections: "+changedInt);
return result;
}
private Author merge(Author author, Author foundAuthor) {
Author.Builder builder = Author.newBuilder(author);
for(KeyValue kv : foundAuthor.getExtIdList()){
if("orcid-author-id".equals(kv.getKey())){
KeyValue.Builder kvb = KeyValue.newBuilder();
kvb.setKey(kv.getKey());
kvb.setValue(kv.getValue());
builder.addExtId(kvb.build());
logger.info("<k:"+kv.getKey()+"; v:"+kv.getValue()+">");
logger.info("<kc:"+kvb.getKey()+"; vc:"+kvb.getValue()+">");
}
}
Author ret = builder.build();
logger.info("<auth:"+ret.toString()+">");
return ret;
}
private boolean equalsIgnoreCaseIgnoreDiacritics(String firstName,
String secondName) {
if (firstName.isEmpty() || secondName.isEmpty()) {
return false;
}
return DiacriticsRemover.removeDiacritics(firstName).equalsIgnoreCase(
DiacriticsRemover.removeDiacritics(secondName));
}
}
| agpl-3.0 |
roskens/opennms-pre-github | opennms-webapp/src/main/java/org/opennms/web/command/StatisticsReportCommand.java | 1870 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.web.command;
/**
* Command object for listing a specific statistics report. This object deserializes query params
* for a specific report, identified by integer ID.
*
* @author <a href="mailto:dj@opennms.org">DJ Gregor</a>
* @version $Id: $
* @since 1.8.1
*/
public class StatisticsReportCommand {
private Integer m_id;
/**
* <p>getId</p>
*
* @return a {@link java.lang.Integer} object.
*/
public Integer getId() {
return m_id;
}
/**
* <p>setId</p>
*
* @param id a {@link java.lang.Integer} object.
*/
public void setId(Integer id) {
m_id = id;
}
}
| agpl-3.0 |
aihua/opennms | core/tasks/src/main/java/org/opennms/core/tasks/AsyncTask.java | 3889 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.core.tasks;
import org.springframework.util.Assert;
/**
* <p>AsyncTask class.</p>
*
* @author ranger
* @version $Id: $
*/
public class AsyncTask<T> extends AbstractTask {
private final Async<T> m_async;
private final Callback<T> m_callback;
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param <T> a T object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async) {
this(coordinator, parent, async, null);
}
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param callback a {@link org.opennms.core.tasks.Callback} object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async, Callback<T> callback) {
super(coordinator, parent);
Assert.notNull(async, "async parameter must not be null");
m_async = async;
m_callback = callback;
}
/** {@inheritDoc} */
@Override
public String toString() {
return String.valueOf(m_async);
}
/** {@inheritDoc} */
@Override
protected void doSubmit() {
Callback<T> callback = callback();
try {
m_async.supplyAsyncThenAccept(callback);
} catch (Throwable t) {
callback.handleException(t);
}
}
/**
* <p>markTaskAsCompleted</p>
*/
private final void markTaskAsCompleted() {
getCoordinator().markTaskAsCompleted(this);
}
private Callback<T> callback() {
return new Callback<T>() {
@Override
public void accept(T t) {
try {
if (m_callback != null) {
m_callback.accept(t);
}
} finally {
markTaskAsCompleted();
}
}
@Override
public T apply(Throwable t) {
try {
if (m_callback != null) {
m_callback.handleException(t);
}
} finally {
markTaskAsCompleted();
}
return null;
}
};
}
}
| agpl-3.0 |
OvercastNetwork/ProjectAres | API/ocn/src/main/java/tc/oc/api/ocn/OCNMapService.java | 1908 | package tc.oc.api.ocn;
import java.util.Collection;
import javax.inject.Singleton;
import com.google.common.util.concurrent.ListenableFuture;
import tc.oc.api.docs.MapRating;
import tc.oc.api.docs.virtual.MapDoc;
import tc.oc.api.docs.virtual.UserDoc;
import tc.oc.api.exceptions.NotFound;
import tc.oc.api.http.HttpOption;
import tc.oc.api.maps.MapRatingsRequest;
import tc.oc.api.maps.MapRatingsResponse;
import tc.oc.api.maps.MapService;
import tc.oc.api.maps.UpdateMapsResponse;
import tc.oc.api.model.HttpModelService;
import tc.oc.commons.core.concurrent.FutureUtils;
import tc.oc.commons.core.stream.Collectors;
@Singleton
class OCNMapService extends HttpModelService<MapDoc, MapDoc> implements MapService {
public ListenableFuture<Object> rate(MapRating rating) {
return this.client().post(memberUri(rating.map_id, "rate"), rating, Object.class, HttpOption.INFINITE_RETRY);
}
public ListenableFuture<MapRatingsResponse> getRatings(MapRatingsRequest request) {
return this.client().post(memberUri(request.map_id, "get_ratings"), request, MapRatingsResponse.class, HttpOption.INFINITE_RETRY);
}
public UpdateMapsResponse updateMaps(Collection<? extends MapDoc> maps) {
final ListenableFuture<MapUpdateMultiResponse> future = updateMulti(maps, MapUpdateMultiResponse.class);
return new UpdateMapsResponse(
(ListenableFuture) future,
maps.stream()
.flatMap(MapDoc::authorAndContributorUuids)
.distinct()
.collect(Collectors.mappingTo(uuid -> FutureUtils.mapSync(
future,
response -> {
final UserDoc.Identity user = response.users_by_uuid.get(uuid);
if(user != null) return user;
throw new NotFound();
}
)))
);
}
}
| agpl-3.0 |
serrapos/opencms-core | src-modules/org/opencms/workplace/tools/content/CmsPropertyDelete.java | 15467 | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.workplace.tools.content;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsVfsException;
import org.opencms.i18n.CmsMessages;
import org.opencms.jsp.CmsJspActionElement;
import org.opencms.lock.CmsLock;
import org.opencms.main.CmsException;
import org.opencms.main.OpenCms;
import org.opencms.workplace.CmsDialog;
import org.opencms.workplace.CmsWorkplace;
import org.opencms.workplace.CmsWorkplaceSettings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.PageContext;
/**
* Provides methods for the delete property definition dialog.<p>
*
* @since 6.0.0
*/
public class CmsPropertyDelete extends CmsDialog {
/** Value for the action: delete cascade. */
public static final int ACTION_DELETE_CASCADE = 100;
/** Request parameter value for the action: delete cascade. */
public static final String DIALOG_DELETE_CASCADE = "deletecascade";
/** The dialog type. */
public static final String DIALOG_TYPE = "propertydelete";
/** Request parameter name for the property name. */
public static final String PARAM_PROPERTYNAME = "propertyname";
private String m_paramPropertyName;
/**
* Public constructor with JSP action element.<p>
*
* @param jsp an initialized JSP action element
*/
public CmsPropertyDelete(CmsJspActionElement jsp) {
super(jsp);
}
/**
* Public constructor with JSP variables.<p>
*
* @param context the JSP page context
* @param req the JSP request
* @param res the JSP response
*/
public CmsPropertyDelete(PageContext context, HttpServletRequest req, HttpServletResponse res) {
this(new CmsJspActionElement(context, req, res));
}
/**
* Deletes the property definition.<p>
*
* @throws JspException if problems including sub-elements occur
*/
public void actionDelete() throws JspException {
// save initialized instance of this class in request attribute for included sub-elements
getJsp().getRequest().setAttribute(SESSION_WORKPLACE_CLASS, this);
try {
getCms().deletePropertyDefinition(getParamPropertyName());
// close the dialog
actionCloseDialog();
} catch (Throwable e) {
// error while deleting property definition, show error dialog
includeErrorpage(this, e);
}
}
/**
* Deletes the property definition by cascading the properties on resources.<p>
*
* @throws JspException if problems including sub-elements occur
*/
public void actionDeleteCascade() throws JspException {
// save initialized instance of this class in request attribute for included sub-elements
getJsp().getRequest().setAttribute(SESSION_WORKPLACE_CLASS, this);
try {
// list of all resources containing this propertydefinition
List resourcesWithProperty = getCms().readResourcesWithProperty(getParamPropertyName());
// list of all resources locked by another user, containing this propertydefinition
List resourcesLockedByOtherUser = getResourcesLockedByOtherUser(resourcesWithProperty);
// do the following operations only if all of the resources are not locked by another user
if (resourcesLockedByOtherUser.isEmpty()) {
// save the site root
String storedSiteRoot = getCms().getRequestContext().getSiteRoot();
try {
// change to the root site
getCms().getRequestContext().setSiteRoot("/");
Iterator i = resourcesWithProperty.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
// read the property object
CmsProperty property = getCms().readPropertyObject(
resource.getRootPath(),
getParamPropertyName(),
false);
// try to delete the property if it is not the NULL PROPERTY
// if the property is the NULL PROPERTY, it only had a shared
// value which was deleted at a sibling which was already processed
if (!property.isNullProperty()) {
CmsLock lock = getCms().getLock(resource);
if (lock.isUnlocked()) {
// lock the resource for the current (Admin) user
getCms().lockResource(resource.getRootPath());
}
property.setStructureValue(CmsProperty.DELETE_VALUE);
property.setResourceValue(CmsProperty.DELETE_VALUE);
// write the property with the null value to the resource and cascade it from the definition
getCms().writePropertyObject(resource.getRootPath(), property);
// unlock the resource
getCms().unlockResource(resource.getRootPath());
}
}
// delete the property definition at last
getCms().deletePropertyDefinition(getParamPropertyName());
} finally {
// restore the siteroot
getCms().getRequestContext().setSiteRoot(storedSiteRoot);
// close the dialog
actionCloseDialog();
}
} else {
StringBuffer reason = new StringBuffer();
reason.append(dialogWhiteBoxStart());
reason.append(buildResourceList(resourcesLockedByOtherUser, true));
reason.append(dialogWhiteBoxEnd());
throw new CmsVfsException(Messages.get().container(
Messages.ERR_DEL_PROP_RESOURCES_LOCKED_1,
reason.toString()));
}
} catch (Throwable e) {
// error while deleting property definition, show error dialog
includeErrorpage(this, e);
}
}
/**
* Builds a HTML list of Resources that use the specified property.<p>
*
* @throws CmsException if operation was not successful
*
* @return the HTML String for the Resource list
*/
public String buildResourceList() throws CmsException {
List resourcesWithProperty = getCms().readResourcesWithProperty(getParamPropertyName());
return buildResourceList(resourcesWithProperty, false);
}
/**
* Builds a HTML list of Resources.<p>
*
* Columns: Type, Name, Uri, Value of the property, locked by(optional).<p>
*
* @param resourceList a list of resources
* @param lockInfo a boolean to decide if the locked info should be shown or not
* @throws CmsException if operation was not successful
*
* @return the HTML String for the Resource list
*/
public String buildResourceList(List resourceList, boolean lockInfo) throws CmsException {
// reverse the resource list
Collections.reverse(resourceList);
CmsMessages messages = Messages.get().getBundle(getLocale());
StringBuffer result = new StringBuffer();
result.append("<table border=\"0\" width=\"100%\" cellpadding=\"1\" cellspacing=\"1\">\n");
result.append("<tr>\n");
// Type
result.append("\t<td style=\"width:5%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_TYPE_0));
result.append("</td>\n");
// Uri
result.append("\t<td style=\"width:40%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_ADRESS_0));
result.append("</td>\n");
// Name
result.append("\t<td style=\"width:25%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_TITLE_0));
result.append("</td>\n");
if (!lockInfo) {
// Property value
result.append("\t<td style=\"width:30%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_PROPERTYVALUE_0));
result.append("</td>\n");
}
if (lockInfo) {
// Property value
result.append("\t<td style=\"width:30%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_EXPLORER_LOCKEDBY_0));
result.append("</td>\n");
result.append("</tr>\n");
}
result.append("</tr>\n");
result.append("<tr><td colspan=\"4\"><span style=\"height: 6px;\"> </span></td></tr>\n");
String storedSiteRoot = getCms().getRequestContext().getSiteRoot();
try {
getCms().getRequestContext().setSiteRoot("/");
Iterator i = resourceList.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
String filetype = OpenCms.getResourceManager().getResourceType(resource.getTypeId()).getTypeName();
result.append("<tr>\n");
// file type
result.append("\t<td>");
result.append("<img src=\"");
result.append(getSkinUri());
result.append(CmsWorkplace.RES_PATH_FILETYPES);
result.append(filetype);
result.append(".gif\">");
result.append("</td>\n");
// file address
result.append("\t<td>");
result.append(resource.getRootPath());
result.append("</td>\n");
// title
result.append("\t<td>");
result.append(getJsp().property(CmsPropertyDefinition.PROPERTY_TITLE, resource.getRootPath(), ""));
result.append("</td>\n");
// current value of the property
if (!lockInfo) {
result.append("\t<td>");
result.append(getJsp().property(getParamPropertyName(), resource.getRootPath()));
result.append("</td>\n");
}
// locked by user
if (lockInfo) {
CmsLock lock = getCms().getLock(resource);
result.append("\t<td>");
result.append(getCms().readUser(lock.getUserId()).getName());
result.append("</td>\n");
}
result.append("</tr>\n");
}
result.append("</table>\n");
} finally {
getCms().getRequestContext().setSiteRoot(storedSiteRoot);
}
return result.toString();
}
/**
* Builds the html for the property definition select box.<p>
*
* @param attributes optional attributes for the <select> tag
* @return the html for the property definition select box
*/
public String buildSelectProperty(String attributes) {
return CmsPropertyChange.buildSelectProperty(getCms(), Messages.get().getBundle(getLocale()).key(
Messages.GUI_PLEASE_SELECT_0), attributes, "");
}
/**
* Returns the value of the propertyname parameter.<p>
*
* @return the value of the propertyname parameter
*/
public String getParamPropertyName() {
return m_paramPropertyName;
}
/**
* Sets the value of the propertyname parameter.<p>
*
* @param paramPropertyName the value of the propertyname parameter
*/
public void setParamPropertyName(String paramPropertyName) {
m_paramPropertyName = paramPropertyName;
}
/**
* @see org.opencms.workplace.CmsWorkplace#initWorkplaceRequestValues(org.opencms.workplace.CmsWorkplaceSettings, javax.servlet.http.HttpServletRequest)
*/
protected void initWorkplaceRequestValues(CmsWorkplaceSettings settings, HttpServletRequest request) {
// fill the parameter values in the get/set methods
fillParamValues(request);
// set the dialog type
setParamDialogtype(DIALOG_TYPE);
// set the action for the JSP switch
if (DIALOG_OK.equals(getParamAction())) {
setAction(ACTION_OK);
setParamTitle(Messages.get().getBundle(getLocale()).key(Messages.GUI_TITLE_PROPERTYDELETE_0)
+ ": "
+ getParamPropertyName());
} else if (DIALOG_CANCEL.equals(getParamAction())) {
setAction(ACTION_CANCEL);
} else if (DIALOG_DELETE_CASCADE.equals(getParamAction())) {
setAction(ACTION_DELETE_CASCADE);
} else {
setAction(ACTION_DEFAULT);
// build title for change property value dialog
setParamTitle(Messages.get().getBundle(getLocale()).key(Messages.GUI_TITLE_PROPERTYDELETE_0));
}
}
/**
* Returns a list of resources that are locked by another user as the current user.<p>
*
* @param resourceList the list of all (mixed) resources
*
* @return a list of resources that are locked by another user as the current user
* @throws CmsException if the getLock operation fails
*/
private List getResourcesLockedByOtherUser(List resourceList) throws CmsException {
List lockedResourcesByOtherUser = new ArrayList();
Iterator i = resourceList.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
// get the lock state for the resource
CmsLock lock = getCms().getLock(resource);
// add this resource to the list if this is locked by another user
if (!lock.isUnlocked() && !lock.isOwnedBy(getCms().getRequestContext().getCurrentUser())) {
lockedResourcesByOtherUser.add(resource);
}
}
return lockedResourcesByOtherUser;
}
}
| lgpl-2.1 |
jia020/portal-core | src/test/java/org/auscope/portal/core/util/TestDOMUtil.java | 4481 | package org.auscope.portal.core.util;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.auscope.portal.core.test.PortalTestClass;
import org.junit.Assert;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/**
* Unit tests for DOMUtil
*
* @author Josh Vote
*
*/
public class TestDOMUtil extends PortalTestClass {
/**
* Simple test to ensure that the 2 DOM util methods are reversible
* @throws SAXException
* @throws IOException
* @throws ParserConfigurationException
* @throws TransformerException
*/
@Test
public void testReversibleTransformation() throws ParserConfigurationException, IOException, SAXException, TransformerException {
final String originalXmlString = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/xml/TestXML_NoPrettyPrint.xml");
final Document doc = DOMUtil.buildDomFromString(originalXmlString);
final String newXmlString = DOMUtil.buildStringFromDom(doc, false);
Assert.assertEquals(originalXmlString, newXmlString);
}
/**
* Namespace for use with src/test/resources/TestXML_NoPrettyPrint.xml
*
* @author vot002
*
*/
public class SimpleXMLNamespace implements NamespaceContext {
private Map<String, String> map;
public SimpleXMLNamespace() {
map = new HashMap<>();
map.put("test", "http://test.namespace");
map.put("test2", "http://test2.namespace");
}
/**
* This method returns the uri for all prefixes needed.
*
* @param prefix
* @return uri
*/
@Override
public String getNamespaceURI(final String prefix) {
if (prefix == null)
throw new IllegalArgumentException("No prefix provided!");
if (map.containsKey(prefix))
return map.get(prefix);
else
return XMLConstants.NULL_NS_URI;
}
@Override
public String getPrefix(final String namespaceURI) {
// Not needed in this context.
return null;
}
@Override
public Iterator<String> getPrefixes(final String namespaceURI) {
// Not needed in this context.
return null;
}
}
/**
* Simple test to ensure that the DOM object is namespace aware
* @throws XPathExpressionException
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
@Test
public void testDOMObjectNamespace() throws XPathExpressionException, IOException, ParserConfigurationException, SAXException {
//Build our DOM
final String originalXmlString = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/xml/TestXML_NoPrettyPrint.xml");
final Document doc = DOMUtil.buildDomFromString(originalXmlString);
//Build our queries (namespace aware)
final XPathFactory factory = XPathFactory.newDefaultInstance();
final XPath xPath = factory.newXPath();
xPath.setNamespaceContext(new SimpleXMLNamespace());
final XPathExpression getChild1Expr = xPath.compile("test:root/test2:child1");
final XPathExpression getChild2Expr = xPath.compile("test:root/test2:child2");
final XPathExpression failingExpr = xPath.compile("root/child1");
Node testNode = (Node) getChild1Expr.evaluate(doc, XPathConstants.NODE);
Assert.assertNotNull(testNode);
Assert.assertEquals("child1Value", testNode.getTextContent());
testNode = (Node) getChild2Expr.evaluate(doc, XPathConstants.NODE);
Assert.assertNotNull(testNode);
Assert.assertEquals("child2Value", testNode.getTextContent());
//This should fail (no namespace specified)
testNode = (Node) failingExpr.evaluate(doc, XPathConstants.NODE);
Assert.assertNull(testNode);
}
}
| lgpl-3.0 |
jia020/portal-core | src/test/java/org/auscope/portal/core/services/responses/vocab/TestConceptFactory.java | 6879 | package org.auscope.portal.core.services.responses.vocab;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathException;
import org.auscope.portal.core.services.namespaces.VocabNamespaceContext;
import org.auscope.portal.core.test.PortalTestClass;
import org.auscope.portal.core.util.DOMUtil;
import org.auscope.portal.core.util.ResourceUtil;
import org.junit.Assert;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/**
* Unit tests for ConceptFactory
*
* @author Josh Vote
*
*/
public class TestConceptFactory extends PortalTestClass {
private void assertSameConcept(Concept[] expected, Concept[] actual, List<String> traversedUrns) {
String errMsg = String.format("%1$s != %2$s", Arrays.toString(expected), Arrays.toString(actual));
Assert.assertArrayEquals(errMsg, expected, actual);
for (int i = 0; i < expected.length; i++) {
assertSameConcept(expected[i], actual[i], traversedUrns);
}
}
private void assertSameConcept(Concept expected, Concept actual, List<String> traversedUrns) {
String errMsg = String.format("%1$s != %2$s", expected, actual);
Assert.assertEquals(errMsg, expected, actual);
Assert.assertEquals(errMsg, expected.getLabel(), actual.getLabel());
Assert.assertEquals(errMsg, expected.getPreferredLabel(), actual.getPreferredLabel());
Assert.assertEquals(errMsg, expected.isHref(), actual.isHref());
Assert.assertEquals(errMsg, expected.getDefinition(), actual.getDefinition());
//To deal with cycles in the hierarchy
if (traversedUrns.contains(expected.getUrn())) {
return;
} else {
traversedUrns.add(expected.getUrn());
}
assertSameConcept(expected.getBroader(), actual.getBroader(), traversedUrns);
assertSameConcept(expected.getNarrower(), actual.getNarrower(), traversedUrns);
assertSameConcept(expected.getRelated(), actual.getRelated(), traversedUrns);
}
/**
* Runs the factory through a standard SISSVoc response XML
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
* @throws XPathException
*/
@Test
public void testSISSVocRDF() throws IOException, ParserConfigurationException, SAXException, XPathException {
//Build our expectation
Concept concept1 = new Concept("urn:concept:1");
Concept concept2 = new Concept("urn:concept:2");
Concept concept3 = new Concept("urn:concept:3");
Concept concept4 = new Concept("urn:concept:4");
NamedIndividual ni1 = new NamedIndividual("urn:ni:1");
NamedIndividual ni2 = new NamedIndividual("urn:ni:2");
NamedIndividual ni3 = new NamedIndividual("urn:ni:3");
concept1.setNarrower(new Concept[] {concept2, concept3, ni2});
concept1.setLabel("LabelConcept1");
concept1.setPreferredLabel("PrefLabelConcept1");
concept2.setBroader(new Concept[] {concept1});
concept2.setRelated(new Concept[] {concept3});
concept2.setLabel("LabelConcept2");
concept2.setPreferredLabel("PrefLabelConcept2");
concept2.setDefinition("DefinitionConcept2");
concept3.setBroader(new Concept[] {concept1});
concept3.setRelated(new Concept[] {concept2});
concept3.setNarrower(new Concept[] {ni1});
concept3.setLabel("LabelConcept3");
concept3.setPreferredLabel("PrefLabelConcept3");
concept4.setNarrower(new Concept[] {ni3});
concept4.setLabel("LabelConcept4");
concept4.setPreferredLabel("PrefLabelConcept4");
concept4.setDefinition("DefinitionConcept4");
ni1.setBroader(new Concept[] {concept3});
ni1.setLabel("LabelNamedIndividual1");
ni1.setPreferredLabel("PrefLabelNamedIndividual1");
ni2.setBroader(new Concept[] {concept1});
ni2.setLabel("LabelNamedIndividual2");
ni2.setPreferredLabel("PrefLabelNamedIndividual2");
ni3.setBroader(new Concept[] {concept4});
ni3.setLabel("LabelNamedIndividual3");
ni3.setPreferredLabel("PrefLabelNamedIndividual3");
Concept[] expectation = new Concept[] {concept1, concept4};
//Build our actual list
String responseXml = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/responses/sissvoc/SISSVocResponse.xml");
Document responseDoc = DOMUtil.buildDomFromString(responseXml);
Node rdfNode = (Node) DOMUtil.compileXPathExpr("rdf:RDF", new VocabNamespaceContext()).evaluate(responseDoc,
XPathConstants.NODE);
ConceptFactory cf = new ConceptFactory();
Concept[] actualConcepts = cf.parseFromRDF(rdfNode);
Assert.assertNotNull(actualConcepts);
assertSameConcept(expectation, actualConcepts, new ArrayList<String>());
}
/**
* This is a legacy test for the older vocabularyServiceResponse.xml
*
* It tests our concepts still return EVEN if we don't define top level concepts
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
* @throws XPathException
*/
@Test
public void testGetConcepts() throws IOException, ParserConfigurationException, SAXException, XPathException {
String responseXml = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/responses/sissvoc/vocabularyServiceResponse.xml");
Document responseDoc = DOMUtil.buildDomFromString(responseXml);
Node rdfNode = (Node) DOMUtil.compileXPathExpr("rdf:RDF", new VocabNamespaceContext()).evaluate(responseDoc,
XPathConstants.NODE);
ConceptFactory cf = new ConceptFactory();
Concept[] actualConcepts = cf.parseFromRDF(rdfNode);
Assert.assertEquals("There are 27 concepts", 27, actualConcepts.length);
//Must contain: Siltstone - concrete aggregate
boolean found = false;
for (Concept concept : actualConcepts) {
if (concept.getPreferredLabel().equals("Siltstone - concrete aggregate")) {
found = true;
break;
}
}
Assert.assertTrue("Must contain: Siltstone - concrete aggregate", found);
//Must contain: Gneiss - crusher dust
found = false;
for (Concept concept : actualConcepts) {
if (concept.getPreferredLabel().equals("Gneiss - crusher dust")) {
found = true;
break;
}
}
Assert.assertTrue("Must contain: Gneiss - crusher dust", found);
}
}
| lgpl-3.0 |
xpqiu/fnlp | fnlp-core/src/main/java/org/fnlp/util/exception/LoadModelException.java | 1512 | /**
* This file is part of FNLP (formerly FudanNLP).
*
* FNLP is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FNLP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with FudanNLP. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2009-2014 www.fnlp.org. All rights reserved.
*/
package org.fnlp.util.exception;
import java.io.FileNotFoundException;
import java.io.IOException;
public class LoadModelException extends Exception {
private static final long serialVersionUID = -3933859344026018386L;
public LoadModelException(Exception e, String file) {
super(e);
if( e instanceof FileNotFoundException) {
System.out.println("模型文件不存在: "+ file);
} else if (e instanceof ClassNotFoundException) {
System.out.println("模型文件版本错误。");
} else if (e instanceof IOException) {
System.out.println("模型文件读入错误: "+file);
}
e.printStackTrace();
}
public LoadModelException(String msg) {
super(msg);
printStackTrace();
}
} | lgpl-3.0 |
okalmanRH/jboss-activemq-artemis | tests/joram-tests/src/test/java/org/apache/activemq/artemis/common/AbstractAdmin.java | 9253 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.common;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.client.ActiveMQClient;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientRequestor;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.api.core.management.ManagementHelper;
import org.apache.activemq.artemis.api.core.management.ResourceNames;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnectorFactory;
import org.apache.activemq.artemis.tests.util.SpawnedVMSupport;
import org.junit.Assert;
import org.objectweb.jtests.jms.admin.Admin;
/**
* AbstractAdmin.
*/
public class AbstractAdmin implements Admin {
protected ClientSession clientSession;
protected ClientRequestor requestor;
protected boolean serverLifeCycleActive;
protected Process serverProcess;
protected ServerLocator serverLocator;
protected ClientSessionFactory sf;
// this is a constant to control if we should use a separate VM for the server.
public static final boolean spawnServer = false;
/**
* Determines whether to act or 'no-op' on serverStart() and
* serverStop(). This is used when testing combinations of client and
* servers with different versions.
*/
private static final String SERVER_LIVE_CYCLE_PROPERTY = "org.apache.activemq.artemis.jms.ActiveMQAMQPAdmin.serverLifeCycle";
public AbstractAdmin() {
serverLifeCycleActive = Boolean.valueOf(System.getProperty(SERVER_LIVE_CYCLE_PROPERTY, "true"));
}
@Override
public String getName() {
return getClass().getName();
}
@Override
public void start() throws Exception {
serverLocator = ActiveMQClient.createServerLocatorWithoutHA(new TransportConfiguration(NettyConnectorFactory.class.getName()));
sf = serverLocator.createSessionFactory();
clientSession = sf.createSession(ActiveMQDefaultConfiguration.getDefaultClusterUser(), ActiveMQDefaultConfiguration.getDefaultClusterPassword(), false, true, true, false, 1);
requestor = new ClientRequestor(clientSession, ActiveMQDefaultConfiguration.getDefaultManagementAddress());
clientSession.start();
}
@Override
public void stop() throws Exception {
requestor.close();
if (sf != null) {
sf.close();
}
if (serverLocator != null) {
serverLocator.close();
}
sf = null;
serverLocator = null;
}
@Override
public Context createContext() throws NamingException {
return new InitialContext();
}
@Override
public void createConnectionFactory(final String name) {
throw new RuntimeException("FIXME NYI createConnectionFactory");
}
@Override
public void deleteConnectionFactory(final String name) {
throw new RuntimeException("FIXME NYI deleteConnectionFactory");
}
@Override
public void createQueue(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "createQueue", name, name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void deleteQueue(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "destroyQueue", name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void createQueueConnectionFactory(final String name) {
createConnectionFactory(name);
}
@Override
public void deleteQueueConnectionFactory(final String name) {
deleteConnectionFactory(name);
}
@Override
public void createTopic(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "createTopic", name, name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void deleteTopic(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "destroyTopic", name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void createTopicConnectionFactory(final String name) {
createConnectionFactory(name);
}
@Override
public void deleteTopicConnectionFactory(final String name) {
deleteConnectionFactory(name);
}
@Override
public void startServer() throws Exception {
if (!serverLifeCycleActive) {
return;
}
if (spawnServer) {
String[] vmArgs = new String[]{};
serverProcess = SpawnedVMSupport.spawnVM(SpawnedJMSServer.class.getName(), vmArgs, false);
InputStreamReader isr = new InputStreamReader(serverProcess.getInputStream());
final BufferedReader br = new BufferedReader(isr);
String line = null;
while ((line = br.readLine()) != null) {
System.out.println("SERVER: " + line);
if ("OK".equals(line.trim())) {
new Thread() {
@Override
public void run() {
try {
String line1 = null;
while ((line1 = br.readLine()) != null) {
System.out.println("SERVER: " + line1);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}.start();
return;
} else if ("KO".equals(line.trim())) {
// something went wrong with the server, destroy it:
serverProcess.destroy();
throw new IllegalStateException("Unable to start the spawned server :" + line);
}
}
} else {
SpawnedJMSServer.startServer();
}
}
@Override
public void stopServer() throws Exception {
if (!serverLifeCycleActive) {
return;
}
if (spawnServer) {
OutputStreamWriter osw = new OutputStreamWriter(serverProcess.getOutputStream());
osw.write("STOP\n");
osw.flush();
int exitValue = serverProcess.waitFor();
if (exitValue != 0) {
serverProcess.destroy();
}
} else {
SpawnedJMSServer.stopServer();
}
}
protected Object invokeSyncOperation(final String resourceName,
final String operationName,
final Object... parameters) throws Exception {
ClientMessage message = clientSession.createMessage(false);
ManagementHelper.putOperationInvocation(message, resourceName, operationName, parameters);
ClientMessage reply;
try {
reply = requestor.request(message, 3000);
} catch (Exception e) {
throw new IllegalStateException("Exception while invoking " + operationName + " on " + resourceName, e);
}
if (reply == null) {
throw new IllegalStateException("no reply received when invoking " + operationName + " on " + resourceName);
}
if (!ManagementHelper.hasOperationSucceeded(reply)) {
throw new IllegalStateException("operation failed when invoking " + operationName +
" on " +
resourceName +
": " +
ManagementHelper.getResult(reply));
}
return ManagementHelper.getResult(reply);
}
}
| apache-2.0 |
apache/qpid-jms | qpid-jms-client/src/test/java/org/apache/qpid/jms/integration/JMSContextIntegrationTest.java | 7660 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.qpid.jms.integration;
import static org.apache.qpid.jms.provider.amqp.AmqpSupport.ANONYMOUS_RELAY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.UUID;
import javax.jms.JMSContext;
import javax.jms.JMSProducer;
import org.apache.qpid.jms.test.QpidJmsTestCase;
import org.apache.qpid.jms.test.testpeer.TestAmqpPeer;
import org.apache.qpid.proton.amqp.Binary;
import org.apache.qpid.proton.amqp.Symbol;
import org.junit.Test;
public class JMSContextIntegrationTest extends QpidJmsTestCase {
private final IntegrationTestFixture testFixture = new IntegrationTestFixture();
private Symbol[] SERVER_ANONYMOUS_RELAY = new Symbol[]{ANONYMOUS_RELAY};
@Test(timeout = 20000)
public void testCreateAndCloseContext() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextWithClientId() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, false, null, null, null, true);
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextAndSetClientID() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, false, null, null, null, false);
context.setClientID(UUID.randomUUID().toString());
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateAutoAckSessionByDefault() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
context.createTopic("TopicName");
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextWithTransactedSessionMode() throws Exception {
Binary txnId = new Binary(new byte[]{ (byte) 5, (byte) 6, (byte) 7, (byte) 8});
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, JMSContext.SESSION_TRANSACTED);
assertEquals(JMSContext.SESSION_TRANSACTED, context.getSessionMode());
// Session should be created and a coordinator should be attached since this
// should be a TX session, then a new TX is declared, once closed the TX should
// be discharged as a roll back.
testPeer.expectBegin();
testPeer.expectCoordinatorAttach();
testPeer.expectDeclare(txnId);
testPeer.expectDischarge(txnId, true);
testPeer.expectEnd();
testPeer.expectClose();
context.createTopic("TopicName");
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextFromContextWithSessionsActive() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
context.createTopic("TopicName");
// Create a second should not create a new session yet, once a new connection is
// create on demand then close of the second context should only close the session
JMSContext other = context.createContext(JMSContext.CLIENT_ACKNOWLEDGE);
assertEquals(JMSContext.CLIENT_ACKNOWLEDGE, other.getSessionMode());
testPeer.expectBegin();
testPeer.expectEnd();
other.createTopic("TopicName");
other.close();
testPeer.waitForAllHandlersToComplete(1000);
// Now the connection should close down.
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testOnlyOneProducerCreatedInSingleContext() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, SERVER_ANONYMOUS_RELAY);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
testPeer.expectSenderAttach();
// One producer created should send an attach.
JMSProducer producer1 = context.createProducer();
assertNotNull(producer1);
// An additional one should not result in an attach
JMSProducer producer2 = context.createProducer();
assertNotNull(producer2);
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testEachContextGetsItsOwnProducer() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, SERVER_ANONYMOUS_RELAY);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
testPeer.expectSenderAttach();
testPeer.expectBegin();
testPeer.expectSenderAttach();
// One producer created should send an attach.
JMSProducer producer1 = context.createProducer();
assertNotNull(producer1);
// An additional one should not result in an attach
JMSContext other = context.createContext(JMSContext.AUTO_ACKNOWLEDGE);
JMSProducer producer2 = other.createProducer();
assertNotNull(producer2);
testPeer.expectEnd();
testPeer.expectEnd();
testPeer.expectClose();
other.close();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
}
| apache-2.0 |
objectiser/camel | components/camel-elasticsearch-rest/src/test/java/org/apache/camel/component/elasticsearch/ElasticsearchRestComponentVerifierExtensionTest.java | 2756 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.elasticsearch;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.Component;
import org.apache.camel.component.extension.ComponentVerifierExtension;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Assert;
import org.junit.Test;
public class ElasticsearchRestComponentVerifierExtensionTest extends CamelTestSupport {
// *************************************************
// Tests (parameters)
// *************************************************
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testParameters() throws Exception {
Component component = context().getComponent("elasticsearch-rest");
ComponentVerifierExtension verifier = component.getExtension(ComponentVerifierExtension.class).orElseThrow(IllegalStateException::new);
Map<String, Object> parameters = new HashMap<>();
parameters.put("hostAddresses", "http://localhost:9000");
parameters.put("clusterName", "es-test");
ComponentVerifierExtension.Result result = verifier.verify(ComponentVerifierExtension.Scope.PARAMETERS, parameters);
Assert.assertEquals(ComponentVerifierExtension.Result.Status.OK, result.getStatus());
}
@Test
public void testConnectivity() throws Exception {
Component component = context().getComponent("elasticsearch-rest");
ComponentVerifierExtension verifier = component.getExtension(ComponentVerifierExtension.class).orElseThrow(IllegalStateException::new);
Map<String, Object> parameters = new HashMap<>();
parameters.put("hostAddresses", "http://localhost:9000");
ComponentVerifierExtension.Result result = verifier.verify(ComponentVerifierExtension.Scope.CONNECTIVITY, parameters);
Assert.assertEquals(ComponentVerifierExtension.Result.Status.ERROR, result.getStatus());
}
}
| apache-2.0 |
creamer/cas | core/cas-server-core-services/src/main/java/org/apereo/cas/authentication/DefaultMultifactorTriggerSelectionStrategy.java | 4960 | package org.apereo.cas.authentication;
import com.google.common.base.Splitter;
import org.apereo.cas.authentication.principal.Principal;
import org.apereo.cas.services.MultifactorAuthenticationProvider;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.services.RegisteredServiceMultifactorPolicy;
import org.apereo.cas.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.servlet.http.HttpServletRequest;
import java.util.Collection;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* Default MFA Trigger selection strategy. This strategy looks for valid triggers in the following order: request
* parameter, RegisteredService policy, principal attribute.
*
* @author Daniel Frett
* @since 5.0.0
*/
public class DefaultMultifactorTriggerSelectionStrategy implements MultifactorTriggerSelectionStrategy {
private static final Splitter ATTR_NAMES = Splitter.on(',').trimResults().omitEmptyStrings();
private final String requestParameter;
private final String globalPrincipalAttributeNameTriggers;
public DefaultMultifactorTriggerSelectionStrategy(final String attributeNameTriggers, final String requestParameter) {
this.globalPrincipalAttributeNameTriggers = attributeNameTriggers;
this.requestParameter = requestParameter;
}
@Override
public Optional<String> resolve(final Collection<MultifactorAuthenticationProvider> providers,
final HttpServletRequest request, final RegisteredService service, final Principal principal) {
Optional<String> provider = Optional.empty();
// short-circuit if we don't have any available MFA providers
if (providers == null || providers.isEmpty()) {
return provider;
}
final Set<String> validProviderIds = providers.stream()
.map(MultifactorAuthenticationProvider::getId)
.collect(Collectors.toSet());
// check for an opt-in provider id parameter trigger, we only care about the first value
if (!provider.isPresent() && request != null) {
provider = Optional.ofNullable(request.getParameter(requestParameter))
.filter(validProviderIds::contains);
}
// check for a RegisteredService configured trigger
if (!provider.isPresent() && service != null) {
final RegisteredServiceMultifactorPolicy policy = service.getMultifactorPolicy();
if (shouldApplyRegisteredServiceMultifactorPolicy(policy, principal)) {
provider = policy.getMultifactorAuthenticationProviders().stream()
.filter(validProviderIds::contains)
.findFirst();
}
}
// check for principal attribute trigger
if (!provider.isPresent() && principal != null
&& StringUtils.hasText(globalPrincipalAttributeNameTriggers)) {
provider = StreamSupport.stream(ATTR_NAMES.split(globalPrincipalAttributeNameTriggers).spliterator(), false)
// principal.getAttribute(name).values
.map(principal.getAttributes()::get).filter(Objects::nonNull)
.map(CollectionUtils::toCollection).flatMap(Set::stream)
// validProviderIds.contains((String) value)
.filter(String.class::isInstance).map(String.class::cast).filter(validProviderIds::contains)
.findFirst();
}
// return the resolved trigger
return provider;
}
private static boolean shouldApplyRegisteredServiceMultifactorPolicy(final RegisteredServiceMultifactorPolicy policy, final Principal principal) {
final String attrName = policy.getPrincipalAttributeNameTrigger();
final String attrValue = policy.getPrincipalAttributeValueToMatch();
// Principal attribute name and/or value is not defined
if (!StringUtils.hasText(attrName) || !StringUtils.hasText(attrValue)) {
return true;
}
// no Principal, we should enforce policy
if (principal == null) {
return true;
}
// check to see if any of the specified attributes match the attrValue pattern
final Predicate<String> attrValuePredicate = Pattern.compile(attrValue).asPredicate();
return StreamSupport.stream(ATTR_NAMES.split(attrName).spliterator(), false)
.map(principal.getAttributes()::get)
.filter(Objects::nonNull)
.map(CollectionUtils::toCollection)
.flatMap(Set::stream)
.filter(String.class::isInstance)
.map(String.class::cast)
.anyMatch(attrValuePredicate);
}
}
| apache-2.0 |
apache/jmeter | src/core/src/main/java/org/apache/jmeter/gui/HtmlReportAction.java | 2972 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.gui;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.util.HashSet;
import java.util.Set;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.MenuElement;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.gui.action.AbstractAction;
import org.apache.jmeter.gui.action.ActionNames;
import org.apache.jmeter.gui.action.ActionRouter;
import org.apache.jmeter.gui.plugin.MenuCreator;
import org.apache.jmeter.util.JMeterUtils;
public class HtmlReportAction extends AbstractAction implements MenuCreator {
private static Set<String> commands = new HashSet<>();
private HtmlReportUI htmlReportPanel;
static {
commands.add(ActionNames.HTML_REPORT);
}
public HtmlReportAction() {
super();
}
@Override
public void doAction(ActionEvent e) throws IllegalUserActionException {
htmlReportPanel = new HtmlReportUI();
htmlReportPanel.showInputDialog(getParentFrame(e));
}
@Override
public Set<String> getActionNames() {
return commands;
}
@Override
public JMenuItem[] getMenuItemsAtLocation(MENU_LOCATION location) {
if (location != MENU_LOCATION.TOOLS) {
return new JMenuItem[0];
}
// Use the action name as resource key because the action name is used by JMeterMenuBar too when changing languages.
JMenuItem menuItem = new JMenuItem(JMeterUtils.getResString(ActionNames.HTML_REPORT), KeyEvent.VK_UNDEFINED);
menuItem.setName(ActionNames.HTML_REPORT);
menuItem.setActionCommand(ActionNames.HTML_REPORT);
menuItem.setAccelerator(null);
menuItem.addActionListener(ActionRouter.getInstance());
return new JMenuItem[] { menuItem };
}
@Override
public JMenu[] getTopLevelMenus() {
return new JMenu[0];
}
@Override
public boolean localeChanged(MenuElement menu) {
return false;
}
@Override
public void localeChanged() {
// NOOP
}
public HtmlReportUI getHtmlReportPanel() {
return htmlReportPanel;
}
}
| apache-2.0 |
camunda/camunda-bpmn-model | src/main/java/org/camunda/bpm/model/bpmn/impl/instance/camunda/CamundaConnectorImpl.java | 3445 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.model.bpmn.impl.instance.camunda;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_ELEMENT_CONNECTOR;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_NS;
import org.camunda.bpm.model.bpmn.impl.instance.BpmnModelElementInstanceImpl;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnector;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnectorId;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaInputOutput;
import org.camunda.bpm.model.xml.ModelBuilder;
import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider;
import org.camunda.bpm.model.xml.type.child.ChildElement;
import org.camunda.bpm.model.xml.type.child.SequenceBuilder;
/**
* The BPMN connector camunda extension element
*
* @author Sebastian Menski
*/
public class CamundaConnectorImpl extends BpmnModelElementInstanceImpl implements CamundaConnector {
protected static ChildElement<CamundaConnectorId> camundaConnectorIdChild;
protected static ChildElement<CamundaInputOutput> camundaInputOutputChild;
public static void registerType(ModelBuilder modelBuilder) {
ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(CamundaConnector.class, CAMUNDA_ELEMENT_CONNECTOR)
.namespaceUri(CAMUNDA_NS)
.instanceProvider(new ModelTypeInstanceProvider<CamundaConnector>() {
public CamundaConnector newInstance(ModelTypeInstanceContext instanceContext) {
return new CamundaConnectorImpl(instanceContext);
}
});
SequenceBuilder sequenceBuilder = typeBuilder.sequence();
camundaConnectorIdChild = sequenceBuilder.element(CamundaConnectorId.class)
.required()
.build();
camundaInputOutputChild = sequenceBuilder.element(CamundaInputOutput.class)
.build();
typeBuilder.build();
}
public CamundaConnectorImpl(ModelTypeInstanceContext instanceContext) {
super(instanceContext);
}
public CamundaConnectorId getCamundaConnectorId() {
return camundaConnectorIdChild.getChild(this);
}
public void setCamundaConnectorId(CamundaConnectorId camundaConnectorId) {
camundaConnectorIdChild.setChild(this, camundaConnectorId);
}
public CamundaInputOutput getCamundaInputOutput() {
return camundaInputOutputChild.getChild(this);
}
public void setCamundaInputOutput(CamundaInputOutput camundaInputOutput) {
camundaInputOutputChild.setChild(this, camundaInputOutput);
}
}
| apache-2.0 |
Soya93/Extract-Refactoring | python/educational-core/student/src/com/jetbrains/edu/learning/stepic/StudyCoursesUpdater.java | 2161 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.edu.learning.stepic;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupActivity;
import com.jetbrains.edu.learning.courseGeneration.StudyProjectGenerator;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class StudyCoursesUpdater implements StartupActivity {
public static StudyCoursesUpdater getInstance() {
final StartupActivity[] extensions = Extensions.getExtensions(StartupActivity.POST_STARTUP_ACTIVITY);
for (StartupActivity extension : extensions) {
if (extension instanceof StudyCoursesUpdater) {
return (StudyCoursesUpdater) extension;
}
}
throw new UnsupportedOperationException("could not find self");
}
@Override
public void runActivity(@NotNull final Project project) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
return;
}
if (checkNeeded()) {
application.executeOnPooledThread(new Runnable() {
@Override
public void run() {
final List<CourseInfo> courses = EduStepicConnector.getCourses();
StudyProjectGenerator.flushCache(courses);
}
});
}
}
public static boolean checkNeeded() {
final List<CourseInfo> courses = StudyProjectGenerator.getCoursesFromCache();
return courses.isEmpty();
}
}
| apache-2.0 |
AlexMinsk/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/api/runtime/util/IncrementCounterListener.java | 977 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.runtime.util;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.ExecutionListener;
/**
* @author: Johannes Heinemann
*/
public class IncrementCounterListener implements ExecutionListener {
public static int counter = 0;
@Override
public void notify(DelegateExecution execution) throws Exception {
counter++;
}
}
| apache-2.0 |
sbryzak/DeltaSpike | deltaspike/core/api/src/test/java/org/apache/deltaspike/test/api/config/ConfigResolverTest.java | 1482 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.test.api.config;
import org.apache.deltaspike.core.api.config.ConfigResolver;
import org.junit.Assert;
import org.junit.Test;
import java.util.List;
public class ConfigResolverTest
{
@Test
public void testOverruledValue()
{
String result = ConfigResolver.getPropertyValue("test");
Assert.assertEquals("test2", result);
}
@Test
public void testOrderOfAllValues()
{
List<String> result = ConfigResolver.getAllPropertyValues("test");
Assert.assertEquals(2, result.size());
Assert.assertEquals("test1", result.get(0));
Assert.assertEquals("test2", result.get(1));
}
}
| apache-2.0 |
spring-projects/spring-loaded | testdata/src/main/java/foo/ControllerB2.java | 162 | package foo;
public class ControllerB2 extends grails.TopB {
public void foo() {
super.foo();
System.out.println("ControllerB.foo() running again!");
}
}
| apache-2.0 |
rhusar/undertow | core/src/main/java/io/undertow/conduits/ReadTimeoutStreamSourceConduit.java | 8603 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.conduits;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
import io.undertow.UndertowLogger;
import io.undertow.UndertowMessages;
import io.undertow.UndertowOptions;
import io.undertow.server.OpenListener;
import io.undertow.util.WorkerUtils;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.IoUtils;
import org.xnio.Options;
import org.xnio.StreamConnection;
import org.xnio.XnioExecutor;
import org.xnio.channels.ReadTimeoutException;
import org.xnio.channels.StreamSinkChannel;
import org.xnio.conduits.AbstractStreamSourceConduit;
import org.xnio.conduits.ConduitStreamSourceChannel;
import org.xnio.conduits.ReadReadyHandler;
import org.xnio.conduits.StreamSourceConduit;
/**
* Wrapper for read timeout. This should always be the first wrapper applied to the underlying channel.
*
* @author Stuart Douglas
* @see org.xnio.Options#READ_TIMEOUT
*/
public final class ReadTimeoutStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> {
private XnioExecutor.Key handle;
private final StreamConnection connection;
private volatile long expireTime = -1;
private final OpenListener openListener;
private static final int FUZZ_FACTOR = 50; //we add 50ms to the timeout to make sure the underlying channel has actually timed out
private volatile boolean expired;
private final Runnable timeoutCommand = new Runnable() {
@Override
public void run() {
handle = null;
if (expireTime == -1) {
return;
}
long current = System.currentTimeMillis();
if (current < expireTime) {
//timeout has been bumped, re-schedule
handle = WorkerUtils.executeAfter(connection.getIoThread(),timeoutCommand, (expireTime - current) + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
return;
}
UndertowLogger.REQUEST_LOGGER.tracef("Timing out channel %s due to inactivity", connection.getSourceChannel());
synchronized (ReadTimeoutStreamSourceConduit.this) {
expired = true;
}
boolean readResumed = connection.getSourceChannel().isReadResumed();
ChannelListener<? super ConduitStreamSourceChannel> readListener = connection.getSourceChannel().getReadListener();
if (readResumed) {
ChannelListeners.invokeChannelListener(connection.getSourceChannel(), readListener);
}
if (connection.getSinkChannel().isWriteResumed()) {
ChannelListeners.invokeChannelListener(connection.getSinkChannel(), connection.getSinkChannel().getWriteListener());
}
// close only after invoking listeners, to allow space for listener getting ReadTimeoutException
IoUtils.safeClose(connection);
}
};
public ReadTimeoutStreamSourceConduit(final StreamSourceConduit delegate, StreamConnection connection, OpenListener openListener) {
super(delegate);
this.connection = connection;
this.openListener = openListener;
final ReadReadyHandler handler = new ReadReadyHandler.ChannelListenerHandler<>(connection.getSourceChannel());
delegate.setReadReadyHandler(new ReadReadyHandler() {
@Override
public void readReady() {
handler.readReady();
}
@Override
public void forceTermination() {
cleanup();
handler.forceTermination();
}
@Override
public void terminated() {
cleanup();
handler.terminated();
}
});
}
private void handleReadTimeout(final long ret) throws IOException {
if (!connection.isOpen()) {
cleanup();
return;
}
if (ret == -1) {
cleanup();
return;
}
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
final long currentTime = System.currentTimeMillis();
if (ret == 0) {
final long expireTimeVar = expireTime;
if (expireTimeVar != -1 && currentTime > expireTimeVar) {
IoUtils.safeClose(connection);
throw UndertowMessages.MESSAGES.readTimedOut(this.getTimeout());
}
}
expireTime = currentTime + timeout;
if (handle == null) {
handle = connection.getIoThread().executeAfter(timeoutCommand, timeout, TimeUnit.MILLISECONDS);
}
}
@Override
public long transferTo(final long position, final long count, final FileChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(position, count, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(count, throughBuffer, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long read(final ByteBuffer[] dsts, final int offset, final int length) throws IOException {
checkExpired();
long ret = super.read(dsts, offset, length);
handleReadTimeout(ret);
return ret;
}
@Override
public int read(final ByteBuffer dst) throws IOException {
checkExpired();
int ret = super.read(dst);
handleReadTimeout(ret);
return ret;
}
@Override
public void awaitReadable() throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
super.awaitReadable(timeout + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
} else {
super.awaitReadable();
}
}
@Override
public void awaitReadable(long time, TimeUnit timeUnit) throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
long millis = timeUnit.toMillis(time);
super.awaitReadable(Math.min(millis, timeout + FUZZ_FACTOR), TimeUnit.MILLISECONDS);
} else {
super.awaitReadable(time, timeUnit);
}
}
private Integer getTimeout() {
Integer timeout = 0;
try {
timeout = connection.getSourceChannel().getOption(Options.READ_TIMEOUT);
} catch (IOException ignore) {
// should never happen
}
Integer idleTimeout = openListener.getUndertowOptions().get(UndertowOptions.IDLE_TIMEOUT);
if ((timeout == null || timeout <= 0) && idleTimeout != null) {
timeout = idleTimeout;
} else if (timeout != null && idleTimeout != null && idleTimeout > 0) {
timeout = Math.min(timeout, idleTimeout);
}
return timeout;
}
@Override
public void terminateReads() throws IOException {
checkExpired();
super.terminateReads();
cleanup();
}
private void cleanup() {
if (handle != null) {
handle.remove();
handle = null;
expireTime = -1;
}
}
@Override
public void suspendReads() {
super.suspendReads();
cleanup();
}
private void checkExpired() throws ReadTimeoutException {
synchronized (this) {
if (expired) {
throw UndertowMessages.MESSAGES.readTimedOut(System.currentTimeMillis());
}
}
}
public String toString() {
return super.toString() + " (next: " + next + ")";
}
}
| apache-2.0 |
droolsjbpm/jbpm | jbpm-test-coverage/src/test/java/org/jbpm/test/functional/gateway/ParallelGatewayAsyncTest.java | 3008 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.test.functional.gateway;
import java.util.HashMap;
import java.util.Map;
import org.jbpm.executor.ExecutorServiceFactory;
import org.jbpm.test.JbpmTestCase;
import org.jbpm.test.wih.ListWorkItemHandler;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.executor.ExecutorService;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.process.ProcessInstance;
import static org.junit.Assert.assertNull;
/**
* Parallel gateway execution test. 2x parallel fork, 1x join
*/
public class ParallelGatewayAsyncTest extends JbpmTestCase {
private static final String PARALLEL_GATEWAY_ASYNC = "org/jbpm/test/functional/gateway/ParallelGatewayAsync.bpmn";
private static final String PARALLEL_GATEWAY_ASYNC_ID = "org.jbpm.test.functional.gateway.ParallelGatewayAsync";
private ExecutorService executorService;
private KieSession kieSession;
private ListWorkItemHandler wih;
public ParallelGatewayAsyncTest() {
super(true, true);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
executorService = ExecutorServiceFactory.newExecutorService(getEmf());
executorService.setInterval(1);
executorService.init();
addEnvironmentEntry("AsyncMode", "true");
addEnvironmentEntry("ExecutorService", executorService);
wih = new ListWorkItemHandler();
addWorkItemHandler("Human Task", wih);
kieSession = createKSession(PARALLEL_GATEWAY_ASYNC);
}
@After
public void tearDown() throws Exception {
executorService.clearAllErrors();
executorService.clearAllRequests();
executorService.destroy();
super.tearDown();
}
/**
* Simple parallel gateway test.
*/
@Test(timeout = 30000)
public void testParallelGatewayAsync() throws Exception {
Map<String, Object> inputs = new HashMap<>();
inputs.put("useHT", Boolean.TRUE);
inputs.put("mode", "1");
ProcessInstance pi = kieSession.startProcess(PARALLEL_GATEWAY_ASYNC_ID, inputs);
Thread.sleep(3000L);
wih.getWorkItems().forEach(e -> kieSession.getWorkItemManager().completeWorkItem(e.getId(), e.getParameters()));
Thread.sleep(1000L);
assertNull(kieSession.getProcessInstance(pi.getId()));
}
}
| apache-2.0 |
vega113/incubator-wave | wave/src/test/java/org/waveprotocol/wave/client/editor/content/paragraph/RenumbererTestBase.java | 16472 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.client.editor.content.paragraph;
import static org.waveprotocol.wave.client.editor.Editor.ROOT_HANDLER_REGISTRY;
import com.google.gwt.dom.client.Element;
import junit.framework.TestCase;
import org.waveprotocol.wave.client.editor.Editor;
import org.waveprotocol.wave.client.editor.EditorTestingUtil;
import org.waveprotocol.wave.client.editor.content.CMutableDocument;
import org.waveprotocol.wave.client.editor.content.ContentDocElement;
import org.waveprotocol.wave.client.editor.content.ContentDocument;
import org.waveprotocol.wave.client.editor.content.ContentDocument.PermanentMutationHandler;
import org.waveprotocol.wave.client.editor.content.ContentElement;
import org.waveprotocol.wave.client.editor.content.ContentNode;
import org.waveprotocol.wave.client.editor.content.HasImplNodelets;
import org.waveprotocol.wave.client.editor.content.paragraph.OrderedListRenumberer.LevelNumbers;
import org.waveprotocol.wave.client.editor.content.paragraph.Paragraph.Alignment;
import org.waveprotocol.wave.client.editor.content.paragraph.Paragraph.Direction;
import org.waveprotocol.wave.client.scheduler.FinalTaskRunner;
import org.waveprotocol.wave.client.scheduler.Scheduler.Task;
import org.waveprotocol.wave.model.document.indexed.IndexedDocumentImpl;
import org.waveprotocol.wave.model.document.operation.Attributes;
import org.waveprotocol.wave.model.document.operation.impl.DocInitializationBuilder;
import org.waveprotocol.wave.model.document.util.Point;
import org.waveprotocol.wave.model.schema.conversation.ConversationSchemas;
import org.waveprotocol.wave.model.util.CollectionUtils;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* Utilities for testing ordered list numbering.
*
* A bunch of methods refer to lines by "index". This is index into the
* conceptual list of lines, so, 0 for the first line, 1 for the second line,
* and so forth.
*
* @author danilatos@google.com (Daniel Danilatos)
*/
public abstract class RenumbererTestBase extends TestCase {
/**
* Simple enum for representing a style of line, that maps to the type and
* li-style type attributes. Contains a representative sample of the types of
* lines that could possibly have different effects on renumbering.
*/
enum Type {
/** No attributes */
NONE,
/** t=h1 */
HEADING,
/** t=li without listyle */
LIST,
/** t=li with listyle = decimal */
DECIMAL // DECIMAL must come last
}
/**
* Fake renderer that doesn't depend on any DOM stuff.
*/
ParagraphHtmlRenderer renderer = new ParagraphHtmlRenderer() {
@Override
public Element createDomImpl(Renderable element) {
return null;
}
@Override
public void updateRendering(HasImplNodelets element, String type, String listStyle, int indent,
Alignment alignment, Direction direction) {
}
@Override
public void updateListValue(HasImplNodelets element, int value) {
assertEquals(Line.fromParagraph(((ContentElement) element)).getCachedNumberValue(), value);
}
};
/**
* Renumberer being tested.
*/
final OrderedListRenumberer renumberer = new OrderedListRenumberer(renderer);
/**
* Batch render task that will get scheduled.
*/
Task scheduledTask;
/**
* Simple fake take runner that just sets {@link #scheduledTask}
*/
final FinalTaskRunner runner = new FinalTaskRunner() {
@Override public void scheduleFinally(Task task) {
assertTrue(scheduledTask == null || scheduledTask == task);
scheduledTask = task;
}
};
/**
* Same as a regular ParagraphRenderer but tagged with
* {@link PermanentMutationHandler} so that it gets used even in POJO document mode.
*/
static class Renderer extends ParagraphRenderer implements PermanentMutationHandler {
Renderer(ParagraphHtmlRenderer htmlRenderer, OrderedListRenumberer renumberer,
FinalTaskRunner finalRaskRunner) {
super(htmlRenderer, renumberer, finalRaskRunner);
// TODO Auto-generated constructor stub
}
}
ContentDocument content1;
ContentDocument content2;
CMutableDocument doc1;
CMutableDocument doc2;
/**
* Current doc being used. For some tests we render more than one doc to test
* the sharing of a single renumberer between multiple documents.
*/
CMutableDocument doc;
/** Number of lines in test documents */
final int SIZE = 10;
@Override
protected void setUp() {
EditorTestingUtil.setupTestEnvironment();
ContentDocElement.register(ROOT_HANDLER_REGISTRY, ContentDocElement.DEFAULT_TAGNAME);
Paragraph.register(ROOT_HANDLER_REGISTRY);
LineRendering.registerLines(ROOT_HANDLER_REGISTRY);
LineRendering.registerParagraphRenderer(Editor.ROOT_HANDLER_REGISTRY,
new Renderer(renderer, renumberer, runner));
renumberer.updateHtmlEvenWhenNullImplNodelet = true;
DocInitializationBuilder builder = new DocInitializationBuilder();
builder.elementStart("body", Attributes.EMPTY_MAP);
for (int i = 0; i < SIZE; i++) {
builder.elementStart("line", Attributes.EMPTY_MAP).elementEnd();
}
builder.elementEnd();
content1 = new ContentDocument(ConversationSchemas.BLIP_SCHEMA_CONSTRAINTS);
content1.setRegistries(Editor.ROOT_REGISTRIES);
content1.consume(builder.build());
doc1 = content1.getMutableDoc();
content2 = new ContentDocument(ConversationSchemas.BLIP_SCHEMA_CONSTRAINTS);
content2.setRegistries(Editor.ROOT_REGISTRIES);
content2.consume(builder.build());
doc2 = content2.getMutableDoc();
doc = doc1;
runTask();
}
/**
* Performs a randomized test of renumbering logic.
*
* @param testIterations number of test iterations on the same document. Each
* iteration does a substantial amount of work (depending on document
* size).
* @param seed initial random seed.
*/
void doRandomTest(int testIterations, int seed) {
ContentDocument.performExpensiveChecks = false;
ContentDocument.validateLocalOps = false;
IndexedDocumentImpl.performValidation = false;
final int LEVELS = 4;
final int MAX_RUN = 3;
final int ITERS_PER_BATCH_RENDER = 6;
final int DECIMALS_TO_OTHERS = 4; // ratio of decimal bullets to other stuff
final int UPDATE_TO_ADD_REMOVE = 4; // ratio of updates to node adds/removals
assertNull(scheduledTask);
int maxRand = 5;
Random r = new Random(seed);
// For each iteration
for (int iter = 0; iter < testIterations; iter++) {
info("Iter: " + iter);
// Repeat several times for a single batch render, to make sure we are
// able to handle multiple overlapping, redundant updates.
// Times two because we are alternating between two documents to test
// the ability of the renumberer to handle more than one document
// correctly.
int innerIters = (r.nextInt(ITERS_PER_BATCH_RENDER) + 1) * 2;
for (int inner = 0; inner < innerIters; inner++) {
doc = doc1; // (inner % 2 == 0) ? doc1 : doc2;
int totalLines = (doc.size() - 2) / 2;
Line line = getFirstLine();
// Pick a random section of the document to perform a bunch of random
// changes to
int i = 0;
int a = r.nextInt(totalLines);
int b = r.nextInt(totalLines);
int startSection = Math.min(a, b);
int endSection = Math.max(a, b);
while (i < startSection) {
i++;
line = line.next();
}
while (i < endSection && line != null) {
// Pick a random indentation to set
int level = r.nextInt(LEVELS);
// Length of run of elements to update
int length;
// Whether we are making them numbered items or doing something else
boolean decimal;
if (r.nextInt(DECIMALS_TO_OTHERS) == 0) {
// No need making it a long run for non-numbered items.
length = r.nextInt(2);
decimal = false;
} else {
decimal = true;
length = r.nextInt(MAX_RUN - 1) + 1;
}
while (length > 0 && i < endSection && line != null) {
boolean fiftyFifty = i % 2 == 0;
// If we're numbering these lines, then DECIMAL, otherwise choose a
// random other type.
Type type = decimal ? Type.DECIMAL : Type.values()[r.nextInt(Type.values().length - 1)];
// Randomly decide to add/remove, or to update
if (r.nextInt(UPDATE_TO_ADD_REMOVE) == 0) {
int index = index(line);
// Randomly decide to add or remove.
// Include some constraints to ensure the document doesn't get too small or too large.
boolean add = index == 0 ||
totalLines < SIZE / 2 ? true : (totalLines > SIZE * 2 ? false : r.nextBoolean());
if (add) {
line = create(index, type, level, r.nextBoolean());
} else {
line = delete(index);
if (line == null) {
// We just deleted the last line.
continue;
}
}
assert line != null;
} else {
update(index(line), type, level, fiftyFifty);
}
length--;
i++;
line = line.next();
}
}
}
check(iter);
}
}
/**
* @return index for the given line object (0 for the first line, etc).
*/
int index(Line line) {
return (doc.getLocation(line.getLineElement()) - 1) / 2;
}
/**
* @return the line element for the given index.
*/
ContentElement getLineElement(int index) {
return doc.locate(index * 2 + 1).getNodeAfter().asElement();
}
/**
* @return the first line object
*/
Line getFirstLine() {
return Line.getFirstLineOfContainer(doc.getDocumentElement().getFirstChild().asElement());
}
/**
* Creates and returns a new line.
*
* @param createAndUpdateSeparately if true, creates a line, then sets the
* attributes as a separate operation. Otherwise, sets them all at
* once. We want to test both scenarios.
*/
Line create(int index, Type type, int indent, boolean createAndUpdateSeparately) {
// info("Creating @" + index + " " +
// type + " " + indent + " " + createAndUpdateSeparately);
Point<ContentNode> loc = doc.locate(index * 2 + 1);
Line l;
if (createAndUpdateSeparately) {
l = Line.fromLineElement(
doc.createElement(loc, "line", Attributes.EMPTY_MAP));
update(index, type, indent);
} else {
l = Line.fromLineElement(
doc.createElement(loc, "line", attributes(type, indent, false, true)));
}
assertNotNull(l);
return l;
}
/**
* Deletes the line at the specified index.
*/
Line delete(int index) {
// info("Deleting @" + index);
assert index != 0 : "Code doesn't (yet) support killing the initial line";
ContentElement e = getLineElement(index);
Line line = Line.fromLineElement(e).next();
doc.deleteNode(e);
return line;
}
/**
* Updates the attributes of the line at the specified index.
*/
void update(int index, Type type, int indent) {
update(index, type, indent, true);
}
/**
* Updates the attributes of the line at the specified index.
*
* @param alwaysSetRedundant if true, always set the listyle attribute even if it
* is not necessary. For example, if the listyle attribute was
* "decimal", but the type is "HEADING", the listyle attribute should
* normally be ignored and has no meaning. It won't make a difference
* if it is set or not. We want to test both scenarios.
*/
void update(int index, Type type, int indent, boolean alwaysSetRedundant) {
ContentElement e = getLineElement(index);
// info("Making @" + ((doc.getLocation(e) - 1)/2) + " " +
// type + " " + indent + " " + alwaysSetStyle);
Map<String, String> updates = attributes(type, indent, alwaysSetRedundant, false);
for (Map.Entry<String, String> pair : updates.entrySet()) {
doc.setElementAttribute(e, pair.getKey(), pair.getValue());
}
}
/**
* Creates the map of element attributes for the given parameters.
*
* @param alwaysSetStyle see {@link #update(int, Type, int, boolean)}
* @param noNulls eliminate keys that would have null values. We want nulls
* for updates, but no nulls for creates.
*/
Map<String, String> attributes(Type type, int indent, boolean alwaysSetStyle, boolean noNulls) {
Map<String, String> updates = new HashMap<String, String>();
String levelStr = (indent > 0 ? "" + indent : null);
maybePut(updates, Paragraph.INDENT_ATTR, levelStr, noNulls);
String t = null;
String lt = null;
switch (type) {
case HEADING: t = "h1"; break;
case LIST: t = Paragraph.LIST_TYPE; break;
case DECIMAL: t = Paragraph.LIST_TYPE; lt = Paragraph.LIST_STYLE_DECIMAL; break;
}
maybePut(updates, Paragraph.SUBTYPE_ATTR, t, noNulls);
if (alwaysSetStyle || type == Type.LIST || type == Type.DECIMAL) {
maybePut(updates, Paragraph.LIST_STYLE_ATTR, lt, noNulls);
}
return updates;
}
void maybePut(Map<String, String> map, String key, String val, boolean noNull) {
if (val != null || !noNull) {
map.put(key, val);
}
}
/**
* Check the current line numbering is consistent with the document state.
*/
void check() {
check(-1);
}
/**
* Check the current line numbering is consistent with the document state.
*
* @param iter current test iteration, for debugging/logging purposes.
*/
void check(int iter) {
runTask();
// if (iter >= 1740) {
// info("\n\nCHECKING\n");
// printInfo(null, "XX");
// info("---");
// }
LevelNumbers numbers = new LevelNumbers(0, 1);
Line line = getFirstLine();
while (line != null) {
int indent = line.getIndent();
numbers.setLevel(indent);
if (line.isDecimalListItem()) {
int num = numbers.getNumberAndIncrement();
assertFalse(line.getCachedNumberValue() == Line.DIRTY);
if (num != line.getCachedNumberValue()) {
String msg = "Expected: " + num + ", got: " + line.getCachedNumberValue();
printInfo(line, msg);
fail("Wrong number on iteration " + iter + ". " + msg +
". See stdout & stderr for debug details");
}
} else {
numbers.setNumber(1);
}
line = line.next();
}
// info("^^^");
}
void runTask() {
if (scheduledTask != null) {
scheduledTask.execute();
}
scheduledTask = null;
}
void printInfo(Line badLine, String msg) {
Line line = getFirstLine();
PrintStream stream = System.out;
int i = 0;
while (line != null) {
int indent = line.getIndent();
stream.println(
CollectionUtils.repeat('.', line.getIndent()) +
line.toString() +
" indent:" + indent +
CollectionUtils.repeat(' ', 20) + line.getCachedNumberValue() + " (" + i + ")");
if (line == badLine) {
stream.println("\n\n\n");
stream = System.err;
stream.println(msg);
stream.println(">>>>>>>>>>>>>>>>>>>>>>>>> DIED ON LINE ABOVE <<<<<<<<<<<<<<<<<<\n\n");
}
line = line.next();
i++;
}
}
void info(Object msg) {
// Uncomment for debugging
// System.out.println(msg == null ? "null" : msg.toString());
}
}
| apache-2.0 |
fjy/druid | server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java | 3297 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.initialization.jetty;
import com.google.inject.Binder;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Provides;
import com.metamx.common.lifecycle.Lifecycle;
import com.metamx.common.logger.Logger;
import io.druid.guice.JsonConfigProvider;
import io.druid.guice.LazySingleton;
import io.druid.guice.LifecycleModule;
import io.druid.guice.annotations.RemoteChatHandler;
import io.druid.guice.annotations.Self;
import io.druid.server.DruidNode;
import io.druid.server.initialization.ServerConfig;
import org.eclipse.jetty.server.Server;
import java.util.Properties;
/**
*/
public class ChatHandlerServerModule implements Module
{
private static final Logger log = new Logger(ChatHandlerServerModule.class);
@Inject
private Properties properties;
@Override
public void configure(Binder binder)
{
/** If "druid.indexer.task.chathandler.port" property is set then we assume that a
* separate Jetty Server with it's own {@link ServerConfig} is required for ingestion apart from the query server
* otherwise we bind {@link DruidNode} annotated with {@link RemoteChatHandler} to {@literal @}{@link Self} {@link DruidNode}
* so that same Jetty Server is used for querying as well as ingestion
*/
if (properties.containsKey("druid.indexer.task.chathandler.port")) {
log.info("Spawning separate ingestion server at port [%s]", properties.get("druid.indexer.task.chathandler.port"));
JsonConfigProvider.bind(binder, "druid.indexer.task.chathandler", DruidNode.class, RemoteChatHandler.class);
JsonConfigProvider.bind(binder, "druid.indexer.server.chathandler.http", ServerConfig.class, RemoteChatHandler.class);
LifecycleModule.register(binder, Server.class, RemoteChatHandler.class);
} else {
binder.bind(DruidNode.class).annotatedWith(RemoteChatHandler.class).to(Key.get(DruidNode.class, Self.class));
binder.bind(ServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(ServerConfig.class));
}
}
@Provides
@LazySingleton
@RemoteChatHandler
public Server getServer(Injector injector, Lifecycle lifecycle, @RemoteChatHandler DruidNode node, @RemoteChatHandler ServerConfig config)
{
final Server server = JettyServerModule.makeJettyServer(node, config);
JettyServerModule.initializeServer(injector, lifecycle, server);
return server;
}
}
| apache-2.0 |
masaki-yamakawa/geode | geode-lucene/src/distributedTest/java/org/apache/geode/cache/lucene/EvictionDUnitTest.java | 6321 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.lucene;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.INDEX_NAME;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.REGION_NAME;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.stream.IntStream;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.control.HeapMemoryMonitor;
import org.apache.geode.test.dunit.SerializableRunnableIF;
import org.apache.geode.test.junit.categories.LuceneTest;
@Category({LuceneTest.class})
@RunWith(JUnitParamsRunner.class)
public class EvictionDUnitTest extends LuceneQueriesAccessorBase {
protected static final float INITIAL_EVICTION_HEAP_PERCENTAGE = 50.9f;
protected static final float EVICTION_HEAP_PERCENTAGE_FAKE_NOTIFICATION = 85.0f;
protected static final int TEST_MAX_MEMORY = 100;
protected static final int MEMORY_USED_FAKE_NOTIFICATION = 90;
protected RegionTestableType[] getPartitionRedundantOverflowEvictionRegionType() {
return new RegionTestableType[] {
RegionTestableType.PARTITION_PERSISTENT_REDUNDANT_EVICTION_OVERFLOW};
}
protected RegionTestableType[] getPartitionRedundantLocalDestroyEvictionRegionType() {
return new RegionTestableType[] {RegionTestableType.PARTITION_REDUNDANT_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_REDUNDANT_PERSISTENT_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_EVICTION_LOCAL_DESTROY,
RegionTestableType.PARTITION_PERSISTENT_EVICTION_LOCAL_DESTROY};
}
@Test
@Parameters(method = "getPartitionRedundantLocalDestroyEvictionRegionType")
public void regionWithEvictionWithLocalDestroyMustNotbeAbleToCreateLuceneIndexes(
RegionTestableType regionTestType) {
SerializableRunnableIF createIndex = getSerializableRunnableIFCreateIndex();
dataStore1.invoke(() -> {
try {
initDataStore(createIndex, regionTestType);
} catch (UnsupportedOperationException e) {
assertEquals(
"Lucene indexes on regions with eviction and action local destroy are not supported",
e.getMessage());
assertNull(getCache().getRegion(REGION_NAME));
}
});
}
private SerializableRunnableIF getSerializableRunnableIFCreateIndex() {
return () -> {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
luceneService.createIndexFactory().setFields("text").create(INDEX_NAME, REGION_NAME);
};
}
@Test
@Parameters(method = "getPartitionRedundantOverflowEvictionRegionType")
public void regionsWithEvictionWithOverflowMustBeAbleToCreateLuceneIndexes(
RegionTestableType regionTestType) {
SerializableRunnableIF createIndex = () -> {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
luceneService.createIndexFactory().setFields("text").create(INDEX_NAME, REGION_NAME);
};
dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
accessor.invoke(() -> initDataStore(createIndex, regionTestType));
accessor.invoke(() -> {
Cache cache = getCache();
Region region = cache.getRegion(REGION_NAME);
IntStream.range(0, NUM_BUCKETS).forEach(i -> region.put(i, new TestObject("hello world")));
});
waitForFlushBeforeExecuteTextSearch(accessor, 60000);
dataStore1.invoke(() -> {
try {
getCache().getResourceManager().setEvictionHeapPercentage(INITIAL_EVICTION_HEAP_PERCENTAGE);
final PartitionedRegion partitionedRegion = (PartitionedRegion) getRootRegion(REGION_NAME);
raiseFakeNotification();
await().untilAsserted(() -> {
assertTrue(partitionedRegion.getDiskRegionStats().getNumOverflowOnDisk() > 0);
});
} finally {
cleanUpAfterFakeNotification();
}
});
accessor.invoke(() -> {
LuceneService luceneService = LuceneServiceProvider.get(getCache());
LuceneQuery<Integer, TestObject> query = luceneService.createLuceneQueryFactory()
.setLimit(100).create(INDEX_NAME, REGION_NAME, "world", "text");
List<LuceneResultStruct<Integer, TestObject>> resultList = query.findResults();
assertEquals(NUM_BUCKETS, resultList.size());
});
}
protected void raiseFakeNotification() {
((GemFireCacheImpl) getCache()).getHeapEvictor().setTestAbortAfterLoopCount(1);
HeapMemoryMonitor.setTestDisableMemoryUpdates(true);
getCache().getResourceManager()
.setEvictionHeapPercentage(EVICTION_HEAP_PERCENTAGE_FAKE_NOTIFICATION);
HeapMemoryMonitor heapMemoryMonitor =
((GemFireCacheImpl) getCache()).getInternalResourceManager().getHeapMonitor();
heapMemoryMonitor.setTestMaxMemoryBytes(TEST_MAX_MEMORY);
heapMemoryMonitor.updateStateAndSendEvent(MEMORY_USED_FAKE_NOTIFICATION, "test");
}
protected void cleanUpAfterFakeNotification() {
((GemFireCacheImpl) getCache()).getHeapEvictor().setTestAbortAfterLoopCount(Integer.MAX_VALUE);
HeapMemoryMonitor.setTestDisableMemoryUpdates(false);
}
}
| apache-2.0 |
mashuai/Open-Source-Research | Javac2007/流程/jvm/16Gen/makeNewArray.java | 914 | //where
/** Generate code to create an array with given element type and number
* of dimensions.
*/
Item makeNewArray(DiagnosticPosition pos, Type type, int ndims) {
try {//我加上的
DEBUG.P(this,"makeNewArray(3)");
DEBUG.P("type="+type);
DEBUG.P("ndims="+ndims);
Type elemtype = types.elemtype(type);
if (types.dimensions(elemtype) + ndims > ClassFile.MAX_DIMENSIONS) {
log.error(pos, "limit.dimensions");
nerrs++;
}
int elemcode = Code.arraycode(elemtype);
DEBUG.P("elemcode="+elemcode);
if (elemcode == 0 || (elemcode == 1 && ndims == 1)) {
code.emitAnewarray(makeRef(pos, elemtype), type);
} else if (elemcode == 1) {
code.emitMultianewarray(ndims, makeRef(pos, type), type);
} else {
code.emitNewarray(elemcode, type);
}
return items.makeStackItem(type);
}finally{//我加上的
DEBUG.P(0,this,"makeNewArray(3)");
}
} | apache-2.0 |
xhoong/incubator-calcite | core/src/main/java/org/apache/calcite/util/TimeString.java | 7356 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.util;
import org.apache.calcite.avatica.util.DateTimeUtils;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import java.util.Calendar;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
/**
* Time literal.
*
* <p>Immutable, internally represented as a string (in ISO format),
* and can support unlimited precision (milliseconds, nanoseconds).
*/
public class TimeString implements Comparable<TimeString> {
private static final Pattern PATTERN =
Pattern.compile("[0-9][0-9]:[0-9][0-9]:[0-9][0-9](\\.[0-9]*[1-9])?");
final String v;
/** Internal constructor, no validation. */
private TimeString(String v, @SuppressWarnings("unused") boolean ignore) {
this.v = v;
}
/** Creates a TimeString. */
public TimeString(String v) {
this(v, false);
Preconditions.checkArgument(PATTERN.matcher(v).matches(),
"Invalid time format:", v);
Preconditions.checkArgument(getHour() >= 0 && getHour() < 24,
"Hour out of range:", getHour());
Preconditions.checkArgument(getMinute() >= 0 && getMinute() < 60,
"Minute out of range:", getMinute());
Preconditions.checkArgument(getSecond() >= 0 && getSecond() < 60,
"Second out of range:", getSecond());
}
/** Creates a TimeString for hour, minute, second and millisecond values. */
public TimeString(int h, int m, int s) {
this(hms(h, m, s), false);
}
/** Validates an hour-minute-second value and converts to a string. */
private static String hms(int h, int m, int s) {
Preconditions.checkArgument(h >= 0 && h < 24, "Hour out of range:", h);
Preconditions.checkArgument(m >= 0 && m < 60, "Minute out of range:", m);
Preconditions.checkArgument(s >= 0 && s < 60, "Second out of range:", s);
final StringBuilder b = new StringBuilder();
DateTimeStringUtils.hms(b, h, m, s);
return b.toString();
}
/** Sets the fraction field of a {@code TimeString} to a given number
* of milliseconds. Nukes the value set via {@link #withNanos}.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withMillis(56)}
* yields {@code TIME '1970-01-01 02:03:04.056'}. */
public TimeString withMillis(int millis) {
Preconditions.checkArgument(millis >= 0 && millis < 1000);
return withFraction(DateTimeStringUtils.pad(3, millis));
}
/** Sets the fraction field of a {@code TimeString} to a given number
* of nanoseconds. Nukes the value set via {@link #withMillis(int)}.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withNanos(56789)}
* yields {@code TIME '1970-01-01 02:03:04.000056789'}. */
public TimeString withNanos(int nanos) {
Preconditions.checkArgument(nanos >= 0 && nanos < 1000000000);
return withFraction(DateTimeStringUtils.pad(9, nanos));
}
/** Sets the fraction field of a {@code TimeString}.
* The precision is determined by the number of leading zeros.
* Trailing zeros are stripped.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withFraction("00506000")}
* yields {@code TIME '1970-01-01 02:03:04.00506'}. */
public TimeString withFraction(String fraction) {
String v = this.v;
int i = v.indexOf('.');
if (i >= 0) {
v = v.substring(0, i);
}
while (fraction.endsWith("0")) {
fraction = fraction.substring(0, fraction.length() - 1);
}
if (fraction.length() > 0) {
v = v + "." + fraction;
}
return new TimeString(v);
}
@Override public String toString() {
return v;
}
@Override public boolean equals(Object o) {
// The value is in canonical form (no trailing zeros).
return o == this
|| o instanceof TimeString
&& ((TimeString) o).v.equals(v);
}
@Override public int hashCode() {
return v.hashCode();
}
@Override public int compareTo(@Nonnull TimeString o) {
return v.compareTo(o.v);
}
/** Creates a TimeString from a Calendar. */
public static TimeString fromCalendarFields(Calendar calendar) {
return new TimeString(
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND))
.withMillis(calendar.get(Calendar.MILLISECOND));
}
public static TimeString fromMillisOfDay(int i) {
return new TimeString(DateTimeUtils.unixTimeToString(i))
.withMillis((int) DateTimeUtils.floorMod(i, 1000));
}
public TimeString round(int precision) {
Preconditions.checkArgument(precision >= 0);
int targetLength = 9 + precision;
if (v.length() <= targetLength) {
return this;
}
String v = this.v.substring(0, targetLength);
while (v.length() >= 9 && (v.endsWith("0") || v.endsWith("."))) {
v = v.substring(0, v.length() - 1);
}
return new TimeString(v);
}
public int getMillisOfDay() {
int h = Integer.valueOf(v.substring(0, 2));
int m = Integer.valueOf(v.substring(3, 5));
int s = Integer.valueOf(v.substring(6, 8));
int ms = getMillisInSecond();
return (int) (h * DateTimeUtils.MILLIS_PER_HOUR
+ m * DateTimeUtils.MILLIS_PER_MINUTE
+ s * DateTimeUtils.MILLIS_PER_SECOND
+ ms);
}
private int getMillisInSecond() {
switch (v.length()) {
case 8: // "12:34:56"
return 0;
case 10: // "12:34:56.7"
return Integer.valueOf(v.substring(9)) * 100;
case 11: // "12:34:56.78"
return Integer.valueOf(v.substring(9)) * 10;
case 12: // "12:34:56.789"
default: // "12:34:56.7890000012345"
return Integer.valueOf(v.substring(9, 12));
}
}
private int getHour() {
return Integer.parseInt(v.substring(0, 2));
}
private int getMinute() {
return Integer.parseInt(this.v.substring(3, 5));
}
private int getSecond() {
return Integer.parseInt(this.v.substring(6, 8));
}
public Calendar toCalendar() {
return Util.calendar(getMillisOfDay());
}
/** Converts this TimestampString to a string, truncated or padded with
* zeroes to a given precision. */
public String toString(int precision) {
Preconditions.checkArgument(precision >= 0);
final int p = precision();
if (precision < p) {
return round(precision).toString(precision);
}
if (precision > p) {
String s = v;
if (p == 0) {
s += ".";
}
return s + Strings.repeat("0", precision - p);
}
return v;
}
private int precision() {
return v.length() < 9 ? 0 : (v.length() - 9);
}
}
// End TimeString.java
| apache-2.0 |
nleite/sling | bundles/extensions/models/integration-tests/src/main/java/org/apache/sling/models/it/ModelFactorySimpleTest.java | 4933 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.models.it;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import javax.jcr.Node;
import javax.jcr.Session;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.junit.annotations.SlingAnnotationsTestRunner;
import org.apache.sling.junit.annotations.TestReference;
import org.apache.sling.models.factory.ModelClassException;
import org.apache.sling.models.factory.ModelFactory;
import org.apache.sling.models.it.models.ConstructorInjectionTestModel;
import org.apache.sling.models.it.models.FieldInjectionTestModel;
import org.apache.sling.models.it.models.InterfaceInjectionTestModel;
import org.apache.sling.models.it.models.implextend.InvalidImplementsInterfacePropertyModel;
import org.apache.sling.models.it.models.implextend.SampleServiceInterface;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(SlingAnnotationsTestRunner.class)
public class ModelFactorySimpleTest {
@TestReference
private ResourceResolverFactory rrFactory;
@TestReference
private ModelFactory modelFactory;
private String value;
private ResourceResolver resolver;
private Resource resource;
private Node createdNode;
@Before
public void setUp() throws Exception {
value = RandomStringUtils.randomAlphanumeric(10);
resolver = rrFactory.getAdministrativeResourceResolver(null);
Session session = resolver.adaptTo(Session.class);
Node rootNode = session.getRootNode();
createdNode = rootNode.addNode("test_" + RandomStringUtils.randomAlphanumeric(10));
createdNode.setProperty("testProperty", value);
session.save();
resource = resolver.getResource(createdNode.getPath());
}
@After
public void tearDown() throws Exception {
if (createdNode != null) {
createdNode.remove();
}
if (resolver != null) {
resolver.close();
}
}
@Test
public void testCreateModel() {
FieldInjectionTestModel model = modelFactory.createModel(resource, FieldInjectionTestModel.class);
assertNotNull("Model is null", model);
assertEquals("Test Property is not set correctly", value, model.getTestProperty());
assertNotNull("Filters is null", model.getFilters());
assertSame("Adaptable is not injected", resource, model.getResource());
}
private static final class DummyClass {
}
@Test
public void testIsModelClass() {
assertTrue("Model is not detected as such", modelFactory.isModelClass(ConstructorInjectionTestModel.class));
assertFalse("Dummy class incorrectly detected as model class", modelFactory.isModelClass(DummyClass.class));
assertFalse("Model with invalid adaptable incorrectly detected as model class" , modelFactory.isModelClass(InvalidImplementsInterfacePropertyModel.class));
assertTrue("Model is not detected as such", modelFactory.isModelClass(SampleServiceInterface.class)); // being provided by two adapters
}
@Test
public void testCanCreateFromAdaptable() {
assertTrue("Model is not detected as such", modelFactory.canCreateFromAdaptable(resource, ConstructorInjectionTestModel.class));
assertTrue("Model is not detected as such", modelFactory.canCreateFromAdaptable(resource, SampleServiceInterface.class));
assertFalse("Model is not detected as such", modelFactory.canCreateFromAdaptable(new String(), ConstructorInjectionTestModel.class)); // invalid adaptable
}
@Test(expected=ModelClassException.class)
public void testCanCreateFromAdaptableWithModelExceptin() {
modelFactory.canCreateFromAdaptable(resource, DummyClass.class); // no model class
}
}
| apache-2.0 |
chanakaudaya/developer-studio | bps/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/adapters/MessageExchangesAdapter.java | 2792 | /**
* <copyright>
* Copyright (c) 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* </copyright>
*/
package org.eclipse.bpel.ui.adapters;
import java.util.List;
import org.eclipse.bpel.model.BPELPackage;
import org.eclipse.bpel.model.MessageExchanges;
import org.eclipse.bpel.ui.BPELUIPlugin;
import org.eclipse.bpel.ui.IBPELUIConstants;
import org.eclipse.bpel.ui.adapters.delegates.ReferenceContainer;
import org.eclipse.bpel.ui.editparts.MessageExchangesEditPart;
import org.eclipse.bpel.ui.editparts.OutlineTreeEditPart;
import org.eclipse.bpel.ui.properties.PropertiesLabelProvider;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPartFactory;
import org.eclipse.swt.graphics.Image;
import org.eclipse.bpel.ui.Messages;
/**
*
* @author Miriam Grundig (MGrundig@de.ibm.com)
*/
public class MessageExchangesAdapter extends ContainerAdapter implements EditPartFactory,
ILabeledElement, IOutlineEditPartFactory, ITrayEditPartFactory
{
public MessageExchangesAdapter() {
super();
}
/* IContainer delegate */
public IContainer createContainerDelegate() {
return new ReferenceContainer(BPELPackage.eINSTANCE.getMessageExchanges_Children());
}
/* EditPartFactory */
public EditPart createEditPart(EditPart context, Object model) {
MessageExchangesEditPart result = new MessageExchangesEditPart();
result.setLabelProvider(PropertiesLabelProvider.getInstance());
result.setModel(model);
return result;
}
/* ITrayEditPartFactory */
public EditPart createTrayEditPart(EditPart context, Object model) {
return createEditPart(context, model);
}
/* ILabeledElement */
public Image getSmallImage(Object object) {
return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_16);
}
public Image getLargeImage(Object object) {
return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_32);
}
public String getTypeLabel(Object object) {
return Messages.MessageExchangesAdapter_TypeLabel;
}
public String getLabel(Object object) {
return Messages.MessageExchangesAdapter_Label;
}
/* IOutlineEditPartFactory */
public EditPart createOutlineEditPart(EditPart context, final Object model) {
EditPart result = new OutlineTreeEditPart(){
protected List getModelChildren() {
MessageExchanges messageExchanges = (MessageExchanges) model;
List list = messageExchanges.getChildren();
return list;
}
};
result.setModel(model);
return result;
}
}
| apache-2.0 |
mgillian/uPortal | uportal-war/src/main/java/org/jasig/portal/layout/dlm/DeleteManager.java | 8567 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.layout.dlm;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.PortalException;
import org.jasig.portal.layout.IUserLayoutStore;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.spring.locator.UserLayoutStoreLocator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Looks for, applies against the ilf, and updates accordingly the delete
* set within a plf.
*
* @version $Revision$ $Date$
* @since uPortal 2.5
*/
public class DeleteManager
{
private static final Log LOG = LogFactory.getLog(DeleteManager.class);
private static IUserLayoutStore dls = null;
/**
* Hands back the single instance of RDBMDistributedLayoutStore. There is
* already a method
* for aquiring a single instance of the configured layout store so we
* delegate over there so that all references refer to the same instance.
* This method is solely for convenience so that we don't have to keep
* calling UserLayoutStoreFactory and casting the resulting class.
*/
private static IUserLayoutStore getDLS()
{
if ( dls == null )
{
dls = UserLayoutStoreLocator.getUserLayoutStore();
}
return dls;
}
/**
Get the delete set if any from the plf and process each delete command
removing any that fail from the delete set so that the delete set is
self cleaning.
*/
static void applyAndUpdateDeleteSet( Document plf,
Document ilf,
IntegrationResult result )
{
Element dSet = null;
try
{
dSet = getDeleteSet( plf, null, false );
}
catch( Exception e )
{
LOG.error("Exception occurred while getting user's DLM delete-set.",
e);
}
if ( dSet == null )
return;
NodeList deletes = dSet.getChildNodes();
for( int i=deletes.getLength()-1; i>=0; i-- )
{
if ( applyDelete( (Element) deletes.item(i), ilf ) == false )
{
dSet.removeChild( deletes.item(i) );
result.setChangedPLF(true);
}
else
{
result.setChangedILF(true);
}
}
if ( dSet.getChildNodes().getLength() == 0 )
{
plf.getDocumentElement().removeChild( dSet );
result.setChangedPLF(true);
}
}
/**
Attempt to apply a single delete command and return true if it succeeds
or false otherwise. If the delete is disallowed or the target element
no longer exists in the document the delete command fails and returns
false.
*/
private static boolean applyDelete( Element delete, Document ilf )
{
String nodeID = delete.getAttribute( Constants.ATT_NAME );
Element e = ilf.getElementById( nodeID );
if ( e == null )
return false;
String deleteAllowed = e.getAttribute( Constants.ATT_DELETE_ALLOWED );
if ( deleteAllowed.equals( "false" ) )
return false;
Element p = (Element) e.getParentNode();
e.setIdAttribute(Constants.ATT_ID, false);
p.removeChild( e );
return true;
}
/**
Get the delete set if any stored in the root of the document or create
it is passed in create flag is true.
*/
private static Element getDeleteSet( Document plf,
IPerson person,
boolean create )
throws PortalException
{
Node root = plf.getDocumentElement();
Node child = root.getFirstChild();
while( child != null )
{
if ( child.getNodeName().equals( Constants.ELM_DELETE_SET ) )
return (Element) child;
child = child.getNextSibling();
}
if ( create == false )
return null;
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete set node " +
"Id for userId=" + person.getID(), e );
}
Element delSet = plf.createElement( Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_TYPE,
Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_ID, ID );
root.appendChild( delSet );
return delSet;
}
/**
Create and append a delete directive to delete the node identified by
the passed in element id. If this node contains any incorporated
elements then they must also have a delete directive added in here to
prevent incorporated channels originating in another column from
reappearing in that column because the position set entry that pulled
them into this column was now removed. (ie: the user moved an inc'd
channel to this column and then deleted the column means that the inc'd
channel should be deleted also.) This was designed to add a delete
directive for each nested element having an ID so as to work for the
future case of a tree view.
*/
public static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person )
throws PortalException
{
Document plf = (Document) person.getAttribute( Constants.PLF );
Element delSet = getDeleteSet( plf, person, true );
addDeleteDirective( compViewNode, elementID, person, plf, delSet );
}
/**
This method does the actual work of adding a delete directive and then
recursively calling itself for any incoporated children that need to be
deleted as well.
*/
private static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person,
Document plf,
Element delSet )
throws PortalException
{
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete node " +
"Id for userId=" + person.getID(), e );
}
Element delete = plf.createElement( Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_TYPE, Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_ID, ID );
delete.setAttributeNS( Constants.NS_URI,
Constants.ATT_NAME, elementID );
delSet.appendChild( delete );
// now pass through children and add delete directives for those with
// IDs indicating that they were incorporated
Element child = (Element) compViewNode.getFirstChild();
while( child != null )
{
String childID = child.getAttribute( "ID" );
if ( childID.startsWith( Constants.FRAGMENT_ID_USER_PREFIX ) )
addDeleteDirective( child, childID, person, plf, delSet );
child = (Element) child.getNextSibling();
}
}
}
| apache-2.0 |
remiemalik/concourse | concourse-server/src/main/java/com/cinchapi/concourse/server/model/Position.java | 5815 | /*
* Copyright (c) 2013-2016 Cinchapi Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cinchapi.concourse.server.model;
import java.nio.ByteBuffer;
import java.util.Objects;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.cinchapi.concourse.server.io.Byteable;
import com.cinchapi.concourse.util.ByteBuffers;
import com.google.common.base.Preconditions;
/**
* A Position is an abstraction for the association between a
* relative location and a {@link PrimaryKey} that is used in a
* {@link SearchRecord} to specify the location of a term in a record.
*
* @author Jeff Nelson
*/
@Immutable
public final class Position implements Byteable, Comparable<Position> {
/**
* Return the Position encoded in {@code bytes} so long as those bytes
* adhere to the format specified by the {@link #getBytes()} method. This
* method assumes that all the bytes in the {@code bytes} belong to the
* Position. In general, it is necessary to get the appropriate Position
* slice from the parent ByteBuffer using
* {@link ByteBuffers#slice(ByteBuffer, int, int)}.
*
* @param bytes
* @return the Position
*/
public static Position fromByteBuffer(ByteBuffer bytes) {
PrimaryKey primaryKey = PrimaryKey.fromByteBuffer(ByteBuffers.get(
bytes, PrimaryKey.SIZE));
int index = bytes.getInt();
return new Position(primaryKey, index);
}
/**
* Return a Position that is backed by {@code primaryKey} and {@code index}.
*
* @param primaryKey
* @param index
* @return the Position
*/
public static Position wrap(PrimaryKey primaryKey, int index) {
return new Position(primaryKey, index);
}
/**
* The total number of bytes used to store each Position
*/
public static final int SIZE = PrimaryKey.SIZE + 4; // index
/**
* A cached copy of the binary representation that is returned from
* {@link #getBytes()}.
*/
private transient ByteBuffer bytes;
/**
* The index that this Position represents.
*/
private final int index;
/**
* The PrimaryKey of the record that this Position represents.
*/
private final PrimaryKey primaryKey;
/**
* Construct a new instance.
*
* @param primaryKey
* @param index
*/
private Position(PrimaryKey primaryKey, int index) {
this(primaryKey, index, null);
}
/**
* Construct a new instance.
*
* @param primaryKey
* @param index
* @param bytes;
*/
private Position(PrimaryKey primaryKey, int index,
@Nullable ByteBuffer bytes) {
Preconditions
.checkArgument(index >= 0, "Cannot have an negative index");
this.primaryKey = primaryKey;
this.index = index;
this.bytes = bytes;
}
@Override
public int compareTo(Position other) {
int comparison;
return (comparison = primaryKey.compareTo(other.primaryKey)) != 0 ? comparison
: Integer.compare(index, other.index);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof Position) {
Position other = (Position) obj;
return primaryKey.equals(other.primaryKey) && index == other.index;
}
return false;
}
/**
* Return a byte buffer that represents this Value with the following order:
* <ol>
* <li><strong>primaryKey</strong> - position 0</li>
* <li><strong>index</strong> - position 8</li>
* </ol>
*
* @return the ByteBuffer representation
*/
@Override
public ByteBuffer getBytes() {
if(bytes == null) {
bytes = ByteBuffer.allocate(size());
copyTo(bytes);
bytes.rewind();
}
return ByteBuffers.asReadOnlyBuffer(bytes);
}
/**
* Return the associated {@code index}.
*
* @return the index
*/
public int getIndex() {
return index;
}
/**
* Return the associated {@code primaryKey}.
*
* @return the primaryKey
*/
public PrimaryKey getPrimaryKey() {
return primaryKey;
}
@Override
public int hashCode() {
return Objects.hash(primaryKey, index);
}
@Override
public int size() {
return SIZE;
}
@Override
public String toString() {
return "Position " + index + " in Record " + primaryKey;
}
@Override
public void copyTo(ByteBuffer buffer) {
// NOTE: Storing the index as an int instead of some size aware
// variable length is probably overkill since most indexes will be
// smaller than Byte.MAX_SIZE or Short.MAX_SIZE, but having variable
// size indexes means that the size of the entire Position (as an
// int) must be stored before the Position for proper
// deserialization. By storing the index as an int, the size of each
// Position is constant so we won't need to store the overall size
// prior to the Position to deserialize it, which is actually more
// space efficient.
primaryKey.copyTo(buffer);
buffer.putInt(index);
}
}
| apache-2.0 |
andyj24/googleads-java-lib | modules/ads_lib/src/main/java/com/google/api/ads/common/lib/soap/SoapCallReturn.java | 3736 | // Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.common.lib.soap;
/**
* Used to package a SOAP call's return. Contains the return value, the request
* and response info, and the originating {@link SoapCall}.
*
* @author Adam Rogal
*/
public class SoapCallReturn {
private Object returnValue;
private RequestInfo requestInfo;
private ResponseInfo responseInfo;
private Throwable exception;
/**
* Constructor.
*/
public SoapCallReturn(){
requestInfo = new RequestInfo();
responseInfo = new ResponseInfo();
}
/**
* Gets the return value from the SOAP call that was made.
*
* @return the return value from the SOAP call that was made or {@code null}
* if there was an exception
*/
public Object getReturnValue() {
return returnValue;
}
/**
* Gets the request info from the SOAP call that was made.
*/
public RequestInfo getRequestInfo() {
return requestInfo;
}
/**
* Gets the response info from the SOAP call that was made.
*/
public ResponseInfo getResponseInfo() {
return responseInfo;
}
/**
* Gets the exception from the SOAP call that was made if one occurred.
*
* @return the exception from the SOAP call that was made or {@code null}
* if there was no exception
*/
public Throwable getException() {
return exception;
}
/**
* Builder for {@link SoapCallReturn} objects.
*
* @author Adam Rogal
*/
public static class Builder {
private SoapCallReturn soapCallReturn;
/**
* Constructor.
*/
public Builder() {
this.soapCallReturn = new SoapCallReturn();
}
/**
* Adds a return value to the SoapCallReturn under construction.
*
* @param returnValue the return value to add to the SoapCallReturn
* @return this builder
*/
public Builder withReturnValue(Object returnValue) {
soapCallReturn.returnValue = returnValue;
return this;
}
/**
* Adds a response info to the SoapCallReturn under construction.
*
* @param responseInfo the response info to add to the SoapCallReturn
* @return this builder
*/
public Builder withResponseInfo(ResponseInfo responseInfo) {
soapCallReturn.responseInfo = responseInfo;
return this;
}
/**
* Adds a request info to the SoapCallReturn under construction.
*
* @param requestInfo the request info to add to the SoapCallReturn
* @return this builder
*/
public Builder withRequestInfo(RequestInfo requestInfo) {
soapCallReturn.requestInfo = requestInfo;
return this;
}
/**
* Adds an exception to the SoapCallReturn under construction.
*
* @param exception the exception to add to the SoapCallReturn
* @return this builder
*/
public Builder withException(Throwable exception) {
soapCallReturn.exception = exception;
return this;
}
/**
* Returns the SoapCallReturn this Builder has been constructing.
*
* @return the built SoapCallReturn object
*/
public SoapCallReturn build() {
return soapCallReturn;
}
}
}
| apache-2.0 |
prestoncarman/vxquery | vxquery-core/src/main/java/org/apache/vxquery/compiler/rewriter/rules/IntroduceTwoStepAggregateRule.java | 5872 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.vxquery.compiler.rewriter.rules;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.vxquery.functions.BuiltinFunctions;
import org.apache.vxquery.functions.BuiltinOperators;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
/**
* The rule searches for aggregate operators with an aggregate function
* expression that has not been initialized for two step aggregation.
*
* <pre>
* Before
*
* plan__parent
* AGGREGATE( $v : af1( $v1 ) )
* plan__child
*
* Where af1 is a VXquery aggregate function expression configured for single
* step processing and $v1 is defined in plan__child.
*
* After
*
* if (af1 == count) aggregate operating settings:
* Step 1: count
* Step 2: sum
* if (af1 == avg) aggregate operating settings:
* Step 1: avg-local
* Step 2: avg-global
* if (af1 in (max, min, sum)) aggregate operating settings:
* Step 1: af1
* Step 2: af1
* </pre>
*
* @author prestonc
*/
public class IntroduceTwoStepAggregateRule implements IAlgebraicRewriteRule {
final Map<FunctionIdentifier, Pair<IFunctionInfo, IFunctionInfo>> AGGREGATE_MAP = new HashMap<FunctionIdentifier, Pair<IFunctionInfo, IFunctionInfo>>();
public IntroduceTwoStepAggregateRule() {
AGGREGATE_MAP.put(BuiltinFunctions.FN_AVG_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinOperators.AVG_LOCAL, BuiltinOperators.AVG_GLOBAL));
AGGREGATE_MAP.put(BuiltinFunctions.FN_COUNT_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_COUNT_1, BuiltinFunctions.FN_SUM_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_MAX_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_MAX_1, BuiltinFunctions.FN_MAX_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_MIN_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_MIN_1, BuiltinFunctions.FN_MIN_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_SUM_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_SUM_1, BuiltinFunctions.FN_SUM_1));
}
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
// Check if aggregate function.
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op;
if (aggregate.getExpressions().size() == 0) {
return false;
}
Mutable<ILogicalExpression> mutableLogicalExpression = aggregate.getExpressions().get(0);
ILogicalExpression logicalExpression = mutableLogicalExpression.getValue();
if (logicalExpression.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) logicalExpression;
if (AGGREGATE_MAP.containsKey(functionCall.getFunctionIdentifier())) {
AggregateFunctionCallExpression aggregateFunctionCall = (AggregateFunctionCallExpression) functionCall;
if (aggregateFunctionCall.isTwoStep()) {
return false;
}
aggregateFunctionCall.setTwoStep(true);
aggregateFunctionCall.setStepOneAggregate(AGGREGATE_MAP.get(functionCall.getFunctionIdentifier()).first);
aggregateFunctionCall.setStepTwoAggregate(AGGREGATE_MAP.get(functionCall.getFunctionIdentifier()).second);
return true;
}
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
return false;
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/JdkComboBox.java | 15058 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.ui.configuration;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkType;
import com.intellij.openapi.projectRoots.SdkTypeId;
import com.intellij.openapi.roots.ui.OrderEntryAppearanceService;
import com.intellij.openapi.roots.ui.configuration.projectRoot.JdkListConfigurable;
import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel;
import com.intellij.openapi.ui.ComboBoxWithWidePopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.ColoredListCellRenderer;
import com.intellij.ui.ScreenUtil;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
import java.util.Collection;
/**
* @author Eugene Zhuravlev
* @since May 18, 2005
*/
public class JdkComboBox extends ComboBoxWithWidePopup<JdkComboBox.JdkComboBoxItem> {
private static final Icon EMPTY_ICON = JBUI.scale(EmptyIcon.create(1, 16));
@Nullable
private final Condition<Sdk> myFilter;
@Nullable
private final Condition<SdkTypeId> myCreationFilter;
private JButton mySetUpButton;
private final Condition<SdkTypeId> mySdkTypeFilter;
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel) {
this(jdkModel, null);
}
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel,
@Nullable Condition<SdkTypeId> filter) {
this(jdkModel, filter, getSdkFilter(filter), filter, false);
}
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel,
@Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> filter,
@Nullable Condition<SdkTypeId> creationFilter,
boolean addSuggestedItems) {
super(new JdkComboBoxModel(jdkModel, sdkTypeFilter, filter, addSuggestedItems));
myFilter = filter;
mySdkTypeFilter = sdkTypeFilter;
myCreationFilter = creationFilter;
setRenderer(new ColoredListCellRenderer<JdkComboBoxItem>() {
@Override
protected void customizeCellRenderer(@NotNull JList<? extends JdkComboBoxItem> list,
JdkComboBoxItem value,
int index,
boolean selected,
boolean hasFocus) {
if (JdkComboBox.this.isEnabled()) {
setIcon(EMPTY_ICON); // to fix vertical size
if (value instanceof InvalidJdkComboBoxItem) {
final String str = value.toString();
append(str, SimpleTextAttributes.ERROR_ATTRIBUTES);
}
else if (value instanceof ProjectJdkComboBoxItem) {
final Sdk jdk = jdkModel.getProjectSdk();
if (jdk != null) {
setIcon(((SdkType)jdk.getSdkType()).getIcon());
append(ProjectBundle.message("project.roots.project.jdk.inherited"), SimpleTextAttributes.REGULAR_ATTRIBUTES);
append(" (" + jdk.getName() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else {
final String str = value.toString();
append(str, SimpleTextAttributes.ERROR_ATTRIBUTES);
}
}
else if (value instanceof SuggestedJdkItem) {
SdkType type = ((SuggestedJdkItem)value).getSdkType();
String home = ((SuggestedJdkItem)value).getPath();
setIcon(type.getIconForAddAction());
String version = type.getVersionString(home);
append(version == null ? type.getPresentableName() : version);
append(" (" + home + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else if (value != null) {
OrderEntryAppearanceService.getInstance().forJdk(value.getJdk(), false, selected, true).customize(this);
}
else {
customizeCellRenderer(list, new NoneJdkComboBoxItem(), index, selected, hasFocus);
}
}
}
});
}
@Override
public Dimension getPreferredSize() {
final Rectangle rec = ScreenUtil.getScreenRectangle(0, 0);
final Dimension size = super.getPreferredSize();
final int maxWidth = rec.width / 4;
if (size.width > maxWidth) {
size.width = maxWidth;
}
return size;
}
@Override
public Dimension getMinimumSize() {
final Dimension minSize = super.getMinimumSize();
final Dimension prefSize = getPreferredSize();
if (minSize.width > prefSize.width) {
minSize.width = prefSize.width;
}
return minSize;
}
public void setSetupButton(final JButton setUpButton,
@Nullable final Project project,
final ProjectSdksModel jdksModel,
final JdkComboBoxItem firstItem,
@Nullable final Condition<Sdk> additionalSetup,
final boolean moduleJdkSetup) {
setSetupButton(setUpButton, project, jdksModel, firstItem, additionalSetup,
ProjectBundle.message("project.roots.set.up.jdk.title", moduleJdkSetup ? 1 : 2));
}
public void setSetupButton(final JButton setUpButton,
@Nullable final Project project,
final ProjectSdksModel jdksModel,
final JdkComboBoxItem firstItem,
@Nullable final Condition<Sdk> additionalSetup,
final String actionGroupTitle) {
mySetUpButton = setUpButton;
mySetUpButton.addActionListener(e -> {
DefaultActionGroup group = new DefaultActionGroup();
jdksModel.createAddActions(group, this, getSelectedJdk(), jdk -> {
if (project != null) {
final JdkListConfigurable configurable = JdkListConfigurable.getInstance(project);
configurable.addJdkNode(jdk, false);
}
reloadModel(new ActualJdkComboBoxItem(jdk), project);
setSelectedJdk(jdk); //restore selection
if (additionalSetup != null) {
if (additionalSetup.value(jdk)) { //leave old selection
setSelectedJdk(firstItem.getJdk());
}
}
}, myCreationFilter);
final DataContext dataContext = DataManager.getInstance().getDataContext(this);
if (group.getChildrenCount() > 1) {
JBPopupFactory.getInstance()
.createActionGroupPopup(actionGroupTitle, group, dataContext, JBPopupFactory.ActionSelectionAid.MNEMONICS, false)
.showUnderneathOf(setUpButton);
}
else {
final AnActionEvent event =
new AnActionEvent(null, dataContext, ActionPlaces.UNKNOWN, new Presentation(""), ActionManager.getInstance(), 0);
group.getChildren(event)[0].actionPerformed(event);
}
});
}
public void setEditButton(final JButton editButton, final Project project, final Computable<Sdk> retrieveJDK){
editButton.addActionListener(e -> {
final Sdk projectJdk = retrieveJDK.compute();
if (projectJdk != null) {
ProjectStructureConfigurable.getInstance(project).select(projectJdk, true);
}
});
addActionListener(e -> {
final JdkComboBoxItem selectedItem = getSelectedItem();
if (selectedItem instanceof ProjectJdkComboBoxItem) {
editButton.setEnabled(ProjectStructureConfigurable.getInstance(project).getProjectJdksModel().getProjectSdk() != null);
}
else {
editButton.setEnabled(!(selectedItem instanceof InvalidJdkComboBoxItem) && selectedItem != null && selectedItem.getJdk() != null);
}
});
}
public JButton getSetUpButton() {
return mySetUpButton;
}
@Override
public JdkComboBoxItem getSelectedItem() {
return (JdkComboBoxItem)super.getSelectedItem();
}
@Nullable
public Sdk getSelectedJdk() {
final JdkComboBoxItem selectedItem = getSelectedItem();
return selectedItem != null? selectedItem.getJdk() : null;
}
public void setSelectedJdk(Sdk jdk) {
final int index = indexOf(jdk);
if (index >= 0) {
setSelectedIndex(index);
}
}
public void setInvalidJdk(String name) {
removeInvalidElement();
addItem(new InvalidJdkComboBoxItem(name));
setSelectedIndex(getModel().getSize() - 1);
}
private int indexOf(Sdk jdk) {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
final int count = model.getSize();
for (int idx = 0; idx < count; idx++) {
final JdkComboBoxItem elementAt = model.getElementAt(idx);
if (jdk == null) {
if (elementAt instanceof NoneJdkComboBoxItem || elementAt instanceof ProjectJdkComboBoxItem) {
return idx;
}
}
else {
Sdk elementAtJdk = elementAt.getJdk();
if (elementAtJdk != null && jdk.getName().equals(elementAtJdk.getName())) {
return idx;
}
}
}
return -1;
}
private void removeInvalidElement() {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
final int count = model.getSize();
for (int idx = 0; idx < count; idx++) {
final JdkComboBoxItem elementAt = model.getElementAt(idx);
if (elementAt instanceof InvalidJdkComboBoxItem) {
removeItemAt(idx);
break;
}
}
}
public void reloadModel(JdkComboBoxItem firstItem, @Nullable Project project) {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
if (project == null) {
model.addElement(firstItem);
return;
}
model.reload(firstItem, ProjectStructureConfigurable.getInstance(project).getProjectJdksModel(), mySdkTypeFilter, myFilter, false);
}
private static class JdkComboBoxModel extends DefaultComboBoxModel<JdkComboBoxItem> {
JdkComboBoxModel(@NotNull final ProjectSdksModel jdksModel, @Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> sdkFilter, boolean addSuggested) {
reload(null, jdksModel, sdkTypeFilter, sdkFilter, addSuggested);
}
void reload(@Nullable final JdkComboBoxItem firstItem,
@NotNull final ProjectSdksModel jdksModel,
@Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> sdkFilter,
boolean addSuggested) {
removeAllElements();
if (firstItem != null) addElement(firstItem);
Sdk[] jdks = sortSdks(jdksModel.getSdks());
for (Sdk jdk : jdks) {
if (sdkFilter == null || sdkFilter.value(jdk)) {
addElement(new ActualJdkComboBoxItem(jdk));
}
}
if (addSuggested) {
addSuggestedItems(sdkTypeFilter, jdks);
}
}
@NotNull
private static Sdk[] sortSdks(@NotNull final Sdk[] sdks) {
Sdk[] clone = sdks.clone();
Arrays.sort(clone, (sdk1, sdk2) -> {
SdkType sdkType1 = (SdkType)sdk1.getSdkType();
SdkType sdkType2 = (SdkType)sdk2.getSdkType();
if (!sdkType1.getComparator().equals(sdkType2.getComparator())) return StringUtil.compare(sdkType1.getPresentableName(), sdkType2.getPresentableName(), true);
return sdkType1.getComparator().compare(sdk1, sdk2);
});
return clone;
}
void addSuggestedItems(@Nullable Condition<SdkTypeId> sdkTypeFilter, Sdk[] jdks) {
SdkType[] types = SdkType.getAllTypes();
for (SdkType type : types) {
if (sdkTypeFilter == null || sdkTypeFilter.value(type) && ContainerUtil.find(jdks, sdk -> sdk.getSdkType() == type) == null) {
Collection<String> paths = type.suggestHomePaths();
for (String path : paths) {
if (path != null && type.isValidSdkHome(path)) {
addElement(new SuggestedJdkItem(type, path));
}
}
}
}
}
}
public static Condition<Sdk> getSdkFilter(@Nullable final Condition<SdkTypeId> filter) {
return filter == null ? Conditions.alwaysTrue() : sdk -> filter.value(sdk.getSdkType());
}
public abstract static class JdkComboBoxItem {
@Nullable
public Sdk getJdk() {
return null;
}
@Nullable
public String getSdkName() {
return null;
}
}
public static class ActualJdkComboBoxItem extends JdkComboBoxItem {
private final Sdk myJdk;
public ActualJdkComboBoxItem(@NotNull Sdk jdk) {
myJdk = jdk;
}
@Override
public String toString() {
return myJdk.getName();
}
@Nullable
@Override
public Sdk getJdk() {
return myJdk;
}
@Nullable
@Override
public String getSdkName() {
return myJdk.getName();
}
}
public static class ProjectJdkComboBoxItem extends JdkComboBoxItem {
public String toString() {
return ProjectBundle.message("jdk.combo.box.project.item");
}
}
public static class NoneJdkComboBoxItem extends JdkComboBoxItem {
public String toString() {
return ProjectBundle.message("jdk.combo.box.none.item");
}
}
private static class InvalidJdkComboBoxItem extends JdkComboBoxItem {
private final String mySdkName;
InvalidJdkComboBoxItem(String name) {
mySdkName = name;
}
@Override
public String getSdkName() {
return mySdkName;
}
public String toString() {
return ProjectBundle.message("jdk.combo.box.invalid.item", mySdkName);
}
}
public static class SuggestedJdkItem extends JdkComboBoxItem {
private final SdkType mySdkType;
private final String myPath;
SuggestedJdkItem(@NotNull SdkType sdkType, @NotNull String path) {
mySdkType = sdkType;
myPath = path;
}
@NotNull
public SdkType getSdkType() {
return mySdkType;
}
@NotNull
public String getPath() {
return myPath;
}
@Override
public String toString() {
return myPath;
}
}
}
| apache-2.0 |
tseen/Federated-HDFS | tseenliu/FedHDFS-hadoop-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java | 10600 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.util.resource.Resources;
@Private
@Unstable
public class FSLeafQueue extends FSQueue {
private static final Log LOG = LogFactory.getLog(
FSLeafQueue.class.getName());
private final List<AppSchedulable> runnableAppScheds = // apps that are runnable
new ArrayList<AppSchedulable>();
private final List<AppSchedulable> nonRunnableAppScheds =
new ArrayList<AppSchedulable>();
private Resource demand = Resources.createResource(0);
// Variables used for preemption
private long lastTimeAtMinShare;
private long lastTimeAtHalfFairShare;
// Track the AM resource usage for this queue
private Resource amResourceUsage;
private final ActiveUsersManager activeUsersManager;
public FSLeafQueue(String name, FairScheduler scheduler,
FSParentQueue parent) {
super(name, scheduler, parent);
this.lastTimeAtMinShare = scheduler.getClock().getTime();
this.lastTimeAtHalfFairShare = scheduler.getClock().getTime();
activeUsersManager = new ActiveUsersManager(getMetrics());
amResourceUsage = Resource.newInstance(0, 0);
}
public void addApp(FSSchedulerApp app, boolean runnable) {
AppSchedulable appSchedulable = new AppSchedulable(scheduler, app, this);
app.setAppSchedulable(appSchedulable);
if (runnable) {
runnableAppScheds.add(appSchedulable);
} else {
nonRunnableAppScheds.add(appSchedulable);
}
}
// for testing
void addAppSchedulable(AppSchedulable appSched) {
runnableAppScheds.add(appSched);
}
/**
* Removes the given app from this queue.
* @return whether or not the app was runnable
*/
public boolean removeApp(FSSchedulerApp app) {
if (runnableAppScheds.remove(app.getAppSchedulable())) {
// Update AM resource usage
if (app.isAmRunning() && app.getAMResource() != null) {
Resources.subtractFrom(amResourceUsage, app.getAMResource());
}
return true;
} else if (nonRunnableAppScheds.remove(app.getAppSchedulable())) {
return false;
} else {
throw new IllegalStateException("Given app to remove " + app +
" does not exist in queue " + this);
}
}
public Collection<AppSchedulable> getRunnableAppSchedulables() {
return runnableAppScheds;
}
public List<AppSchedulable> getNonRunnableAppSchedulables() {
return nonRunnableAppScheds;
}
@Override
public void collectSchedulerApplications(
Collection<ApplicationAttemptId> apps) {
for (AppSchedulable appSched : runnableAppScheds) {
apps.add(appSched.getApp().getApplicationAttemptId());
}
for (AppSchedulable appSched : nonRunnableAppScheds) {
apps.add(appSched.getApp().getApplicationAttemptId());
}
}
@Override
public void setPolicy(SchedulingPolicy policy)
throws AllocationConfigurationException {
if (!SchedulingPolicy.isApplicableTo(policy, SchedulingPolicy.DEPTH_LEAF)) {
throwPolicyDoesnotApplyException(policy);
}
super.policy = policy;
}
@Override
public void recomputeShares() {
policy.computeShares(getRunnableAppSchedulables(), getFairShare());
}
@Override
public Resource getDemand() {
return demand;
}
@Override
public Resource getResourceUsage() {
Resource usage = Resources.createResource(0);
for (AppSchedulable app : runnableAppScheds) {
Resources.addTo(usage, app.getResourceUsage());
}
for (AppSchedulable app : nonRunnableAppScheds) {
Resources.addTo(usage, app.getResourceUsage());
}
return usage;
}
public Resource getAmResourceUsage() {
return amResourceUsage;
}
@Override
public void updateDemand() {
// Compute demand by iterating through apps in the queue
// Limit demand to maxResources
Resource maxRes = scheduler.getAllocationConfiguration()
.getMaxResources(getName());
demand = Resources.createResource(0);
for (AppSchedulable sched : runnableAppScheds) {
if (Resources.equals(demand, maxRes)) {
break;
}
updateDemandForApp(sched, maxRes);
}
for (AppSchedulable sched : nonRunnableAppScheds) {
if (Resources.equals(demand, maxRes)) {
break;
}
updateDemandForApp(sched, maxRes);
}
if (LOG.isDebugEnabled()) {
LOG.debug("The updated demand for " + getName() + " is " + demand
+ "; the max is " + maxRes);
}
}
private void updateDemandForApp(AppSchedulable sched, Resource maxRes) {
sched.updateDemand();
Resource toAdd = sched.getDemand();
if (LOG.isDebugEnabled()) {
LOG.debug("Counting resource from " + sched.getName() + " " + toAdd
+ "; Total resource consumption for " + getName() + " now "
+ demand);
}
demand = Resources.add(demand, toAdd);
demand = Resources.componentwiseMin(demand, maxRes);
}
@Override
public Resource assignContainer(FSSchedulerNode node) {
Resource assigned = Resources.none();
if (LOG.isDebugEnabled()) {
LOG.debug("Node " + node.getNodeName() + " offered to queue: " + getName());
}
if (!assignContainerPreCheck(node)) {
return assigned;
}
Comparator<Schedulable> comparator = policy.getComparator();
Collections.sort(runnableAppScheds, comparator);
for (AppSchedulable sched : runnableAppScheds) {
if (SchedulerAppUtils.isBlacklisted(sched.getApp(), node, LOG)) {
continue;
}
assigned = sched.assignContainer(node);
if (!assigned.equals(Resources.none())) {
break;
}
}
return assigned;
}
@Override
public RMContainer preemptContainer() {
RMContainer toBePreempted = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Queue " + getName() + " is going to preempt a container " +
"from its applications.");
}
// If this queue is not over its fair share, reject
if (!preemptContainerPreCheck()) {
return toBePreempted;
}
// Choose the app that is most over fair share
Comparator<Schedulable> comparator = policy.getComparator();
AppSchedulable candidateSched = null;
for (AppSchedulable sched : runnableAppScheds) {
if (candidateSched == null ||
comparator.compare(sched, candidateSched) > 0) {
candidateSched = sched;
}
}
// Preempt from the selected app
if (candidateSched != null) {
toBePreempted = candidateSched.preemptContainer();
}
return toBePreempted;
}
@Override
public List<FSQueue> getChildQueues() {
return new ArrayList<FSQueue>(1);
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo(UserGroupInformation user) {
QueueUserACLInfo userAclInfo =
recordFactory.newRecordInstance(QueueUserACLInfo.class);
List<QueueACL> operations = new ArrayList<QueueACL>();
for (QueueACL operation : QueueACL.values()) {
if (hasAccess(operation, user)) {
operations.add(operation);
}
}
userAclInfo.setQueueName(getQueueName());
userAclInfo.setUserAcls(operations);
return Collections.singletonList(userAclInfo);
}
public long getLastTimeAtMinShare() {
return lastTimeAtMinShare;
}
public void setLastTimeAtMinShare(long lastTimeAtMinShare) {
this.lastTimeAtMinShare = lastTimeAtMinShare;
}
public long getLastTimeAtHalfFairShare() {
return lastTimeAtHalfFairShare;
}
public void setLastTimeAtHalfFairShare(long lastTimeAtHalfFairShare) {
this.lastTimeAtHalfFairShare = lastTimeAtHalfFairShare;
}
@Override
public int getNumRunnableApps() {
return runnableAppScheds.size();
}
@Override
public ActiveUsersManager getActiveUsersManager() {
return activeUsersManager;
}
/**
* Check whether this queue can run this application master under the
* maxAMShare limit
*
* @param amResource
* @return true if this queue can run
*/
public boolean canRunAppAM(Resource amResource) {
float maxAMShare =
scheduler.getAllocationConfiguration().getQueueMaxAMShare(getName());
if (Math.abs(maxAMShare - -1.0f) < 0.0001) {
return true;
}
Resource maxAMResource = Resources.multiply(getFairShare(), maxAMShare);
Resource ifRunAMResource = Resources.add(amResourceUsage, amResource);
return !policy
.checkIfAMResourceUsageOverLimit(ifRunAMResource, maxAMResource);
}
public void addAMResourceUsage(Resource amResource) {
if (amResource != null) {
Resources.addTo(amResourceUsage, amResource);
}
}
@Override
public void recoverContainer(Resource clusterResource,
SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer) {
// TODO Auto-generated method stub
}
}
| apache-2.0 |
skinzer/governator | governator-legacy/src/test/java/com/netflix/governator/lifecycle/warmup/Flat.java | 1291 | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.governator.lifecycle.warmup;
import com.google.inject.Singleton;
import com.netflix.governator.annotations.WarmUp;
public class Flat
{
/*
Root classes without dependencies
*/
@Singleton
public static class A
{
public volatile Recorder recorder;
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("A");
}
}
@Singleton
public static class B
{
public volatile Recorder recorder;
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("B");
}
}
}
| apache-2.0 |
zwets/flowable-engine | modules/flowable-idm-engine/src/main/java/org/flowable/idm/engine/impl/cmd/SetUserPictureCmd.java | 2022 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.idm.engine.impl.cmd;
import java.io.Serializable;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.api.FlowableObjectNotFoundException;
import org.flowable.engine.common.impl.interceptor.Command;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.idm.api.Picture;
import org.flowable.idm.api.User;
import org.flowable.idm.engine.impl.util.CommandContextUtil;
/**
* @author Tom Baeyens
*/
public class SetUserPictureCmd implements Command<Object>, Serializable {
private static final long serialVersionUID = 1L;
protected String userId;
protected Picture picture;
public SetUserPictureCmd(String userId, Picture picture) {
this.userId = userId;
this.picture = picture;
}
@Override
public Object execute(CommandContext commandContext) {
if (userId == null) {
throw new FlowableIllegalArgumentException("userId is null");
}
User user = CommandContextUtil.getIdmEngineConfiguration().getIdmIdentityService()
.createUserQuery().userId(userId)
.singleResult();
if (user == null) {
throw new FlowableObjectNotFoundException("user " + userId + " doesn't exist", User.class);
}
CommandContextUtil.getUserEntityManager(commandContext).setUserPicture(user, picture);
return null;
}
}
| apache-2.0 |
goldmansachs/reladomo | reladomo/src/test/java/com/gs/fw/common/mithra/test/tax/FormRoleList.java | 1001 | /*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.test.tax;
import com.gs.fw.finder.Operation;
import java.util.*;
public class FormRoleList extends FormRoleListAbstract
{
public FormRoleList()
{
super();
}
public FormRoleList(int initialSize)
{
super(initialSize);
}
public FormRoleList(Collection c)
{
super(c);
}
public FormRoleList(Operation operation)
{
super(operation);
}
}
| apache-2.0 |
kuznetsovsergeyymailcom/homework | chapter_001/maximumFromTwoNumbers/src/test/java/ru/skuznetsov/package-info.java | 145 | /**
* Purpose of package - find largest number from 2 and 3 numbers.
* @since 1.0
* @author skuznetsov
* @version 2.0
*/
package ru.skuznetsov;
| apache-2.0 |
dumitru-petrusca/gosu-lang | gosu-core/src/main/java/gw/internal/gosu/parser/ReducedDelegateFunctionSymbol.java | 511 | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.lang.reflect.IMethodInfo;
/**
*/
public class ReducedDelegateFunctionSymbol extends ReducedDynamicFunctionSymbol implements IReducedDelegateFunctionSymbol {
private IMethodInfo _targetMethodInfo;
ReducedDelegateFunctionSymbol(DelegateFunctionSymbol dfs) {
super( dfs );
_targetMethodInfo = dfs.getMi();
}
@Override
public IMethodInfo getTargetMethodInfo() {
return _targetMethodInfo;
}
}
| apache-2.0 |
petracvv/cas | support/cas-server-support-infinispan-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/InfinispanTicketRegistry.java | 2994 | package org.apereo.cas.ticket.registry;
import org.apereo.cas.ticket.Ticket;
import org.infinispan.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
/**
* This is {@link InfinispanTicketRegistry}. Infinispan is a distributed in-memory
* key/value data store with optional schema.
* It offers advanced functionality such as transactions, events, querying and distributed processing.
* See <a href="http://infinispan.org/features/">http://infinispan.org/features/</a> for more info.
*
* @author Misagh Moayyed
* @since 4.2.0
*/
public class InfinispanTicketRegistry extends AbstractTicketRegistry {
private static final Logger LOGGER = LoggerFactory.getLogger(InfinispanTicketRegistry.class);
private Cache<String, Ticket> cache;
/**
* Instantiates a new Infinispan ticket registry.
*
* @param cache the cache
*/
public InfinispanTicketRegistry(final Cache<String, Ticket> cache) {
this.cache = cache;
LOGGER.info("Setting up Infinispan Ticket Registry...");
}
@Override
public Ticket updateTicket(final Ticket ticket) {
this.cache.put(ticket.getId(), ticket);
return ticket;
}
@Override
public void addTicket(final Ticket ticketToAdd) {
final Ticket ticket = encodeTicket(ticketToAdd);
final long idleTime = ticket.getExpirationPolicy().getTimeToIdle() <= 0
? ticket.getExpirationPolicy().getTimeToLive()
: ticket.getExpirationPolicy().getTimeToIdle();
LOGGER.debug("Adding ticket [{}] to cache store to live [{}] seconds and stay idle for [{}]",
ticket.getId(), ticket.getExpirationPolicy().getTimeToLive(), idleTime);
this.cache.put(ticket.getId(), ticket,
ticket.getExpirationPolicy().getTimeToLive(), TimeUnit.SECONDS,
idleTime, TimeUnit.SECONDS);
}
@Override
public Ticket getTicket(final String ticketId) {
final String encTicketId = encodeTicketId(ticketId);
if (ticketId == null) {
return null;
}
return Ticket.class.cast(cache.get(encTicketId));
}
@Override
public boolean deleteSingleTicket(final String ticketId) {
this.cache.remove(ticketId);
return getTicket(ticketId) == null;
}
@Override
public long deleteAll() {
final int size = this.cache.size();
this.cache.clear();
return size;
}
/**
* Retrieve all tickets from the registry.
* <p>
* Note! Usage of this method can be computational and I/O intensive and should not be used for other than
* debugging.
*
* @return collection of tickets currently stored in the registry. Tickets
* might or might not be valid i.e. expired.
*/
@Override
public Collection<Ticket> getTickets() {
return decodeTickets(this.cache.values());
}
}
| apache-2.0 |
XidongHuang/aws-sdk-for-java | src/main/java/com/amazonaws/services/ec2/model/transform/DescribeTagsResultStaxUnmarshaller.java | 2423 | /*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Describe Tags Result StAX Unmarshaller
*/
public class DescribeTagsResultStaxUnmarshaller implements Unmarshaller<DescribeTagsResult, StaxUnmarshallerContext> {
public DescribeTagsResult unmarshall(StaxUnmarshallerContext context) throws Exception {
DescribeTagsResult describeTagsResult = new DescribeTagsResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument()) targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument()) return describeTagsResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("tagSet/item", targetDepth)) {
describeTagsResult.getTags().add(TagDescriptionStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return describeTagsResult;
}
}
}
}
private static DescribeTagsResultStaxUnmarshaller instance;
public static DescribeTagsResultStaxUnmarshaller getInstance() {
if (instance == null) instance = new DescribeTagsResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
howepeng/isis | core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/properties/property/disabled/DisabledFacetForPropertyAnnotation.java | 2470 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.properties.property.disabled;
import org.apache.isis.applib.annotation.Editing;
import org.apache.isis.applib.annotation.Property;
import org.apache.isis.applib.annotation.When;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.metamodel.facetapi.FacetHolder;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacet;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacetAbstractImpl;
public class DisabledFacetForPropertyAnnotation extends DisabledFacetAbstractImpl {
public static DisabledFacet create(final Property property, final FacetHolder holder) {
if (property == null) {
return null;
}
final Editing editing = property.editing();
final String disabledReason = property.editingDisabledReason();
switch (editing) {
case AS_CONFIGURED:
// nothing needs to be done here; the DomainObjectFactory (processing @DomainObject annotation)
// will install an ImmutableFacetForDomainObjectAnnotation on the domain object and then a
// DisabledFacetOnPropertyDerivedFromImmutable facet will be installed.
return null;
case DISABLED:
return new DisabledFacetForPropertyAnnotation(disabledReason, holder);
case ENABLED:
return null;
}
return null;
}
private DisabledFacetForPropertyAnnotation(final String reason, final FacetHolder holder) {
super(When.ALWAYS, Where.EVERYWHERE, reason, holder);
}
}
| apache-2.0 |
facebook/buck | src/com/facebook/buck/infer/InferConfig.java | 4497 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.infer;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.config.ConfigView;
import com.facebook.buck.core.model.TargetConfiguration;
import com.facebook.buck.core.model.UnconfiguredBuildTarget;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.toolchain.toolprovider.ToolProvider;
import com.facebook.buck.core.toolchain.toolprovider.impl.ConstantToolProvider;
import com.facebook.buck.core.util.immutables.BuckStyleValue;
import com.facebook.buck.rules.tool.config.ToolConfig;
import com.google.common.collect.ImmutableList;
import java.nio.file.Paths;
import java.util.Optional;
import org.immutables.value.Value;
/** Infer specific buck config */
@BuckStyleValue
public abstract class InferConfig implements ConfigView<BuckConfig> {
// TODO(arr): change to just "infer" when cxx and java configs are consolidated
private static final String SECTION = "infer_java";
private static final String DIST_FIELD = "dist";
private static final String DEFAULT_DIST_BINARY = "infer";
@Override
public abstract BuckConfig getDelegate();
public static InferConfig of(BuckConfig delegate) {
return ImmutableInferConfig.of(delegate);
}
@Value.Lazy
public Optional<ToolProvider> getBinary() {
return getDelegate().getView(ToolConfig.class).getToolProvider(SECTION, "binary");
}
/**
* Depending on the type of dist (plain path vs target) either return a {@link
* ConstantToolProvider} or {@link InferDistFromTargetProvider} with properly set up parse time
* deps.
*/
@Value.Lazy
public Optional<ToolProvider> getDist() {
Optional<String> valueOpt = getDelegate().getValue(SECTION, DIST_FIELD);
if (!valueOpt.isPresent()) {
return Optional.empty();
}
String value = valueOpt.get();
Optional<UnconfiguredBuildTarget> targetOpt =
getDelegate().getMaybeUnconfiguredBuildTarget(SECTION, DIST_FIELD);
ToolProvider toolProvider =
targetOpt
.map(this::mkDistProviderFromTarget)
.orElseGet(() -> this.mkDistProviderFromPath(value));
return Optional.of(toolProvider);
}
@Value.Lazy
public String getDistBinary() {
return getDelegate().getValue(SECTION, "dist_binary").orElse(DEFAULT_DIST_BINARY);
}
@Value.Lazy
public Optional<String> getVersion() {
return getDelegate().getValue(SECTION, "version");
}
@Value.Lazy
public Optional<SourcePath> getConfigFile(TargetConfiguration targetConfiguration) {
return getDelegate().getSourcePath(SECTION, "config_file", targetConfiguration);
}
@Value.Lazy
public ImmutableList<String> getNullsafeArgs() {
return getDelegate().getListWithoutComments(SECTION, "nullsafe_args");
}
/** Directory with third party signatures for nullsafe. */
@Value.Lazy
public Optional<SourcePath> getNullsafeThirdPartySignatures(
TargetConfiguration targetConfiguration) {
return getDelegate()
.getSourcePath(SECTION, "nullsafe_third_party_signatures", targetConfiguration);
}
@Value.Lazy
public Boolean getPrettyPrint() {
return getDelegate().getBooleanValue(SECTION, "pretty_print", false);
}
@Value.Lazy
public Boolean executeRemotely() {
return getDelegate().getBooleanValue(SECTION, "execute_remotely", false);
}
private ToolProvider mkDistProviderFromTarget(UnconfiguredBuildTarget target) {
String source = String.format("[%s] %s", SECTION, DIST_FIELD);
return new InferDistFromTargetProvider(target, getDistBinary(), source);
}
private ToolProvider mkDistProviderFromPath(String path) {
String errorMessage = String.format("%s:%s path not found", SECTION, DIST_FIELD);
return new ConstantToolProvider(
new InferDistTool(
() -> getDelegate().getPathSourcePath(Paths.get(path), errorMessage), getDistBinary()));
}
}
| apache-2.0 |