repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
mayonghui2112/helloWorld | sourceCode/testMaven/onjava8/src/main/java/generics/Shape.java | 557 | package generics;// generics/Shape.java
// (c)2017 MindView LLC: see Copyright.txt
// We make no guarantees that this code is fit for any purpose.
// Visit http://OnJava8.com for more book information.
public class Shape {
private static long counter = 0;
private final long id = counter++;
@Override
public String toString() {
return getClass().getSimpleName() + " " + id;
}
public void rotate() {
System.out.println(this + " rotate");
}
public void resize(int newSize) {
System.out.println(this + " resize " + newSize);
}
}
| apache-2.0 |
StephanEwen/incubator-flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointOptions.java | 9824 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.state.CheckpointStorageLocationReference;
import java.io.Serializable;
import java.util.Objects;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* Options for performing the checkpoint. Note that different {@link
* org.apache.flink.runtime.io.network.api.CheckpointBarrier barriers} may have different options.
*
* <p>The {@link CheckpointProperties} are related and cover properties that are only relevant at
* the {@link CheckpointCoordinator}. These options are relevant at the {@link AbstractInvokable}
* instances running on task managers.
*/
public class CheckpointOptions implements Serializable {
/** How a checkpoint should be aligned. */
public enum AlignmentType {
AT_LEAST_ONCE,
ALIGNED,
UNALIGNED,
FORCED_ALIGNED
}
public static final long NO_ALIGNED_CHECKPOINT_TIME_OUT = Long.MAX_VALUE;
private static final long serialVersionUID = 5010126558083292915L;
/** Type of the checkpoint. */
private final CheckpointType checkpointType;
/** Target location for the checkpoint. */
private final CheckpointStorageLocationReference targetLocation;
private final AlignmentType alignmentType;
private final long alignedCheckpointTimeout;
public static CheckpointOptions notExactlyOnce(
CheckpointType type, CheckpointStorageLocationReference location) {
return new CheckpointOptions(
type, location, AlignmentType.AT_LEAST_ONCE, NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
public static CheckpointOptions alignedNoTimeout(
CheckpointType type, CheckpointStorageLocationReference location) {
return new CheckpointOptions(
type, location, AlignmentType.ALIGNED, NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
public static CheckpointOptions unaligned(CheckpointStorageLocationReference location) {
return new CheckpointOptions(
CheckpointType.CHECKPOINT,
location,
AlignmentType.UNALIGNED,
NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
public static CheckpointOptions alignedWithTimeout(
CheckpointStorageLocationReference location, long alignedCheckpointTimeout) {
return new CheckpointOptions(
CheckpointType.CHECKPOINT,
location,
AlignmentType.ALIGNED,
alignedCheckpointTimeout);
}
private static CheckpointOptions forceAligned(
CheckpointStorageLocationReference location, long alignedCheckpointTimeout) {
return new CheckpointOptions(
CheckpointType.CHECKPOINT,
location,
AlignmentType.FORCED_ALIGNED,
alignedCheckpointTimeout);
}
public static CheckpointOptions forConfig(
CheckpointType checkpointType,
CheckpointStorageLocationReference locationReference,
boolean isExactlyOnceMode,
boolean isUnalignedEnabled,
long alignedCheckpointTimeout) {
if (!isExactlyOnceMode) {
return notExactlyOnce(checkpointType, locationReference);
} else if (checkpointType.isSavepoint()) {
return alignedNoTimeout(checkpointType, locationReference);
} else if (!isUnalignedEnabled) {
return alignedNoTimeout(checkpointType, locationReference);
} else if (alignedCheckpointTimeout == 0
|| alignedCheckpointTimeout == NO_ALIGNED_CHECKPOINT_TIME_OUT) {
return unaligned(locationReference);
} else {
return alignedWithTimeout(locationReference, alignedCheckpointTimeout);
}
}
@VisibleForTesting
public CheckpointOptions(
CheckpointType checkpointType, CheckpointStorageLocationReference targetLocation) {
this(checkpointType, targetLocation, AlignmentType.ALIGNED, NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
public CheckpointOptions(
CheckpointType checkpointType,
CheckpointStorageLocationReference targetLocation,
AlignmentType alignmentType,
long alignedCheckpointTimeout) {
checkArgument(
alignmentType != AlignmentType.UNALIGNED || !checkpointType.isSavepoint(),
"Savepoint can't be unaligned");
checkArgument(
alignedCheckpointTimeout == NO_ALIGNED_CHECKPOINT_TIME_OUT
|| alignmentType != AlignmentType.UNALIGNED,
"Unaligned checkpoint can't have timeout (%s)",
alignedCheckpointTimeout);
this.checkpointType = checkNotNull(checkpointType);
this.targetLocation = checkNotNull(targetLocation);
this.alignmentType = checkNotNull(alignmentType);
this.alignedCheckpointTimeout = alignedCheckpointTimeout;
}
public boolean needsAlignment() {
return isExactlyOnceMode()
&& (getCheckpointType().isSavepoint() || !isUnalignedCheckpoint());
}
public long getAlignedCheckpointTimeout() {
return alignedCheckpointTimeout;
}
public AlignmentType getAlignment() {
return alignmentType;
}
public boolean isTimeoutable() {
if (alignmentType == AlignmentType.FORCED_ALIGNED) {
return false;
}
return alignmentType == AlignmentType.ALIGNED
&& (alignedCheckpointTimeout > 0
&& alignedCheckpointTimeout != NO_ALIGNED_CHECKPOINT_TIME_OUT);
}
// ------------------------------------------------------------------------
/** Returns the type of checkpoint to perform. */
public CheckpointType getCheckpointType() {
return checkpointType;
}
/** Returns the target location for the checkpoint. */
public CheckpointStorageLocationReference getTargetLocation() {
return targetLocation;
}
public boolean isExactlyOnceMode() {
return alignmentType != AlignmentType.AT_LEAST_ONCE;
}
public boolean isUnalignedCheckpoint() {
return alignmentType == AlignmentType.UNALIGNED;
}
public CheckpointOptions withUnalignedSupported() {
if (alignmentType == AlignmentType.FORCED_ALIGNED) {
return alignedCheckpointTimeout != NO_ALIGNED_CHECKPOINT_TIME_OUT
? alignedWithTimeout(targetLocation, alignedCheckpointTimeout)
: unaligned(targetLocation);
}
return this;
}
public CheckpointOptions withUnalignedUnsupported() {
if (isUnalignedCheckpoint() || isTimeoutable()) {
return forceAligned(targetLocation, alignedCheckpointTimeout);
}
return this;
}
// ------------------------------------------------------------------------
@Override
public int hashCode() {
return Objects.hash(
targetLocation, checkpointType, alignmentType, alignedCheckpointTimeout);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj != null && obj.getClass() == CheckpointOptions.class) {
final CheckpointOptions that = (CheckpointOptions) obj;
return this.checkpointType == that.checkpointType
&& this.targetLocation.equals(that.targetLocation)
&& this.alignmentType == that.alignmentType
&& this.alignedCheckpointTimeout == that.alignedCheckpointTimeout;
} else {
return false;
}
}
@Override
public String toString() {
return "CheckpointOptions {"
+ "checkpointType = "
+ checkpointType
+ ", targetLocation = "
+ targetLocation
+ ", alignment = "
+ alignmentType
+ ", alignedCheckpointTimeout = "
+ alignedCheckpointTimeout
+ "}";
}
// ------------------------------------------------------------------------
// Factory methods
// ------------------------------------------------------------------------
private static final CheckpointOptions CHECKPOINT_AT_DEFAULT_LOCATION =
new CheckpointOptions(
CheckpointType.CHECKPOINT, CheckpointStorageLocationReference.getDefault());
@VisibleForTesting
public static CheckpointOptions forCheckpointWithDefaultLocation() {
return CHECKPOINT_AT_DEFAULT_LOCATION;
}
public CheckpointOptions toUnaligned() {
checkState(alignmentType == AlignmentType.ALIGNED);
return unaligned(targetLocation);
}
}
| apache-2.0 |
antoinesd/weld-core | tests-arquillian/src/test/java/org/jboss/weld/tests/interceptors/cache/Foo.java | 1097 | /*
* JBoss, Home of Professional Open Source
* Copyright 2014, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.interceptors.cache;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
public class Foo {
@Inject
private Instance<Bar> bar;
@FooBinding
public void foo() {
Bar instance = bar.get();
instance.bar();
bar.destroy(instance);
}
}
| apache-2.0 |
shuliangtao/apache-camel-2.13.0-src | components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvUnmarshalPipeDelimiterSpringTest.java | 2342 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.csv;
import java.util.List;
import org.apache.camel.EndpointInject;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.spring.CamelSpringTestSupport;
import org.junit.Test;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Spring based integration test for the <code>CsvDataFormat</code>
* @version
*/
public class CsvUnmarshalPipeDelimiterSpringTest extends CamelSpringTestSupport {
@EndpointInject(uri = "mock:result")
private MockEndpoint result;
@SuppressWarnings("unchecked")
@Test
public void testCsvMarshal() throws Exception {
result.expectedMessageCount(1);
template.sendBody("direct:start", "123|Camel in Action|1\n124|ActiveMQ in Action|2");
assertMockEndpointsSatisfied();
List<List<String>> body = result.getReceivedExchanges().get(0).getIn().getBody(List.class);
assertEquals(2, body.size());
assertEquals("123", body.get(0).get(0));
assertEquals("Camel in Action", body.get(0).get(1));
assertEquals("1", body.get(0).get(2));
assertEquals("124", body.get(1).get(0));
assertEquals("ActiveMQ in Action", body.get(1).get(1));
assertEquals("2", body.get(1).get(2));
}
@Override
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/dataformat/csv/CsvUnmarshalPipeDelimiterSpringTest-context.xml");
}
} | apache-2.0 |
dejunhuang/privacyCleaning | src/main/java/data/cleaning/core/service/dataset/impl/DatasetType.java | 109 | package data.cleaning.core.service.dataset.impl;
public enum DatasetType {
TARGET, MASTER, GROUND_TRUTH;
}
| apache-2.0 |
mklew/mmp | src/java/org/apache/cassandra/db/index/keys/KeysIndex.java | 5713 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.index.keys;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.index.PerColumnSecondaryIndex;
import org.apache.cassandra.db.index.SecondaryIndexSearcher;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.cassandra.db.marshal.LocalByPartionerType;
import org.apache.cassandra.dht.*;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements a secondary index for a column family using a second column family
* in which the row keys are indexed values, and column names are base row keys.
*/
public class KeysIndex extends PerColumnSecondaryIndex
{
private static final Logger logger = LoggerFactory.getLogger(KeysIndex.class);
private ColumnFamilyStore indexCfs;
public KeysIndex()
{
}
public void init()
{
assert baseCfs != null && columnDefs != null;
ColumnDefinition columnDef = columnDefs.iterator().next();
CFMetaData indexedCfMetadata = CFMetaData.newIndexMetadata(baseCfs.metadata, columnDef, indexComparator());
indexCfs = ColumnFamilyStore.createColumnFamilyStore(baseCfs.table,
indexedCfMetadata.cfName,
new LocalPartitioner(columnDef.getValidator()),
indexedCfMetadata);
}
public static AbstractType indexComparator()
{
IPartitioner rowPartitioner = StorageService.getPartitioner();
return (rowPartitioner instanceof OrderPreservingPartitioner || rowPartitioner instanceof ByteOrderedPartitioner)
? BytesType.instance
: new LocalByPartionerType(StorageService.getPartitioner());
}
public void deleteColumn(DecoratedKey<?> valueKey, ByteBuffer rowKey, IColumn column)
{
if (column.isMarkedForDelete())
return;
int localDeletionTime = (int) (System.currentTimeMillis() / 1000);
ColumnFamily cfi = ColumnFamily.create(indexCfs.metadata);
cfi.addTombstone(rowKey, localDeletionTime, column.timestamp());
indexCfs.apply(valueKey, cfi);
if (logger.isDebugEnabled())
logger.debug("removed index entry for cleaned-up value {}:{}", valueKey, cfi);
}
public void insertColumn(DecoratedKey<?> valueKey, ByteBuffer rowKey, IColumn column)
{
ColumnFamily cfi = ColumnFamily.create(indexCfs.metadata);
if (column instanceof ExpiringColumn)
{
ExpiringColumn ec = (ExpiringColumn)column;
cfi.addColumn(new ExpiringColumn(rowKey, ByteBufferUtil.EMPTY_BYTE_BUFFER, ec.timestamp(), ec.getTimeToLive(), ec.getLocalDeletionTime()));
}
else
{
cfi.addColumn(new Column(rowKey, ByteBufferUtil.EMPTY_BYTE_BUFFER, column.timestamp()));
}
if (logger.isDebugEnabled())
logger.debug("applying index row {} in {}", indexCfs.metadata.getKeyValidator().getString(valueKey.key), cfi);
indexCfs.apply(valueKey, cfi);
}
public void updateColumn(DecoratedKey<?> valueKey, ByteBuffer rowKey, IColumn col)
{
insertColumn(valueKey, rowKey, col);
}
public void removeIndex(ByteBuffer columnName) throws IOException
{
indexCfs.invalidate();
}
public void forceBlockingFlush() throws IOException
{
try
{
indexCfs.forceBlockingFlush();
}
catch (ExecutionException e)
{
throw new IOException(e);
}
catch (InterruptedException e)
{
throw new IOException(e);
}
}
public void invalidate()
{
indexCfs.invalidate();
}
public void truncate(long truncatedAt)
{
indexCfs.discardSSTables(truncatedAt);
}
public ColumnFamilyStore getIndexCfs()
{
return indexCfs;
}
public SecondaryIndexSearcher createSecondaryIndexSearcher(Set<ByteBuffer> columns)
{
return new KeysSearcher(baseCfs.indexManager, columns);
}
public String getIndexName()
{
return indexCfs.columnFamily;
}
public void validateOptions() throws ConfigurationException
{
// no options used
}
public long getLiveSize()
{
return indexCfs.getMemtableDataSize();
}
}
| apache-2.0 |
rasika90/carbon-uuf | components/uuf-core/src/main/java/org/wso2/carbon/uuf/exception/UUFException.java | 1020 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.uuf.exception;
public class UUFException extends RuntimeException {
public UUFException() {
}
public UUFException(String message) {
super(message);
}
public UUFException(Throwable cause) {
super(cause);
}
public UUFException(String message, Throwable cause) {
super(message, cause);
}
}
| apache-2.0 |
AdamBien/enhydrator | enhydrator/src/main/java/com/airhacks/enhydrator/transform/SkipFirstRow.java | 1892 | package com.airhacks.enhydrator.transform;
/*
* #%L
* enhydrator
* %%
* Copyright (C) 2014 Adam Bien
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.airhacks.enhydrator.flexpipe.RowTransformation;
import com.airhacks.enhydrator.in.Row;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author airhacks.com
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlRootElement(name = "skip-first-row")
public class SkipFirstRow extends RowTransformation {
@XmlTransient
private boolean skipped = false;
@Override
public Row execute(Row input) {
if (skipped) {
return input;
} else {
skipped = true;
return null;
}
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + (this.skipped ? 1 : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final SkipFirstRow other = (SkipFirstRow) obj;
if (this.skipped != other.skipped) {
return false;
}
return true;
}
}
| apache-2.0 |
liurl3/carbon-identity | components/user-mgt/org.wso2.carbon.user.mgt.workflow/src/main/java/org/wso2/carbon/user/mgt/workflow/userstore/AddRoleWFRequestHandler.java | 11436 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.user.mgt.workflow.userstore;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService;
import org.wso2.carbon.identity.workflow.mgt.bean.Entity;
import org.wso2.carbon.identity.workflow.mgt.exception.InternalWorkflowException;
import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException;
import org.wso2.carbon.identity.workflow.mgt.extension.AbstractWorkflowRequestHandler;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowDataType;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowRequestStatus;
import org.wso2.carbon.user.api.Permission;
import org.wso2.carbon.user.api.UserRealm;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.common.AbstractUserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.user.mgt.workflow.internal.IdentityWorkflowDataHolder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class AddRoleWFRequestHandler extends AbstractWorkflowRequestHandler {
private static final String FRIENDLY_NAME = "Add Role";
private static final String FRIENDLY_DESCRIPTION = "Triggered when a user create a new role.";
private static final String ROLENAME = "Role Name";
private static final String USER_STORE_DOMAIN = "User Store Domain";
private static final String PERMISSIONS = "Permissions";
private static final String USER_LIST = "Users";
private static final String SEPARATOR = "->";
private static final Map<String, String> PARAM_DEFINITION;
private static Log log = LogFactory.getLog(AddRoleWFRequestHandler.class);
static {
PARAM_DEFINITION = new LinkedHashMap<>();
PARAM_DEFINITION.put(ROLENAME, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(USER_STORE_DOMAIN, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(USER_LIST, WorkflowDataType.STRING_LIST_TYPE);
PARAM_DEFINITION.put(PERMISSIONS, WorkflowDataType.STRING_LIST_TYPE);
}
public boolean startAddRoleFlow(String userStoreDomain, String role, String[] userList, Permission[] permissions)
throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
if (permissions == null) {
permissions = new Permission[0];
}
if (userList == null) {
userList = new String[0];
}
int tenant = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String fullyQualifiedName = UserCoreUtil.addDomainToName(role, userStoreDomain);
List<String> permissionList = new ArrayList<>(permissions.length);
for (int i = 0; i < permissions.length; i++) {
permissionList.add(permissions[i].getResourceId() + SEPARATOR + permissions[i].getAction());
}
Map<String, Object> wfParams = new HashMap<>();
Map<String, Object> nonWfParams = new HashMap<>();
wfParams.put(ROLENAME, role);
wfParams.put(USER_STORE_DOMAIN, userStoreDomain);
wfParams.put(PERMISSIONS, permissionList);
wfParams.put(USER_LIST, Arrays.asList(userList));
String uuid = UUID.randomUUID().toString();
Entity[] entities = new Entity[userList.length + 1];
entities[0] = new Entity(fullyQualifiedName, UserStoreWFConstants.ENTITY_TYPE_ROLE, tenant);
for (int i = 0; i < userList.length; i++) {
fullyQualifiedName = UserCoreUtil.addDomainToName(userList[i], userStoreDomain);
entities[i + 1] = new Entity(fullyQualifiedName, UserStoreWFConstants.ENTITY_TYPE_USER, tenant);
}
if (!Boolean.TRUE.equals(getWorkFlowCompleted()) && !isValidOperation(entities)) {
throw new WorkflowException("Operation is not valid");
}
boolean state = startWorkFlow(wfParams, nonWfParams, uuid);
//WF_REQUEST_ENTITY_RELATIONSHIP table has foreign key to WF_REQUEST, so need to run this after WF_REQUEST is
// updated
if (!Boolean.TRUE.equals(getWorkFlowCompleted()) && !state) {
try {
workflowService.addRequestEntityRelationships(uuid, entities);
} catch (InternalWorkflowException e) {
//debug exception which occurs at DB level since no workflows associated with event
if (log.isDebugEnabled()) {
log.debug("No workflow associated with the operation.", e);
}
}
}
return state;
}
@Override
public String getEventId() {
return UserStoreWFConstants.ADD_ROLE_EVENT;
}
@Override
public Map<String, String> getParamDefinitions() {
return PARAM_DEFINITION;
}
@Override
public String getFriendlyName() {
return FRIENDLY_NAME;
}
@Override
public String getDescription() {
return FRIENDLY_DESCRIPTION;
}
@Override
public String getCategory() {
return UserStoreWFConstants.CATEGORY_USERSTORE_OPERATIONS;
}
@Override
public boolean retryNeedAtCallback() {
return true;
}
@Override
public void onWorkflowCompletion(String status, Map<String, Object> requestParams, Map<String, Object>
responseAdditionalParams, int tenantId) throws WorkflowException {
String roleName = (String) requestParams.get(ROLENAME);
if (roleName == null) {
throw new WorkflowException("Callback request for Add role received without the mandatory " +
"parameter 'roleName'");
}
String userStoreDomain = (String) requestParams.get(USER_STORE_DOMAIN);
if (StringUtils.isNotBlank(userStoreDomain)) {
roleName = userStoreDomain + "/" + roleName;
}
List<String> userList = (List<String>) requestParams.get(USER_LIST);
String[] users;
if (userList != null) {
users = new String[userList.size()];
users = userList.toArray(users);
} else {
users = new String[0];
}
List<String> permissionList = (List<String>) requestParams.get(PERMISSIONS);
Permission[] permissions;
if (permissionList != null) {
permissions = new Permission[permissionList.size()];
int i = 0;
for (String permissionString : permissionList) {
String[] splittedString = permissionString.split(SEPARATOR);
if (splittedString.length == 2) {
permissions[i] = new Permission(splittedString[0], splittedString[1]);
}
i++;
}
} else {
permissions = new Permission[0];
}
if (WorkflowRequestStatus.APPROVED.toString().equals(status) ||
WorkflowRequestStatus.SKIPPED.toString().equals(status)) {
try {
RealmService realmService = IdentityWorkflowDataHolder.getInstance().getRealmService();
UserRealm userRealm = realmService.getTenantUserRealm(tenantId);
userRealm.getUserStoreManager().addRole(roleName, users, permissions);
} catch (UserStoreException e) {
// Sending e.getMessage() since it is required to give error message to end user.
throw new WorkflowException(e.getMessage(), e);
}
} else {
if (retryNeedAtCallback()) {
//unset threadlocal variable
unsetWorkFlowCompleted();
}
if (log.isDebugEnabled()) {
log.debug(
"Adding role is aborted for role '" + roleName + "', Reason: Workflow response was " + status);
}
}
}
@Override
public boolean isValidOperation(Entity[] entities) throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
boolean eventEngaged = workflowService.eventEngagedWithWorkflows(UserStoreWFConstants.ADD_ROLE_EVENT);
RealmService realmService = IdentityWorkflowDataHolder.getInstance().getRealmService();
UserRealm userRealm;
AbstractUserStoreManager userStoreManager;
try {
userRealm = realmService.getTenantUserRealm(PrivilegedCarbonContext.getThreadLocalCarbonContext()
.getTenantId());
userStoreManager = (AbstractUserStoreManager) userRealm.getUserStoreManager();
} catch (UserStoreException e) {
throw new WorkflowException("Error while retrieving user realm.", e);
}
for (int i = 0; i < entities.length; i++) {
try {
if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_ROLE && (workflowService
.entityHasPendingWorkflowsOfType(entities[i], UserStoreWFConstants.ADD_ROLE_EVENT) ||
workflowService.entityHasPendingWorkflowsOfType(entities[i], UserStoreWFConstants
.UPDATE_ROLE_NAME_EVENT) || userStoreManager.isExistingRole(entities[i].getEntityId()
))) {
throw new WorkflowException("Role name already exists in the system. Please pick another role " +
"name.");
} else if (workflowService.eventEngagedWithWorkflows(UserStoreWFConstants.ADD_USER_EVENT) &&
entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER && workflowService
.entityHasPendingWorkflowsOfType(entities[i], UserStoreWFConstants.DELETE_USER_EVENT)) {
throw new WorkflowException("One or more assigned users are pending in delete workflow.");
} else if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER && !userStoreManager
.isExistingUser(entities[i].getEntityId())) {
throw new WorkflowException("User " + entities[i].getEntityId() + " does not exist.");
}
} catch (InternalWorkflowException | org.wso2.carbon.user.core.UserStoreException e) {
throw new WorkflowException(e.getMessage(), e);
}
}
return true;
}
}
| apache-2.0 |
objectiser/camel | components/camel-rest/src/main/java/org/apache/camel/component/rest/DefaultRestRegistry.java | 9606 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Producer;
import org.apache.camel.Route;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.Service;
import org.apache.camel.ServiceStatus;
import org.apache.camel.StatefulService;
import org.apache.camel.StaticService;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.RestRegistry;
import org.apache.camel.support.LifecycleStrategySupport;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
public class DefaultRestRegistry extends ServiceSupport implements StaticService, RestRegistry, CamelContextAware {
private CamelContext camelContext;
private final Map<Consumer, RestService> registry = new LinkedHashMap<>();
private transient Producer apiProducer;
@Override
public void addRestService(Consumer consumer, String url, String baseUrl, String basePath, String uriTemplate, String method,
String consumes, String produces, String inType, String outType, String routeId, String description) {
RestServiceEntry entry = new RestServiceEntry(consumer, url, baseUrl, basePath, uriTemplate, method, consumes, produces, inType, outType, routeId, description);
registry.put(consumer, entry);
}
@Override
public void removeRestService(Consumer consumer) {
registry.remove(consumer);
}
@Override
public List<RestRegistry.RestService> listAllRestServices() {
return new ArrayList<>(registry.values());
}
@Override
public int size() {
return registry.size();
}
@Override
public String apiDocAsJson() {
// see if there is a rest-api endpoint which would be the case if rest api-doc has been explicit enabled
if (apiProducer == null) {
Endpoint restApiEndpoint = null;
Endpoint restEndpoint = null;
for (Map.Entry<String, Endpoint> entry : camelContext.getEndpointMap().entrySet()) {
String uri = entry.getKey();
if (uri.startsWith("rest-api:")) {
restApiEndpoint = entry.getValue();
break;
} else if (restEndpoint == null && uri.startsWith("rest:")) {
restEndpoint = entry.getValue();
}
}
if (restApiEndpoint == null && restEndpoint != null) {
// no rest-api has been explicit enabled, then we need to create it first
RestEndpoint rest = (RestEndpoint) restEndpoint;
String componentName = rest.getProducerComponentName();
if (componentName != null) {
RestConfiguration config = camelContext.getRestConfiguration(componentName, true);
String apiComponent = config.getApiComponent() != null ? config.getApiComponent() : RestApiEndpoint.DEFAULT_API_COMPONENT_NAME;
String path = config.getApiContextPath() != null ? config.getApiContextPath() : "api-doc";
restApiEndpoint = camelContext.getEndpoint(String.format("rest-api:%s/%s?componentName=%s&apiComponentName=%s&contextIdPattern=#name#",
path, camelContext.getName(), componentName, apiComponent));
}
}
if (restApiEndpoint != null) {
// reuse the producer to avoid creating it
try {
apiProducer = restApiEndpoint.createProducer();
camelContext.addService(apiProducer, true);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
if (apiProducer != null) {
try {
Exchange dummy = apiProducer.getEndpoint().createExchange();
apiProducer.process(dummy);
String json = dummy.getMessage().getBody(String.class);
return json;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
return null;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(camelContext, "camelContext", this);
// add a lifecycle so we can keep track when consumers is being removed, so we can unregister them from our registry
camelContext.addLifecycleStrategy(new RemoveRestServiceLifecycleStrategy());
}
@Override
protected void doStop() throws Exception {
registry.clear();
}
/**
* Represents a rest service
*/
private final class RestServiceEntry implements RestService {
private final Consumer consumer;
private final String url;
private final String baseUrl;
private final String basePath;
private final String uriTemplate;
private final String method;
private final String consumes;
private final String produces;
private final String inType;
private final String outType;
private final String routeId;
private final String description;
private RestServiceEntry(Consumer consumer, String url, String baseUrl, String basePath, String uriTemplate, String method,
String consumes, String produces, String inType, String outType, String routeId, String description) {
this.consumer = consumer;
this.url = url;
this.baseUrl = baseUrl;
this.basePath = basePath;
this.uriTemplate = uriTemplate;
this.method = method;
this.consumes = consumes;
this.produces = produces;
this.inType = inType;
this.outType = outType;
this.routeId = routeId;
this.description = description;
}
@Override
public Consumer getConsumer() {
return consumer;
}
@Override
public String getUrl() {
return url;
}
@Override
public String getBaseUrl() {
return baseUrl;
}
@Override
public String getBasePath() {
return basePath;
}
@Override
public String getUriTemplate() {
return uriTemplate;
}
@Override
public String getMethod() {
return method;
}
@Override
public String getConsumes() {
return consumes;
}
@Override
public String getProduces() {
return produces;
}
@Override
public String getInType() {
return inType;
}
@Override
public String getOutType() {
return outType;
}
@Override
public String getState() {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
ServiceStatus status = null;
if (consumer instanceof StatefulService) {
status = ((StatefulService) consumer).getStatus();
}
// if no status exists then its stopped
if (status == null) {
status = ServiceStatus.Stopped;
}
return status.name();
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public String getDescription() {
return description;
}
}
/**
* A {@link org.apache.camel.spi.LifecycleStrategy} that keeps track when a {@link Consumer} is removed
* and automatic un-register it from this REST registry.
*/
private final class RemoveRestServiceLifecycleStrategy extends LifecycleStrategySupport {
@Override
public void onServiceRemove(CamelContext context, Service service, Route route) {
super.onServiceRemove(context, service, route);
// if its a consumer then de-register it from the rest registry
if (service instanceof Consumer) {
removeRestService((Consumer) service);
}
}
}
}
| apache-2.0 |
michalkurka/h2o-3 | h2o-extensions/xgboost/src/main/java/hex/tree/xgboost/rabit/RabitWorker.java | 5089 | package hex.tree.xgboost.rabit;
import hex.tree.xgboost.rabit.communication.XGBoostAutoBuffer;
import hex.tree.xgboost.rabit.util.LinkMap;
import org.apache.log4j.Logger;
import water.AutoBuffer;
import java.io.IOException;
import java.nio.channels.SocketChannel;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class RabitWorker implements Comparable<RabitWorker> {
private static final Logger LOG = Logger.getLogger(RabitWorker.class);
final String host;
final int workerPort;
private SocketChannel socket;
int rank;
int worldSize;
String jobId;
public String cmd;
int waitAccept;
private int port;
private XGBoostAutoBuffer ab;
private XGBoostAutoBuffer writerAB;
RabitWorker(SocketChannel channel) throws IOException {
this.ab = new XGBoostAutoBuffer(channel);
this.socket = channel;
this.host = channel.socket().getInetAddress().getHostAddress();
this.workerPort = channel.socket().getPort();
int magicReceived = ab.get4();
if(RabitTrackerH2O.MAGIC != magicReceived) {
throw new IllegalStateException(
"Tracker received wrong magic number ["
+ magicReceived +
"] from host " + this.host
);
}
writerAB = new XGBoostAutoBuffer();
writerAB.put4(RabitTrackerH2O.MAGIC);
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
this.rank = ab.get4();
this.worldSize = ab.get4();
this.jobId = safeLowercase(ab.getStr());
this.cmd = safeLowercase(ab.getStr());
this.waitAccept = 0;
this.port = -1;
LOG.debug("Initialized worker " + this.host + " with rank " + this.rank + " and command [" + this.cmd + "].");
}
private String safeLowercase(String str) {
return null == str ? null : str.toLowerCase();
}
int decideRank(Map<String, Integer> jobToRankMap) {
if (rank >= 0) {
return rank;
}
if (!"null".equals(jobId) && jobToRankMap.containsKey(jobId)) {
return jobToRankMap.get(jobId);
}
return -1;
}
public XGBoostAutoBuffer receiver() {
return ab;
}
public void assignRank(int rank, Map<Integer, RabitWorker> waitConn, LinkMap linkMap) throws IOException {
this.rank = rank;
List<Integer> nnset = linkMap.treeMap.get(rank);
Integer rprev = linkMap.ringMap.get(rank)._1();
Integer rnext = linkMap.ringMap.get(rank)._2();
writerAB.put4(rank);
writerAB.put4(linkMap.parentMap.get(rank));
writerAB.put4(linkMap.treeMap.size());
writerAB.put4(nnset.size());
for (Integer r : nnset) {
writerAB.put4(r);
}
if (rprev != -1 && rprev != rank) {
nnset.add(rprev);
writerAB.put4(rprev);
} else {
writerAB.put4(-1);
}
if (rnext != -1 && rnext != rank) {
nnset.add(rnext);
writerAB.put4(rnext);
} else {
writerAB.put4(-1);
}
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
while (true) {
int ngood = ab.get4();
Set<Integer> goodSet = new LinkedHashSet<>();
for(int i = 0; i < ngood; i++) {
int got = ab.get4();
goodSet.add(got);
}
assert nnset.containsAll(goodSet);
Set<Integer> badSet = new LinkedHashSet<>(nnset);
badSet.removeAll(goodSet);
Set<Integer> conset = new LinkedHashSet<>();
for (Integer r : badSet) {
if(waitConn.containsKey(r)) {
conset.add(r);
}
}
writerAB.put4(conset.size());
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
writerAB.put4(badSet.size() - conset.size());
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
for (Integer r : conset) {
writerAB.putStr(waitConn.get(r).host);
writerAB.put4(waitConn.get(r).port);
writerAB.put4(r);
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
}
int nerr = ab.get4();
if(nerr != 0) {
continue;
}
this.port = ab.get4();
Set<Integer> rmset = new LinkedHashSet<>();
// All connections were successfully setup
for (Integer r : conset) {
waitConn.get(r).waitAccept -= 1;
if(waitConn.get(r).waitAccept == 0) {
rmset.add(r);
}
}
for (Integer r : rmset) {
waitConn.remove(r);
}
this.waitAccept = badSet.size() - conset.size();
return;
}
}
@Override
public int compareTo(RabitWorker o) {
return jobId.compareTo(o.jobId);
}
}
| apache-2.0 |
kagix/infotranspub-backend | modules/onebusaway-gtfs/src/main/java/org/onebusaway/gtfs/serialization/mappings/RouteAgencyFieldMappingFactory.java | 3782 | /**
* Copyright (C) 2011 Brian Ferris <bdferris@onebusaway.org>
* Copyright (C) 2012 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onebusaway.gtfs.serialization.mappings;
import java.util.List;
import java.util.Map;
import org.onebusaway.csv_entities.CsvEntityContext;
import org.onebusaway.csv_entities.exceptions.MissingRequiredFieldException;
import org.onebusaway.csv_entities.schema.AbstractFieldMapping;
import org.onebusaway.csv_entities.schema.BeanWrapper;
import org.onebusaway.csv_entities.schema.EntitySchemaFactory;
import org.onebusaway.csv_entities.schema.FieldMapping;
import org.onebusaway.csv_entities.schema.FieldMappingFactory;
import org.onebusaway.gtfs.model.Agency;
import org.onebusaway.gtfs.model.Route;
import org.onebusaway.gtfs.serialization.GtfsReader;
import org.onebusaway.gtfs.serialization.GtfsReaderContext;
/**
* Responsible for setting the {@link Route#setAgency(Agency)} from a csv
* "agency_id" field in "routes.txt" and vice-versa.
*
* @author bdferris
* @see Route#setAgency(Agency)
*/
public class RouteAgencyFieldMappingFactory implements FieldMappingFactory {
public FieldMapping createFieldMapping(EntitySchemaFactory schemaFactory,
Class<?> entityType, String csvFieldName, String objFieldName,
Class<?> objFieldType, boolean required) {
return new RouteAgencyFieldMapping(entityType, csvFieldName, objFieldName,
Agency.class, required);
}
private class RouteAgencyFieldMapping extends AbstractFieldMapping {
public RouteAgencyFieldMapping(Class<?> entityType, String csvFieldName,
String objFieldName, Class<?> objFieldType, boolean required) {
super(entityType, csvFieldName, objFieldName, required);
}
public void translateFromCSVToObject(CsvEntityContext context,
Map<String, Object> csvValues, BeanWrapper object) {
GtfsReaderContext ctx = (GtfsReaderContext) context.get(GtfsReader.KEY_CONTEXT);
String agencyId = (String) csvValues.get(_csvFieldName);
Agency agency = null;
if (isMissing(csvValues)) {
List<Agency> agencies = ctx.getAgencies();
if (agencies.isEmpty()) {
throw new AgencyNotFoundForRouteException(Route.class,
object.getWrappedInstance(Route.class));
} else if (agencies.size() > 1) {
throw new MissingRequiredFieldException(_entityType, _csvFieldName);
}
agency = agencies.get(0);
} else {
agencyId = ctx.getTranslatedAgencyId(agencyId);
for (Agency testAgency : ctx.getAgencies()) {
if (testAgency.getId().equals(agencyId)) {
agency = testAgency;
break;
}
}
if (agency == null)
throw new AgencyNotFoundForRouteException(Route.class,
object.getWrappedInstance(Route.class), agencyId);
}
object.setPropertyValue(_objFieldName, agency);
}
public void translateFromObjectToCSV(CsvEntityContext context,
BeanWrapper object, Map<String, Object> csvValues) {
Agency agency = (Agency) object.getPropertyValue(_objFieldName);
if (isOptional() && agency == null)
return;
csvValues.put(_csvFieldName, agency.getId());
}
}
}
| apache-2.0 |
Skarlso/gocd | server/src/main/java/com/thoughtworks/go/listener/TimelineUpdateListener.java | 867 | /*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.listener;
import java.util.TreeSet;
import com.thoughtworks.go.domain.PipelineTimelineEntry;
public interface TimelineUpdateListener {
void added(PipelineTimelineEntry newlyAddedEntry, TreeSet<PipelineTimelineEntry> timeline);
} | apache-2.0 |
gf53520/kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/StoreChangelogReader.java | 14999 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.InvalidOffsetException;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.processor.StateRestoreListener;
import org.slf4j.Logger;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class StoreChangelogReader implements ChangelogReader {
private final Logger log;
private final Consumer<byte[], byte[]> restoreConsumer;
private final StateRestoreListener userStateRestoreListener;
private final Map<TopicPartition, Long> endOffsets = new HashMap<>();
private final Map<String, List<PartitionInfo>> partitionInfo = new HashMap<>();
private final Map<TopicPartition, StateRestorer> stateRestorers = new HashMap<>();
private final Set<TopicPartition> needsRestoring = new HashSet<>();
private final Set<TopicPartition> needsInitializing = new HashSet<>();
private final Set<TopicPartition> completedRestorers = new HashSet<>();
private final Duration pollTime;
public StoreChangelogReader(final Consumer<byte[], byte[]> restoreConsumer,
final Duration pollTime,
final StateRestoreListener userStateRestoreListener,
final LogContext logContext) {
this.restoreConsumer = restoreConsumer;
this.pollTime = pollTime;
this.log = logContext.logger(getClass());
this.userStateRestoreListener = userStateRestoreListener;
}
@Override
public void register(final StateRestorer restorer) {
if (!stateRestorers.containsKey(restorer.partition())) {
restorer.setUserRestoreListener(userStateRestoreListener);
stateRestorers.put(restorer.partition(), restorer);
log.trace("Added restorer for changelog {}", restorer.partition());
}
needsInitializing.add(restorer.partition());
}
public Collection<TopicPartition> restore(final RestoringTasks active) {
if (!needsInitializing.isEmpty()) {
initialize(active);
}
if (needsRestoring.isEmpty()) {
restoreConsumer.unsubscribe();
return completed();
}
try {
final ConsumerRecords<byte[], byte[]> records = restoreConsumer.poll(pollTime);
for (final TopicPartition partition : needsRestoring) {
final StateRestorer restorer = stateRestorers.get(partition);
final long pos = processNext(records.records(partition), restorer, endOffsets.get(partition));
restorer.setRestoredOffset(pos);
if (restorer.hasCompleted(pos, endOffsets.get(partition))) {
restorer.restoreDone();
endOffsets.remove(partition);
completedRestorers.add(partition);
}
}
} catch (final InvalidOffsetException recoverableException) {
log.warn("Restoring StreamTasks failed. Deleting StreamTasks stores to recreate from scratch.", recoverableException);
final Set<TopicPartition> partitions = recoverableException.partitions();
for (final TopicPartition partition : partitions) {
final StreamTask task = active.restoringTaskFor(partition);
log.info("Reinitializing StreamTask {} for changelog {}", task, partition);
needsInitializing.remove(partition);
needsRestoring.remove(partition);
final StateRestorer restorer = stateRestorers.get(partition);
restorer.setCheckpointOffset(StateRestorer.NO_CHECKPOINT);
task.reinitializeStateStoresForPartitions(recoverableException.partitions());
}
restoreConsumer.seekToBeginning(partitions);
}
needsRestoring.removeAll(completedRestorers);
if (needsRestoring.isEmpty()) {
restoreConsumer.unsubscribe();
}
return completed();
}
private void initialize(final RestoringTasks active) {
if (!restoreConsumer.subscription().isEmpty()) {
throw new StreamsException("Restore consumer should not be subscribed to any topics (" + restoreConsumer.subscription() + ")");
}
// first refresh the changelog partition information from brokers, since initialize is only called when
// the needsInitializing map is not empty, meaning we do not know the metadata for some of them yet
refreshChangelogInfo();
final Set<TopicPartition> initializable = new HashSet<>();
for (final TopicPartition topicPartition : needsInitializing) {
if (hasPartition(topicPartition)) {
initializable.add(topicPartition);
}
}
// try to fetch end offsets for the initializable restorers and remove any partitions
// where we already have all of the data
try {
endOffsets.putAll(restoreConsumer.endOffsets(initializable));
} catch (final TimeoutException e) {
// if timeout exception gets thrown we just give up this time and retry in the next run loop
log.debug("Could not fetch end offset for {}; will fall back to partition by partition fetching", initializable);
return;
}
final Iterator<TopicPartition> iter = initializable.iterator();
while (iter.hasNext()) {
final TopicPartition topicPartition = iter.next();
final Long endOffset = endOffsets.get(topicPartition);
// offset should not be null; but since the consumer API does not guarantee it
// we add this check just in case
if (endOffset != null) {
final StateRestorer restorer = stateRestorers.get(topicPartition);
if (restorer.checkpoint() >= endOffset) {
restorer.setRestoredOffset(restorer.checkpoint());
iter.remove();
completedRestorers.add(topicPartition);
} else if (restorer.offsetLimit() == 0 || endOffset == 0) {
restorer.setRestoredOffset(0);
iter.remove();
completedRestorers.add(topicPartition);
} else {
restorer.setEndingOffset(endOffset);
}
needsInitializing.remove(topicPartition);
} else {
log.info("End offset cannot be found form the returned metadata; removing this partition from the current run loop");
iter.remove();
}
}
// set up restorer for those initializable
if (!initializable.isEmpty()) {
startRestoration(initializable, active);
}
}
private void startRestoration(final Set<TopicPartition> initialized,
final RestoringTasks active) {
log.debug("Start restoring state stores from changelog topics {}", initialized);
final Set<TopicPartition> assignment = new HashSet<>(restoreConsumer.assignment());
assignment.addAll(initialized);
restoreConsumer.assign(assignment);
final List<StateRestorer> needsPositionUpdate = new ArrayList<>();
for (final TopicPartition partition : initialized) {
final StateRestorer restorer = stateRestorers.get(partition);
if (restorer.checkpoint() != StateRestorer.NO_CHECKPOINT) {
log.trace("Found checkpoint {} from changelog {} for store {}.", restorer.checkpoint(), partition, restorer.storeName());
restoreConsumer.seek(partition, restorer.checkpoint());
logRestoreOffsets(partition,
restorer.checkpoint(),
endOffsets.get(partition));
restorer.setStartingOffset(restoreConsumer.position(partition));
restorer.restoreStarted();
} else {
log.trace("Did not find checkpoint from changelog {} for store {}, rewinding to beginning.", partition, restorer.storeName());
restoreConsumer.seekToBeginning(Collections.singletonList(partition));
needsPositionUpdate.add(restorer);
}
}
for (final StateRestorer restorer : needsPositionUpdate) {
final TopicPartition partition = restorer.partition();
// If checkpoint does not exist it means the task was not shutdown gracefully before;
// and in this case if EOS is turned on we should wipe out the state and re-initialize the task
final StreamTask task = active.restoringTaskFor(partition);
if (task.isEosEnabled()) {
log.info("No checkpoint found for task {} state store {} changelog {} with EOS turned on. " +
"Reinitializing the task and restore its state from the beginning.", task.id, restorer.storeName(), partition);
needsInitializing.remove(partition);
initialized.remove(partition);
restorer.setCheckpointOffset(restoreConsumer.position(partition));
task.reinitializeStateStoresForPartitions(Collections.singleton(partition));
} else {
log.info("Restoring task {}'s state store {} from beginning of the changelog {} ", task.id, restorer.storeName(), partition);
final long position = restoreConsumer.position(restorer.partition());
logRestoreOffsets(restorer.partition(),
position,
endOffsets.get(restorer.partition()));
restorer.setStartingOffset(position);
restorer.restoreStarted();
}
}
needsRestoring.addAll(initialized);
}
private void logRestoreOffsets(final TopicPartition partition,
final long startingOffset,
final Long endOffset) {
log.debug("Restoring partition {} from offset {} to endOffset {}",
partition,
startingOffset,
endOffset);
}
private Collection<TopicPartition> completed() {
return completedRestorers;
}
private void refreshChangelogInfo() {
try {
partitionInfo.putAll(restoreConsumer.listTopics());
} catch (final TimeoutException e) {
log.debug("Could not fetch topic metadata within the timeout, will retry in the next run loop");
}
}
@Override
public Map<TopicPartition, Long> restoredOffsets() {
final Map<TopicPartition, Long> restoredOffsets = new HashMap<>();
for (final Map.Entry<TopicPartition, StateRestorer> entry : stateRestorers.entrySet()) {
final StateRestorer restorer = entry.getValue();
if (restorer.isPersistent()) {
restoredOffsets.put(entry.getKey(), restorer.restoredOffset());
}
}
return restoredOffsets;
}
@Override
public void reset() {
partitionInfo.clear();
stateRestorers.clear();
needsRestoring.clear();
endOffsets.clear();
needsInitializing.clear();
completedRestorers.clear();
}
private long processNext(final List<ConsumerRecord<byte[], byte[]>> records,
final StateRestorer restorer,
final Long endOffset) {
final List<ConsumerRecord<byte[], byte[]>> restoreRecords = new ArrayList<>();
long nextPosition = -1;
final int numberRecords = records.size();
int numberRestored = 0;
long lastRestoredOffset = -1;
for (final ConsumerRecord<byte[], byte[]> record : records) {
final long offset = record.offset();
if (restorer.hasCompleted(offset, endOffset)) {
nextPosition = record.offset();
break;
}
lastRestoredOffset = offset;
numberRestored++;
if (record.key() != null) {
restoreRecords.add(record);
}
}
// if we have changelog topic then we should have restored all records in the list
// otherwise if we did not fully restore to that point we need to set nextPosition
// to the position of the restoreConsumer and we'll cause a TaskMigratedException exception
if (nextPosition == -1 || (restorer.offsetLimit() == Long.MAX_VALUE && numberRecords != numberRestored)) {
nextPosition = restoreConsumer.position(restorer.partition());
}
if (!restoreRecords.isEmpty()) {
restorer.restore(restoreRecords);
restorer.restoreBatchCompleted(lastRestoredOffset, records.size());
log.trace("Restored from {} to {} with {} records, ending offset is {}, next starting position is {}",
restorer.partition(), restorer.storeName(), records.size(), lastRestoredOffset, nextPosition);
}
return nextPosition;
}
private boolean hasPartition(final TopicPartition topicPartition) {
final List<PartitionInfo> partitions = partitionInfo.get(topicPartition.topic());
if (partitions == null) {
return false;
}
for (final PartitionInfo partition : partitions) {
if (partition.partition() == topicPartition.partition()) {
return true;
}
}
return false;
}
}
| apache-2.0 |
odiszapc/stem | components/clustermanager/src/main/java/org/stem/domain/topology/TopologyChangesListener.java | 1666 | /*
* Copyright 2014 Alexey Plotnik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.stem.domain.topology;
public abstract class TopologyChangesListener implements TopologyEventListener {
public abstract void onTopologyUpdated(Topology.Node node);
@Override
public void onDatacenterAdded(Topology.Datacenter dc) {
onTopologyUpdated(dc);
}
@Override
public void onDatacenterRemoved(Topology.Datacenter dc) {
onTopologyUpdated(dc);
}
@Override
public void onDiskAdded(Topology.Disk disk) {
onTopologyUpdated(disk);
}
@Override
public void onDiskRemoved(Topology.Disk disk) {
onTopologyUpdated(disk);
}
@Override
public void onRackAdded(Topology.Rack rack) {
onTopologyUpdated(rack);
}
@Override
public void onRackRemoved(Topology.Rack rack) {
onTopologyUpdated(rack);
}
@Override
public void onStorageNodeAdded(Topology.StorageNode node) {
onTopologyUpdated(node);
}
@Override
public void onStorageNodeRemoved(Topology.StorageNode node) {
onTopologyUpdated(node);
}
}
| apache-2.0 |
data-integrations/anaplan | src/main/java/com/anaplan/client/ex/UserNotFoundException.java | 248 | package com.anaplan.client.ex;
/**
* Thrown when user does not exist in the system
*/
public class UserNotFoundException extends RuntimeException {
public UserNotFoundException(Throwable t) {
super("User not recognized!", t);
}
} | apache-2.0 |
paragp/GTS-PreUAT | src/org/opengts/war/report/field/MotionReport.java | 98096 | // ----------------------------------------------------------------------------
// Copyright 2007-2015, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2007/03/25 Martin D. Flynn
// -Initial release
// 2007/06/03 Martin D. Flynn
// -Added PrivateLabel to constructor
// 2007/11/28 Martin D. Flynn
// -Added start 'address' to go with start geoPoint
// -Added stop geopoint/address to available report fields
// 2008/03/28 Martin D. Flynn
// -Added limited reporting support for devices that do not support OpenDMTP.
// 2008/05/14 Martin D. Flynn
// -Integrated Device DataTransport interface
// 2008/06/20 Martin D. Flynn
// -Added support for displaying a report 'map'.
// 2009/01/01 Martin D. Flynn
// -Added totals for drive/idle time and distance driven.
// -Added 'minimumStoppedTime' property (for simulated start/stop events only).
// -Added 'hasStartStopCodes' property to force simulated start/stop events.
// 2009/05/01 Martin D. Flynn
// -Added support for "idle" elapsed time (ignition on and not moving).
// 2009/08/07 Martin D. Flynn
// -Changed 'hasStartStopCode' to 'tripStartType'
// 2009/11/01 Martin D. Flynn
// -Added property 'stopOnIgnitionOff'
// 2010/05/24 Martin D. Flynn
// -Added idle accumulation to TRIP_ON_SPEED
// 2012/04/03 Martin D. Flynn
// -Added check for valid odometer (use previous valid odometer if current
// odometer is not valid). See "lastValidOdometerKM"
// -Added TRIP_ON_ENGINE (still being tested)
// 2013/08/06 Martin D. Flynn
// -Added check for Device specified WorkHours.
// -Fixed idle-time stop when ignition-off and "stopOnIgnitionOff" is true
// 2014/01/01 Martin D. Flynn
// -Added Fleet Detail group report support.
// 2014/09/16 Martin D. Flynn
// -Added column sort feature (see PROP_fleetSortByField)
// -Apply offset to start/stop odometer values (in "_addRecord" method). [2.5.7-B28]
// 2015/02/03 Martin D. Flynn
// -Changed definition of "Idle" to 0-speed during "trip" (see LEGACY_IDLE_DEF)
// 2015/08/16 Martin D. Flynn
// -Added init "this.pendingStopEvent" per device [2.6.0-B62]
// -Modified to fix XML output issues
// ----------------------------------------------------------------------------
package org.opengts.war.report.field;
import java.io.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.*;
import org.opengts.db.tables.*;
import org.opengts.war.tools.*;
import org.opengts.war.report.*;
public class MotionReport
extends ReportData
implements DBRecordHandler<EventData>
{
// ------------------------------------------------------------------------
// Detail report
// Multiple FieldData records per device
// 'From'/'To' date
// ------------------------------------------------------------------------
// Columns:
// index startDateTime movingElapse stopDateTime idleElapse
// ------------------------------------------------------------------------
// It would be helpful if the following items were available from the device:
// - "minimumStoppedTime"
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// Properties
private static final String PROP_legacyIdleDefinition = "legacyIdleDefinition";
private static final String PROP_alwaysReadAllEvents = "alwaysReadAllEvents";
private static final String PROP_isFleetSummaryReport = "isFleetSummaryReport";
private static final String PROP_fleetSortByField = "fleetSortByField";
private static final String PROP_showMapLink = "showMapLink";
private static final String PROP_tripStartType = "tripStartType";
private static final String PROP_minimumStoppedTime = "minimumStoppedTime";
private static final String PROP_minimumSpeedKPH = "minimumSpeedKPH";
private static final String PROP_stopOnIgnitionOff = "stopOnIgnitionOff";
private static final String PROP_tabulateByWorkHours = "tabulateByWorkHours";
private static final String PROP_WorkHours_ = "WorkHours.";
// ------------------------------------------------------------------------
// Trip start types
private static final String MOTION_DEFAULT[] = new String[] { "default" };
private static final String MOTION_SPEED[] = new String[] { "speed" , "motion" };
private static final String MOTION_IGNITION[] = new String[] { "ignition" };
private static final String MOTION_ENGINE[] = new String[] { "engine" };
private static final String MOTION_STARTSTOP[] = new String[] { "start" , "startstop" };
private static final int TRIP_ON_SPEED = 0; // idle time if ignition present
private static final int TRIP_ON_IGNITION = 1; // no idle time
private static final int TRIP_ON_ENGINE = 2; // no idle time
private static final int TRIP_ON_START = 3; // idle time if ignition present
private static String TripTypeName(int type)
{
switch (type) {
case TRIP_ON_SPEED : return "Speed";
case TRIP_ON_IGNITION : return "Ignition";
case TRIP_ON_ENGINE : return "Engine";
case TRIP_ON_START : return "Start/Stop";
default : return "Unknown";
}
}
// ------------------------------------------------------------------------
/**
*** True to show map link, false otherwise
**/
private static final boolean SHOW_MAP_LINK = true;
/** TRIP_ON_SPEED only
*** Minimum speed used for determining in-motion when the device does not
*** support start/stop events
**/
private static final double MIN_SPEED_KPH = 0.0;
/** TRIP_ON_SPEED only
*** Default mimimum stopped elapsed time to be considered stopped
**/
private static final long MIN_STOPPED_TIME_SEC = DateTime.MinuteSeconds(5);
/**
*** Default to delimit stop with ignition off (if this occurs before the minimum stopped time)
**/
private static final boolean STOP_ON_IGNITION_OFF = false;
/**
*** Default to tabulate driving time/distance by work hours
**/
private static final boolean TABULATE_BY_WORK_HOURS = false;
/**
*** Legacy idle time accumulation
**/
private static final boolean LEGACY_IDLE_DEF = false;
// ------------------------------------------------------------------------
// During TRIP_ON_SPEED trip delimiters, set this value to 'true' to reset the
// elapsed stop time accumulation to start at the point of the defined 'stop'
// which is after the minimum elapsed stopped time has passed. This does cause
// some user confustion, so if the above is unclear, leave this value 'false'.
private static final boolean SPEED_RESET_STOP_TIME = false;
// ------------------------------------------------------------------------
private static final int STATE_UNKNOWN = 0;
private static final int STATE_START = 1;
private static final int STATE_STOP = 2;
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static class DeviceDetailComparator
implements Comparator<FieldData>
{
private String fieldName = "";
public DeviceDetailComparator(String fieldName) {
this.fieldName = StringTools.trim(fieldName);
}
public int compare(FieldData fd1, FieldData fd2) {
Object v1 = (fd1 != null)? fd1.getValue(this.fieldName,null) : null;
Object v2 = (fd2 != null)? fd2.getValue(this.fieldName,null) : null;
if ((v1 == null) && (v2 == null)) {
// -- both v1/v2 are null
return 0; // equal
} else
if (v1 == null) {
// -- v1==null, v2 is non-null
return -1; // null < non-null
} else
if (v2 == null) {
// -- v1 is non-null, v2==null
return 1; // non-null > null
} else {
// -- both v1/v2 are non-null
if (v1 instanceof String) {
// -- compare as Strings
return ((String)v1).compareTo(v2.toString());
} else
if (v1 instanceof Number) {
// -- compare as Doubles
double d1 = StringTools.parseDouble(v1,0.0);
double d2 = StringTools.parseDouble(v2,0.0);
return (d1 < d2)? -1 : 1;
} else
if (v1 instanceof Boolean) {
// -- compare as Booleans
boolean b1 = StringTools.parseBoolean(v1,false);
boolean b2 = StringTools.parseBoolean(v2,false);
if (b1 == b2) {
return 0;
} else {
return b1? 1 : -1; // true > false
}
} else
if (v1 instanceof GeoPoint) {
// -- all GeoPoints are equal
return 0;
} else {
// -- unrecognized type
return 0;
}
}
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private int deviceCount = 0; // init on new report
private long deviceEventIndex = 0L; // auto-reset per device
private boolean isFleetReport = false; // property
private String fleetSortByField = null; // property
private boolean fleetSortAscending = true; // property
private int fleetSortLimit = -1; // property
private boolean fleetSortTrim = false; // property
private boolean alwaysReadAllEvents = false; // property
private boolean legacyIdleDefinition = false; // property
private boolean showMapLink = true; // property
private int tripStartType = TRIP_ON_SPEED; // property
private boolean tripTypeDefault = true; // property
private int ignitionCodes[] = null; // auto-reset per device
private boolean isIgnitionOn = false; // auto-reset per device
private EventData lastIgnitionEvent = null; // auto-reset per device
private EventData idleStartEvent = null; // auto-reset per device
private EventData idleStopEvent = null; // auto-reset per device
private long idleAccumulator = 0L; // [seconds] auto-reset per device
private boolean isInMotion = false; // auto-reset per device
private EventData lastMotionEvent = null; // auto-reset per device
private EventData pendingStopEvent = null; // [TRIP_ON_SPEED only] auto-reset per device
private double minSpeedKPH = MIN_SPEED_KPH; // [TRIP_ON_SPEED only] property
private long minStoppedTimeSec = MIN_STOPPED_TIME_SEC; // [TRIP_ON_SPEED only] property
private boolean stopOnIgnitionOff = STOP_ON_IGNITION_OFF; // property
private boolean tabulateByWorkHours = TABULATE_BY_WORK_HOURS; // property
private TimeZone timeZone = null; // reset per report
private WorkHours workHours = null; // reset per report / property
private double lastValidOdometerKM = 0.0; // auto-reset per device
private double lastValidOdomOfsKM = 0.0; // auto-reset per device
private int lastStateChange = STATE_UNKNOWN; // auto-reset per device
private long lastStartTime = 0L; // auto-reset per device
private GeoPoint lastStartPoint = null; // auto-reset per device
private String lastStartAddress = ""; // auto-reset per device
private double lastStartOdometer = 0.0; // auto-reset per device
private double lastStartOdomOfs = 0.0; // auto-reset per device
private double lastStartFuelUsed = 0.0; // auto-reset per device
private double lastStartFuelLevel = 0.0; // auto-reset per device
private double lastStartFuelRemain = 0.0; // auto-reset per device
private long lastStopTime = 0L; // auto-reset per device
private GeoPoint lastStopPoint = null; // auto-reset per device
private String lastStopAddress = ""; // auto-reset per device
private double lastStopOdometer = 0.0; // auto-reset per device
private double lastStopOdomOfs = 0.0; // auto-reset per device
private double lastStopFuelUsed = 0.0; // auto-reset per device
private double lastStopFuelLevel = 0.0; // auto-reset per device
private double lastStopFuelRemain = 0.0; // auto-reset per device
private Vector<FieldData> deviceDetailData = null; // auto-reset per device
private Vector<FieldData> deviceTotalData = null; // reset per "getBodyDataIterator()"
private Vector<FieldData> fleetTotalData = null; // single instance for all devices
/* device totals */
private double totalOdomKM = 0.0; // auto-reset per device
private long totalDriveSec = 0L; // auto-reset per device
private double totalDriveFuel = 0.0; // auto-reset per device
private int totalStopCount = 0; // auto-reset per device
private long totalStopSec = 0L; // auto-reset per device
private long totalIdleSec = 0L; // auto-reset per device
private double totalIdleFuel = 0.0; // auto-reset per device
/* workhour totals */
private double tworkOdomKM = 0.0; // auto-reset per device
private long tworkDriveSec = 0L; // auto-reset per device
private double tworkDriveFuel = 0.0; // auto-reset per device
private int tworkStopCount = 0; // auto-reset per device
private double tworkIdleFuel = 0.0; // auto-reset per device
// ------------------------------------------------------------------------
/**
*** Motion Report Constructor
*** @param rptEntry The ReportEntry that generated this report
*** @param reqState The session RequestProperties instance
*** @param devList The list of devices
**/
public MotionReport(ReportEntry rptEntry, RequestProperties reqState, ReportDeviceList devList)
throws ReportException
{
super(rptEntry, reqState, devList);
/* Account check */
if (this.getAccount() == null) {
throw new ReportException("Account-ID not specified");
}
/* Device check */
this.deviceCount = this.getDeviceCount();
if (this.deviceCount <= 0) {
throw new ReportException("No Devices specified");
}
// Detail report if "isFleetSummaryReport" is false (device count == 1)
// Summary report if "isFleetSummaryReport" is true (device count > 1)
/* Timezone */
this.timeZone = reqState.getTimeZone(); // not null
}
// ------------------------------------------------------------------------
/**
*** Post report initialization
**/
public void postInitialize()
{
/* properties */
RTProperties rtp = this.getProperties();
this.legacyIdleDefinition = rtp.getBoolean(PROP_legacyIdleDefinition, LEGACY_IDLE_DEF);
this.alwaysReadAllEvents = rtp.getBoolean(PROP_alwaysReadAllEvents , false);
this.isFleetReport = rtp.getBoolean(PROP_isFleetSummaryReport, false);
this.showMapLink = rtp.getBoolean(PROP_showMapLink , SHOW_MAP_LINK);
this.minSpeedKPH = rtp.getDouble( PROP_minimumSpeedKPH , MIN_SPEED_KPH);
this.minStoppedTimeSec = rtp.getLong( PROP_minimumStoppedTime , MIN_STOPPED_TIME_SEC);
this.stopOnIgnitionOff = rtp.getBoolean(PROP_stopOnIgnitionOff , STOP_ON_IGNITION_OFF);
/* fieldSortByField ascending/descending */
String _fleetSortByField = rtp.getString( PROP_fleetSortByField , "");
if (this.isFleetReport && !StringTools.isBlank(_fleetSortByField)) {
// -- "fleetSortByField" specified:
// - "idleElapse"
// - "idleElapse,[desc|asc],{LIMIT}"
String sbf[] = StringTools.split(_fleetSortByField.trim(),',');
// -- sortOn: "idleElapse"
this.fleetSortByField = (sbf.length > 0)? StringTools.trim(sbf[0]) : null;
// -- sortOrder: "ascending"|"descending"
this.fleetSortAscending = (sbf.length > 1)? !sbf[1].toLowerCase().startsWith("desc") : true;
// -- sortLimit: top X in list
this.fleetSortLimit = (sbf.length > 2)? StringTools.parseInt(sbf[2],-1) : -1;
// -- sortTrim: "trim" (to remove any trailing zero values)
this.fleetSortTrim = (sbf.length > 3)? sbf[3].toLowerCase().startsWith("trim") : false;
}
/* default work hours */
this.tabulateByWorkHours = rtp.getBoolean(PROP_tabulateByWorkHours , TABULATE_BY_WORK_HOURS);
if (this.tabulateByWorkHours) {
this.workHours = new WorkHours(this.getProperties(), PROP_WorkHours_);
//Print.logInfo("WorkHours:\n" + this.workHours);
} else {
//Print.logInfo("Not tabulating by work hours");
}
/* debug */
//Print.logInfo("Is Fleet Report : " + this.isFleetReport);
//Print.logInfo("Legacy Idle Definitiont: " + this.legacyIdleDefinition);
//Print.logInfo("Minimum Speed km/h : " + this.minSpeedKPH);
}
// ------------------------------------------------------------------------
/**
*** Returns true if this report handles only a single device at a time
*** @return True If this report handles only a single device at a time
**/
public boolean isSingleDeviceOnly()
{
return this.isFleetReport? false : true;
}
// ------------------------------------------------------------------------
/**
*** Override 'getEventData' to reset selected status codes
*** @param device The Device for which EventData records will be selected
*** @param rcdHandler The DBRecordHandler
*** @return An array of EventData records for the device
**/
@Override
protected EventData[] getEventData_Device(Device device, DBRecordHandler<EventData> rcdHandler)
{
/* Device */
if (device == null) {
return EventData.EMPTY_ARRAY;
}
/* report constraints */
ReportConstraints rc = this.getReportConstraints();
/* adjust report constraints */
if (this.alwaysReadAllEvents) {
// -- debug purposes, should be "false" for production
// - return all status codes
rc.setStatusCodes(null);
rc.setValidGPSRequired(false);
} else
if (this.tripStartType == TRIP_ON_START) {
// -- return only start/stop events
if (this.ignitionCodes != null) {
if (this.legacyIdleDefinition) {
rc.setStatusCodes(new int[] {
StatusCodes.STATUS_MOTION_START,
StatusCodes.STATUS_MOTION_STOP,
this.ignitionCodes[0], // ignition OFF
this.ignitionCodes[1] // ignition ON
});
rc.setValidGPSRequired(false); // don't need just valid gps events
} else {
// -- read all events
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
} else {
if (this.legacyIdleDefinition) {
rc.setStatusCodes(new int[] {
StatusCodes.STATUS_MOTION_START,
StatusCodes.STATUS_MOTION_STOP
});
rc.setValidGPSRequired(false); // don't need just valid gps events
} else {
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
}
} else
if (this.tripStartType == TRIP_ON_IGNITION) {
// -- return only IgnitionOn/IgnitionOff events (this.ignitionCodes is non-null)
if (this.ignitionCodes != null) {
if (this.legacyIdleDefinition) {
// -- read only ignition on/off events
rc.setStatusCodes(new int[] {
this.ignitionCodes[0], // ignition OFF
this.ignitionCodes[1] // ignition ON
});
rc.setValidGPSRequired(false); // don't need just valid gps events
} else {
// -- read all events
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
} else {
// -- no ignition codes, read all events
Print.logInfo("(No Ignition Codes) Reading all events ...");
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
} else
if (this.tripStartType == TRIP_ON_ENGINE) {
// -- return only EngineStart/EngineStop events
if (this.ignitionCodes != null) {
if (this.legacyIdleDefinition) {
rc.setStatusCodes(new int[] {
StatusCodes.STATUS_ENGINE_STOP,
StatusCodes.STATUS_ENGINE_START,
this.ignitionCodes[0], // ignition OFF
this.ignitionCodes[1] // ignition ON
});
rc.setValidGPSRequired(false); // don't need just valid gps events
} else {
// -- read all events
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
} else {
if (this.legacyIdleDefinition) {
rc.setStatusCodes(new int[] {
StatusCodes.STATUS_ENGINE_STOP,
StatusCodes.STATUS_ENGINE_START
});
rc.setValidGPSRequired(false); // don't need just valid gps events
} else {
// -- read all events
rc.setStatusCodes(null);
rc.setValidGPSRequired(false); // don't need just valid gps events
}
}
} else {
// -- default: TRIP_ON_SPEED
// - return all status codes
rc.setStatusCodes(null);
rc.setValidGPSRequired((this.ignitionCodes == null)? true : false); // GPS only if no ignition codes
}
/* report selection limits */
long rptLimit = rc.getReportLimit();
if (rptLimit > 0L) {
rc.setSelectionLimit(Math.max(rc.getSelectionLimit(), (rptLimit * 4L)));
}
/* get data */
return super.getEventData_Device(device, rcdHandler);
}
// ------------------------------------------------------------------------
/**
*** Returns true if this report supports displaying a map
*** @return True if this report supports displaying a map, false otherwise
**/
public boolean getSupportsMapDisplay() // prop "showMapLink"
{
return this.showMapLink;
}
/**
*** Returns true if this report supports displaying KML
*** @return True if this report supports displaying KML, false otherwise
**/
public boolean getSupportsKmlDisplay()
{
return this.hasReportColumn(FieldLayout.DATA_STOP_GEOPOINT);
}
// ------------------------------------------------------------------------
/**
*** Gets the bound ReportLayout singleton instance for this report
*** @return The bound ReportLayout
**/
public static ReportLayout GetReportLayout()
{
// bind the report format to this data
return FieldLayout.getReportLayout();
}
/**
*** Gets the bound ReportLayout singleton instance for this report
*** @return The bound ReportLayout
**/
public ReportLayout getReportLayout()
{
// bind the report format to this data
return GetReportLayout();
}
// ------------------------------------------------------------------------
/**
*** Creates and returns an iterator for the row data displayed in the body of this report.
*** @return The body row data iterator
**/
public DBDataIterator getBodyDataIterator()
{
/* total totals */
double grandTotalOdomKM = 0.0;
long grandTotalDriveSec = 0L;
double grandTotalDriveFuel = 0.0;
int grandTotalStopCount = 0;
long grandTotalStopSec = 0L;
long grandTotalIdleSec = 0L;
double grandTotalIdleFuel = 0.0;
// -- Work Hours
double gworkTotalOdomKM = 0.0;
long gworkTotalDriveSec = 0L;
// -- After Hours
double gafterTotalOdomKM = 0.0;
long gafterTotalDriveSec = 0L;
/* device total data list */
this.deviceTotalData = new Vector<FieldData>();
Vector<FieldData> allDevicesDetailData = new Vector<FieldData>();
/* device list */
Account account = this.getAccount();
String accountID = account.getAccountID();
ReportDeviceList devList = this.getReportDeviceList();
/* loop through devices */
deviceListIterator:
for (Iterator i = devList.iterator(); i.hasNext();) {
String devID = (String)i.next();
//Print.logInfo("Processing events for device: " + devID);
/* new device detail data iterator */
this.deviceDetailData = new Vector<FieldData>();
/* reset per-device totals */
this.totalOdomKM = 0.0;
this.totalDriveSec = 0L ;
this.totalDriveFuel = 0.0;
this.totalStopCount = 0;
this.totalStopSec = 0L ;
this.totalIdleSec = 0L ;
this.totalIdleFuel = 0.0;
this.tworkOdomKM = 0.0;
this.tworkDriveSec = 0L;
this.tworkDriveFuel = 0.0;
this.tworkStopCount = 0;
this.tworkIdleFuel = 0.0;
// reset ignition state
this.isIgnitionOn = false;
this.lastIgnitionEvent = null;
this.ignitionCodes = null;
// reset idle state
this.idleStartEvent = null;
this.idleStopEvent = null;
this.idleAccumulator = 0L;
// reset motion
this.isInMotion = false;
this.lastMotionEvent = null;
this.pendingStopEvent = null;
// reset start
this.lastStartTime = 0L;
this.lastStartPoint = null;
this.lastStartAddress = "";
this.lastStartOdometer = 0.0;
this.lastStartOdomOfs = 0.0;
this.lastStartFuelUsed = 0.0;
this.lastStartFuelLevel = 0.0;
this.lastStartFuelRemain = 0.0;
// reset stop
this.lastStopTime = 0L;
this.lastStopPoint = null;
this.lastStopAddress = "";
this.lastStopOdometer = 0.0;
this.lastStopOdomOfs = 0.0;
this.lastStopFuelUsed = 0.0;
this.lastStopFuelLevel = 0.0;
this.lastStopFuelRemain = 0.0;
// reset state
this.lastStateChange = STATE_UNKNOWN;
// reset last valid odometer
this.lastValidOdometerKM = 0.0;
this.lastValidOdomOfsKM = 0.0;
try {
/* get device */
Device device = devList.getDevice(devID);
if (device == null) {
Print.logWarn("Device not found: " + devID);
continue; // deviceListIterator
}
//Print.logDebug("Analyzing Device: " + devID);
// Device ignition statusCodes
this.ignitionCodes = device.getIgnitionStatusCodes();
boolean hasIgnition = (this.ignitionCodes != null);
// -- trip start/stop type
RTProperties rtp = this.getProperties();
String tt = rtp.getString(PROP_tripStartType,MOTION_SPEED[0]).toLowerCase();
//Print.logInfo("Trip type: " + tt);
if (ListTools.contains(MOTION_DEFAULT,tt)) {
// -- "default" (TRIP_ON_ENGINE not selected when using "default")
String devCode = device.getDeviceCode();
DCServerConfig dcs = DCServerFactory.getServerConfig(devCode);
if ((dcs == null) && StringTools.isBlank(devCode) && Account.IsDemoAccount(accountID)) {
// -- special case for "demo" account when 'deviceCode' is blank
dcs = DCServerFactory.getServerConfig(DCServerFactory.OPENDMTP_NAME);
if (dcs == null) {
Print.logWarn("Account 'demo' DCServerConfig not found: " + DCServerFactory.OPENDMTP_NAME);
}
}
if (dcs != null) {
// -- DCServerConfig found
if (dcs.getStartStopSupported(false)) {
// Device supports start/stop
this.tripStartType = TRIP_ON_START;
} else
if (hasIgnition) {
// -- Device supports ignition state
this.tripStartType = TRIP_ON_IGNITION;
} else {
// -- Default to speed
this.tripStartType = TRIP_ON_SPEED;
}
} else {
// -- DCServerConfig not found ('deviceCode' is either blank or invalid)
if (hasIgnition) {
// -- Device supports ignition state
this.tripStartType = TRIP_ON_IGNITION;
} else {
// -- Default
this.tripStartType = TRIP_ON_SPEED;
}
}
this.tripTypeDefault = true;
} else
if (ListTools.contains(MOTION_STARTSTOP,tt)) {
// "startstop"
this.tripStartType = TRIP_ON_START;
this.tripTypeDefault = false;
} else
if (ListTools.contains(MOTION_IGNITION,tt)/* && hasIgnition */) {
// "ignition"
this.tripStartType = TRIP_ON_IGNITION;
this.tripTypeDefault = false;
if (!hasIgnition) {
this.ignitionCodes = new int[] { StatusCodes.STATUS_IGNITION_OFF, StatusCodes.STATUS_IGNITION_ON };
hasIgnition = true;
}
} else
if (ListTools.contains(MOTION_ENGINE,tt)) {
// "ignition"
this.tripStartType = TRIP_ON_ENGINE;
this.tripTypeDefault = false;
} else {
// "speed", "motion"
this.tripStartType = TRIP_ON_SPEED;
this.tripTypeDefault = true;
}
/* debug */
if (RTConfig.isDebugMode()) {
Print.logDebug("Trip Start Type: [" + this.tripStartType + "] " + TripTypeName(this.tripStartType));
if (hasIgnition) {
String ignOff = StatusCodes.GetHex(this.ignitionCodes[0]);
String ignOn = StatusCodes.GetHex(this.ignitionCodes[1]);
Print.logDebug("Device Ignition Codes "+ignOff+":"+ignOn+" [" + accountID + "/" + devID + "]");
} else {
Print.logDebug("No defined Device ignition codes [" + accountID + "/" + devID + "]");
}
}
// get events
// this.lastValidOdometerKM = 0.0; <-- already reset above
this.deviceEventIndex = 0L; // provide an index to all events read
this.getEventData_Device(device, this); // <== callback to 'handleDBRecord'
//Print.logInfo("Total Accumulated Idle Time: " + this.totalIdleSec + " seconds");
// -- handle final record here
if (this.lastStopTime > 0) {
// -- we are stopped
long driveTime = (this.lastStartTime > 0L)? (this.lastStopTime - this.lastStartTime ) : -1L;
double driveDist = (this.lastStartTime > 0L)? (this.lastStopOdometer - this.lastStartOdometer) : -1.0; // kilometers
double fuelTrip = (this.lastStartTime > 0L)? (this.lastStopFuelUsed - this.lastStartFuelUsed) : -1.0; // liter
double driveEcon = (fuelTrip > 0.0)? (driveDist / fuelTrip) : 0.0; // kilometers per liter
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.FUEL_CONSUMED;
long stopElaps = -1L;
long idleElaps = (this.idleAccumulator > 0L)? this.idleAccumulator : -1L;
double fuelIdle = -1.0;
this._addRecord( // getBodyDataIterator
this.deviceDetailData,
accountID, devID, device,
this.lastStartTime , this.lastStartPoint, this.lastStartAddress, this.lastStartOdometer, this.lastStartOdomOfs, this.lastStartFuelUsed,
this.lastStopTime , this.lastStopPoint , this.lastStopAddress , this.lastStopOdometer , this.lastStopOdomOfs , this.lastStopFuelUsed ,
driveTime, driveDist, fuelTrip, driveEcon, driveEconType,
stopElaps, idleElaps, fuelIdle);
} else
if (this.lastStartTime > 0) {
// -- we haven't stopped during the range of this report
long driveTime = -1L;
double driveDist = -1.0; // kilometers
double fuelTrip = -1.0; // liters
double driveEcon = -1.0; // kilometers per liter
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.UNKNOWN;
long stopElaps = -1L;
long idleElaps = -1L;
double fuelIdle = -1.0;
this._addRecord( // getBodyDataIterator
this.deviceDetailData,
accountID, devID, device,
this.lastStartTime , this.lastStartPoint, this.lastStartAddress, this.lastStartOdometer, this.lastStartOdomOfs, this.lastStartFuelUsed,
-1L , null , "" , -1.0 , 0.0 , -1.0 ,
driveTime, driveDist, fuelTrip, driveEcon, driveEconType,
stopElaps, idleElaps, fuelIdle);
} else {
// --
}
/* fuel economy */
double driveEcon = (this.totalDriveFuel > 0.0)? (this.totalOdomKM / this.totalDriveFuel) : 0.0;
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.FUEL_CONSUMED;
/* device total record */
FieldData fd = new FieldData();
fd.setRowType(DBDataRow.RowType.TOTAL);
long idleElaps = (this.totalIdleSec > 0L)? this.totalIdleSec : -1L;
fd.setAccount(account);
fd.setDevice(device);
fd.setString(FieldLayout.DATA_ACCOUNT_ID , this.getAccountID());
fd.setString(FieldLayout.DATA_DEVICE_ID , devID);
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA , this.totalOdomKM); // odomDelta
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED , this.totalDriveSec);
fd.setDouble(FieldLayout.DATA_FUEL_TRIP , this.totalDriveFuel);
fd.setDouble(FieldLayout.DATA_FUEL_ECONOMY , driveEcon);
fd.setValue( FieldLayout.DATA_FUEL_ECONOMY_TYPE , driveEconType);
fd.setLong( FieldLayout.DATA_STOP_COUNT , this.totalStopCount);
fd.setLong( FieldLayout.DATA_STOP_ELAPSED , this.totalStopSec);
fd.setLong( FieldLayout.DATA_IDLE_ELAPSED , idleElaps);
fd.setDouble(FieldLayout.DATA_FUEL_IDLE , this.totalIdleFuel);
// -- Work Hours
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_WH, this.tworkDriveSec);
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_WH , this.tworkOdomKM);
fd.setDouble(FieldLayout.DATA_FUEL_TRIP_WH , this.tworkDriveFuel);
fd.setDouble(FieldLayout.DATA_FUEL_IDLE_WH , this.tworkIdleFuel);
fd.setLong( FieldLayout.DATA_STOP_COUNT_WH , this.tworkStopCount);
// -- After Hours
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_AH, (this.totalDriveSec - this.tworkDriveSec));
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_AH , (this.totalOdomKM - this.tworkOdomKM));
this.deviceTotalData.add(fd);
/* grand totals */
grandTotalOdomKM += this.totalOdomKM;
grandTotalDriveSec += this.totalDriveSec;
grandTotalDriveFuel += this.totalDriveFuel;
grandTotalStopCount += this.totalStopCount;
grandTotalStopSec += this.totalStopSec;
grandTotalIdleSec += this.totalIdleSec;
grandTotalIdleFuel += this.totalIdleFuel;
// -- Work Hours
gworkTotalOdomKM += this.tworkOdomKM;
gworkTotalDriveSec += this.tworkDriveSec;
// -- After Hours
gafterTotalOdomKM += (this.totalOdomKM - this.tworkOdomKM);
gafterTotalDriveSec += (this.totalDriveSec - this.tworkDriveSec);
} catch (DBException dbe) {
Print.logError("Error retrieving EventData for Device: " + devID);
}
/* save device detail data */
allDevicesDetailData.addAll(this.deviceDetailData);
this.deviceDetailData.clear();
this.deviceDetailData = null;
} // Device list iterator
/* return row iterator */
if (this.isFleetReport) {
// -- prepare fleet-total data
double avgEcon = (grandTotalDriveFuel > 0.0)? (grandTotalOdomKM / grandTotalDriveFuel) : 0.0;
FieldData fd = new FieldData();
fd.setRowType(DBDataRow.RowType.TOTAL);
fd.setAccount(account);
fd.setString(FieldLayout.DATA_ACCOUNT_ID , this.getAccountID());
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA , grandTotalOdomKM); // odomDelta
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED , grandTotalDriveSec);
fd.setDouble(FieldLayout.DATA_FUEL_TRIP , grandTotalDriveFuel);
//fd.setDouble(FieldLayout.DATA_FUEL_ECONOMY , avgEcon);
//fd.setValue( FieldLayout.DATA_FUEL_ECONOMY_TYPE , avgEconType);
fd.setLong( FieldLayout.DATA_STOP_COUNT , grandTotalStopCount);
fd.setLong( FieldLayout.DATA_STOP_ELAPSED , grandTotalStopSec);
fd.setLong( FieldLayout.DATA_IDLE_ELAPSED , grandTotalIdleSec);
fd.setDouble(FieldLayout.DATA_FUEL_IDLE , grandTotalIdleFuel);
// -- Work Hours
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_WH , gworkTotalOdomKM);
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_WH , gworkTotalDriveSec);
// -- After Hours
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_AH , gafterTotalOdomKM);
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_AH , gafterTotalDriveSec);
// -- totals list
this.fleetTotalData = new Vector<FieldData>();
this.fleetTotalData.add(fd);
// -- sort device-TOTAL data?
if (!StringTools.isBlank(this.fleetSortByField)) {
// -- sort data by field column
DeviceDetailComparator ddc = new DeviceDetailComparator(this.fleetSortByField);
ListTools.sort(this.deviceTotalData, ddc, this.fleetSortAscending);
// -- trim excess items beyond limit
if ((this.fleetSortLimit > 0) && (this.deviceTotalData.size() > this.fleetSortLimit)) {
// -- remove all entries above limit
this.deviceTotalData.setSize(this.fleetSortLimit);
}
// -- if descending, trim trailing items which are zero
if (this.fleetSortTrim && !this.fleetSortAscending && (this.deviceTotalData.size() > 1)) {
//Print.logInfo("Trimming descending device total data ...");
for (int i = this.deviceTotalData.size() - 1; i > 0; i--) {
FieldData dfd = this.deviceTotalData.get(i);
Object fvl = (dfd != null)? dfd.getValue(this.fleetSortByField,null) : null;
//Print.logInfo("Testing value: " + fvl);
if ( (fvl == null ) ||
((fvl instanceof String) && StringTools.isBlank((String)fvl) ) ||
((fvl instanceof Long ) && (((Long)fvl).longValue() <= 0L) ) ||
((fvl instanceof Number) && (((Number)fvl).doubleValue() == 0.0)) ) {
this.deviceTotalData.remove(i);
}
}
}
}
// -- return device-TOTAL data (fleet report)
//Print.logStackTrace("Device Total Data ...");
return new ListDataIterator(this.deviceTotalData);
} else {
// -- return device-DETAIL data (device report)
//Print.logStackTrace("Device Detail Data ...");
return new ListDataIterator(allDevicesDetailData);
}
}
/**
*** Creates and returns an iterator for the row data displayed in the total rows of this report.
*** @return The total row data iterator
**/
public DBDataIterator getTotalsDataIterator()
{
if (this.isFleetReport) {
if (this.fleetTotalData != null) {
//Print.logStackTrace("Fleet Total Data ...");
return new ListDataIterator(this.fleetTotalData);
} else {
//Print.logStackTrace("Fleet Total Data 'null' ...");
return null;
}
} else {
//Print.logStackTrace("Device Total Data ...");
return new ListDataIterator(this.deviceTotalData);
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Adds a record to the body database iterator
*** @param startTime The motion start time
*** @param startGP The motion start GeoPoint
*** @param startAddress The motion start Address
*** @param startOdom The motion start Odometer
*** @param startFuel The motion start Fuel Usage
*** @param stopTime The motion stop time
*** @param stopGP The motion stop GeoPoint
*** @param stopAddress The motion stop Address
*** @param stopOdom The motion stop Odometer
*** @param stopFuel The motion stop Fuel Usage
*** @param driveTime The driving elapsed time
*** @param driveDist The distance driven
*** @param fuelTrip The fuel used
*** @param driveEcon The fuel economy
*** @param driveEconType The fuel economy type
*** @param stopElapse The elapsed stop time
*** @param idleElapse The elapsed idle time (ignition-on, not moving)
*** @param fuelIdle The fuel used while idling (<='0.0' if unavailable)
**/
private void _addRecord(
Vector<FieldData> _deviceDetailData,
String acctID , String devID , Device device ,
long startTime , GeoPoint startGP, String startAddress, double startOdom, double startOdomOfs, double startFuel,
long stopTime , GeoPoint stopGP , String stopAddress , double stopOdom , double stopOdomOfs , double stopFuel ,
long driveTime , double driveDist, double fuelTrip , double driveEcon, Device.FuelEconomyType driveEconType,
long stopElapse, long idleElapse , double fuelIdle
)
{
/* standard fields */
FieldData fd = new MotionFieldData();
fd.setString( FieldLayout.DATA_ACCOUNT_ID , acctID);
fd.setString( FieldLayout.DATA_DEVICE_ID , devID);
fd.setGeoPoint(FieldLayout.DATA_GEOPOINT , startGP); // may be null
fd.setString( FieldLayout.DATA_ADDRESS , startAddress); // may be null/blank
fd.setLong( FieldLayout.DATA_START_TIMESTAMP , startTime); // may be 0L
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED , driveTime);
fd.setDouble( FieldLayout.DATA_ODOMETER , startOdom);
fd.setDouble( FieldLayout.DATA_ODOMETER_DELTA , driveDist); // odomDelta
fd.setLong( FieldLayout.DATA_STOP_TIMESTAMP , stopTime);
fd.setGeoPoint(FieldLayout.DATA_STOP_GEOPOINT , stopGP); // may be null
fd.setString( FieldLayout.DATA_STOP_ADDRESS , stopAddress); // may be null/blank
fd.setDouble( FieldLayout.DATA_STOP_ODOMETER , stopOdom);
fd.setDouble( FieldLayout.DATA_ODOMETER_OFFSET , stopOdomOfs); // use stop odometer offset
fd.setDouble( FieldLayout.DATA_FUEL_TOTAL , startFuel);
fd.setDouble( FieldLayout.DATA_FUEL_TRIP , fuelTrip); // stopFuel - startFuel
fd.setDouble( FieldLayout.DATA_FUEL_ECONOMY , driveEcon); // driveDist / fuelTrip
fd.setValue( FieldLayout.DATA_FUEL_ECONOMY_TYPE, driveEconType);
fd.setLong( FieldLayout.DATA_STOP_ELAPSED , stopElapse);
fd.setLong( FieldLayout.DATA_IDLE_ELAPSED , idleElapse);
fd.setDouble( FieldLayout.DATA_FUEL_IDLE , fuelIdle);
/* work hours fields */
boolean whStart = false;
boolean whStop = false;
long whDriveTime = 0L;
double whDriveDist = 0.0;
double whFuelTrip = 0.0;
double whFuelIdle = 0.0;
if (this.tabulateByWorkHours && (this.workHours != null)) {
WorkHours WH = (device != null)? device.getWorkHours(this.workHours) : this.workHours; // not null
whStart = (startTime > 0L)? WH.isMatch(startTime, this.timeZone) : false;
whStop = (stopTime > 0L)? WH.isMatch(stopTime , this.timeZone) : false;
//Print.logInfo("Checking WorkHours: " + (new DateTime(startTime)) + " - to - " + (new DateTime(stopTime)));
if ((startTime <= 0L) && (stopTime <= 0L)) {
// -- unlikely, ignore
} else
if ((startTime <= 0L) && (stopTime > 0L)) {
// -- no start time
boolean stopWH = WH.isMatch(stopTime , this.timeZone);
if (stopWH) {
// -- all time/distance attributed to WorkHours
whDriveTime = driveTime;
whDriveDist = driveDist;
whFuelTrip = fuelTrip;
whFuelIdle = fuelIdle;
} else {
// -- all time/distance attributed to non-WorkHours
}
} else
if ((startTime > 0L) && (stopTime <= 0L)) {
// -- no stop time
boolean startWH = WH.isMatch(startTime, this.timeZone);
if (startWH) {
// -- all time/distance attributed to WorkHours
whDriveTime = driveTime;
whDriveDist = driveDist;
whFuelTrip = fuelTrip;
whFuelIdle = fuelIdle;
} else {
// -- all time/distance attributed to non-WorkHours
}
} else
if (startTime < stopTime) {
double totalHR = (double)(stopTime - startTime) / 3600.0;
DateTime startDT = new DateTime(startTime, this.timeZone);
DateTime stopDT = new DateTime(stopTime , this.timeZone);
double accumHR = WH.countWorkHours(startDT, stopDT, this.timeZone);
if (accumHR > 0.0) {
double fracHR = accumHR / totalHR;
if (fracHR > 1.0) {
// -- all time/distance attributed to WorkHours
whDriveTime = driveTime;
whDriveDist = driveDist;
whFuelTrip = fuelTrip;
whFuelIdle = fuelIdle;
} else {
whDriveTime = (long)(fracHR * (double)driveTime);
whDriveDist = fracHR * driveDist;
whFuelTrip = fracHR * fuelTrip;
whFuelIdle = fracHR * fuelIdle;
}
}
}
}
// -- Work Hours
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_WH, whDriveTime);
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_WH , whDriveDist);
fd.setDouble(FieldLayout.DATA_FUEL_TRIP_WH , whFuelTrip);
fd.setDouble(FieldLayout.DATA_FUEL_IDLE_WH , whFuelIdle);
// -- After Hours
fd.setLong( FieldLayout.DATA_DRIVING_ELAPSED_AH, (driveTime - whDriveTime));
fd.setDouble(FieldLayout.DATA_ODOMETER_DELTA_AH , (driveDist - whDriveDist));
fd.setDouble(FieldLayout.DATA_FUEL_TRIP_AH , (fuelTrip - whFuelTrip ));
fd.setDouble(FieldLayout.DATA_FUEL_IDLE_AH , (fuelIdle - whFuelIdle ));
/* add to device detail data iterator */
_deviceDetailData.add(fd); // this.deviceDetailData
/* accumulate device totals */
if (driveTime > 0L) { this.totalDriveSec += driveTime ; }
if (driveDist > 0.0) { this.totalOdomKM += driveDist ; }
if (fuelTrip > 0.0) { this.totalDriveFuel += fuelTrip ; }
if (stopTime > 0L) { this.totalStopCount += 1 ; }
if (stopElapse > 0L) { this.totalStopSec += stopElapse ; }
if (idleElapse > 0L) { this.totalIdleSec += idleElapse ; }
if (fuelIdle > 0.0) { this.totalIdleFuel += fuelIdle ; }
/* accumulate device workhours */
if (whDriveTime > 0L) { this.tworkDriveSec += whDriveTime ; }
if (whDriveDist > 0.0) { this.tworkOdomKM += whDriveDist ; }
if (whFuelTrip > 0.0) { this.tworkDriveFuel += whFuelTrip ; }
if (whStop ) { this.tworkStopCount += 1 ; }
if (whFuelIdle > 0.0) { this.tworkIdleFuel += whFuelIdle ; }
}
/**
*** Custom DBRecord callback handler class
*** @param rcd The EventData record
*** @return The returned status indicating whether to continue, or stop
**/
public int handleDBRecord(EventData rcd)
throws DBException
{
EventData evRcd = rcd;
Device device = evRcd.getDevice(); // should be non-null
int statusCode = evRcd.getStatusCode();
double speedKPH = evRcd.getSpeedKPH();
/* copy of deviceDetailData */
Vector<FieldData> _deviceDetailData = this.deviceDetailData;
/* count event */
this.deviceEventIndex++;
//Print.logInfo("EventData["+this.deviceEventIndex+"]: " + evRcd.getTimestamp() + " 0x" + StringTools.toHexString(evRcd.getStatusCode(),16));
/* ignition state change for non-ignition trips */
boolean ignitionChange = false;
if (this.tripStartType != TRIP_ON_IGNITION) {
if (this.ignitionCodes != null) {
// -- has ignition codes
if (this.isIgnitionOff(statusCode)) {
// -- ignition OFF
if ((this.lastIgnitionEvent == null) || this.isIgnitionOn) {
ignitionChange = true;
this.isIgnitionOn = false;
//this.lastIgnOffEvent = evRcd;
this.lastIgnitionEvent = evRcd;
} else {
// -- ignition is already off
}
} else
if (this.isIgnitionOn(statusCode)) {
// ignition ON
if ((this.lastIgnitionEvent == null) || !this.isIgnitionOn) {
ignitionChange = true;
this.isIgnitionOn = true;
//this.lastIgnOnEvent = evRcd;
this.lastIgnitionEvent = evRcd;
} else {
// -- ignition is already on
}
} else {
// -- leave ignition state as-is
}
} else {
// -- no ignition codes
}
}
/* trip delimiter */
boolean isMotionStart = false;
boolean isMotionStop = false;
boolean isIdleStart = false;
boolean isIdleStop = false;
if (this.tripStartType == TRIP_ON_IGNITION) {
// -- TRIP_ON_IGNITION
if (this.isIgnitionOn(statusCode)) {
// -- I've started moving/trip
if ((this.lastIgnitionEvent == null) || !this.isIgnitionOn) {
// -- ignition was off, ignition state changed to on
ignitionChange = true;
this.isIgnitionOn = true;
//this.lastIgnOnEvent = evRcd;
this.lastIgnitionEvent = evRcd;
isMotionStart = true;
this.isInMotion = true;
//this.lastStartEvent = evRcd;
this.lastMotionEvent = evRcd;
if (this.legacyIdleDefinition) {
// -- legacy idle definition
//isIdleStop = true; <== no idle for TRIP_ON_IGNITION
this.idleStopEvent = null;
}
} else {
// -- ignition was already on
}
} else
if (this.isIgnitionOff(statusCode)) {
// -- I've stopped moving/trip
if ((this.lastIgnitionEvent == null) || this.isIgnitionOn) {
ignitionChange = true;
this.isIgnitionOn = false;
//this.lastIgnOffEvent = evRcd;
this.lastIgnitionEvent = evRcd;
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd;
if (this.legacyIdleDefinition) {
//isIdleStart = true; <== no idle for TRIP_ON_IGNITION
this.idleStartEvent = null;
}
} else {
// -- ignition is already off
}
} else {
// -- not a ignition-motion state change event, check for idle
if (this.legacyIdleDefinition) {
// -- do nothing
}
}
} else
if (this.tripStartType == TRIP_ON_ENGINE) {
// -- TRIP_ON_ENGINE
if (this.isEngineStart(statusCode)) {
// -- engine started
if (!this.isInMotion) {
// -- I was stopped, I've now started moving
isMotionStart = true;
this.isInMotion = true;
//this.lastStartEvent = evRcd;
this.lastMotionEvent = evRcd;
if (!this.isIgnitionOn) {
// -- force ignition ON when engine on
this.isIgnitionOn = true;
//this.lastIgnOnEvent = evRcd;
this.lastIgnitionEvent = evRcd;
}
if (this.legacyIdleDefinition) {
// -- legacy idle definition
isIdleStop = true; // in TRIP_ON_ENGINE
this.idleStopEvent = evRcd;
}
} else {
// -- I'm already moving
}
} else
if (this.isEngineStop(statusCode)) {
// -- engine stopped
if (this.isInMotion) {
// -- I've stopped moving (start idle clock)
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd;
if (this.isIgnitionOn && (this.ignitionCodes == null)) {
// -- force ignition off if device does not have ignition codes
this.isIgnitionOn = false;
//this.lastIgnOffEvent = evRcd
this.lastIgnitionEvent = evRcd;
}
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_ENGINE
this.idleStartEvent = evRcd;
}
} else {
// -- I'm already stopped
}
} else
if (this.isIgnitionOff(statusCode) && this.stopOnIgnitionOff) {
// -- ignition turned off
if (this.isInMotion) {
// -- Likely a "Stop" event was not found, force stop
// - I've stopped moving (start idle clock)
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd;
} else {
// -- I'm already stopped
//Print.logInfo("(Ignition Off) I'm already stopped");
}
// -- stop idle clock
if (this.legacyIdleDefinition) {
// -- idling, stop idle
isIdleStop = true; // in TRIP_ON_ENGINE
this.idleStopEvent = evRcd;
}
} else {
// -- not a motion state change event
// - check for idle change events while not moving
if (!this.isInMotion) {
if (this.isIgnitionOn(statusCode)) {
// -- ignition on while not moving, start idle clock
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_ENGINE
this.idleStartEvent = evRcd;
}
} else
if (this.isIgnitionOff(statusCode)) {
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_ENGINE
this.idleStopEvent = evRcd;
}
}
}
}
} else
if (this.tripStartType == TRIP_ON_START) {
// -- TRIP_ON_START
if (this.isMotionStart(statusCode)) {
if (!this.isInMotion) {
// -- I was stopped, I've now started moving (stop idle clock)
isMotionStart = true;
this.isInMotion = true;
//this.lastStartEvent = evRcd;
this.lastMotionEvent = evRcd;
if (!this.isIgnitionOn) {
// -- force ignition ON when moving
this.isIgnitionOn = true;
//this.lastIgnOnEvent = evRcd;
this.lastIgnitionEvent = evRcd;
}
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_START
this.idleStopEvent = evRcd;
}
} else {
// I'm already moving
//Print.logInfo("(Start) I'm already moving");
}
} else
if (this.isMotionStop(statusCode)) {
if (this.isInMotion) {
// -- I've stopped moving (start idle clock)
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd;
if (this.isIgnitionOn && (this.ignitionCodes == null)) {
// -- force ignition off if device does not have ignition codes
this.isIgnitionOn = false;
//this.lastIgnOffEvent = evRcd
this.lastIgnitionEvent = evRcd;
}
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_START
this.idleStartEvent = evRcd;
}
} else {
// -- I'm already stopped
//Print.logInfo("(Stop) I'm already stopped");
}
} else
if (this.isIgnitionOff(statusCode) && this.stopOnIgnitionOff) {
// -- ignition turned off
if (this.isInMotion) {
// -- Likely a "Stop" event was not found, force stop
// - I was moving, I've now stopped moving
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd;
} else {
// -- I'm already stopped
//Print.logInfo("(Ignition Off) I'm already stopped");
}
// -- stop idle clock
if (this.legacyIdleDefinition) {
// -- idling, stop idle
isIdleStop = true; // in TRIP_ON_START
this.idleStopEvent = evRcd;
}
} else {
// -- not a motion state change event
// - check for idle change events while not moving
if (!this.isInMotion) {
// -- not moving
if (this.isIgnitionOn(statusCode)) {
// -- ignition on while not moving, start idle clock
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_START
this.idleStartEvent = evRcd;
}
} else
if (this.isIgnitionOff(statusCode)) {
// -- ignition off while not moving, stop idle clock
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_START
this.idleStopEvent = evRcd;
}
} else {
// -- neither ignition on nor off?
}
} else {
// -- I'm still moving
}
}
} else
if (this.tripStartType == TRIP_ON_SPEED) {
if ((speedKPH > 0.0) && (speedKPH >= this.minSpeedKPH)) {
// -- I am moving
this.pendingStopEvent = null; // always reset (for min stop time below)
if (!this.isInMotion) {
// -- I wasn't moving before, now I've started moving
isMotionStart = true;
this.isInMotion = true;
//this.lastStartEvent = evRcd;
this.lastMotionEvent = evRcd; // start of motion
if (this.isIgnitionOn) {
// -- ignition is already on.
//Print.logInfo("Start of motion (ignition is ON)");
} else {
// -- force ignition on (since were now moving)
//Print.logInfo("Start of motion (force ignition ON)");
this.isIgnitionOn = true;
//this.lastIgnOnEvent = evRcd;
this.lastIgnitionEvent = evRcd;
}
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_SPEED
this.idleStopEvent = evRcd;
}
} else {
// -- I'm still moving
if (ignitionChange) {
// -- ignition on/off while moving?
}
}
} else {
// -- I am not moving
if (this.isInMotion) {
// -- I was moving, now I've stopped moving - maybe
if (this.minStoppedTimeSec <= 0L) {
// no minimum stopped-time, and we haven't already stopped
//Print.logInfo("Stopped motion (no minimum stopped time)");
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = evRcd; // stop motion
this.pendingStopEvent = null;
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_SPEED
this.idleStartEvent = evRcd;
}
} else
if (ignitionChange && !this.isIgnitionOn && this.stopOnIgnitionOff) {
// -- ignition off while not moving, and we want to consider this as a stop
//Print.logInfo("Stopped motion (forced by ignition OFF)");
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
this.lastMotionEvent = (this.pendingStopEvent != null)? this.pendingStopEvent : evRcd; // stop motion
this.pendingStopEvent = null;
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_SPEED
this.idleStopEvent = evRcd;
}
} else {
// -- minimum stopped time in effect
if (this.pendingStopEvent == null) {
// -- start the stopped-time clock
this.pendingStopEvent = evRcd;
} else {
// -- check to see if we've met the minimum stopped time
long deltaTimeSec = evRcd.getTimestamp() - this.pendingStopEvent.getTimestamp();
if (deltaTimeSec >= this.minStoppedTimeSec) {
// -- elapsed stop time exceeded limit
//Print.logInfo("Stopped motion (elapsed minimum stop time)");
isMotionStop = true;
this.isInMotion = false;
//this.lastStopEvent = evRcd;
if (SPEED_RESET_STOP_TIME) {
// -- if we reset the stop event here, then the minimum stopped time will
// - not be counted. (this does cause some user confusion, so this reset
// - should not occur).
this.lastMotionEvent = evRcd; // stop motion
} else {
this.lastMotionEvent = this.pendingStopEvent;
}
this.pendingStopEvent = null;
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_SPEED
this.idleStartEvent = evRcd;
}
} else {
// -- assume I'm still moving (ie. temporarily stopped)
}
}
}
} else {
// -- I'm still not moving
// - check for idle change events while not moving
if (this.isIgnitionOn(statusCode)) {
// -- ignition on while not moving, start idle clock
if (this.legacyIdleDefinition) {
isIdleStart = true; // in TRIP_ON_SPEED
this.idleStartEvent = evRcd;
}
} else
if (this.isIgnitionOff(statusCode)) {
// -- stopped, ignition off, stop idle
if (this.legacyIdleDefinition) {
isIdleStop = true; // in TRIP_ON_SPEED
this.idleStopEvent = evRcd;
}
}
}
}
}
// isMotionStart - true if motion changed from stop==>start
// isMotionStop - true if motion changed from start==>stop
// this.isInMotion - current motion state
// this.lastMotionEvent - last motion delimiter event
// this.lastIgnitionEvent - last ignition delimiter event
// ignitionChange - true if ignition changed state
// this.isIgnitionOn - current ignition state
/* new idle */
// -- defined as 0-speed elapsed time between trip delimiters
if (this.legacyIdleDefinition) {
// -- skip for legacy idle accumulation
} else
if (isMotionStart) {
if (!evRcd.isValidGeoPoint()) {
// -- no valid GPS, assume stopped, start idle
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Start (no GPS) ...");
isIdleStart = true;
this.idleStartEvent = evRcd;
} else
if ((speedKPH > 0.0) && (speedKPH >= this.minSpeedKPH)) {
// -- I am moving as motion occurred, stop idle
if (this.idleStartEvent != null) {
// -- currently idling, stop idle
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Stop (speed "+speedKPH+") ...");
isIdleStop = true;
this.idleStopEvent = evRcd;
} else {
// -- already not idling
}
} else {
// -- stopped as motion occurred, start idle
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Start (no speed) ...");
isIdleStart = true;
this.idleStartEvent = evRcd;
}
} else
if (this.isInMotion) {
// -- we are currently in motion
if (this.isIgnitionOn) {
// -- ignition is on, start idle (if not moving)
if (!evRcd.isValidGeoPoint()) {
// -- no valid GPS, disregard this record
//Print.logInfo("["+evRcd.getTimestampString()+"] No valid GPS (disregard) ...");
} else
if ((speedKPH > 0.0) && (speedKPH >= this.minSpeedKPH)) {
// -- I am moving, not idle
if (this.idleStartEvent != null) {
// -- we are currently idling, stop idle
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Stop (speed="+speedKPH+") ...");
isIdleStop = true;
this.idleStopEvent = evRcd;
} else {
// -- we are not already idling
}
} else {
// -- stopped, ignition on, is idle
if (this.idleStartEvent != null) {
// -- we are already idling
} else {
// -- start idle
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Start (no speed) ...");
isIdleStart = true;
this.idleStartEvent = evRcd;
}
}
}
} else
if (isMotionStop) {
// -- we've just stopped from motion
if (this.idleStartEvent != null) {
//Print.logInfo("["+evRcd.getTimestampString()+"] Idle Stop (ignition off) ...");
isIdleStop = true;
this.idleStopEvent = evRcd;
} else {
// -- already not idling
}
}
// isIdleStart - true if idle changed from stop==>start
// isIdleStop - true if idle changed from start==>stop
// this.idleStartEvent - last idle start event
// this.idleStopEvent - last idle stop event
/* accrue idle time */
if (this.ignitionCodes != null) {
// -- 'idle' only valid if we have ignition codes
if (isIdleStart) {
// -- just wait for 'stop'
} else
if (isIdleStop) {
// -- 'this.idleStopEvent' is non-null
if (this.idleStartEvent != null) {
long idleTime = this.idleStopEvent.getTimestamp() - this.idleStartEvent.getTimestamp();
this.idleAccumulator += idleTime;
//Print.logInfo("["+this.idleStartEvent.getTimestampString()+" ==> "+this.idleStopEvent.getTimestampString()+"] Added Idle Time: " + idleTime + " sec [total "+this.idleAccumulator+"]");
} else {
// -- 'this.idleStartEvent' not yet initialized (likely first occurance in report)
}
//Print.logInfo("Accumulated Idle time: " + this.idleAccumulator);
this.idleStartEvent = null;
this.idleStopEvent = null;
}
}
// -- lastStart -> lastStop -> start
if (isMotionStart) {
EventData ev = this.lastMotionEvent; // start of motion
// -- 'this.isIgnitionOn' is 'true'
if (this.lastStateChange == STATE_START) {
// -- abnormal start ==> start
// - we already have a 'start', we're missing an interleaving 'stop'
// - the driving-time is not valid
// - ('this.lastStopTime' will already be '0' here, since we didn't get an interleaving 'stop')
// - ('this.lastStartTime' will be > 0 here, since we did get a previous 'start')
// - We treat this START event as a STOP event
long stopTime = ev.getTimestamp();
GeoPoint stopPoint = ev.getGeoPoint();
String stopAddr = ev.getAddress();
double stopOdom = ev.getOdometerKM();
double stopOdomOfs = ev.getOdometerOffsetKM(null);
if (stopOdom <= 0.0) {
stopOdom = ev.getDistanceKM();
if (stopOdom <= 0.0) {
// -- we do not have a valid stop odometer, use last valid odometer
stopOdom = this.lastValidOdometerKM;
stopOdomOfs = this.lastValidOdomOfsKM;
if (stopOdom <= 0.0) {
// -- we still do not have a valid odometer
}
}
}
double stopFuel = ev.getFieldValue(EventData.FLD_fuelTotal, 0.0);
long driveTime = (this.lastStartTime > 0L)? (stopTime - this.lastStartTime) : 0L;
double driveDist = (this.lastStartTime > 0L)? (stopOdom - this.lastStartOdometer) : 0.0; // kilometers
double fuelTrip = (this.lastStartTime > 0L)? (stopFuel - this.lastStartFuelUsed) : 0.0; // liters
double driveEcon = (fuelTrip > 0.0)? (driveDist / fuelTrip) : -1.0; // kilometers per liter
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.FUEL_CONSUMED;
long stopElaps = 0L;
long idleElaps = 0L;
double fuelIdle = -1.0;
this._addRecord( // handleDBRecord
_deviceDetailData,
ev.getAccountID(), ev.getDeviceID(), device,
this.lastStartTime , this.lastStartPoint, this.lastStartAddress, this.lastStartOdometer, this.lastStartOdomOfs, this.lastStartFuelUsed,
stopTime , stopPoint , stopAddr , stopOdom , stopOdomOfs , stopFuel ,
driveTime, driveDist, fuelTrip, driveEcon, driveEconType,
stopElaps, idleElaps, fuelIdle);
// -- continue with 'START'
} else
if (this.lastStopTime > 0) {
// -- normal start --> stop ==> start
long driveTime = (this.lastStartTime > 0L)? (this.lastStopTime - this.lastStartTime) : 0L;
double driveDist = (this.lastStartTime > 0L)? (this.lastStopOdometer - this.lastStartOdometer) : -1.0; // kilometers
double fuelTrip = (this.lastStartTime > 0L)? (this.lastStopFuelUsed - this.lastStartFuelUsed) : -1.0; // liters
double driveEcon = (fuelTrip > 0.0)? (driveDist / fuelTrip) : 0.0; // kilometers per liter
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.FUEL_CONSUMED;
long stopElaps = ev.getTimestamp() - this.lastStopTime;
long idleElaps = (this.idleAccumulator > 0L)? this.idleAccumulator : -1L;
double fuelIdle = -1.0;
this._addRecord( // handleDBRecord
_deviceDetailData,
ev.getAccountID(), ev.getDeviceID(), device,
this.lastStartTime , this.lastStartPoint, this.lastStartAddress, this.lastStartOdometer, this.lastStartOdomOfs, this.lastStartFuelUsed,
this.lastStopTime , this.lastStopPoint , this.lastStopAddress , this.lastStopOdometer , this.lastStopOdomOfs , this.lastStopFuelUsed ,
driveTime, driveDist, fuelTrip, driveEcon, driveEconType,
stopElaps, idleElaps, fuelIdle);
}
this.lastStartTime = ev.getTimestamp();
this.lastStartPoint = ev.getGeoPoint();
this.lastStartAddress = ev.getAddress();
this.lastStartOdometer = ev.getOdometerKM();
this.lastStartOdomOfs = ev.getOdometerOffsetKM(null);
if (this.lastStartOdometer <= 0.0) {
this.lastStartOdometer = ev.getDistanceKM();
if (this.lastStartOdometer <= 0.0) {
// we do not have a valid stop odometer, use last valid odometer
this.lastStartOdometer = this.lastValidOdometerKM;
this.lastStartOdomOfs = this.lastValidOdomOfsKM;
if (this.lastStartOdometer <= 0.0) {
// we still do not have a valid odometer
}
}
}
this.lastStartFuelUsed = ev.getFieldValue(EventData.FLD_fuelTotal , 0.0);
this.lastStartFuelLevel = ev.getFieldValue(EventData.FLD_fuelLevel , 0.0);
this.lastStartFuelRemain = ev.getFieldValue(EventData.FLD_fuelRemain, 0.0);
this.lastStopTime = 0L;
this.lastStopPoint = null;
this.lastStopAddress = null;
this.lastStopOdometer = 0.0;
this.lastStopOdomOfs = 0.0;
this.lastStopFuelUsed = 0.0;
this.lastStopFuelLevel = 0.0;
this.lastStopFuelRemain = 0.0;
this.lastStateChange = STATE_START;
/* clear idle accrual */
this.idleAccumulator = 0L;
} else
if (isMotionStop) {
EventData ev = this.lastMotionEvent; // stop motion
if (this.lastStateChange == STATE_STOP) {
// abnormal start --> stop ==> stop
// we already have a 'stop', we're missing a 'start'.
// this condition can only occur for TRIP_ON_START or TRIP_ON_IGNITION
if ((this.lastStopTime > 0) && (this.lastIgnitionEvent != null) && (this.lastIgnitionEvent.getTimestamp() > this.lastStopTime)) {
// inject a START at the last ignition event (no additional idle accural calculations)
long startTime = this.lastIgnitionEvent.getTimestamp();
GeoPoint startPoint = this.lastIgnitionEvent.getGeoPoint();
String startAddr = this.lastIgnitionEvent.getAddress();
double startOdom = this.lastIgnitionEvent.getOdometerKM();
double startOdomOfs = this.lastIgnitionEvent.getOdometerOffsetKM(null);
if (startOdom <= 0.0) {
startOdom = this.lastIgnitionEvent.getDistanceKM();
if (startOdom <= 0.0) {
// we do not have a valid stop odometer, use last valid odometer
startOdom = this.lastValidOdometerKM;
startOdomOfs = this.lastValidOdomOfsKM;
if (startOdom <= 0.0) {
// we still do not have a valid odometer
}
}
}
double startFuel = this.lastIgnitionEvent.getFieldValue(EventData.FLD_fuelTotal, 0.0);
long driveTime = this.lastStopTime - startTime;
double driveDist = this.lastStopOdometer - startOdom; // kilometers
double fuelTrip = this.lastStopFuelUsed - startFuel; // liters
double driveEcon = (fuelTrip > 0.0)? (driveDist / fuelTrip) : 0.0; // kilometers per liter
Device.FuelEconomyType driveEconType = Device.FuelEconomyType.FUEL_CONSUMED;
long stopElaps = this.lastIgnitionEvent.getTimestamp() - this.lastStopTime;
long idleElaps = (this.idleAccumulator > 0L)? this.idleAccumulator : -1L;
double fuelIdle = -1.0;
this._addRecord( // handleDBRecord
_deviceDetailData,
ev.getAccountID(), ev.getDeviceID(), device,
startTime , startPoint , startAddr , startOdom , startOdomOfs , startFuel ,
this.lastStopTime , this.lastStopPoint , this.lastStopAddress , this.lastStopOdometer, this.lastStopOdomOfs, this.lastStopFuelUsed ,
driveTime, driveDist, fuelTrip, driveEcon, driveEconType,
stopElaps, idleElaps, fuelIdle);
this.isIgnitionOn = true; // force to true, since we simulated a 'START'
// 'this.lastIgnitionEvent' stays as-is
// Continue with STOP
} else {
// no interleaving ignition events
// ignore the previous 'STOP'
}
}
this.lastStopTime = ev.getTimestamp();
this.lastStopPoint = ev.getGeoPoint();
this.lastStopAddress = ev.getAddress();
this.lastStopOdometer = ev.getOdometerKM();
this.lastStopOdomOfs = ev.getOdometerOffsetKM(null);
if (this.lastStopOdometer <= 0.0) {
this.lastStopOdometer = ev.getDistanceKM();
if (this.lastStopOdometer <= 0.0) {
// we do not have a valid stop odometer, use last valid odometer
this.lastStopOdometer = this.lastValidOdometerKM;
this.lastStopOdomOfs = this.lastValidOdomOfsKM;
if (this.lastStopOdometer <= 0.0) {
// we still do not have a valid odometer
}
}
}
this.lastStopFuelUsed = ev.getFieldValue(EventData.FLD_fuelTotal , 0.0);
this.lastStopFuelLevel = ev.getFieldValue(EventData.FLD_fuelLevel , 0.0);
this.lastStopFuelRemain = ev.getFieldValue(EventData.FLD_fuelRemain, 0.0);
this.lastStateChange = STATE_STOP;
/* start idle accumulation */
// -- NOTE: clearing this here prevents this trip value from being added to the device total
//this.idleAccumulator = 0L;
//Print.logInfo("this.idleAccumulator cleared ...");
}
/* cache previous valid odometer */
double thisEventOdometerKM = evRcd.getOdometerKM();
double thisEventOdomOfsKM = evRcd.getOdometerOffsetKM(null);
if (thisEventOdometerKM > 0.0) {
this.lastValidOdometerKM = thisEventOdometerKM;
this.lastValidOdomOfsKM = thisEventOdomOfsKM;
} else {
thisEventOdometerKM = evRcd.getDistanceKM();
if (thisEventOdometerKM > 0.0) {
this.lastValidOdometerKM = thisEventOdometerKM;
this.lastValidOdomOfsKM = thisEventOdomOfsKM;
}
}
/* return record limit status */
return (_deviceDetailData.size() < this.getReportLimit())? DBRH_SKIP : DBRH_STOP;
} // handleDBRecord
// ------------------------------------------------------------------------
private boolean isIgnitionOn(int statusCode)
{
if (this.ignitionCodes != null) {
return (statusCode == this.ignitionCodes[1]);
} else {
return false;
}
}
private boolean isIgnitionOff(int statusCode)
{
if (this.ignitionCodes != null) {
return (statusCode == this.ignitionCodes[0]);
} else {
return false;
}
}
private boolean isEngineStart(int statusCode)
{
return (statusCode == StatusCodes.STATUS_ENGINE_START);
}
private boolean isEngineStop(int statusCode)
{
return (statusCode == StatusCodes.STATUS_ENGINE_STOP);
}
private boolean isMotionStart(int statusCode)
{
return (statusCode == StatusCodes.STATUS_MOTION_START);
}
private boolean isMotionStop(int statusCode)
{
return (statusCode == StatusCodes.STATUS_MOTION_STOP);
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Custom MotionFieldData class
**/
private static class MotionFieldData
extends FieldData
implements EventDataProvider
{
// Available fields:
// FieldLayout.DATA_ACCOUNT_ID
// FieldLayout.DATA_DEVICE_ID
// FieldLayout.DATA_GEOPOINT
// FieldLayout.DATA_ADDRESS
// FieldLayout.DATA_START_TIMESTAMP
// FieldLayout.DATA_DRIVING_ELAPSED
// FieldLayout.DATA_ODOMETER
// FieldLayout.DATA_ODOMETER_DELTA (odomDelta)
// FieldLayout.DATA_STOP_TIMESTAMP
// FieldLayout.DATA_STOP_GEOPOINT
// FieldLayout.DATA_STOP_ADDRESS
// FieldLayout.DATA_STOP_ODOMETER
// FieldLayout.DATA_FUEL_TOTAL
// FieldLayout.DATA_FUEL_TRIP
// FieldLayout.DATA_FUEL_ECONOMY
// FieldLayout.DATA_FUEL_ECONOMY_TYPE
// FieldLayout.DATA_IDLE_ELAPSED
public MotionFieldData() {
super();
}
public String getAccountID() {
return super.getString(FieldLayout.DATA_ACCOUNT_ID,"");
}
public String getDeviceID() {
return super.getDeviceID();
}
public String getDeviceDescription() {
return super.getDeviceDescription();
}
public String getDeviceVIN() {
return super.getDeviceVIN();
}
public long getTimestamp() {
return super.getLong(FieldLayout.DATA_STOP_TIMESTAMP, 0L);
}
public int getStatusCode() {
return StatusCodes.STATUS_MOTION_STOP;
}
public String getStatusCodeDescription(BasicPrivateLabel bpl) {
Device dev = null;
int code = this.getStatusCode();
return StatusCode.getDescription(dev, code, bpl, "Stop");
}
public StatusCodeProvider getStatusCodeProvider(BasicPrivateLabel bpl) {
Device dev = null;
int code = this.getStatusCode();
return StatusCode.getStatusCodeProvider(dev, code, bpl, null/*dftSCP*/);
}
public int getPushpinIconIndex(String iconSelector, OrderedSet<String> iconKeys,
boolean isFleet, BasicPrivateLabel bpl) {
return EventData.ICON_PUSHPIN_RED;
}
public boolean isValidGeoPoint() {
return GeoPoint.isValid(this.getLatitude(), this.getLongitude());
}
public double getLatitude() {
GeoPoint gp = super.getGeoPoint(FieldLayout.DATA_STOP_GEOPOINT, null);
return (gp != null)? gp.getLatitude() : 0.0;
}
public double getLongitude() {
GeoPoint gp = super.getGeoPoint(FieldLayout.DATA_STOP_GEOPOINT, null);
return (gp != null)? gp.getLongitude() : 0.0;
}
public GeoPoint getGeoPoint() {
return new GeoPoint(this.getLatitude(), this.getLongitude());
}
public long getGpsAge() {
return 0L; // not available
}
public long getCreationAge() {
return 0L; // not available
}
public double getHorzAccuracy() {
return -1.0; // not available
}
public GeoPoint getBestGeoPoint() {
return this.getGeoPoint();
}
public double getBestAccuracy() {
return this.getHorzAccuracy();
}
public int getSatelliteCount() {
return 0;
}
public double getBatteryLevel() {
return 0.0;
}
public double getSpeedKPH() {
return 0.0;
}
public double getHeading() {
return 0.0;
}
public double getAltitude() {
return 0.0;
}
public double getOdometerKM() {
return 0.0;
}
public String getGeozoneID() {
return "";
}
public String getAddress() {
return super.getString(FieldLayout.DATA_STOP_ADDRESS, "");
}
public long getInputMask() {
return 0L;
}
public void setEventIndex(int ndx)
{
super.setInt(FieldLayout.DATA_EVENT_INDEX,ndx);
}
public int getEventIndex()
{
return super.getInt(FieldLayout.DATA_EVENT_INDEX,-1);
}
public boolean getIsFirstEvent()
{
return (this.getEventIndex() == 0);
}
public void setIsLastEvent(boolean isLast) {
super.setBoolean(FieldLayout.DATA_LAST_EVENT,isLast);
}
public boolean getIsLastEvent() {
return super.getBoolean(FieldLayout.DATA_LAST_EVENT,false);
}
}
}
| apache-2.0 |
Skarlso/gocd | server/src/main/java/com/thoughtworks/go/config/update/FullConfigUpdateCommand.java | 1398 | /*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.update;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.NoOverwriteUpdateConfigCommand;
public class FullConfigUpdateCommand implements NoOverwriteUpdateConfigCommand{
private final CruiseConfig configForEdit;
private final String unmodifiedMd5;
public FullConfigUpdateCommand(CruiseConfig configForEdit, String unmodifiedMd5) {
this.configForEdit = configForEdit;
this.unmodifiedMd5 = unmodifiedMd5;
}
@Override
public String unmodifiedMd5() {
return this.unmodifiedMd5;
}
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) {
return configForEdit;
}
public CruiseConfig configForEdit() {
return this.configForEdit;
}
}
| apache-2.0 |
GeLiXin/hadoop | hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java | 4700 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ratis;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.ratis.client.RaftClient;
import org.apache.ratis.conf.RaftProperties;
import org.apache.ratis.grpc.GrpcConfigKeys;
import org.apache.ratis.protocol.RaftGroup;
import org.apache.ratis.protocol.RaftGroupId;
import org.apache.ratis.protocol.RaftPeer;
import org.apache.ratis.protocol.RaftPeerId;
import org.apache.ratis.rpc.RpcType;
import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.ratis.util.SizeInBytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* Ratis helper methods.
*/
public interface RatisHelper {
Logger LOG = LoggerFactory.getLogger(RatisHelper.class);
static String toRaftPeerIdString(DatanodeDetails id) {
return id.getUuidString() + "_" +
id.getPort(DatanodeDetails.Port.Name.RATIS).getValue();
}
static String toRaftPeerAddressString(DatanodeDetails id) {
return id.getIpAddress() + ":" +
id.getPort(DatanodeDetails.Port.Name.RATIS).getValue();
}
static RaftPeerId toRaftPeerId(DatanodeDetails id) {
return RaftPeerId.valueOf(toRaftPeerIdString(id));
}
static RaftPeer toRaftPeer(DatanodeDetails id) {
return new RaftPeer(toRaftPeerId(id), toRaftPeerAddressString(id));
}
static List<RaftPeer> toRaftPeers(Pipeline pipeline) {
return toRaftPeers(pipeline.getMachines());
}
static <E extends DatanodeDetails> List<RaftPeer> toRaftPeers(
List<E> datanodes) {
return datanodes.stream().map(RatisHelper::toRaftPeer)
.collect(Collectors.toList());
}
/* TODO: use a dummy id for all groups for the moment.
* It should be changed to a unique id for each group.
*/
RaftGroupId DUMMY_GROUP_ID =
RaftGroupId.valueOf(ByteString.copyFromUtf8("AOzoneRatisGroup"));
RaftGroup EMPTY_GROUP = new RaftGroup(DUMMY_GROUP_ID,
Collections.emptyList());
static RaftGroup emptyRaftGroup() {
return EMPTY_GROUP;
}
static RaftGroup newRaftGroup(Collection<RaftPeer> peers) {
return peers.isEmpty()? emptyRaftGroup()
: new RaftGroup(DUMMY_GROUP_ID, peers);
}
static RaftGroup newRaftGroup(RaftGroupId groupId,
Collection<DatanodeDetails> peers) {
final List<RaftPeer> newPeers = peers.stream()
.map(RatisHelper::toRaftPeer)
.collect(Collectors.toList());
return peers.isEmpty() ? new RaftGroup(groupId, Collections.emptyList())
: new RaftGroup(groupId, newPeers);
}
static RaftGroup newRaftGroup(Pipeline pipeline) {
return newRaftGroup(toRaftPeers(pipeline));
}
static RaftClient newRaftClient(RpcType rpcType, Pipeline pipeline) {
return newRaftClient(rpcType, toRaftPeerId(pipeline.getLeader()),
newRaftGroup(pipeline.getId().getRaftGroupID(),
pipeline.getMachines()));
}
static RaftClient newRaftClient(RpcType rpcType, RaftPeer leader) {
return newRaftClient(rpcType, leader.getId(),
newRaftGroup(new ArrayList<>(Arrays.asList(leader))));
}
static RaftClient newRaftClient(
RpcType rpcType, RaftPeerId leader, RaftGroup group) {
LOG.trace("newRaftClient: {}, leader={}, group={}", rpcType, leader, group);
final RaftProperties properties = new RaftProperties();
RaftConfigKeys.Rpc.setType(properties, rpcType);
GrpcConfigKeys.setMessageSizeMax(properties,
SizeInBytes.valueOf(OzoneConfigKeys.DFS_CONTAINER_CHUNK_MAX_SIZE));
return RaftClient.newBuilder()
.setRaftGroup(group)
.setLeaderId(leader)
.setProperties(properties)
.build();
}
}
| apache-2.0 |
rritoch/gemini.blueprint | core/src/test/java/org/eclipse/gemini/blueprint/compendium/config/ManagedPropertiesTest.java | 7025 | /******************************************************************************
* Copyright (c) 2006, 2010 VMware Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html and the Apache License v2.0
* is available at http://www.opensource.org/licenses/apache2.0.php.
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* VMware Inc.
*****************************************************************************/
package org.eclipse.gemini.blueprint.compendium.config;
import org.eclipse.gemini.blueprint.TestUtils;
import org.eclipse.gemini.blueprint.compendium.internal.cm.ManagedServiceInstanceTrackerPostProcessor;
import org.eclipse.gemini.blueprint.context.support.BundleContextAwareProcessor;
import org.eclipse.gemini.blueprint.mock.MockBundleContext;
import org.eclipse.gemini.blueprint.mock.MockServiceRegistration;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ManagedService;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.util.Dictionary;
import java.util.Hashtable;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* @author Costin Leau
*/
public class ManagedPropertiesTest {
private GenericApplicationContext appContext;
private int unregistrationCounter;
private int registrationCounter;
@Before
public void setUp() throws Exception {
final Configuration cfg = createNiceMock(Configuration.class);
expect(cfg.getProperties()).andReturn(new Hashtable<String, Object>());
replay(cfg);
registrationCounter = 0;
unregistrationCounter = 0;
BundleContext bundleContext = new MockBundleContext() {
// always return a ConfigurationAdmin
public Object getService(ServiceReference reference) {
return new MockConfigurationAdmin() {
public Configuration getConfiguration(String pid) throws IOException {
return cfg;
}
};
}
public ServiceRegistration registerService(String[] clazzes, Object service, Dictionary properties) {
if (service instanceof ManagedService) {
registrationCounter++;
return new MockServiceRegistration(clazzes, properties) {
public void unregister() {
super.unregister();
unregistrationCounter++;
}
};
}
return super.registerService(clazzes, service, properties);
}
};
appContext = new GenericApplicationContext();
appContext.getBeanFactory().addBeanPostProcessor(new BundleContextAwareProcessor(bundleContext));
appContext.setClassLoader(getClass().getClassLoader());
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(appContext);
reader.loadBeanDefinitions(new ClassPathResource("managedService.xml", getClass()));
appContext.refresh();
}
@After
public void tearDown() throws Exception {
appContext.close();
appContext = null;
}
private ManagedServiceInstanceTrackerPostProcessor getTrackerForBean(String beanName) {
return (ManagedServiceInstanceTrackerPostProcessor) appContext
.getBean(ManagedServiceInstanceTrackerPostProcessor.class.getName() + "#0#" + beanName);
}
@Test
public void testSimpleBeanTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp = getTrackerForBean("simple");
assertEquals("simple", TestUtils.getFieldValue(bpp, "pid"));
assertNull(TestUtils.getFieldValue(bpp, "updateMethod"));
assertNull(TestUtils.getFieldValue(bpp, "updateStrategy"));
}
@Test
public void testSimpleBeanWithNoNameTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp =
getTrackerForBean("org.eclipse.gemini.blueprint.compendium.OneSetter#0");
assertEquals("non-name", TestUtils.getFieldValue(bpp, "pid"));
assertNull(TestUtils.getFieldValue(bpp, "updateMethod"));
assertNull(TestUtils.getFieldValue(bpp, "updateStrategy"));
}
@Test
public void testSimpleWUpdateBeanTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp = getTrackerForBean("simpleWUpdate");
assertEquals("simple", TestUtils.getFieldValue(bpp, "pid"));
assertNull(TestUtils.getFieldValue(bpp, "updateMethod"));
}
@Test
public void testMultipleWUpdateBeanTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp = getTrackerForBean("multipleWUpdate");
assertEquals("multiple", TestUtils.getFieldValue(bpp, "pid"));
assertNull(TestUtils.getFieldValue(bpp, "updateMethod"));
assertEquals(true, TestUtils.getFieldValue(bpp, "autowireOnUpdate"));
}
@Test
public void testBeanManagedTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp = getTrackerForBean("beanManaged");
assertEquals("bean-managed", TestUtils.getFieldValue(bpp, "pid"));
assertEquals("update", TestUtils.getFieldValue(bpp, "updateMethod"));
assertEquals(false, TestUtils.getFieldValue(bpp, "autowireOnUpdate"));
}
@Test
public void testMixedManagedTrackingBpp() throws Exception {
ManagedServiceInstanceTrackerPostProcessor bpp = getTrackerForBean("mixedManaged");
assertEquals("bean-managed", TestUtils.getFieldValue(bpp, "pid"));
assertEquals("update", TestUtils.getFieldValue(bpp, "updateMethod"));
assertEquals(true, TestUtils.getFieldValue(bpp, "autowireOnUpdate"));
}
@Test
public void testTrackingCleanup() throws Exception {
assertEquals(6, registrationCounter);
assertEquals(0, unregistrationCounter);
appContext.close();
assertEquals(6, unregistrationCounter);
}
} | apache-2.0 |
bruceadowns/amza | amza-bot-deployable/src/main/java/com/jivesoftware/os/amzabot/deployable/endpoint/AmzaBotCoalmineEndpoints.java | 1706 | package com.jivesoftware.os.amzabot.deployable.endpoint;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.jivesoftware.os.amzabot.deployable.bot.AmzaBotCoalmineConfig;
import com.jivesoftware.os.amzabot.deployable.bot.AmzaBotCoalmineService;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import io.swagger.annotations.Api;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.inject.Singleton;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
@Api(value = "Amza Bot Coalmine")
@Singleton
@Path("/api/amzabot/v1")
public class AmzaBotCoalmineEndpoints {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final AmzaBotCoalmineService service;
AmzaBotCoalmineEndpoints(@Context AmzaBotCoalmineService service) {
this.service = service;
}
@POST
@Consumes("application/json")
@Path("/newminer")
public Response newCoalminer(AmzaBotCoalmineRequest request) {
try {
AmzaBotCoalmineConfig config = AmzaBotCoalmineRequest.genConfig(request);
ExecutorService executor = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat("amzabot-coalmine-%d").build());
executor.submit(service.newMinerWithConfig(config));
return Response.accepted().build();
} catch (Exception e) {
LOG.error("Failed to start new coalminer", e);
return Response.serverError().build();
}
}
}
| apache-2.0 |
cdegroot/river | qa/src/com/sun/jini/test/spec/security/proxytrust/util/Interface2Impl.java | 1004 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.test.spec.security.proxytrust.util;
import java.util.logging.Level;
/**
* Class implementing test interface #2.
*/
public class Interface2Impl implements TestInterface2 {}
| apache-2.0 |
machaval/mule-intellij-plugins | data-weave-plugin/src/main/java/org/mule/tooling/lang/dw/parser/psi/WeaveFindUsagesProvider.java | 2063 | package org.mule.tooling.lang.dw.parser.psi;
import com.intellij.lang.cacheBuilder.DefaultWordsScanner;
import com.intellij.lang.cacheBuilder.WordsScanner;
import com.intellij.lang.findUsages.FindUsagesProvider;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.tree.TokenSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mule.tooling.lang.dw.lexer.WeaveLexer;
public class WeaveFindUsagesProvider implements FindUsagesProvider {
private static final DefaultWordsScanner WORDS_SCANNER =
new DefaultWordsScanner(new WeaveLexer(),
TokenSet.create(WeaveTypes.ID), TokenSet.create(WeaveTypes.LINE_COMMENT), TokenSet.EMPTY);
@Nullable
@Override
public WordsScanner getWordsScanner() {
return WORDS_SCANNER;
}
@Override
public boolean canFindUsagesFor(@NotNull PsiElement psiElement) {
return psiElement instanceof PsiNamedElement;
}
@Nullable
@Override
public String getHelpId(@NotNull PsiElement psiElement) {
return com.intellij.lang.HelpID.FIND_OTHER_USAGES;
}
@NotNull
@Override
public String getType(@NotNull PsiElement psiElement) {
if (psiElement instanceof WeaveFunctionDefinition || psiElement instanceof WeaveFunctionCallExpression) {
return "Function";
} else if (psiElement instanceof WeaveFunctionParameter) {
return "Parameter";
} else if (psiElement instanceof WeaveVariableDefinition || psiElement instanceof WeaveVariableReferenceExpression) {
return "Variable";
}
return psiElement.getClass().getSimpleName();
}
@NotNull
@Override
public String getDescriptiveName(@NotNull PsiElement psiElement) {
return ((PsiNamedElement) psiElement).getName();
}
@NotNull
@Override
public String getNodeText(@NotNull PsiElement psiElement, boolean b) {
return ((PsiNamedElement) psiElement).getName();
}
}
| apache-2.0 |
delkyd/hawtjms | hawtjms-amqp/src/main/java/io/hawtjms/provider/amqp/AmqpProviderFactory.java | 2322 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hawtjms.provider.amqp;
import io.hawtjms.provider.AsyncProvider;
import io.hawtjms.provider.BlockingProvider;
import io.hawtjms.provider.DefaultBlockingProvider;
import io.hawtjms.provider.ProviderFactory;
import io.hawtjms.util.PropertyUtil;
import java.net.URI;
import java.util.Map;
/**
* Factory for creating the AMQP provider.
*/
public class AmqpProviderFactory extends ProviderFactory {
@Override
public BlockingProvider createProvider(URI remoteURI) throws Exception {
return new DefaultBlockingProvider(createAsyncProvider(remoteURI));
}
@Override
public AsyncProvider createAsyncProvider(URI remoteURI) throws Exception {
Map<String, String> map = PropertyUtil.parseQuery(remoteURI.getQuery());
Map<String, String> providerOptions = PropertyUtil.filterProperties(map, "provider.");
remoteURI = PropertyUtil.replaceQuery(remoteURI, map);
AsyncProvider result = new AmqpProvider(remoteURI);
if (!PropertyUtil.setProperties(result, providerOptions)) {
String msg = ""
+ " Not all provider options could be set on the AMQP Provider."
+ " Check the options are spelled correctly."
+ " Given parameters=[" + providerOptions + "]."
+ " This provider instance cannot be started.";
throw new IllegalArgumentException(msg);
}
return result;
}
@Override
public String getName() {
return "AMQP";
}
}
| apache-2.0 |
jamesagnew/hapi-fhir | hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CreateMethodBinding.java | 2645 | package ca.uhn.fhir.rest.server.method;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Set;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.rest.annotation.Create;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import javax.annotation.Nonnull;
public class CreateMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceParam {
public CreateMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) {
super(theMethod, theContext, Create.class, theProvider);
}
@Override
protected String getMatchingOperation() {
return null;
}
@Nonnull
@Override
public RestOperationTypeEnum getRestOperationType() {
return RestOperationTypeEnum.CREATE;
}
@Override
protected Set<RequestTypeEnum> provideAllowableRequestTypes() {
return Collections.singleton(RequestTypeEnum.POST);
}
@Override
protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId,
String theUrlId, String theMatchUrl) {
if (isNotBlank(theUrlId)) {
String msg = getContext().getLocalizer()
.getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInUrlForCreate", theUrlId);
throw new InvalidRequestException(msg);
}
if (getContext().getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) {
if (isNotBlank(theResourceId)) {
String msg = getContext().getLocalizer().getMessage(
BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInBodyForCreate", theResourceId);
throw new InvalidRequestException(msg);
}
} else {
theResource.setId((IIdType) null);
}
}
}
| apache-2.0 |
timzam/jetpad-mapper | mapper/src/test/java/jetbrains/jetpad/mapper/ItemMapper.java | 1929 | /*
* Copyright 2012-2016 JetBrains s.r.o
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.jetpad.mapper;
import jetbrains.jetpad.model.transform.Transformers;
class ItemMapper extends Mapper<Item, Item> {
private SimpleRoleSynchronizer<Item, Item> mySimpleRole;
ItemMapper(Item item) {
super(item, new Item());
}
@Override
protected void registerSynchronizers(SynchronizersConfiguration conf) {
conf.add(Synchronizers.forObservableRole(this, getSource().observableChildren, getTarget().observableChildren, createMapperFactory()));
conf.add(Synchronizers.forObservableRole(this, getSource().transformedChildren, Transformers.<Item>identityList(), getTarget().transformedChildren, createMapperFactory()));
conf.add(Synchronizers.forSingleRole(this, getSource().singleChild, getTarget().singleChild, createMapperFactory()));
conf.add(mySimpleRole = Synchronizers.forSimpleRole(this, getSource().children, getTarget().children, createMapperFactory()));
conf.add(Synchronizers.forPropsTwoWay(getSource().name, getTarget().name));
}
public void refreshSimpleRole() {
mySimpleRole.refresh();
}
protected MapperFactory<Item, Item> createMapperFactory() {
return new MapperFactory<Item, Item>() {
@Override
public Mapper<? extends Item, ? extends Item> createMapper(Item source) {
return new ItemMapper(source);
}
};
}
} | apache-2.0 |
jyemin/mongo-java-driver | bson/src/test/unit/org/bson/codecs/pojo/entities/conventions/CreatorConstructorModel.java | 3221 | /*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bson.codecs.pojo.entities.conventions;
import org.bson.codecs.pojo.annotations.BsonCreator;
import org.bson.codecs.pojo.annotations.BsonProperty;
import java.util.List;
public final class CreatorConstructorModel {
private final List<Integer> integersField;
private String stringField;
public long longField;
@BsonCreator
public CreatorConstructorModel(@BsonProperty("integersField") final List<Integer> integerField,
@BsonProperty("longField") final long longField) {
this.integersField = integerField;
this.longField = longField;
}
public CreatorConstructorModel(final List<Integer> integersField, final String stringField, final long longField) {
this.integersField = integersField;
this.stringField = stringField;
this.longField = longField;
}
public List<Integer> getIntegersField() {
return integersField;
}
public String getStringField() {
return stringField;
}
public void setStringField(final String stringField) {
this.stringField = stringField;
}
public long getLongField() {
return longField;
}
public void setLongField(final long longField) {
this.longField = longField;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CreatorConstructorModel that = (CreatorConstructorModel) o;
if (getLongField() != that.getLongField()) {
return false;
}
if (getIntegersField() != null ? !getIntegersField().equals(that.getIntegersField()) : that.getIntegersField() != null) {
return false;
}
if (getStringField() != null ? !getStringField().equals(that.getStringField()) : that.getStringField() != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = getIntegersField() != null ? getIntegersField().hashCode() : 0;
result = 31 * result + (getStringField() != null ? getStringField().hashCode() : 0);
result = 31 * result + (int) (getLongField() ^ (getLongField() >>> 32));
return result;
}
@Override
public String toString() {
return "CreatorConstructorModel{"
+ "integersField=" + integersField
+ ", stringField='" + stringField + "'"
+ ", longField=" + longField
+ "}";
}
}
| apache-2.0 |
aemay2/hapi-fhir | hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/ExtensionMatcher.java | 1779 | package ca.uhn.fhir.mdm.rules.matcher;
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.util.ExtensionUtil;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import java.util.List;
public class ExtensionMatcher implements IMdmFieldMatcher {
@Override
public boolean matches(FhirContext theFhirContext, IBase theLeftBase, IBase theRightBase, boolean theExact, String theIdentifierSystem) {
if (!(theLeftBase instanceof IBaseHasExtensions && theRightBase instanceof IBaseHasExtensions)) {
return false;
}
List<? extends IBaseExtension<?, ?>> leftExtension = ((IBaseHasExtensions) theLeftBase).getExtension();
List<? extends IBaseExtension<?, ?>> rightExtension = ((IBaseHasExtensions) theRightBase).getExtension();
boolean match = false;
for (IBaseExtension leftExtensionValue : leftExtension) {
for (IBaseExtension rightExtensionValue : rightExtension) {
match |= ExtensionUtil.equals(leftExtensionValue, rightExtensionValue);
}
}
return match;
}
}
| apache-2.0 |
danielpassos/welcome-android | src/com/feedhenry/android/utilities/MyLocation.java | 4665 | /**
* Copyright 2015 Red Hat, Inc., and individual contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.feedhenry.android.utilities;
import java.util.Timer;
import java.util.TimerTask;
import android.content.Context;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.util.Log;
public class MyLocation {
Timer timer1;
LocationManager lm;
LocationResult locationResult;
boolean gps_enabled=false;
boolean network_enabled=false;
public boolean getLocation(Context context, LocationResult result) {
// LocationResult callback class to pass location value from MyLocation to location fragment.
locationResult=result;
if(lm==null)
lm = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
// Exceptions will be thrown if provider is not permitted.
try{
gps_enabled=lm.isProviderEnabled(LocationManager.GPS_PROVIDER);
} catch(Exception ex){
Log.i("FEEDHENRY", "Failed in GPS");
}
try{
network_enabled=lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
} catch(Exception ex){
Log.i("FEEDHENRY", "Failed in NETWORK");
}
// Don't start listeners if no provider is enabled
if(!gps_enabled && !network_enabled)
return false;
if(gps_enabled)
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, locationListenerGps);
if(network_enabled)
lm.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, locationListenerNetwork);
timer1=new Timer();
timer1.schedule(new GetLastLocation(), 10000);
return true;
}
LocationListener locationListenerGps = new LocationListener() {
public void onLocationChanged(Location location) {
timer1.cancel();
locationResult.gotLocation(location);
lm.removeUpdates(this);
lm.removeUpdates(locationListenerNetwork);
}
public void onProviderDisabled(String provider) {
Log.i("FEEDHENRY", "Diasabled");
}
public void onProviderEnabled(String provider) {}
public void onStatusChanged(String provider, int status, Bundle extras) {}
};
LocationListener locationListenerNetwork = new LocationListener() {
public void onLocationChanged(Location location) {
timer1.cancel();
locationResult.gotLocation(location);
lm.removeUpdates(this);
lm.removeUpdates(locationListenerGps);
}
public void onProviderDisabled(String provider) {}
public void onProviderEnabled(String provider) {}
public void onStatusChanged(String provider, int status, Bundle extras) {}
};
class GetLastLocation extends TimerTask {
@Override
public void run() {
lm.removeUpdates(locationListenerGps);
lm.removeUpdates(locationListenerNetwork);
Location net_loc=null, gps_loc=null;
if(gps_enabled)
gps_loc=lm.getLastKnownLocation(LocationManager.GPS_PROVIDER);
if(network_enabled)
net_loc=lm.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
//if there are both values use the latest one
if(gps_loc!=null && net_loc!=null){
if(gps_loc.getTime()>net_loc.getTime())
locationResult.gotLocation(gps_loc);
else
locationResult.gotLocation(net_loc);
return;
}
if(gps_loc!=null){
locationResult.gotLocation(gps_loc);
return;
}
if(net_loc!=null){
locationResult.gotLocation(net_loc);
return;
}
locationResult.gotLocation(null);
}
}
public static abstract class LocationResult{
public abstract void gotLocation(Location location);
}
}
| apache-2.0 |
githubKNB/public | JavaEightStudies/src/org/javaeightstudies/languageenchancements/PreBuildFunctionsTest.java | 5580 | /**
*
*/
package org.javaeightstudies.languageenchancements;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.javaeightstudies.common.BaseJavaTest;
/**
* @author niranjanb
* https://www.oreilly.com/learning/java-8-functional-interfaces
*/
public class PreBuildFunctionsTest extends BaseJavaTest {
private PreBuildFunctionsTest(){}
public static PreBuildFunctionsTest getInstance(){
PreBuildFunctionsTest preBuildFunctionsTest = new PreBuildFunctionsTest();
return preBuildFunctionsTest;
}
public void letsGo(){
predicateTest();
System.out.println("");
System.out.println("=========================================");
System.out.println("");
functionTest();
System.out.println("");
System.out.println("=========================================");
consumerTest();
System.out.println("");
System.out.println("=========================================");
supplierTest();
}
/**
* <p>
* Predicate.
*
* A function for checking a condition. A <code>Predicate</code> is one such
* function accepting a single argument to evaluate to a boolean result.
*
* It has a single method <code>test</code> which returns the boolean value.
*
* <code>
* @FunctionalInterface
* public interface Predicate<T>{
* boolean test(T t);
* }
* </code>
* </p>
*/
public static void predicateTest(){
Predicate<Integer> positive = i -> i > 0;
List<Integer> integerList = Arrays.asList(1,10,200,101,-10,0);
List<Integer> filteredList = integerList.stream()
.filter(i -> positive.test(i)).collect(Collectors.toList());
filteredList.forEach(System.out::println);
}
/**
* <p>
* Function
*
* A <code>Function</code> is a functional interface whose sole purpose is to return any result by working on a
* single input argument.
*
* It accepts an argument (T) and returns a result (R), by applying specified login on the input via
* the apply method.
*
* <code>
* @FunctionalInterface
* public interface Function<T, R>{
* R apply(T t);
* }
* </code>
*
* <code>Function</code> is used for transformation purposes, such as converting temperature from
* Centigrade to Fahrenheit, transforming a String to an Integer;
* </p>
*/
public static void functionTest(){
// convert centigrade to fahrenheit
Function<Integer, Double> centigradeToFahrenheit = x -> new Double((x * 9/5) + 32);
// String to an Integer
Function<String, Integer> stringToInteger = x -> Integer.valueOf(x);
//tests
System.out.println("Centigrade 40 to Fahrenheit : " + centigradeToFahrenheit.apply(40));
System.out.println("String to Integer : " + stringToInteger.apply("29"));
}
/**
* <p>
* Consumer
*
* A <code>Consumer</code> accepts a single argument but does not return any result.
*
* <code>
* @FunctionalInterface
* public interface Consumer<T>{
* void accept(T t);
* }
* </code>
* <br>
* <br>
* This is mostly used to perform operations on the arguments such as persisting the employees, invoking
* house keeping operations, email newsletters;
*
* http://www.java2s.com/Tutorials/Java/java.util.function/Consumer/index.htm
* </p>
*/
public static void consumerTest(){
// Example 1
Consumer<String> consumer1 = x -> System.out.println(x.toLowerCase());
consumer1.accept("Consumer interface represents an operation");
// Example 2
int x = 99;
Consumer<String> consumer2 = y -> {
System.out.println("x : " + x);
System.out.println("y : " + y);
System.out.println("Some other lines...");
};
consumer2.accept("The following code shows how to create consumer with block statement.");
}
/**
* <p>Supplier
*
* The <code>Supplier</code>, as the name suggests, supplies us with a result;
*
* <code>
* @FunctionalInterface
* public interface Supplier<T>{
* T get();
* }
* </code>
* <br>
* <br>
* Example usage include fetching configuration values from the databse,
* loading with reference data, creating an list of students with default
* identifiers;
*
* http://www.java2s.com/Tutorials/Java/java.util.function/Supplier/index.htm
* </p>
*
*/
public static void supplierTest(){
//Example 1
Supplier<String> i = () -> "java 8 supplier.";
System.out.println(i.get());
// Example 2
class SunPower{
public SunPower(){
System.out.println("Sun Powder initialized.");
}
}
Supplier<SunPower> sunPowerSupplier = () -> new SunPower();
SunPower sp1 = sunPowerSupplier.get();
SunPower sp2 = sunPowerSupplier.get();
System.out.println("Are s1 and s2 objects equal ? " + Objects.equals(sp1, sp2));
System.out.println("----------- Sequance generation with Supplier ----------------");
// Generating numbers up to 2 billion
int[] fibs = {0,1};
int maxResult = 2000000000;
boolean reverse = false;
Stream<Integer> fibonacci = Stream.generate(() -> {
int result = fibs[1];
if(!reverse && result < maxResult){
int fib3 = fibs[0] + fibs[1];
fibs[0] = fibs[1];
fibs[1] = fib3;
return result;
} else {
throw new IllegalStateException("Number over " + maxResult);
}
});
fibonacci.forEach(s -> System.out.println(s));
System.out.println("--------------------------------------------------------------");
}
}
| apache-2.0 |
kamransaleem/waltz | waltz-model/src/main/java/com/khartec/waltz/model/orgunit/OrganisationalUnitHierarchy.java | 1235 | /*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package com.khartec.waltz.model.orgunit;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.immutables.value.Value;
import java.util.List;
@Value.Immutable
@JsonSerialize(as = ImmutableOrganisationalUnitHierarchy.class)
@JsonDeserialize(as = ImmutableOrganisationalUnitHierarchy.class)
public abstract class OrganisationalUnitHierarchy {
public abstract List<OrganisationalUnit> parents();
public abstract List<OrganisationalUnit> children();
public abstract OrganisationalUnit unit();
}
| apache-2.0 |
SmarterApp/ItemAuthoring | sbac-iaip/java/src/main/java/com/pacificmetrics/orca/cts/model/Category.java | 1342 | package com.pacificmetrics.orca.cts.model;
import java.io.Serializable;
import java.util.List;
public class Category implements Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String treeLevel;
private String fkPublication;
private String level;
private List<Standard> standardList;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTreeLevel() {
return treeLevel;
}
public void setTreeLevel(String treeLevel) {
this.treeLevel = treeLevel;
}
public String getFkPublication() {
return fkPublication;
}
public void setFkPublication(String fkPublication) {
this.fkPublication = fkPublication;
}
/**
* @return the level
*/
public String getLevel() {
return level;
}
/**
* @param level the level to set
*/
public void setLevel(String level) {
this.level = level;
}
/**
* @return the standardList
*/
public List<Standard> getStandardList() {
return standardList;
}
/**
* @param standardList the standardList to set
*/
public void setStandardList(List<Standard> standardList) {
this.standardList = standardList;
}
public String toString() {
return "{\"name\":\""+name+"\",\"treeLevel\":"+treeLevel+",\"fkPublication\":\""+fkPublication+"\"}";
}
}
| apache-2.0 |
tikue/jcs2-snapshot | src/java/org/apache/commons/jcs/auxiliary/lateral/socket/tcp/LateralTCPSender.java | 11484 | package org.apache.commons.jcs.auxiliary.lateral.socket.tcp;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import org.apache.commons.jcs.auxiliary.lateral.LateralElementDescriptor;
import org.apache.commons.jcs.auxiliary.lateral.socket.tcp.behavior.ITCPLateralCacheAttributes;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This class is based on the log4j SocketAppender class. I'm using a different repair structure, so
* it is significantly different.
*/
public class LateralTCPSender
{
/** The logger */
private final static Log log = LogFactory.getLog( LateralTCPSender.class );
/** Config */
private ITCPLateralCacheAttributes tcpLateralCacheAttributes;
/** The hostname of the remote server. */
private String remoteHost;
/** The address of the server */
private InetAddress address;
/** The port the server is listening to. */
int port = 1111;
/** The stream from the server connection. */
private ObjectOutputStream oos;
/** The socket connection with the server. */
private Socket socket;
/** how many messages sent */
private int sendCnt = 0;
/** Use to synchronize multiple threads that may be trying to get. */
private final Object getLock = new int[0];
/**
* Constructor for the LateralTCPSender object.
* <p>
* @param lca
* @exception IOException
*/
public LateralTCPSender( ITCPLateralCacheAttributes lca )
throws IOException
{
this.setTcpLateralCacheAttributes( lca );
String p1 = lca.getTcpServer();
if ( p1 != null )
{
String h2 = p1.substring( 0, p1.indexOf( ":" ) );
int po = Integer.parseInt( p1.substring( p1.indexOf( ":" ) + 1 ) );
if ( log.isDebugEnabled() )
{
log.debug( "h2 = " + h2 );
log.debug( "po = " + po );
}
if ( h2 == null )
{
throw new IOException( "Cannot connect to invalid address [" + h2 + ":" + po + "]" );
}
init( h2, po );
}
}
/**
* Creates a connection to a TCP server.
* <p>
* @param host
* @param port
* @throws IOException
*/
protected void init( String host, int port )
throws IOException
{
this.port = port;
this.address = getAddressByName( host );
this.setRemoteHost( host );
try
{
if ( log.isInfoEnabled() )
{
log.info( "Attempting connection to [" + address.getHostName() + "]" );
}
// have time out socket open do this for us
try
{
InetSocketAddress address = new InetSocketAddress( host, port );
socket = new Socket();
socket.connect( address, tcpLateralCacheAttributes.getOpenTimeOut() );
}
catch ( IOException ioe )
{
if (socket != null)
{
socket.close();
}
// Java 1.6+ only throw new IOException( "Cannot connect to " + host + ":" + port, ioe );
final IOException ioException = new IOException( "Cannot connect to " + host + ":" + port);
ioException.initCause(ioe);
throw ioException;
}
socket.setSoTimeout( tcpLateralCacheAttributes.getSocketTimeOut() );
synchronized ( this )
{
oos = new ObjectOutputStream( socket.getOutputStream() );
}
}
catch ( java.net.ConnectException e )
{
log.debug( "Remote host [" + address.getHostName() + "] refused connection." );
throw e;
}
catch ( IOException e )
{
log.debug( "Could not connect to [" + address.getHostName() + "]. Exception is " + e );
throw e;
}
}
/**
* Gets the addressByName attribute of the LateralTCPSender object.
* <p>
* @param host
* @return The addressByName value
* @throws IOException
*/
private InetAddress getAddressByName( String host )
throws IOException
{
try
{
return InetAddress.getByName( host );
}
catch ( Exception e )
{
log.error( "Could not find address of [" + host + "] ", e );
throw new IOException( "Could not find address of [" + host + "] " + e.getMessage() );
}
}
/**
* Sends commands to the lateral cache listener.
* <p>
* @param led
* @throws IOException
*/
public <K extends Serializable, V extends Serializable> void send( LateralElementDescriptor<K, V> led )
throws IOException
{
sendCnt++;
if ( log.isInfoEnabled() )
{
if ( sendCnt % 100 == 0 )
{
log.info( "Send Count (port " + port + ") = " + sendCnt );
}
}
if ( log.isDebugEnabled() )
{
log.debug( "sending LateralElementDescriptor" );
}
if ( led == null )
{
return;
}
if ( address == null )
{
throw new IOException( "No remote host is set for LateralTCPSender." );
}
if ( oos != null )
{
synchronized ( this.getLock )
{
try
{
oos.writeUnshared( led );
oos.flush();
}
catch ( IOException e )
{
oos = null;
log.error( "Detected problem with connection: " + e );
throw e;
}
}
}
}
/**
* Sends commands to the lateral cache listener and gets a response. I'm afraid that we could
* get into a pretty bad blocking situation here. This needs work. I just wanted to get some
* form of get working. However, get is not recommended for performance reasons. If you have 10
* laterals, then you have to make 10 failed gets to find out none of the caches have the item.
* <p>
* @param led
* @return ICacheElement
* @throws IOException
*/
public <K extends Serializable, V extends Serializable> Object sendAndReceive( LateralElementDescriptor<K, V> led )
throws IOException
{
if ( led == null )
{
return null;
}
if ( address == null )
{
throw new IOException( "No remote host is set for LateralTCPSender." );
}
Object response = null;
if ( oos != null )
{
// Synchronized to insure that the get requests to server from this
// sender and the responses are processed in order, else you could
// return the wrong item from the cache.
// This is a big block of code. May need to re-think this strategy.
// This may not be necessary.
// Normal puts, etc to laterals do not have to be synchronized.
synchronized ( this.getLock )
{
try
{
try
{
// clean up input stream, nothing should be there yet.
if ( socket.getInputStream().available() > 0 )
{
socket.getInputStream().read( new byte[socket.getInputStream().available()] );
}
}
catch ( IOException ioe )
{
log.error( "Problem cleaning socket before send " + socket, ioe );
throw ioe;
}
// write object to listener
oos.writeUnshared( led );
oos.flush();
try
{
// TODO make configurable
// socket.setSoTimeout( 2000 );
ObjectInputStream ois = new ObjectInputStream( socket.getInputStream() );
response = ois.readObject();
}
catch ( IOException ioe )
{
String message = "Could not open ObjectInputStream to " + socket;
message += " SoTimeout [" + socket.getSoTimeout() + "] Connected [" + socket.isConnected() + "]";
log.error( message, ioe );
throw ioe;
}
catch ( Exception e )
{
log.error( e );
}
}
catch ( IOException e )
{
oos = null;
log.error( "Detected problem with connection: " + e );
throw e;
}
}
}
return response;
}
/**
* Closes connection used by all LateralTCPSenders for this lateral connection. Dispose request
* should come into the facade and be sent to all lateral cache services. The lateral cache
* service will then call this method.
* <p>
* @param cache
* @throws IOException
*/
public void dispose( String cache )
throws IOException
{
if ( log.isInfoEnabled() )
{
log.info( "Dispose called for cache [" + cache + "]" );
}
// WILL CLOSE CONNECTION USED BY ALL
oos.close();
}
/**
* @param tcpLateralCacheAttributes The tcpLateralCacheAttributes to set.
*/
public void setTcpLateralCacheAttributes( ITCPLateralCacheAttributes tcpLateralCacheAttributes )
{
this.tcpLateralCacheAttributes = tcpLateralCacheAttributes;
}
/**
* @return Returns the tcpLateralCacheAttributes.
*/
public ITCPLateralCacheAttributes getTcpLateralCacheAttributes()
{
return tcpLateralCacheAttributes;
}
/**
* @param remoteHost The remoteHost to set.
*/
public void setRemoteHost( String remoteHost )
{
this.remoteHost = remoteHost;
}
/**
* @return Returns the remoteHost.
*/
public String getRemoteHost()
{
return remoteHost;
}
}
| apache-2.0 |
jianbingfang/xhf | src/main/java/com/xthena/common/manager/XhfCommonPlainManager.java | 5519 | package com.xthena.common.manager;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.IdentityService;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.delegate.DelegateExecution;
import org.activiti.engine.delegate.DelegateTask;
import org.activiti.engine.delegate.TaskListener;
import org.activiti.engine.runtime.ProcessInstance;
import org.springframework.transaction.annotation.Transactional;
import org.activiti.engine.ActivitiException;
import com.xthena.api.user.UserConnector;
import com.xthena.bpm.persistence.domain.BpmProcess;
import com.xthena.bpm.persistence.manager.BpmProcessManager;
import com.xthena.common.domain.XhfCommonPlain;
import com.xthena.core.hibernate.HibernateEntityDao;
import com.xthena.core.hibernate.PropertyFilter;
import com.xthena.core.mapper.BeanMapper;
import com.xthena.core.page.Page;
import com.xthena.security.util.SpringSecurityUtils;
import com.xthena.util.ConstValue;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.stereotype.Service;
@Service
public class XhfCommonPlainManager extends HibernateEntityDao<XhfCommonPlain> implements TaskListener{
@Autowired
private UserConnector userConnector;
@Autowired
private BpmProcessManager bpmProcessManager;
@Autowired
private ProcessEngine processEngine;
@Transactional(readOnly = true)
@Override
public Page pagedQuery(Page page,List<PropertyFilter> propertyFilters ){
Page page1=super.pagedQuery(page, propertyFilters);
List<XhfCommonPlain> xhfCommonPlains= (List<XhfCommonPlain>) page1.getResult();
for(XhfCommonPlain xhfCommonPlain:xhfCommonPlains){
xhfCommonPlain.setFmemo5(userConnector.findById(String.valueOf(xhfCommonPlain.getFuserid())).getDisplayName());
}
page1.setResult(xhfCommonPlains);
return page1;
}
//保存工作计划 发起流程
@Transactional
public void savePlain(XhfCommonPlain xhfCommonPlain){
// 用来设置启动流程的人员ID,引擎会自动把用户ID保存到activiti:initiator中
BpmProcess bpmProcess = bpmProcessManager.findUniqueBy("name", "计划申报");
String processDefinitionId = bpmProcess.getBpmConfBase()
.getProcessDefinitionId();
String processDefinitionKey=bpmProcess.getBpmConfBase().getProcessDefinitionKey();
IdentityService identityService = processEngine.getIdentityService();
String userid=SpringSecurityUtils.getCurrentUserId();
try {
identityService.setAuthenticatedUserId(userid);
}
catch (Exception e){
String a=e.getMessage();
}
Map<String, Object> processParameters = new HashMap<String, Object>();
xhfCommonPlain.setFuserid(Long.valueOf(SpringSecurityUtils.getCurrentUserId()));
xhfCommonPlain.setFstatus("未审阅");
save(xhfCommonPlain);
try {
ProcessInstance processInstance = processEngine.getRuntimeService()
.startProcessInstanceById(processDefinitionId, xhfCommonPlain.getFid().toString(),
processParameters);
System.out.println("流程实例Id:"+processInstance.getId());
System.out.println("流程定义Id:"+processInstance.getProcessDefinitionId());
//判断当前是否位于start节点
System.out.println("流程定义BusinessKey"+processInstance.getBusinessKey());
//判断当前是否位于state节点
System.out.println("是否位于state节点:"+processInstance.getTenantId());
//判断流程是否结束
System.out.println("判断流程是否结束:"+processInstance.isEnded());
System.out.println("------------------------>使流程继续向下执行");
//使流程继续向下执行
//ProcessInstance instanceState=executionService.signalExecutionById(processInstance.getId());
xhfCommonPlain.setFtaskid(processInstance.getProcessInstanceId());
}
catch(Exception e)
{
String msg=e.getMessage();
System.out.print(msg);
}
// try {
// ProcessInstance processInstance1 = processEngine.getRuntimeService().startProcessInstanceByKey
// (processDefinitionKey, xhfCommonPlain.getFid().toString(), processParameters);
// xhfCommonPlain.setFtaskid(processInstance1.getProcessDefinitionId());
// }
// catch (ActivitiException e)
// {
// String msg=e.getMessage();
// }
save(xhfCommonPlain);
}
@Transactional
public XhfCommonPlain loadPlain(String taskId){
String processInstanceId=processEngine.getTaskService().createTaskQuery().taskId(taskId).singleResult().getProcessInstanceId();
XhfCommonPlain xhfCommonPlain=findUniqueBy("ftaskid", processInstanceId);
return xhfCommonPlain;
}
@Transactional
public void savePlain(XhfCommonPlain xhfCommonPlain,String taskId){
BeanMapper beanMapper=new BeanMapper();
XhfCommonPlain dest = get(xhfCommonPlain.getFid());
beanMapper.copy(xhfCommonPlain, dest);
dest.setFstatus("已审阅");
save(dest);
processEngine.getTaskService().complete(taskId);
}
/* //保存工作计划 发起流程
@Transactional
public XhfCommonPlain newPlain(DelegateExecution execution){
}*/
@Override
public void notify(DelegateTask delegateTask) {
XhfCommonPlain xhfCommonPlain=findUniqueBy("fmemo", delegateTask.getProcessInstanceId());
xhfCommonPlain.setFmemo1("2");
save(xhfCommonPlain);
}
}
| apache-2.0 |
davidkarlsen/camel | components/camel-netty4/src/test/java/org/apache/camel/component/netty4/NettySSLClasspathTest.java | 2488 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
public class NettySSLClasspathTest extends BaseNettyTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testSSLInOutWithNettyConsumer() throws Exception {
// ibm jdks dont have sun security algorithms
if (isJavaVendor("ibm")) {
return;
}
context.addRoutes(new RouteBuilder() {
public void configure() {
from("netty4:tcp://localhost:{{port}}?sync=true&ssl=true&passphrase=changeit&keyStoreResource=classpath:keystore.jks&trustStoreResource=classpath:keystore.jks")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.getOut().setBody("When You Go Home, Tell Them Of Us And Say, For Your Tomorrow, We Gave Our Today.");
}
});
}
});
context.start();
String response = template.requestBody(
"netty4:tcp://localhost:{{port}}?sync=true&ssl=true&passphrase=changeit&keyStoreResource=classpath:keystore.jks&trustStoreResource=classpath:keystore.jks",
"Epitaph in Kohima, India marking the WWII Battle of Kohima and Imphal, Burma Campaign - Attributed to John Maxwell Edmonds", String.class);
assertEquals("When You Go Home, Tell Them Of Us And Say, For Your Tomorrow, We Gave Our Today.", response);
}
}
| apache-2.0 |
FITeagle/ft1 | interactors/sfa/src/main/java/org/fiteagle/interactors/sfa/rspec/manifest/LoginServiceContents.java | 6328 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.09.16 at 10:18:13 AM CEST
//
package org.fiteagle.interactors.sfa.rspec.manifest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import org.w3c.dom.Element;
/**
* <p>Java class for LoginServiceContents complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="LoginServiceContents">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice maxOccurs="unbounded" minOccurs="0">
* <group ref="{http://www.geni.net/resources/rspec/3}AnyExtension"/>
* <element ref="{http://www.geni.net/resources/rspec/3}user"/>
* </choice>
* <attGroup ref="{http://www.geni.net/resources/rspec/3}AnyExtension"/>
* <attribute name="authentication" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="hostname" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="port" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <attribute name="username" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "LoginServiceContents", propOrder = {
"anyOrUser"
})
public class LoginServiceContents {
@XmlElementRef(name = "user", namespace = "http://www.geni.net/resources/rspec/3", type = User.class, required = false)
@XmlAnyElement(lax = true)
protected List<Object> anyOrUser;
@XmlAttribute(name = "authentication", required = true)
@XmlSchemaType(name = "anySimpleType")
protected String authentication;
@XmlAttribute(name = "hostname")
@XmlSchemaType(name = "anySimpleType")
protected String hostname;
@XmlAttribute(name = "port")
@XmlSchemaType(name = "anySimpleType")
protected String port;
@XmlAttribute(name = "username")
@XmlSchemaType(name = "anySimpleType")
protected String username;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the anyOrUser property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the anyOrUser property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAnyOrUser().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Object }
* {@link User }
* {@link Element }
*
*
*/
public List<Object> getAnyOrUser() {
if (anyOrUser == null) {
anyOrUser = new ArrayList<Object>();
}
return this.anyOrUser;
}
/**
* Gets the value of the authentication property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAuthentication() {
return authentication;
}
/**
* Sets the value of the authentication property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAuthentication(String value) {
this.authentication = value;
}
/**
* Gets the value of the hostname property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHostname() {
return hostname;
}
/**
* Sets the value of the hostname property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHostname(String value) {
this.hostname = value;
}
/**
* Gets the value of the port property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPort() {
return port;
}
/**
* Sets the value of the port property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPort(String value) {
this.port = value;
}
/**
* Gets the value of the username property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUsername() {
return username;
}
/**
* Sets the value of the username property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUsername(String value) {
this.username = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
| apache-2.0 |
nabilzhang/enunciate | obj-c-xml-client/src/main/java/com/webcohesion/enunciate/modules/objc_client/FunctionIdentifierForMethod.java | 4841 | /**
* Copyright © 2006-2016 Web Cohesion (info@webcohesion.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webcohesion.enunciate.modules.objc_client;
import com.webcohesion.enunciate.modules.jaxb.EnunciateJaxbContext;
import com.webcohesion.enunciate.modules.jaxb.model.Accessor;
import com.webcohesion.enunciate.modules.jaxb.model.EnumTypeDefinition;
import com.webcohesion.enunciate.modules.jaxb.model.TypeDefinition;
import freemarker.ext.beans.BeansWrapperBuilder;
import freemarker.template.Configuration;
import freemarker.template.TemplateMethodModelEx;
import freemarker.template.TemplateModel;
import freemarker.template.TemplateModelException;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.PrimitiveType;
import javax.lang.model.type.TypeMirror;
import java.util.List;
/**
* Template method used to lookup the function identifier for a given type. Only Objective C primitive types
* have a function identifier. Returns null if no function identifier is provided.
*
* @author Ryan Heaton
*/
public class FunctionIdentifierForMethod implements TemplateMethodModelEx {
private final NameForTypeDefinitionMethod typeDefName;
private final EnunciateJaxbContext context;
public FunctionIdentifierForMethod(NameForTypeDefinitionMethod typeDefName, EnunciateJaxbContext context) {
this.typeDefName = typeDefName;
this.context = context;
}
public Object exec(List list) throws TemplateModelException {
if (list.size() < 1) {
throw new TemplateModelException("The functionIdentifierFor method must have an accessor or type mirror as a parameter.");
}
TemplateModel from = (TemplateModel) list.get(0);
Object unwrapped = new BeansWrapperBuilder(Configuration.DEFAULT_INCOMPATIBLE_IMPROVEMENTS).build().unwrap(from);
TypeMirror typeMirror;
if (unwrapped instanceof Accessor) {
Accessor accessor = (Accessor) unwrapped;
if (accessor.isAdapted()) {
typeMirror = accessor.getAdapterType().getAdaptingType(accessor.getAccessorType(), this.context.getContext());
}
else {
typeMirror = accessor.getAccessorType();
}
}
else if (unwrapped instanceof TypeMirror) {
typeMirror = (TypeMirror) unwrapped;
}
else {
throw new TemplateModelException("The functionIdentifierFor method must have an accessor or type mirror as a parameter.");
}
if (typeMirror instanceof PrimitiveType) {
switch (typeMirror.getKind()) {
case BOOLEAN:
return "Boolean";
case BYTE:
return "Byte";
case CHAR:
return "Character";
case DOUBLE:
return "Double";
case FLOAT:
return "Float";
case INT:
return "Int";
case LONG:
return "Long";
case SHORT:
return "Short";
default:
return (typeMirror.getKind()).toString();
}
}
else if (typeMirror instanceof DeclaredType) {
TypeElement declaration = (TypeElement) ((DeclaredType) typeMirror).asElement();
TypeDefinition typeDefinition = this.context.findTypeDefinition(declaration);
if (typeDefinition != null) {
if (typeDefinition instanceof EnumTypeDefinition) {
return typeDefName.calculateName(typeDefinition);
}
}
else {
String classname = declaration.getQualifiedName().toString();
if (Boolean.class.getName().equals(classname)) {
return "Boolean";
}
else if (Byte.class.getName().equals(classname)) {
return "Byte";
}
else if (Character.class.getName().equals(classname)) {
return "UnsignedShort";
}
else if (Double.class.getName().equals(classname)) {
return "Double";
}
else if (Float.class.getName().equals(classname)) {
return "Float";
}
else if (Integer.class.getName().equals(classname)) {
return "Int";
}
else if (Long.class.getName().equals(classname)) {
return "Long";
}
else if (Short.class.getName().equals(classname)) {
return "Short";
}
}
}
return null;
}
} | apache-2.0 |
subutai-io/base | management/server/subutai-common/src/main/java/io/subutai/common/metric/AlertType.java | 118 | package io.subutai.common.metric;
/**
* Alert type
*/
public enum AlertType
{
ENVIRONMENT_ALERT, PEER_ALERT
}
| apache-2.0 |
SmarterApp/TechnologyReadinessTool | readiness/src/main/java/net/techreadiness/customer/action/task/org/create/AddAction.java | 2396 | package net.techreadiness.customer.action.task.org.create;
import static net.techreadiness.security.CorePermissionCodes.CORE_CUSTOMER_ORG_CREATE;
import java.util.AbstractMap.SimpleEntry;
import java.util.List;
import javax.inject.Inject;
import net.techreadiness.annotation.CoreSecured;
import net.techreadiness.service.ConfigService;
import net.techreadiness.service.common.ViewDef;
import net.techreadiness.service.common.ViewDef.ViewDefTypeCode;
import net.techreadiness.service.object.Org;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.Result;
public class AddAction extends BaseOrgAction {
private static final long serialVersionUID = 1L;
@Inject
private ConfigService configService;
private ViewDef viewDef;
private List<SimpleEntry<Long, String>> orgTypes;
private List<Org> orgs;
@Override
@CoreSecured({ CORE_CUSTOMER_ORG_CREATE })
@Action(results = { @Result(name = "success", location = "/task/org/add.jsp"),@Result(name = "input", location = "/task/org/add.jsp") })
public String execute() {
conversation.put("orgFilterSelectionHandler", "orgFilterSelectionHandlerForOrgCreate");
populateForm(false);
return SUCCESS;
}
@Action(value = "updateForm", results = { @Result(name = "success", location = "/task/org/add.jsp", params = {
"parentOrganizationId", "%{parentOrganizationId}", "ajax", "true" }) })
public String updateForm() {
populateForm(true);
return SUCCESS;
}
private void populateForm(boolean reload) {
Long parentOrgTypeId = null;
Org parentOrg = null;
List<Org> orgSelection = getOrgSelectionHandler().getSelection();
if (orgSelection.size() > 0) {
parentOrg = orgSelection.get(0);
if (parentOrg != null) {
parentOrgTypeId = parentOrg.getOrgTypeId();
parentOrganizationId = parentOrg.getOrgId();
}
}
orgTypes = organizationService.findChildOrgTypesByParentOrgType(getServiceContext(), parentOrgTypeId);
viewDef = configService.getViewDefinition(getServiceContext(), ViewDefTypeCode.ORG);
}
public ViewDef getViewDef() {
return viewDef;
}
public List<SimpleEntry<Long, String>> getOrgTypes() {
return orgTypes;
}
public void setOrgTypes(List<SimpleEntry<Long, String>> orgTypes) {
this.orgTypes = orgTypes;
}
public void setOrgs(List<Org> orgs) {
this.orgs = orgs;
}
public List<Org> getOrgs() {
return orgs;
}
}
| apache-2.0 |
apache/portals-pluto | pluto-portal-driver/src/main/java/org/apache/pluto/driver/util/RenderData.java | 1191 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pluto.driver.util;
/**
* @author Neil Griffin
*/
public class RenderData {
private String contentType;
private String content;
public RenderData(String content, String contentType) {
this.content = content;
this.contentType = contentType;
}
public String getContent() {
return content;
}
public String getContentType() {
return contentType;
}
}
| apache-2.0 |
jwren/intellij-community | platform/platform-impl/src/com/intellij/ui/popup/WizardPopup.java | 15024 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.popup;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.ui.PopupBorder;
import com.intellij.ui.ScreenUtil;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.UiInterceptors;
import com.intellij.ui.popup.async.AsyncPopupImpl;
import com.intellij.ui.popup.async.AsyncPopupStep;
import com.intellij.ui.popup.list.ComboBoxPopup;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.ui.popup.tree.TreePopupImpl;
import com.intellij.ui.popup.util.MnemonicsSearch;
import com.intellij.ui.speedSearch.ElementFilter;
import com.intellij.ui.speedSearch.SpeedSearch;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.TimerUtil;
import org.intellij.lang.annotations.JdkConstants;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.Collections;
public abstract class WizardPopup extends AbstractPopup implements ActionListener, ElementFilter {
private static final Logger LOG = Logger.getInstance(WizardPopup.class);
private static final Dimension MAX_SIZE = new Dimension(Integer.MAX_VALUE, 600);
protected static final int STEP_X_PADDING = 2;
private final WizardPopup myParent;
protected final PopupStep<Object> myStep;
protected WizardPopup myChild;
private final Timer myAutoSelectionTimer =
TimerUtil.createNamedTimer("Wizard auto-selection", Registry.intValue("ide.popup.auto.delay", 500), this);
private final MnemonicsSearch myMnemonicsSearch;
private Object myParentValue;
private Point myLastOwnerPoint;
private Window myOwnerWindow;
private MyComponentAdapter myOwnerListener;
private final ActionMap myActionMap = new ActionMap();
private final InputMap myInputMap = new InputMap();
private boolean myKeyPressedReceived;
/**
* @deprecated use {@link #WizardPopup(Project, JBPopup, PopupStep)}
*/
@Deprecated(forRemoval = true)
public WizardPopup(@NotNull PopupStep<Object> aStep) {
this(CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext()), null, aStep);
}
public WizardPopup(@Nullable Project project, @Nullable JBPopup aParent, @NotNull PopupStep<Object> aStep) {
myParent = (WizardPopup) aParent;
myStep = aStep;
mySpeedSearch.setEnabled(myStep.isSpeedSearchEnabled());
final JComponent content = createContent();
content.putClientProperty(KEY, this);
JComponent popupComponent = createPopupComponent(content);
init(project, popupComponent, getPreferredFocusableComponent(), true, true, true, null,
isResizable(), aStep.getTitle(), null, true, Collections.emptySet(), false, null, null, null, false, null, true, false, true, null, 0f,
null, true, false, new Component[0], null, SwingConstants.LEFT, true, Collections.emptyList(),
null, null, false, true, true, null, true, null);
registerAction("disposeAll", KeyEvent.VK_ESCAPE, InputEvent.SHIFT_MASK, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if (mySpeedSearch.isHoldingFilter()) {
mySpeedSearch.reset();
}
else {
disposeAll();
}
}
});
AbstractAction goBackAction = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
goBack();
}
};
registerAction("goBack3", KeyEvent.VK_ESCAPE, 0, goBackAction);
myMnemonicsSearch = new MnemonicsSearch(this) {
@Override
protected void select(Object value) {
onSelectByMnemonic(value);
}
};
}
@NotNull
protected JComponent createPopupComponent(JComponent content) {
JScrollPane scrollPane = createScrollPane(content);
scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED);
scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scrollPane.getHorizontalScrollBar().setBorder(null);
scrollPane.getActionMap().get("unitScrollLeft").setEnabled(false);
scrollPane.getActionMap().get("unitScrollRight").setEnabled(false);
scrollPane.setBorder(JBUI.Borders.empty());
return scrollPane;
}
@NotNull
protected JScrollPane createScrollPane(JComponent content) {
return ScrollPaneFactory.createScrollPane(content);
}
private void disposeAll() {
WizardPopup root = PopupDispatcher.getActiveRoot();
disposeAllParents(null);
root.getStep().canceled();
}
public void goBack() {
if (mySpeedSearch.isHoldingFilter()) {
mySpeedSearch.reset();
return;
}
if (myParent != null) {
myParent.disposeChildren();
}
else {
disposeAll();
}
}
protected abstract JComponent createContent();
@Override
public void dispose() {
myAutoSelectionTimer.stop();
super.dispose();
PopupDispatcher.unsetShowing(this);
PopupDispatcher.clearRootIfNeeded(this);
if (myOwnerWindow != null && myOwnerListener != null) {
myOwnerWindow.removeComponentListener(myOwnerListener);
}
}
public void disposeChildren() {
if (myChild != null) {
myChild.disposeChildren();
Disposer.dispose(myChild);
myChild = null;
}
}
@Override
public void show(@NotNull final Component owner, final int aScreenX, final int aScreenY, final boolean considerForcedXY) {
if (UiInterceptors.tryIntercept(this)) return;
LOG.assertTrue (!isDisposed());
Rectangle targetBounds = new Rectangle(new Point(aScreenX, aScreenY), getContent().getPreferredSize());
if (getParent() != null) {
final Rectangle parentBounds = getParent().getBounds();
parentBounds.x += STEP_X_PADDING;
parentBounds.width -= STEP_X_PADDING * 2;
ScreenUtil.moveToFit(targetBounds, ScreenUtil.getScreenRectangle(
parentBounds.x + parentBounds.width / 2,
parentBounds.y + parentBounds.height / 2), null);
if (parentBounds.intersects(targetBounds)) {
targetBounds.x = getParent().getBounds().x - targetBounds.width - STEP_X_PADDING;
}
} else {
ScreenUtil.moveToFit(targetBounds, ScreenUtil.getScreenRectangle(aScreenX + 1, aScreenY + 1), null);
}
if (getParent() == null) {
PopupDispatcher.setActiveRoot(this);
}
else {
PopupDispatcher.setShowing(this);
}
LOG.assertTrue (!isDisposed(), "Disposed popup, parent="+getParent());
super.show(owner, targetBounds.x, targetBounds.y, true);
}
@Override
protected void afterShow() {
super.afterShow();
registerAutoMove();
}
private void registerAutoMove() {
if (myOwner != null) {
myOwnerWindow = SwingUtilities.getWindowAncestor(myOwner);
if (myOwnerWindow != null) {
myLastOwnerPoint = myOwnerWindow.getLocationOnScreen();
myOwnerListener = new MyComponentAdapter();
myOwnerWindow.addComponentListener(myOwnerListener);
}
}
}
private void processParentWindowMoved() {
if (isDisposed()) return;
final Point newOwnerPoint = myOwnerWindow.getLocationOnScreen();
int deltaX = myLastOwnerPoint.x - newOwnerPoint.x;
int deltaY = myLastOwnerPoint.y - newOwnerPoint.y;
myLastOwnerPoint = newOwnerPoint;
final Window wnd = SwingUtilities.getWindowAncestor(getContent());
if (!wnd.isShowing()) return;
final Point current = wnd.getLocationOnScreen();
setLocation(new Point(current.x - deltaX, current.y - deltaY));
}
protected abstract JComponent getPreferredFocusableComponent();
@Override
public void cancel(InputEvent e) {
super.cancel(e);
disposeChildren();
Disposer.dispose(this);
getStep().canceled();
}
@Override
public boolean isCancelKeyEnabled() {
return super.isCancelKeyEnabled() && !mySpeedSearch.isHoldingFilter();
}
protected void disposeAllParents(InputEvent e) {
myDisposeEvent = e;
Disposer.dispose(this);
if (myParent != null) {
myParent.disposeAllParents(null);
}
}
public final void registerAction(@NonNls String aActionName, int aKeyCode, @JdkConstants.InputEventMask int aModifier, Action aAction) {
myInputMap.put(KeyStroke.getKeyStroke(aKeyCode, aModifier), aActionName);
myActionMap.put(aActionName, aAction);
}
protected String getActionForKeyStroke(final KeyStroke keyStroke) {
return (String) myInputMap.get(keyStroke);
}
public final void registerAction(@NonNls String aActionName, KeyStroke keyStroke, Action aAction) {
myInputMap.put(keyStroke, aActionName);
myActionMap.put(aActionName, aAction);
}
protected abstract InputMap getInputMap();
protected abstract ActionMap getActionMap();
protected final void setParentValue(Object parentValue) {
myParentValue = parentValue;
}
@Override
@NotNull
protected MyContentPanel createContentPanel(final boolean resizable, final @NotNull PopupBorder border, final boolean isToDrawMacCorner) {
return new MyContainer(border);
}
protected boolean isResizable() {
return false;
}
private static final class MyContainer extends MyContentPanel {
private MyContainer(@NotNull PopupBorder border) {
super(border);
setOpaque(true);
setFocusCycleRoot(true);
}
@Override
public Dimension getPreferredSize() {
if (isPreferredSizeSet()) {
return super.getPreferredSize();
}
final Component focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
Point p = null;
if (focusOwner != null && focusOwner.isShowing()) {
p = focusOwner.getLocationOnScreen();
}
return computeNotBiggerDimension(super.getPreferredSize().getSize(), p);
}
private static Dimension computeNotBiggerDimension(Dimension ofContent, final Point locationOnScreen) {
int resultHeight = ofContent.height > MAX_SIZE.height + 50 ? MAX_SIZE.height : ofContent.height;
if (locationOnScreen != null) {
final Rectangle r = ScreenUtil.getScreenRectangle(locationOnScreen);
resultHeight = Math.min(ofContent.height, r.height - (r.height / 4));
}
int resultWidth = Math.min(ofContent.width, MAX_SIZE.width);
if (ofContent.height > MAX_SIZE.height) {
resultWidth += ScrollPaneFactory.createScrollPane().getVerticalScrollBar().getPreferredSize().getWidth();
}
return new Dimension(resultWidth, resultHeight);
}
}
public WizardPopup getParent() {
return myParent;
}
public PopupStep getStep() {
return myStep;
}
public final boolean dispatch(KeyEvent event) {
if (event.getID() == KeyEvent.KEY_PRESSED) {
myKeyPressedReceived = true;
final KeyStroke stroke = KeyStroke.getKeyStroke(event.getKeyCode(), event.getModifiers(), false);
if (proceedKeyEvent(event, stroke)) return true;
}
else if (!myKeyPressedReceived && !(this instanceof ComboBoxPopup)) {
// key was pressed while this popup wasn't active, ignore the event
return false;
}
if (event.getID() == KeyEvent.KEY_RELEASED) {
final KeyStroke stroke = KeyStroke.getKeyStroke(event.getKeyCode(), event.getModifiers(), true);
return proceedKeyEvent(event, stroke);
}
myMnemonicsSearch.processKeyEvent(event);
mySpeedSearch.processKeyEvent(event);
if (event.isConsumed()) return true;
process(event);
return event.isConsumed();
}
private boolean proceedKeyEvent(KeyEvent event, KeyStroke stroke) {
if (myInputMap.get(stroke) != null) {
final Action action = myActionMap.get(myInputMap.get(stroke));
if (action != null && action.isEnabled()) {
action.actionPerformed(new ActionEvent(getContent(), event.getID(), "", event.getWhen(), event.getModifiers()));
event.consume();
return true;
}
}
return false;
}
protected void process(KeyEvent aEvent) {
}
public Rectangle getBounds() {
JComponent content = isDisposed() ? null : getContent();
return content == null ? null : new Rectangle(content.getLocationOnScreen(), content.getSize());
}
protected WizardPopup createPopup(WizardPopup parent, PopupStep step, Object parentValue) {
if (step instanceof AsyncPopupStep) {
return new AsyncPopupImpl(getProject(), parent, (AsyncPopupStep)step, parentValue);
}
if (step instanceof ListPopupStep) {
return new ListPopupImpl(getProject(), parent, (ListPopupStep)step, parentValue);
}
else if (step instanceof TreePopupStep) {
return new TreePopupImpl(getProject(), parent, (TreePopupStep)step, parentValue);
}
else {
throw new IllegalArgumentException(step.getClass().toString());
}
}
@Override
public final void actionPerformed(ActionEvent e) {
myAutoSelectionTimer.stop();
if (getStep().isAutoSelectionEnabled()) {
onAutoSelectionTimer();
}
}
protected final void restartTimer() {
if (!myAutoSelectionTimer.isRunning()) {
myAutoSelectionTimer.start();
}
else {
myAutoSelectionTimer.restart();
}
}
protected final void stopTimer() {
myAutoSelectionTimer.stop();
}
protected void onAutoSelectionTimer() {
}
@Override
public boolean shouldBeShowing(Object value) {
if (!myStep.isSpeedSearchEnabled()) return true;
SpeedSearchFilter<Object> filter = myStep.getSpeedSearchFilter();
if (filter == null) return true;
if (!filter.canBeHidden(value)) return true;
if (!mySpeedSearch.isHoldingFilter()) return true;
String text = filter.getIndexedString(value);
return mySpeedSearch.shouldBeShowing(text);
}
public SpeedSearch getSpeedSearch() {
return mySpeedSearch;
}
protected void onSelectByMnemonic(Object value) {
}
protected abstract void onChildSelectedFor(Object value);
protected final void notifyParentOnChildSelection() {
if (myParent == null || myParentValue == null) return;
myParent.onChildSelectedFor(myParentValue);
}
private class MyComponentAdapter extends ComponentAdapter {
@Override
public void componentMoved(final ComponentEvent e) {
processParentWindowMoved();
}
}
@Override
public final void setFinalRunnable(Runnable runnable) {
if (getParent() == null) {
super.setFinalRunnable(runnable);
} else {
getParent().setFinalRunnable(runnable);
}
}
@Override
public void setOk(boolean ok) {
if (getParent() == null) {
super.setOk(ok);
} else {
getParent().setOk(ok);
}
}
}
| apache-2.0 |
dropbox/bazel | src/main/java/com/google/devtools/build/lib/packages/SkylarkProvider.java | 10417 | // Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.packages.SkylarkInfo.Layout;
import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec;
import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter;
import com.google.devtools.build.lib.syntax.Environment;
import com.google.devtools.build.lib.syntax.FunctionSignature;
import com.google.devtools.build.lib.syntax.SkylarkType;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nullable;
/**
* A provider defined in Skylark rather than in native code.
*
* <p>This is a result of calling the {@code provider()} function from Skylark ({@link
* com.google.devtools.build.lib.analysis.skylark.SkylarkRuleClassFunctions#provider}).
*
* <p>{@code SkylarkProvider}s may be either schemaless or schemaful. Instances of schemaless
* providers can have any set of fields on them, whereas instances of schemaful providers may have
* only the fields that are named in the schema. Schemaful provider instances are more space
* efficient since they do not use maps; see {@link SkylarkInfo}.
*
* <p>Exporting a {@code SkylarkProvider} creates a key that is used to uniquely identify it.
* Usually a provider is exported by calling {@link #export}, but a test may wish to just create a
* pre-exported provider directly. Exported providers use only their key for {@link #equals} and
* {@link #hashCode}.
*/
public class SkylarkProvider extends ProviderFromFunction implements SkylarkExportable {
private static final FunctionSignature.WithValues<Object, SkylarkType> SCHEMALESS_SIGNATURE =
FunctionSignature.WithValues.create(FunctionSignature.KWARGS);
/** Default value for {@link #errorMessageFormatForUnknownField}. */
private static final String DEFAULT_ERROR_MESSAGE_FORMAT = "Object has no '%s' attribute.";
/**
* For schemaful providers, a layout describing the allowed fields and their order in an
* array-based representation. For schemaless providers, null.
*/
@Nullable
private final Layout layout;
/** Null iff this provider has not yet been exported. */
@Nullable
private SkylarkKey key;
/** Error message format. Reassigned upon exporting. */
private String errorMessageFormatForUnknownField;
/**
* Creates an unexported {@link SkylarkProvider} with no schema.
*
* <p>The resulting object needs to be exported later (via {@link #export}).
*
* @param location the location of the Skylark definition for this provider (tests may use {@link
* Location#BUILTIN})
*/
public static SkylarkProvider createUnexportedSchemaless(Location location) {
return new SkylarkProvider(/*key=*/ null, /*fields=*/ null, location);
}
/**
* Creates an unexported {@link SkylarkProvider} with a schema.
*
* <p>The resulting object needs to be exported later (via {@link #export}).
*
* @param fields a list of allowed field names for instances of this provider, in some canonical
* order
* @param location the location of the Skylark definition for this provider (tests may use {@link
* Location#BUILTIN})
*/
public static SkylarkProvider createUnexportedSchemaful(
Iterable<String> fields, Location location) {
return new SkylarkProvider(
/*key=*/ null, fields == null ? null : ImmutableList.copyOf(fields), location);
}
/**
* Creates an exported {@link SkylarkProvider} with no schema.
*
* @param key the key that identifies this provider
* @param location the location of the Skylark definition for this provider (tests may use {@link
* Location#BUILTIN})
*/
public static SkylarkProvider createExportedSchemaless(SkylarkKey key, Location location) {
return new SkylarkProvider(key, /*fields=*/ null, location);
}
/**
* Creates an exported {@link SkylarkProvider} with no schema.
*
* @param key the key that identifies this provider
* @param fields a list of allowed field names for instances of this provider, in some canonical
* order
* @param location the location of the Skylark definition for this provider (tests may use {@link
* Location#BUILTIN})
*/
public static SkylarkProvider createExportedSchemaful(
SkylarkKey key, Iterable<String> fields, Location location) {
return new SkylarkProvider(key, fields == null ? null : ImmutableList.copyOf(fields), location);
}
/**
* Constructs the provider.
*
* <p>If {@code key} is null, the provider is unexported. If {@code fields} is null, the provider
* is schemaless.
*/
private SkylarkProvider(
@Nullable SkylarkKey key, @Nullable ImmutableList<String> fields, Location location) {
// We override getName() in order to use the name that is assigned when export() is called.
// Hence BaseFunction's constructor gets a null name.
super(/*name=*/ null, buildSignature(fields), location);
this.layout = fields == null ? null : new Layout(fields);
this.key = key; // possibly null
this.errorMessageFormatForUnknownField =
key == null ? DEFAULT_ERROR_MESSAGE_FORMAT
: makeErrorMessageFormatForUnknownField(key.getExportedName());
}
private static FunctionSignature.WithValues<Object, SkylarkType> buildSignature(
@Nullable Iterable<String> fields) {
if (fields == null) {
return SCHEMALESS_SIGNATURE;
}
return FunctionSignature.WithValues.create(
FunctionSignature.namedOnly(0, ImmutableList.copyOf(fields).toArray(new String[0])));
}
@Override
protected SkylarkInfo createInstanceFromSkylark(Object[] args, Environment env, Location loc) {
if (layout == null) {
@SuppressWarnings("unchecked")
Map<String, Object> kwargs = (Map<String, Object>) args[0];
return SkylarkInfo.createSchemaless(this, kwargs, loc);
} else {
// Note: This depends on the layout map using the same ordering as args.
return SkylarkInfo.createSchemaful(this, layout, args, loc);
}
}
@Override
public boolean isExported() {
return key != null;
}
@Override
public SkylarkKey getKey() {
Preconditions.checkState(isExported());
return key;
}
@Override
public String getName() {
return key != null ? key.getExportedName() : "<no name>";
}
@Override
public String getPrintableName() {
return getName();
}
/**
* Returns the list of fields used to define this provider, or null if the provider is schemaless.
*
* <p>Note: In the future, this method may be replaced by one that returns more detailed schema
* information (if/when the allowed schemas for structs become more complex).
*/
@Nullable
public ImmutableList<String> getFields() {
if (layout == null) {
return null;
}
return ImmutableList.copyOf(layout.getFields());
}
/** Returns the layout, or null if the provider is schemaless. */
@VisibleForTesting
@Nullable
Layout getLayout() {
return layout;
}
@Override
public String getErrorMessageFormatForUnknownField() {
return errorMessageFormatForUnknownField;
}
@Override
public void export(Label extensionLabel, String exportedName) {
Preconditions.checkState(!isExported());
this.key = new SkylarkKey(extensionLabel, exportedName);
this.errorMessageFormatForUnknownField = makeErrorMessageFormatForUnknownField(exportedName);
}
private static String makeErrorMessageFormatForUnknownField(String exportedName) {
return String.format("'%s' object has no attribute '%%s'", exportedName);
}
@Override
public int hashCode() {
if (isExported()) {
return getKey().hashCode();
}
return System.identityHashCode(this);
}
@Override
public boolean equals(@Nullable Object otherObject) {
if (!(otherObject instanceof SkylarkProvider)) {
return false;
}
SkylarkProvider other = (SkylarkProvider) otherObject;
if (this.isExported() && other.isExported()) {
return this.getKey().equals(other.getKey());
} else {
return this == other;
}
}
@Override
public boolean isImmutable() {
// Hash code for non exported constructors may be changed
return isExported();
}
@Override
public void repr(SkylarkPrinter printer) {
printer.append("<provider>");
}
/**
* A serializable representation of Skylark-defined {@link SkylarkProvider} that uniquely
* identifies all {@link SkylarkProvider}s that are exposed to SkyFrame.
*/
@AutoCodec
public static class SkylarkKey extends Key {
private final Label extensionLabel;
private final String exportedName;
public SkylarkKey(Label extensionLabel, String exportedName) {
this.extensionLabel = Preconditions.checkNotNull(extensionLabel);
this.exportedName = Preconditions.checkNotNull(exportedName);
}
public Label getExtensionLabel() {
return extensionLabel;
}
public String getExportedName() {
return exportedName;
}
@Override
public String toString() {
return exportedName;
}
@Override
public int hashCode() {
return Objects.hash(extensionLabel, exportedName);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof SkylarkKey)) {
return false;
}
SkylarkKey other = (SkylarkKey) obj;
return Objects.equals(this.extensionLabel, other.extensionLabel)
&& Objects.equals(this.exportedName, other.exportedName);
}
}
}
| apache-2.0 |
streamsets/datacollector | container/src/main/java/com/streamsets/datacollector/util/SysInfoModule.java | 1033 | /*
* Copyright 2020 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.util;
import com.streamsets.datacollector.main.RuntimeInfo;
import dagger.Module;
import dagger.Provides;
import javax.inject.Singleton;
@Module(
library = true,
complete = false,
injects = { SysInfo.class }
)
public class SysInfoModule {
@Provides
@Singleton
public SysInfo provideSysInfo(
RuntimeInfo runtimeInfo
) {
return new SysInfo(runtimeInfo);
}
}
| apache-2.0 |
galpha/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/tostring/functions/ConcatGraphHeadStrings.java | 1842 | /*
* Copyright © 2014 - 2021 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradoop.flink.model.impl.operators.tostring.functions;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.GroupReduceFunction;
import org.apache.flink.util.Collector;
import org.gradoop.flink.model.impl.operators.tostring.tuples.GraphHeadString;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.gradoop.flink.model.impl.operators.tostring.CanonicalAdjacencyMatrixBuilder.LINE_SEPARATOR;
/**
* concatenates the sorted string representations of graph heads to represent a
* collection
*/
public class ConcatGraphHeadStrings
implements GroupReduceFunction<GraphHeadString, String> {
@Override
public void reduce(Iterable<GraphHeadString> graphHeadLabels,
Collector<String> collector) throws Exception {
List<String> graphLabels = new ArrayList<>();
for (GraphHeadString graphHeadString : graphHeadLabels) {
String graphLabel = graphHeadString.getLabel();
if (!graphLabel.equals("")) {
graphLabels.add(graphLabel);
}
}
Collections.sort(graphLabels);
collector.collect(StringUtils.join(graphLabels, LINE_SEPARATOR));
}
}
| apache-2.0 |
msebire/intellij-community | platform/vcs-impl/src/com/intellij/ide/todo/CustomChangelistTodosTreeBuilder.java | 7011 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.todo;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.LocalChangeList;
import com.intellij.openapi.vcs.checkin.TodoCheckinHandlerWorker;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.PsiTodoSearchHelper;
import com.intellij.psi.search.TodoItem;
import com.intellij.psi.search.TodoPattern;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.*;
/**
* @author irengrig
*/
public class CustomChangelistTodosTreeBuilder extends TodoTreeBuilder {
public static final TodoItem[] EMPTY_ITEMS = new TodoItem[0];
private final Project myProject;
private final String myTitle;
private final MultiMap<PsiFile, TodoItem> myMap;
private final Set<PsiFile> myIncludedFiles;
private PsiTodoSearchHelper myPsiTodoSearchHelper;
private final ChangeListManager myChangeListManager;
public CustomChangelistTodosTreeBuilder(JTree tree, Project project, final String title,
final Collection<? extends TodoItem> todoItems) {
super(tree, project);
myProject = project;
myTitle = title;
myMap = new MultiMap<>();
myIncludedFiles = new HashSet<>();
myChangeListManager = ChangeListManager.getInstance(myProject);
initMap(todoItems);
initHelper();
}
private void initMap(Collection<? extends TodoItem> todoItems) {
buildMap(todoItems);
myIncludedFiles.addAll(myMap.keySet());
}
private void buildMap(Collection<? extends TodoItem> todoItems) {
myMap.clear();
for (TodoItem todoItem : todoItems) {
myMap.putValue(todoItem.getFile(), todoItem);
}
}
private void initHelper() {
myPsiTodoSearchHelper = new PsiTodoSearchHelper() {
@NotNull
@Override
public PsiFile[] findFilesWithTodoItems() {
final List<Change> changes = new ArrayList<>();
final List<LocalChangeList> changeLists = myChangeListManager.getChangeLists();
final Map<VirtualFile, Change> allChanges = new HashMap<>();
for (LocalChangeList changeList : changeLists) {
final Collection<Change> currChanges = changeList.getChanges();
for (Change currChange : currChanges) {
if (currChange.getAfterRevision() != null && currChange.getAfterRevision().getFile().getVirtualFile() != null) {
allChanges.put(currChange.getAfterRevision().getFile().getVirtualFile(), currChange);
}
}
}
for (final PsiFile next : myIncludedFiles) {
final Change change = allChanges.get(next.getVirtualFile());
if (change != null) {
changes.add(change);
}
}
// a hack here with _todo filter
final TodoCheckinHandlerWorker worker = new TodoCheckinHandlerWorker(myProject, changes, getTodoTreeStructure().getTodoFilter());
worker.execute();
buildMap(worker.inOneList());
final Set<PsiFile> files = myMap.keySet();
return files.toArray(PsiFile.EMPTY_ARRAY);
}
@NotNull
@Override
public TodoItem[] findTodoItems(@NotNull PsiFile file) {
return findPatternedTodoItems(file, getTodoTreeStructure().getTodoFilter());
}
@NotNull
@Override
public TodoItem[] findTodoItemsLight(@NotNull PsiFile file) {
return findTodoItems(file);
}
@NotNull
@Override
public TodoItem[] findTodoItemsLight(@NotNull PsiFile file, int startOffset, int endOffset) {
return findTodoItems(file, startOffset, endOffset);
}
@NotNull
@Override
public TodoItem[] findTodoItems(@NotNull PsiFile file, int startOffset, int endOffset) {
final TodoItem[] todoItems = findTodoItems(file);
if (todoItems.length == 0) {
return todoItems;
}
final TextRange textRange = new TextRange(startOffset, endOffset);
final List<TodoItem> result = new ArrayList<>();
for (TodoItem todoItem : todoItems) {
if (todoItem.getTextRange().contains(textRange)) {
result.add(todoItem);
}
}
return result.isEmpty() ? EMPTY_ITEMS : result.toArray(new TodoItem[0]);
}
@Override
public int getTodoItemsCount(@NotNull PsiFile file) {
return findTodoItems(file).length;
}
@Override
public int getTodoItemsCount(@NotNull PsiFile file, @NotNull TodoPattern pattern) {
final TodoFilter filter = new TodoFilter();
filter.addTodoPattern(pattern);
return findPatternedTodoItems(file, filter).length;
}
};
}
private TodoItem[] findPatternedTodoItems(PsiFile file, final TodoFilter todoFilter) {
if (! myIncludedFiles.contains(file)) return EMPTY_ITEMS;
if (myDirtyFileSet.contains(file.getVirtualFile())) {
myMap.remove(file);
final Change change = myChangeListManager.getChange(file.getVirtualFile());
if (change != null) {
final TodoCheckinHandlerWorker
worker = new TodoCheckinHandlerWorker(myProject, Collections.singletonList(change), todoFilter);
worker.execute();
final Collection<TodoItem> todoItems = worker.inOneList();
if (todoItems != null && ! todoItems.isEmpty()) {
for (TodoItem todoItem : todoItems) {
myMap.putValue(file, todoItem);
}
}
}
}
final Collection<TodoItem> todoItems = myMap.get(file);
return todoItems.isEmpty() ? EMPTY_ITEMS : todoItems.toArray(new TodoItem[0]);
}
@NotNull
@Override
protected TodoTreeStructure createTreeStructure() {
return new CustomChangelistTodoTreeStructure(myProject, myPsiTodoSearchHelper);
}
@Override
void rebuildCache() {
Set<VirtualFile> files = new HashSet<>();
TodoTreeStructure treeStructure=getTodoTreeStructure();
PsiFile[] psiFiles= myPsiTodoSearchHelper.findFilesWithTodoItems();
for (PsiFile psiFile : psiFiles) {
if (myPsiTodoSearchHelper.getTodoItemsCount(psiFile) > 0 && treeStructure.accept(psiFile)) {
files.add(psiFile.getVirtualFile());
}
}
super.rebuildCache(files);
}
}
| apache-2.0 |
wjacl/webbase | webbase/src/com/wja/base/system/service/ParamService.java | 1220 | package com.wja.base.system.service;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.wja.base.system.dao.ParamDao;
import com.wja.base.system.entity.Param;
@Service
public class ParamService
{
@Autowired
private ParamDao dao;
public List<Param> findAll()
{
return this.dao.findAll();
}
public Param get(String id)
{
return this.dao.findOne(id);
}
public void save(Param p)
{
if (p == null || StringUtils.isBlank(p.getId()))
{
return;
}
Param p1 = this.get(p.getId());
if (p1 != null)
{
p1.setValue(p.getValue());
this.dao.save(p1);
}
}
public void saveValue(String id, String value)
{
if (StringUtils.isBlank(id) || StringUtils.isBlank(value))
{
return;
}
Param p1 = this.get(id);
if (p1 != null)
{
p1.setValue(value);
this.dao.save(p1);
}
}
}
| apache-2.0 |
Ant-Droid/android_packages_apps_Settings_OLD | src/com/android/settings/TrustedCredentialsSettings.java | 32204 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.UserInfo;
import android.net.http.SslCertificate;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.RemoteException;
import android.os.UserHandle;
import android.os.UserManager;
import android.security.IKeyChainService;
import android.security.KeyChain;
import android.security.KeyChain.KeyChainConnection;
import android.util.SparseArray;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.BaseExpandableListAdapter;
import android.widget.Button;
import android.widget.ExpandableListView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.Spinner;
import android.widget.Switch;
import android.widget.TabHost;
import android.widget.TextView;
import com.android.internal.logging.MetricsLogger;
import com.android.internal.util.ParcelableString;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.HashMap;
public class TrustedCredentialsSettings extends InstrumentedFragment {
private static final String TAG = "TrustedCredentialsSettings";
private UserManager mUserManager;
private static final String USER_ACTION = "com.android.settings.TRUSTED_CREDENTIALS_USER";
@Override
protected int getMetricsCategory() {
return MetricsLogger.TRUSTED_CREDENTIALS;
}
private enum Tab {
SYSTEM("system",
R.string.trusted_credentials_system_tab,
R.id.system_tab,
R.id.system_progress,
R.id.system_list,
R.id.system_expandable_list,
true),
USER("user",
R.string.trusted_credentials_user_tab,
R.id.user_tab,
R.id.user_progress,
R.id.user_list,
R.id.user_expandable_list,
false);
private final String mTag;
private final int mLabel;
private final int mView;
private final int mProgress;
private final int mList;
private final int mExpandableList;
private final boolean mSwitch;
private Tab(String tag, int label, int view, int progress, int list, int expandableList,
boolean withSwitch) {
mTag = tag;
mLabel = label;
mView = view;
mProgress = progress;
mList = list;
mExpandableList = expandableList;
mSwitch = withSwitch;
}
private List<ParcelableString> getAliases(IKeyChainService service) throws RemoteException {
switch (this) {
case SYSTEM: {
return service.getSystemCaAliases().getList();
}
case USER:
return service.getUserCaAliases().getList();
}
throw new AssertionError();
}
private boolean deleted(IKeyChainService service, String alias) throws RemoteException {
switch (this) {
case SYSTEM:
return !service.containsCaAlias(alias);
case USER:
return false;
}
throw new AssertionError();
}
private int getButtonLabel(CertHolder certHolder) {
switch (this) {
case SYSTEM:
if (certHolder.mDeleted) {
return R.string.trusted_credentials_enable_label;
}
return R.string.trusted_credentials_disable_label;
case USER:
return R.string.trusted_credentials_remove_label;
}
throw new AssertionError();
}
private int getButtonConfirmation(CertHolder certHolder) {
switch (this) {
case SYSTEM:
if (certHolder.mDeleted) {
return R.string.trusted_credentials_enable_confirmation;
}
return R.string.trusted_credentials_disable_confirmation;
case USER:
return R.string.trusted_credentials_remove_confirmation;
}
throw new AssertionError();
}
private void postOperationUpdate(boolean ok, CertHolder certHolder) {
if (ok) {
if (certHolder.mTab.mSwitch) {
certHolder.mDeleted = !certHolder.mDeleted;
} else {
certHolder.mAdapter.remove(certHolder);
}
certHolder.mAdapter.notifyDataSetChanged();
} else {
// bail, reload to reset to known state
certHolder.mAdapter.load();
}
}
}
private TabHost mTabHost;
private AliasOperation mAliasOperation;
private HashMap<Tab, AdapterData.AliasLoader>
mAliasLoaders = new HashMap<Tab, AdapterData.AliasLoader>(2);
private final SparseArray<KeyChainConnection>
mKeyChainConnectionByProfileId = new SparseArray<KeyChainConnection>();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mUserManager = (UserManager) getActivity().getSystemService(Context.USER_SERVICE);
}
@Override public View onCreateView(
LayoutInflater inflater, ViewGroup parent, Bundle savedInstanceState) {
mTabHost = (TabHost) inflater.inflate(R.layout.trusted_credentials, parent, false);
mTabHost.setup();
addTab(Tab.SYSTEM);
// TODO add Install button on Tab.USER to go to CertInstaller like KeyChainActivity
addTab(Tab.USER);
if (getActivity().getIntent() != null &&
USER_ACTION.equals(getActivity().getIntent().getAction())) {
mTabHost.setCurrentTabByTag(Tab.USER.mTag);
}
return mTabHost;
}
@Override
public void onDestroy() {
for (AdapterData.AliasLoader aliasLoader : mAliasLoaders.values()) {
aliasLoader.cancel(true);
}
if (mAliasOperation != null) {
mAliasOperation.cancel(true);
mAliasOperation = null;
}
closeKeyChainConnections();
super.onDestroy();
}
private void closeKeyChainConnections() {
final int n = mKeyChainConnectionByProfileId.size();
for (int i = 0; i < n; ++i) {
mKeyChainConnectionByProfileId.valueAt(i).close();
}
mKeyChainConnectionByProfileId.clear();
}
private void addTab(Tab tab) {
TabHost.TabSpec systemSpec = mTabHost.newTabSpec(tab.mTag)
.setIndicator(getActivity().getString(tab.mLabel))
.setContent(tab.mView);
mTabHost.addTab(systemSpec);
if (mUserManager.getUserProfiles().size() > 1) {
ExpandableListView lv = (ExpandableListView) mTabHost.findViewById(tab.mExpandableList);
final TrustedCertificateExpandableAdapter adapter =
new TrustedCertificateExpandableAdapter(tab);
lv.setAdapter(adapter);
lv.setOnChildClickListener(new ExpandableListView.OnChildClickListener() {
@Override
public boolean onChildClick(ExpandableListView parent, View v,
int groupPosition, int childPosition, long id) {
showCertDialog(adapter.getChild(groupPosition, childPosition));
return true;
}
});
} else {
ListView lv = (ListView) mTabHost.findViewById(tab.mList);
final TrustedCertificateAdapter adapter = new TrustedCertificateAdapter(tab);
lv.setAdapter(adapter);
lv.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override public void onItemClick(AdapterView<?> parent, View view,
int pos, long id) {
showCertDialog(adapter.getItem(pos));
}
});
}
}
/**
* Common interface for adapters of both expandable and non-expandable certificate lists.
*/
private interface TrustedCertificateAdapterCommons {
/**
* Remove a certificate from the list.
* @param certHolder the certificate to be removed.
*/
void remove(CertHolder certHolder);
/**
* Notify the adapter that the underlying data set has changed.
*/
void notifyDataSetChanged();
/**
* Load the certificates.
*/
void load();
/**
* Gets the identifier of the list view the adapter is connected to.
* @param tab the tab on which the list view resides.
* @return identifier of the list view.
*/
int getListViewId(Tab tab);
}
/**
* Adapter for expandable list view of certificates. Groups in the view correspond to profiles
* whereas children correspond to certificates.
*/
private class TrustedCertificateExpandableAdapter extends BaseExpandableListAdapter implements
TrustedCertificateAdapterCommons {
private AdapterData mData;
private TrustedCertificateExpandableAdapter(Tab tab) {
mData = new AdapterData(tab, this);
load();
}
@Override
public void remove(CertHolder certHolder) {
mData.remove(certHolder);
}
@Override
public int getGroupCount() {
return mData.mCertHoldersByUserId.size();
}
@Override
public int getChildrenCount(int groupPosition) {
List<CertHolder> certHolders = mData.mCertHoldersByUserId.valueAt(groupPosition);
if (certHolders != null) {
return certHolders.size();
}
return 0;
}
@Override
public UserHandle getGroup(int groupPosition) {
return new UserHandle(mData.mCertHoldersByUserId.keyAt(groupPosition));
}
@Override
public CertHolder getChild(int groupPosition, int childPosition) {
return mData.mCertHoldersByUserId.valueAt(groupPosition).get(childPosition);
}
@Override
public long getGroupId(int groupPosition) {
return mData.mCertHoldersByUserId.keyAt(groupPosition);
}
@Override
public long getChildId(int groupPosition, int childPosition) {
return childPosition;
}
@Override
public boolean hasStableIds() {
return false;
}
@Override
public View getGroupView(int groupPosition, boolean isExpanded, View convertView,
ViewGroup parent) {
if (convertView == null) {
LayoutInflater inflater = (LayoutInflater) getActivity()
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = Utils.inflateCategoryHeader(inflater, parent);
}
final TextView title = (TextView) convertView.findViewById(android.R.id.title);
final UserHandle profile = getGroup(groupPosition);
final UserInfo userInfo = mUserManager.getUserInfo(profile.getIdentifier());
if (userInfo.isManagedProfile()) {
title.setText(R.string.category_work);
} else {
title.setText(R.string.category_personal);
}
title.setTextAlignment(View.TEXT_ALIGNMENT_VIEW_END);
return convertView;
}
@Override
public View getChildView(int groupPosition, int childPosition, boolean isLastChild,
View convertView, ViewGroup parent) {
return getViewForCertificate(getChild(groupPosition, childPosition), mData.mTab,
convertView, parent);
}
@Override
public boolean isChildSelectable(int groupPosition, int childPosition) {
return true;
}
@Override
public void load() {
mData.new AliasLoader().execute();
}
@Override
public int getListViewId(Tab tab) {
return tab.mExpandableList;
}
}
private class TrustedCertificateAdapter extends BaseAdapter implements
TrustedCertificateAdapterCommons {
private final AdapterData mData;
private TrustedCertificateAdapter(Tab tab) {
mData = new AdapterData(tab, this);
load();
}
@Override
public void remove(CertHolder certHolder) {
mData.remove(certHolder);
}
@Override
public int getListViewId(Tab tab) {
return tab.mList;
}
@Override
public void load() {
mData.new AliasLoader().execute();
}
@Override public int getCount() {
List<CertHolder> certHolders = mData.mCertHoldersByUserId.valueAt(0);
if (certHolders != null) {
return certHolders.size();
}
return 0;
}
@Override public CertHolder getItem(int position) {
return mData.mCertHoldersByUserId.valueAt(0).get(position);
}
@Override public long getItemId(int position) {
return position;
}
@Override public View getView(int position, View view, ViewGroup parent) {
return getViewForCertificate(getItem(position), mData.mTab, view, parent);
}
}
private class AdapterData {
private final SparseArray<List<CertHolder>> mCertHoldersByUserId =
new SparseArray<List<CertHolder>>();
private final Tab mTab;
private final TrustedCertificateAdapterCommons mAdapter;
private AdapterData(Tab tab, TrustedCertificateAdapterCommons adapter) {
mAdapter = adapter;
mTab = tab;
}
private class AliasLoader extends AsyncTask<Void, Integer, SparseArray<List<CertHolder>>> {
private ProgressBar mProgressBar;
private View mList;
private Context mContext;
public AliasLoader() {
mContext = getActivity();
mAliasLoaders.put(mTab, this);
}
@Override protected void onPreExecute() {
View content = mTabHost.getTabContentView();
mProgressBar = (ProgressBar) content.findViewById(mTab.mProgress);
mList = content.findViewById(mAdapter.getListViewId(mTab));
mProgressBar.setVisibility(View.VISIBLE);
mList.setVisibility(View.GONE);
}
@Override protected SparseArray<List<CertHolder>> doInBackground(Void... params) {
SparseArray<List<CertHolder>> certHoldersByProfile =
new SparseArray<List<CertHolder>>();
try {
List<UserHandle> profiles = mUserManager.getUserProfiles();
final int n = profiles.size();
// First we get all aliases for all profiles in order to show progress
// correctly. Otherwise this could all be in a single loop.
SparseArray<List<ParcelableString>> aliasesByProfileId = new SparseArray<
List<ParcelableString>>(n);
int max = 0;
int progress = 0;
for (int i = 0; i < n; ++i) {
UserHandle profile = profiles.get(i);
int profileId = profile.getIdentifier();
KeyChainConnection keyChainConnection = KeyChain.bindAsUser(mContext,
profile);
// Saving the connection for later use on the certificate dialog.
mKeyChainConnectionByProfileId.put(profileId, keyChainConnection);
IKeyChainService service = keyChainConnection.getService();
List<ParcelableString> aliases = mTab.getAliases(service);
if (isCancelled()) {
return new SparseArray<List<CertHolder>>();
}
max += aliases.size();
aliasesByProfileId.put(profileId, aliases);
}
for (int i = 0; i < n; ++i) {
UserHandle profile = profiles.get(i);
int profileId = profile.getIdentifier();
List<ParcelableString> aliases = aliasesByProfileId.get(profileId);
if (isCancelled()) {
return new SparseArray<List<CertHolder>>();
}
IKeyChainService service = mKeyChainConnectionByProfileId.get(profileId)
.getService();
List<CertHolder> certHolders = new ArrayList<CertHolder>(max);
final int aliasMax = aliases.size();
for (int j = 0; j < aliasMax; ++j) {
String alias = aliases.get(j).string;
byte[] encodedCertificate = service.getEncodedCaCertificate(alias,
true);
X509Certificate cert = KeyChain.toCertificate(encodedCertificate);
certHolders.add(new CertHolder(service, mAdapter,
mTab, alias, cert, profileId));
publishProgress(++progress, max);
}
Collections.sort(certHolders);
certHoldersByProfile.put(profileId, certHolders);
}
return certHoldersByProfile;
} catch (RemoteException e) {
Log.e(TAG, "Remote exception while loading aliases.", e);
return new SparseArray<List<CertHolder>>();
} catch (InterruptedException e) {
Log.e(TAG, "InterruptedException while loading aliases.", e);
return new SparseArray<List<CertHolder>>();
}
}
@Override protected void onProgressUpdate(Integer... progressAndMax) {
int progress = progressAndMax[0];
int max = progressAndMax[1];
if (max != mProgressBar.getMax()) {
mProgressBar.setMax(max);
}
mProgressBar.setProgress(progress);
}
@Override protected void onPostExecute(SparseArray<List<CertHolder>> certHolders) {
mCertHoldersByUserId.clear();
final int n = certHolders.size();
for (int i = 0; i < n; ++i) {
mCertHoldersByUserId.put(certHolders.keyAt(i), certHolders.valueAt(i));
}
mAdapter.notifyDataSetChanged();
mProgressBar.setVisibility(View.GONE);
mList.setVisibility(View.VISIBLE);
mProgressBar.setProgress(0);
mAliasLoaders.remove(mTab);
}
}
public void remove(CertHolder certHolder) {
if (mCertHoldersByUserId != null) {
final List<CertHolder> certs = mCertHoldersByUserId.get(certHolder.mProfileId);
if (certs != null) {
certs.remove(certHolder);
}
}
}
}
private static class CertHolder implements Comparable<CertHolder> {
public int mProfileId;
private final IKeyChainService mService;
private final TrustedCertificateAdapterCommons mAdapter;
private final Tab mTab;
private final String mAlias;
private final X509Certificate mX509Cert;
private final SslCertificate mSslCert;
private final String mSubjectPrimary;
private final String mSubjectSecondary;
private boolean mDeleted;
private CertHolder(IKeyChainService service,
TrustedCertificateAdapterCommons adapter,
Tab tab,
String alias,
X509Certificate x509Cert,
int profileId) {
mProfileId = profileId;
mService = service;
mAdapter = adapter;
mTab = tab;
mAlias = alias;
mX509Cert = x509Cert;
mSslCert = new SslCertificate(x509Cert);
String cn = mSslCert.getIssuedTo().getCName();
String o = mSslCert.getIssuedTo().getOName();
String ou = mSslCert.getIssuedTo().getUName();
// if we have a O, use O as primary subject, secondary prefer CN over OU
// if we don't have an O, use CN as primary, empty secondary
// if we don't have O or CN, use DName as primary, empty secondary
if (!o.isEmpty()) {
if (!cn.isEmpty()) {
mSubjectPrimary = o;
mSubjectSecondary = cn;
} else {
mSubjectPrimary = o;
mSubjectSecondary = ou;
}
} else {
if (!cn.isEmpty()) {
mSubjectPrimary = cn;
mSubjectSecondary = "";
} else {
mSubjectPrimary = mSslCert.getIssuedTo().getDName();
mSubjectSecondary = "";
}
}
try {
mDeleted = mTab.deleted(mService, mAlias);
} catch (RemoteException e) {
Log.e(TAG, "Remote exception while checking if alias " + mAlias + " is deleted.",
e);
mDeleted = false;
}
}
@Override public int compareTo(CertHolder o) {
int primary = this.mSubjectPrimary.compareToIgnoreCase(o.mSubjectPrimary);
if (primary != 0) {
return primary;
}
return this.mSubjectSecondary.compareToIgnoreCase(o.mSubjectSecondary);
}
@Override public boolean equals(Object o) {
if (!(o instanceof CertHolder)) {
return false;
}
CertHolder other = (CertHolder) o;
return mAlias.equals(other.mAlias);
}
@Override public int hashCode() {
return mAlias.hashCode();
}
}
private View getViewForCertificate(CertHolder certHolder, Tab mTab, View convertView,
ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
LayoutInflater inflater = LayoutInflater.from(getActivity());
convertView = inflater.inflate(R.layout.trusted_credential, parent, false);
holder = new ViewHolder();
holder.mSubjectPrimaryView = (TextView)
convertView.findViewById(R.id.trusted_credential_subject_primary);
holder.mSubjectSecondaryView = (TextView)
convertView.findViewById(R.id.trusted_credential_subject_secondary);
holder.mSwitch = (Switch) convertView.findViewById(
R.id.trusted_credential_status);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
holder.mSubjectPrimaryView.setText(certHolder.mSubjectPrimary);
holder.mSubjectSecondaryView.setText(certHolder.mSubjectSecondary);
if (mTab.mSwitch) {
holder.mSwitch.setChecked(!certHolder.mDeleted);
holder.mSwitch.setEnabled(!mUserManager.hasUserRestriction(
UserManager.DISALLOW_CONFIG_CREDENTIALS,
new UserHandle(certHolder.mProfileId)));
holder.mSwitch.setVisibility(View.VISIBLE);
}
return convertView;
}
private static class ViewHolder {
private TextView mSubjectPrimaryView;
private TextView mSubjectSecondaryView;
private Switch mSwitch;
}
private void showCertDialog(final CertHolder certHolder) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(com.android.internal.R.string.ssl_certificate);
final ArrayList<View> views = new ArrayList<View>();
final ArrayList<String> titles = new ArrayList<String>();
addCertChain(certHolder, views, titles);
ArrayAdapter<String> arrayAdapter = new ArrayAdapter<String>(getActivity(),
android.R.layout.simple_spinner_item,
titles);
arrayAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
Spinner spinner = new Spinner(getActivity());
spinner.setAdapter(arrayAdapter);
spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position,
long id) {
for(int i = 0; i < views.size(); i++) {
views.get(i).setVisibility(i == position ? View.VISIBLE : View.GONE);
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) { }
});
LinearLayout container = new LinearLayout(getActivity());
container.setOrientation(LinearLayout.VERTICAL);
container.addView(spinner);
for (int i = 0; i < views.size(); ++i) {
View certificateView = views.get(i);
if (i != 0) {
certificateView.setVisibility(View.GONE);
}
container.addView(certificateView);
}
builder.setView(container);
builder.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int id) {
dialog.dismiss();
}
});
final Dialog certDialog = builder.create();
ViewGroup body = (ViewGroup) container.findViewById(com.android.internal.R.id.body);
LayoutInflater inflater = LayoutInflater.from(getActivity());
Button removeButton = (Button) inflater.inflate(R.layout.trusted_credential_details,
body,
false);
if (!mUserManager.hasUserRestriction(UserManager.DISALLOW_CONFIG_CREDENTIALS,
new UserHandle(certHolder.mProfileId))) {
body.addView(removeButton);
}
removeButton.setText(certHolder.mTab.getButtonLabel(certHolder));
removeButton.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setMessage(certHolder.mTab.getButtonConfirmation(certHolder));
builder.setPositiveButton(
android.R.string.yes, new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int id) {
new AliasOperation(certHolder).execute();
dialog.dismiss();
certDialog.dismiss();
}
});
builder.setNegativeButton(
android.R.string.no, new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert = builder.create();
alert.show();
}
});
certDialog.show();
}
private void addCertChain(final CertHolder certHolder,
final ArrayList<View> views, final ArrayList<String> titles) {
List<X509Certificate> certificates = null;
try {
KeyChainConnection keyChainConnection = mKeyChainConnectionByProfileId.get(
certHolder.mProfileId);
IKeyChainService service = keyChainConnection.getService();
List<String> chain = service.getCaCertificateChainAliases(certHolder.mAlias, true);
final int n = chain.size();
certificates = new ArrayList<X509Certificate>(n);
for (int i = 0; i < n; ++i) {
byte[] encodedCertificate = service.getEncodedCaCertificate(chain.get(i), true);
X509Certificate certificate = KeyChain.toCertificate(encodedCertificate);
certificates.add(certificate);
}
} catch (RemoteException ex) {
Log.e(TAG, "RemoteException while retrieving certificate chain for root "
+ certHolder.mAlias, ex);
return;
}
for (X509Certificate certificate : certificates) {
addCertDetails(certificate, views, titles);
}
}
private void addCertDetails(X509Certificate certificate, final ArrayList<View> views,
final ArrayList<String> titles) {
SslCertificate sslCert = new SslCertificate(certificate);
views.add(sslCert.inflateCertificateView(getActivity()));
titles.add(sslCert.getIssuedTo().getCName());
}
private class AliasOperation extends AsyncTask<Void, Void, Boolean> {
private final CertHolder mCertHolder;
private AliasOperation(CertHolder certHolder) {
mCertHolder = certHolder;
mAliasOperation = this;
}
@Override
protected Boolean doInBackground(Void... params) {
try {
KeyChainConnection keyChainConnection = mKeyChainConnectionByProfileId.get(
mCertHolder.mProfileId);
IKeyChainService service = keyChainConnection.getService();
if (mCertHolder.mDeleted) {
byte[] bytes = mCertHolder.mX509Cert.getEncoded();
service.installCaCertificate(bytes);
return true;
} else {
return service.deleteCaCertificate(mCertHolder.mAlias);
}
} catch (CertificateEncodingException | SecurityException | IllegalStateException
| RemoteException e) {
Log.w(TAG, "Error while toggling alias " + mCertHolder.mAlias,
e);
return false;
}
}
@Override
protected void onPostExecute(Boolean ok) {
mCertHolder.mTab.postOperationUpdate(ok, mCertHolder);
mAliasOperation = null;
}
}
}
| apache-2.0 |
jwren/intellij-community | plugins/git4idea/src/git4idea/config/CachingFileTester.java | 6007 | /*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package git4idea.config;
import com.intellij.execution.configurations.PathEnvironmentVariableUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.SystemInfo;
import git4idea.i18n.GitBundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
abstract class CachingFileTester {
private static final Logger LOG = Logger.getInstance(CachingFileTester.class);
private static final int FILE_TEST_TIMEOUT_MS = 30000;
private final ReentrantLock LOCK = new ReentrantLock();
@NotNull private final ConcurrentMap<GitExecutable, TestResult> myTestMap = new ConcurrentHashMap<>();
@NotNull
final TestResult getResultFor(@NotNull GitExecutable executable) {
return ProgressIndicatorUtils.computeWithLockAndCheckingCanceled(LOCK, 50, TimeUnit.MILLISECONDS, () -> {
TestResult result = myTestMap.get(executable);
long currentLastModificationDate = 0L;
try {
currentLastModificationDate = getModificationTime(executable);
if (result == null || result.getFileLastModifiedTimestamp() != currentLastModificationDate) {
result = new TestResult(testOrAbort(executable), currentLastModificationDate);
myTestMap.put(executable, result);
}
}
catch (ProcessCanceledException pce) {
throw pce;
}
catch (Exception e) {
result = new TestResult(e, currentLastModificationDate);
myTestMap.put(executable, result);
}
return result;
});
}
private static long getModificationTime(@NotNull GitExecutable executable) throws IOException {
if (executable instanceof GitExecutable.Unknown) {
return 0;
}
if (executable instanceof GitExecutable.Local) {
String filePath = executable.getExePath();
if (!filePath.contains(File.separator)) {
File exeFile = PathEnvironmentVariableUtil.findInPath(filePath);
if (exeFile != null) filePath = exeFile.getPath();
}
return Files.getLastModifiedTime(Paths.get(filePath)).toMillis();
}
if (executable instanceof GitExecutable.Wsl) {
return 0;
}
LOG.error("Can't get modification time for " + executable);
return 0;
}
@NotNull
private GitVersion testOrAbort(@NotNull GitExecutable executable) throws Exception {
int maxAttempts = 1;
// IDEA-248193 Apple Git might hang with timeout after hibernation. Do several attempts.
if (SystemInfo.isMac && "/usr/bin/git".equals(executable.getExePath())) {
maxAttempts = 3;
}
int attempt = 0;
while (attempt < maxAttempts) {
GitVersion result = runTestWithTimeout(executable);
if (result != null) return result;
attempt++;
}
throw new GitVersionIdentificationException(
GitBundle.message("git.executable.validation.error.no.response.in.n.attempts.message", maxAttempts), null);
}
@Nullable
private GitVersion runTestWithTimeout(@NotNull GitExecutable executable) throws Exception {
EmptyProgressIndicator indicator = new EmptyProgressIndicator();
Ref<Exception> exceptionRef = new Ref<>();
Ref<GitVersion> resultRef = new Ref<>();
Semaphore semaphore = new Semaphore(0);
ApplicationManager.getApplication().executeOnPooledThread(
() -> ProgressManager.getInstance().executeProcessUnderProgress(() -> {
try {
resultRef.set(testExecutable(executable));
}
catch (Exception e) {
exceptionRef.set(e);
}
finally {
semaphore.release();
}
}, indicator));
try {
long start = System.currentTimeMillis();
while (true) {
ProgressManager.checkCanceled();
if (semaphore.tryAcquire(50, TimeUnit.MILLISECONDS)) break;
if (System.currentTimeMillis() - start > FILE_TEST_TIMEOUT_MS) break;
}
if (!resultRef.isNull()) return resultRef.get();
if (!exceptionRef.isNull()) throw exceptionRef.get();
return null; // timeout
}
finally {
indicator.cancel();
}
}
@Nullable
public TestResult getCachedResultFor(@NotNull GitExecutable executable) {
return myTestMap.get(executable);
}
public void dropCache(@NotNull GitExecutable executable) {
myTestMap.remove(executable);
}
@NotNull
protected abstract GitVersion testExecutable(@NotNull GitExecutable executable) throws Exception;
public static class TestResult {
@Nullable private final GitVersion myResult;
@Nullable private final Exception myException;
private final long myFileLastModifiedTimestamp;
TestResult(@NotNull GitVersion result, long timestamp) {
myResult = result;
myFileLastModifiedTimestamp = timestamp;
myException = null;
}
TestResult(@NotNull Exception exception, long timestamp) {
myFileLastModifiedTimestamp = timestamp;
myResult = null;
myException = exception;
}
@Nullable
public GitVersion getResult() {
return myResult;
}
@Nullable
public Exception getException() {
return myException;
}
private long getFileLastModifiedTimestamp() {
return myFileLastModifiedTimestamp;
}
}
} | apache-2.0 |
sekikn/ambari | ambari-server/src/test/java/org/apache/ambari/server/security/encryption/CredentialProviderTest.java | 4615 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.security.encryption;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
import org.apache.ambari.server.configuration.Configuration;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import junit.framework.Assert;
public class CredentialProviderTest {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();
@Before
public void setUp() throws Exception {
tmpFolder.create();
}
private void createMasterKey() throws IOException {
File f = tmpFolder.newFile(Configuration.MASTER_KEY_FILENAME_DEFAULT);
Assert.assertTrue(new MasterKeyServiceImpl("dummyKey").initializeMasterKeyFile(f, "blahblah!"));
MasterKeyService ms = new MasterKeyServiceImpl(f);
if (!ms.isMasterKeyInitialized()) {
throw new ExceptionInInitializerError("Cannot create master key.");
}
}
@Test
public void testInitialization() throws Exception {
CredentialProvider cr;
File msFile = tmpFolder.newFile(Configuration.MASTER_KEY_FILENAME_DEFAULT);
File mksFile = tmpFolder.newFile(Configuration.MASTER_KEYSTORE_FILENAME_DEFAULT);
Configuration configuration = new Configuration(new Properties());
configuration.setProperty(Configuration.MASTER_KEY_LOCATION, msFile.getParent());
configuration.setProperty(Configuration.MASTER_KEYSTORE_LOCATION, mksFile.getParent());
// With master key persisted
createMasterKey();
cr = new CredentialProvider(null, configuration);
Assert.assertNotNull(cr);
Assert.assertNotNull(cr.getKeystoreService());
msFile.delete();
mksFile.delete();
// Without master key persisted
cr = new CredentialProvider("blahblah!", configuration);
Assert.assertNotNull(cr);
Assert.assertNotNull(cr.getKeystoreService());
}
@Test
public void testIsAliasString() {
String test = "cassablanca";
Assert.assertFalse(CredentialProvider.isAliasString(test));
test = "${}";
Assert.assertFalse(CredentialProvider.isAliasString(test));
test = "{}";
Assert.assertFalse(CredentialProvider.isAliasString(test));
test = "{cassablanca}";
Assert.assertFalse(CredentialProvider.isAliasString(test));
test = "${cassablanca}";
Assert.assertFalse(CredentialProvider.isAliasString(test));
test = "${alias=cassablanca}";
Assert.assertTrue(CredentialProvider.isAliasString(test));
}
@Test
public void testCredentialStore() throws Exception {
File msFile = tmpFolder.newFile(Configuration.MASTER_KEY_FILENAME_DEFAULT);
File mksFile = tmpFolder.newFile(Configuration.MASTER_KEYSTORE_FILENAME_DEFAULT);
Configuration configuration = new Configuration(new Properties());
configuration.setProperty(Configuration.MASTER_KEY_LOCATION, msFile.getParent());
configuration.setProperty(Configuration.MASTER_KEYSTORE_LOCATION, mksFile.getParent());
// With master key persisted
createMasterKey();
CredentialProvider cr = new CredentialProvider(null, configuration);
Assert.assertNotNull(cr);
Assert.assertNotNull(cr.getKeystoreService());
try {
cr.addAliasToCredentialStore("", "xyz");
Assert.fail("Expected an exception");
} catch (Throwable t) {
Assert.assertTrue(t instanceof IllegalArgumentException);
}
try {
cr.addAliasToCredentialStore("xyz", null);
Assert.fail("Expected an exception");
} catch (Throwable t) {
Assert.assertTrue(t instanceof IllegalArgumentException);
}
cr.addAliasToCredentialStore("myalias", "mypassword");
Assert.assertEquals("mypassword", new String(cr.getPasswordForAlias
("myalias")));
}
@After
public void tearDown() throws Exception {
tmpFolder.delete();
}
}
| apache-2.0 |
archord/svom | src/main/java/com/gwac/action/OTLookBack.java | 3655 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.gwac.action;
/**
*
* @author xy
*/
import com.gwac.dao.OtLevel2Dao;
import com.gwac.model.OtLevel2;
import static com.opensymphony.xwork2.Action.ERROR;
import static com.opensymphony.xwork2.Action.INPUT;
import static com.opensymphony.xwork2.Action.SUCCESS;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionSupport;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.Result;
/*parameter:currentDirectory, configFile, [fileUpload], [fileUpload].*/
/* curl command example: */
/* curl http://localhost/otLookBack.action -F ot2name=M151017_C00020 -F flag=1 */
/**
* @author xy
*/
//@InterceptorRef("jsonValidationWorkflowStack")
//加了这句化,文件传不上来
//@ParentPackage(value="struts-default")
//@Controller()
//@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class OTLookBack extends ActionSupport {
private static final Log log = LogFactory.getLog(OTLookBack.class);
private String ot2name;
private Short flag; //图像相减有目标1,图像相减没有目标2, 0代表没处理或处理报错
private String echo = "";
private OtLevel2Dao ot2Dao;
@Action(value = "otLookBack", results = {
@Result(location = "manage/result.jsp", name = SUCCESS),
@Result(location = "manage/result.jsp", name = INPUT),
@Result(location = "manage/result.jsp", name = ERROR)})
public String upload() {
String result = SUCCESS;
setEcho("");
//必须设置望远镜名称
if (null == ot2name || ot2name.isEmpty()) {
setEcho(getEcho() + "Error, must set tspname.\n");
} else {
OtLevel2 ot2 = new OtLevel2();
ot2.setName(ot2name.trim());
ot2.setLookBackResult(flag);
int trst = ot2Dao.updateLookBackResult(ot2);
log.debug("1 update, ot2name=" + ot2name + ", flag=" + flag + ", result=" + trst);
for (int i = 0; i < 5; i++) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
log.error("sleep error", e);
}
OtLevel2 tot2 = ot2Dao.getOtLevel2ByName(ot2name, false);
if (tot2.getLookBackResult() == 0) {
trst = ot2Dao.updateLookBackResult(ot2);
log.debug((i + 2) + " update, ot2name=" + ot2name + ", flag=" + flag + ", result=" + trst);
} else {
log.debug((i + 2) + " update sucess, ot2name=" + ot2name + ", flag=" + flag + ", result=" + trst);
break;
}
}
echo = "success.\n";
}
log.debug(getEcho());
/* 如果使用struts2的标签,返回结果会有两个空行,这个显示在命令行不好看。
* 用jsp的out,则不会有两个空行。
* 在这里将结果信息存储在session中,在jsp页面获得返回信息。
*/
ActionContext ctx = ActionContext.getContext();
ctx.getSession().put("echo", getEcho());
return result;
}
/**
* @param ot2name the ot2name to set
*/
public void setOt2name(String ot2name) {
this.ot2name = ot2name;
}
/**
* @param flag the flag to set
*/
public void setFlag(Short flag) {
this.flag = flag;
}
/**
* @return the echo
*/
public String getEcho() {
return echo;
}
/**
* @param echo the echo to set
*/
public void setEcho(String echo) {
this.echo = echo;
}
/**
* @param ot2Dao the ot2Dao to set
*/
public void setOt2Dao(OtLevel2Dao ot2Dao) {
this.ot2Dao = ot2Dao;
}
}
| apache-2.0 |
cgtz/ambry | ambry-messageformat/src/test/java/com/github/ambry/messageformat/MessageMetadataTest.java | 1555 | /*
* Copyright 2017 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.messageformat;
import com.github.ambry.utils.TestUtils;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.Unpooled;
import java.io.DataInputStream;
import java.nio.ByteBuffer;
import org.junit.Assert;
import org.junit.Test;
public class MessageMetadataTest {
/**
* Test instantiation and serialization/deserialization of MessageMetadata.
* @throws Exception
*/
@Test
public void testInstantiationAndSerDe() throws Exception {
ByteBuffer encryptionKey = ByteBuffer.wrap(TestUtils.getRandomBytes(256));
MessageMetadata messageMetadata = new MessageMetadata(encryptionKey.duplicate());
ByteBuf serializedBuf = Unpooled.buffer(messageMetadata.sizeInBytes());
messageMetadata.serializeMessageMetadata(serializedBuf);
MessageMetadata deserialized =
MessageMetadata.deserializeMessageMetadata(new DataInputStream(new ByteBufInputStream(serializedBuf)));
Assert.assertEquals(encryptionKey, deserialized.getEncryptionKey());
}
}
| apache-2.0 |
unbounce/raml-tester | src/main/java/guru/nidi/ramltester/spring/RamlRestTemplate.java | 3350 | /*
* Copyright © 2014 Stefan Niederhauser (nidin@gmx.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package guru.nidi.ramltester.spring;
import guru.nidi.ramltester.core.*;
import org.springframework.http.client.*;
import org.springframework.web.client.RestTemplate;
import java.util.Collections;
/**
*
*/
public class RamlRestTemplate extends RestTemplate {
private final RamlChecker ramlChecker;
private final boolean notSending;
private final ReportStore reportStore;
private final ClientHttpRequestFactory originalRequestFactory;
private RamlRestTemplate(RamlChecker ramlChecker, boolean notSending, ReportStore reportStore, ClientHttpRequestFactory requestFactory) {
this.ramlChecker = ramlChecker;
this.notSending = notSending;
this.reportStore = reportStore;
this.originalRequestFactory = requestFactory;
final RamlRequestInterceptor interceptor = new RamlRequestInterceptor(ramlChecker, notSending, reportStore);
setRequestFactory(new InterceptingClientHttpRequestFactory(
new BufferingClientHttpRequestFactory(requestFactory), Collections.<ClientHttpRequestInterceptor>singletonList(interceptor)));
}
private RamlRestTemplate(RamlChecker ramlChecker, boolean notSending, ReportStore reportStore, RamlRestTemplate restTemplate) {
this(ramlChecker, notSending, reportStore, restTemplate.originalRequestFactory);
init(restTemplate);
}
private RamlRestTemplate(RamlChecker ramlChecker, boolean notSending, ReportStore reportStore, RestTemplate restTemplate) {
this(ramlChecker, notSending, reportStore, restTemplate.getRequestFactory());
init(restTemplate);
}
public RamlRestTemplate(RamlChecker ramlChecker, ClientHttpRequestFactory requestFactory) {
this(ramlChecker, false, new ThreadLocalReportStore(), requestFactory);
}
public RamlRestTemplate(RamlChecker ramlChecker, RestTemplate restTemplate) {
this(ramlChecker, false, new ThreadLocalReportStore(), restTemplate);
}
public RamlRestTemplate(RamlChecker ramlChecker, RamlRestTemplate restTemplate) {
this(ramlChecker, false, new ThreadLocalReportStore(), restTemplate);
}
public RamlRestTemplate notSending() {
return new RamlRestTemplate(ramlChecker, true, reportStore, this);
}
public RamlRestTemplate aggregating(ReportAggregator aggregator) {
return new RamlRestTemplate(ramlChecker, notSending, new AggregatingReportStore(reportStore, aggregator), this);
}
private void init(RestTemplate restTemplate) {
setErrorHandler(restTemplate.getErrorHandler());
setMessageConverters(restTemplate.getMessageConverters());
}
public RamlReport getLastReport() {
return reportStore.getLastReport();
}
}
| apache-2.0 |
250203726/ams | src/com/ams/assetmanage/assetchange/model/AssetChangeModel.java | 2138 | package com.ams.assetmanage.assetchange.model;
import java.sql.Timestamp;
/**
* 资产调拨视图模型
* @author simon
* @date 2016年11月1日 上午10:44:05
*/
public class AssetChangeModel {
private String aciId; //资产调拨编号,主键
private String assetId; //资产编号
private String originalDepartment; //原使用部门
private String originalPrincipal; //原使用人
private String nowDepartment;//现部门
private String nowPrincipal; //现负责人
private Timestamp changeTmie; //调拨时间
private String assetStatus; //资产状态
private String assetAttach; //附件
private String remark; //备注
public String getAciId() {
return aciId;
}
public void setAciId(String aciId) {
this.aciId = aciId;
}
public String getAssetId() {
return assetId;
}
public void setAssetId(String assetId) {
this.assetId = assetId;
}
public String getOriginalDepartment() {
return originalDepartment;
}
public void setOriginalDepartment(String originalDepartment) {
this.originalDepartment = originalDepartment;
}
public String getOriginalPrincipal() {
return originalPrincipal;
}
public void setOriginalPrincipal(String originalPrincipal) {
this.originalPrincipal = originalPrincipal;
}
public String getNowDepartment() {
return nowDepartment;
}
public void setNowDepartment(String nowDepartment) {
this.nowDepartment = nowDepartment;
}
public String getNowPrincipal() {
return nowPrincipal;
}
public void setNowPrincipal(String nowPrincipal) {
this.nowPrincipal = nowPrincipal;
}
public Timestamp getChangeTmie() {
return changeTmie;
}
public void setChangeTmie(Timestamp changeTmie) {
this.changeTmie = changeTmie;
}
public String getAssetStatus() {
return assetStatus;
}
public void setAssetStatus(String assetStatus) {
this.assetStatus = assetStatus;
}
public String getAssetAttach() {
return assetAttach;
}
public void setAssetAttach(String assetAttach) {
this.assetAttach = assetAttach;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
}
| apache-2.0 |
NicholasAzar/light | rule/src/main/java/com/networknt/light/rule/user/RevokeRefreshTokenEvRule.java | 1119 | /*
* Copyright 2015 Network New Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.networknt.light.rule.user;
import com.networknt.light.rule.Rule;
import java.util.Map;
/**
* Created by steve on 20/01/15.
*/
public class RevokeRefreshTokenEvRule extends AbstractUserRule implements Rule {
public boolean execute (Object ...objects) throws Exception {
Map<String, Object> eventMap = (Map<String, Object>) objects[0];
Map<String, Object> data = (Map<String, Object>) eventMap.get("data");
revokeRefreshToken(data);
return true;
}
}
| apache-2.0 |
mdogan/hazelcast | hazelcast/src/main/java/com/hazelcast/collection/impl/collection/CollectionContainerCollector.java | 2009 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.collection.impl.collection;
import com.hazelcast.config.MergePolicyConfig;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.spi.impl.merge.AbstractNamedContainerCollector;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
class CollectionContainerCollector extends AbstractNamedContainerCollector<CollectionContainer> {
CollectionContainerCollector(NodeEngine nodeEngine, ConcurrentMap<String, CollectionContainer> containers) {
super(nodeEngine, containers);
}
@Override
protected MergePolicyConfig getMergePolicyConfig(CollectionContainer container) {
return container.getConfig().getMergePolicyConfig();
}
@Override
protected void destroy(CollectionContainer container) {
// owned data is stored in the collection
container.getCollection().clear();
}
@Override
protected void destroyBackup(CollectionContainer container) {
// backup data is stored in the map
container.getMap().clear();
}
@Override
protected int getMergingValueCount() {
int size = 0;
for (Collection<CollectionContainer> containers : getCollectedContainers().values()) {
for (CollectionContainer container : containers) {
size += container.size();
}
}
return size;
}
}
| apache-2.0 |
Nasdaq/presto | presto-main/src/test/java/com/facebook/presto/transaction/TestTransactionManager.java | 9252 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.transaction;
import com.facebook.presto.connector.ConnectorId;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.connector.Connector;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.testing.TestingConnectorContext;
import com.facebook.presto.tpch.TpchConnectorFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.units.Duration;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import java.io.Closeable;
import java.io.IOException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.spi.StandardErrorCode.TRANSACTION_ALREADY_ABORTED;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestTransactionManager
{
private static final String CATALOG_NAME = "test_catalog";
private static final ConnectorId CONNECTOR_ID = new ConnectorId(CATALOG_NAME);
private final ExecutorService finishingExecutor = newCachedThreadPool(daemonThreadsNamed("transaction-%s"));
@AfterClass
public void tearDown()
throws Exception
{
finishingExecutor.shutdownNow();
}
@Test
public void testTransactionWorkflow()
throws Exception
{
try (IdleCheckExecutor executor = new IdleCheckExecutor()) {
TransactionManager transactionManager = TransactionManager.create(new TransactionManagerConfig(), executor.getExecutor(), finishingExecutor);
Connector c1 = new TpchConnectorFactory().create(CATALOG_NAME, ImmutableMap.of(), new TestingConnectorContext());
transactionManager.addConnector(CONNECTOR_ID, c1);
TransactionId transactionId = transactionManager.beginTransaction(false);
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
TransactionInfo transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertFalse(transactionInfo.isAutoCommitContext());
assertTrue(transactionInfo.getConnectorIds().isEmpty());
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
ConnectorMetadata metadata = transactionManager.getMetadata(transactionId, CONNECTOR_ID);
metadata.listSchemaNames(TEST_SESSION.toConnectorSession(CONNECTOR_ID));
transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertEquals(transactionInfo.getConnectorIds(), ImmutableList.of(CONNECTOR_ID));
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
transactionManager.asyncCommit(transactionId).join();
assertTrue(transactionManager.getAllTransactionInfos().isEmpty());
}
}
@Test
public void testAbortedTransactionWorkflow()
throws Exception
{
try (IdleCheckExecutor executor = new IdleCheckExecutor()) {
TransactionManager transactionManager = TransactionManager.create(new TransactionManagerConfig(), executor.getExecutor(), finishingExecutor);
Connector c1 = new TpchConnectorFactory().create(CATALOG_NAME, ImmutableMap.of(), new TestingConnectorContext());
transactionManager.addConnector(CONNECTOR_ID, c1);
TransactionId transactionId = transactionManager.beginTransaction(false);
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
TransactionInfo transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertFalse(transactionInfo.isAutoCommitContext());
assertTrue(transactionInfo.getConnectorIds().isEmpty());
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
ConnectorMetadata metadata = transactionManager.getMetadata(transactionId, CONNECTOR_ID);
metadata.listSchemaNames(TEST_SESSION.toConnectorSession(CONNECTOR_ID));
transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertEquals(transactionInfo.getConnectorIds(), ImmutableList.of(CONNECTOR_ID));
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
transactionManager.asyncAbort(transactionId).join();
assertTrue(transactionManager.getAllTransactionInfos().isEmpty());
}
}
@Test
public void testFailedTransactionWorkflow()
throws Exception
{
try (IdleCheckExecutor executor = new IdleCheckExecutor()) {
TransactionManager transactionManager = TransactionManager.create(new TransactionManagerConfig(), executor.getExecutor(), finishingExecutor);
Connector c1 = new TpchConnectorFactory().create(CATALOG_NAME, ImmutableMap.of(), new TestingConnectorContext());
transactionManager.addConnector(CONNECTOR_ID, c1);
TransactionId transactionId = transactionManager.beginTransaction(false);
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
TransactionInfo transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertFalse(transactionInfo.isAutoCommitContext());
assertTrue(transactionInfo.getConnectorIds().isEmpty());
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
ConnectorMetadata metadata = transactionManager.getMetadata(transactionId, CONNECTOR_ID);
metadata.listSchemaNames(TEST_SESSION.toConnectorSession(CONNECTOR_ID));
transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertEquals(transactionInfo.getConnectorIds(), ImmutableList.of(CONNECTOR_ID));
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
transactionManager.fail(transactionId);
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
try {
transactionManager.getMetadata(transactionId, CONNECTOR_ID);
fail();
}
catch (PrestoException e) {
assertEquals(e.getErrorCode(), TRANSACTION_ALREADY_ABORTED.toErrorCode());
}
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
transactionManager.asyncAbort(transactionId).join();
assertTrue(transactionManager.getAllTransactionInfos().isEmpty());
}
}
@Test
public void testExpiration()
throws Exception
{
try (IdleCheckExecutor executor = new IdleCheckExecutor()) {
TransactionManager transactionManager = TransactionManager.create(
new TransactionManagerConfig()
.setIdleTimeout(new Duration(1, TimeUnit.MILLISECONDS))
.setIdleCheckInterval(new Duration(5, TimeUnit.MILLISECONDS)),
executor.getExecutor(),
finishingExecutor);
TransactionId transactionId = transactionManager.beginTransaction(false);
assertEquals(transactionManager.getAllTransactionInfos().size(), 1);
TransactionInfo transactionInfo = transactionManager.getTransactionInfo(transactionId);
assertFalse(transactionInfo.isAutoCommitContext());
assertTrue(transactionInfo.getConnectorIds().isEmpty());
assertFalse(transactionInfo.getWrittenConnectorId().isPresent());
transactionManager.trySetInactive(transactionId);
TimeUnit.MILLISECONDS.sleep(100);
assertTrue(transactionManager.getAllTransactionInfos().isEmpty());
}
}
private static class IdleCheckExecutor
implements Closeable
{
private final ScheduledExecutorService executorService = newSingleThreadScheduledExecutor(daemonThreadsNamed("idle-check"));
public ScheduledExecutorService getExecutor()
{
return executorService;
}
@Override
public void close()
throws IOException
{
executorService.shutdownNow();
}
}
}
| apache-2.0 |
apucher/pinot | pinot-broker/src/main/java/com/linkedin/pinot/broker/routing/builder/GeneratorBasedRoutingTableBuilder.java | 13781 | /**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.broker.routing.builder;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Set;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
/**
* Routing table builder that uses a random routing table generator to create multiple routing tables. See a more
* detailed explanation of the algorithm in {@link LowLevelConsumerRoutingTableBuilder} and
* {@link LargeClusterRoutingTableBuilder}.
*/
public abstract class GeneratorBasedRoutingTableBuilder extends BaseRoutingTableBuilder {
/** Number of routing tables to keep */
private static final int ROUTING_TABLE_COUNT = 500;
/** Number of routing tables to generate during the optimization phase */
private static final int ROUTING_TABLE_GENERATION_COUNT = 1000;
/**
* Generates a routing table, decorated with a metric.
*
* @return A pair of a routing table and its associated metric.
*/
private Pair<Map<String, List<String>>, Float> generateRoutingTableWithMetric(
Map<String, List<String>> segmentToServersMap) {
Map<String, List<String>> routingTable = generateRoutingTable(segmentToServersMap);
int segmentCount = 0;
int serverCount = 0;
// Compute the number of segments and servers (for the average part of the variance)
for (List<String> segmentsForServer : routingTable.values()) {
int segmentCountForServer = segmentsForServer.size();
segmentCount += segmentCountForServer;
serverCount++;
}
// Compute the variance of the number of segments allocated per server
float averageSegmentCount = ((float) segmentCount) / serverCount;
float variance = 0.0f;
for (List<String> segmentsForServer : routingTable.values()) {
int segmentCountForServer = segmentsForServer.size();
float difference = segmentCountForServer - averageSegmentCount;
variance += difference * difference;
}
return new ImmutablePair<>(routingTable, variance);
}
Map<String, List<String>> generateRoutingTable(Map<String, List<String>> segmentToServersMap) {
Map<String, List<String>> routingTable = new HashMap<>();
if (segmentToServersMap.isEmpty()) {
return routingTable;
}
// Construct the map from server to list of segments
Map<String, List<String>> serverToSegmentsMap = new HashMap<>();
for (Map.Entry<String, List<String>> entry : segmentToServersMap.entrySet()) {
List<String> servers = entry.getValue();
for (String serverName : servers) {
List<String> segmentsForServer = serverToSegmentsMap.get(serverName);
if (segmentsForServer == null) {
segmentsForServer = new ArrayList<>();
serverToSegmentsMap.put(serverName, segmentsForServer);
}
segmentsForServer.add(entry.getKey());
}
}
int numSegments = segmentToServersMap.size();
List<String> servers = new ArrayList<>(serverToSegmentsMap.keySet());
int numServers = servers.size();
// Set of segments that have no instance serving them
Set<String> segmentsNotHandledByServers = new HashSet<>(segmentToServersMap.keySet());
// Set of servers in this routing table
int targetNumServersPerQuery = getTargetNumServersPerQuery();
Set<String> serversInRoutingTable = new HashSet<>(targetNumServersPerQuery);
if (numServers <= targetNumServersPerQuery) {
// If there are not enough instances, add them all
serversInRoutingTable.addAll(servers);
segmentsNotHandledByServers.clear();
} else {
// Otherwise add _targetNumServersPerQuery instances
while (serversInRoutingTable.size() < targetNumServersPerQuery) {
String randomServer = servers.get(_random.nextInt(numServers));
if (!serversInRoutingTable.contains(randomServer)) {
serversInRoutingTable.add(randomServer);
segmentsNotHandledByServers.removeAll(serverToSegmentsMap.get(randomServer));
}
}
}
// If there are segments that have no instance that can serve them, add a server to serve them
while (!segmentsNotHandledByServers.isEmpty()) {
String segmentNotHandledByServers = segmentsNotHandledByServers.iterator().next();
// Pick a random server that can serve this segment
List<String> serversForSegment = segmentToServersMap.get(segmentNotHandledByServers);
String randomServer = serversForSegment.get(_random.nextInt(serversForSegment.size()));
serversInRoutingTable.add(randomServer);
segmentsNotHandledByServers.removeAll(serverToSegmentsMap.get(randomServer));
}
// Sort all the segments to be used during assignment in ascending order of replicas
PriorityQueue<Pair<String, List<String>>> segmentToReplicaSetQueue =
new PriorityQueue<>(numSegments, new Comparator<Pair<String, List<String>>>() {
@Override
public int compare(Pair<String, List<String>> firstPair, Pair<String, List<String>> secondPair) {
return Integer.compare(firstPair.getRight().size(), secondPair.getRight().size());
}
});
for (Map.Entry<String, List<String>> entry : segmentToServersMap.entrySet()) {
// Servers for the segment is the intersection of all servers for this segment and the servers that we have in
// this routing table
List<String> serversForSegment = new ArrayList<>(entry.getValue());
serversForSegment.retainAll(serversInRoutingTable);
segmentToReplicaSetQueue.add(new ImmutablePair<>(entry.getKey(), serversForSegment));
}
// Assign each segment to a server
Pair<String, List<String>> segmentServersPair;
while ((segmentServersPair = segmentToReplicaSetQueue.poll()) != null) {
String segmentName = segmentServersPair.getLeft();
List<String> serversForSegment = segmentServersPair.getRight();
String serverWithLeastSegmentsAssigned = getServerWithLeastSegmentsAssigned(serversForSegment, routingTable);
List<String> segmentsAssignedToServer = routingTable.get(serverWithLeastSegmentsAssigned);
if (segmentsAssignedToServer == null) {
segmentsAssignedToServer = new ArrayList<>();
routingTable.put(serverWithLeastSegmentsAssigned, segmentsAssignedToServer);
}
segmentsAssignedToServer.add(segmentName);
}
return routingTable;
}
/*
The weighted random selection logic for reference
This can be used to replace getServerWithLeastSegmentsAssigned()
private String pickWeightedRandomReplica(Set<String> validReplicaSet,
Map<String, Set<String>> instanceToSegmentSetMap, Random random) {
// No replicas?
if (validReplicaSet.isEmpty()) {
return null;
}
// Only one valid replica?
if (validReplicaSet.size() == 1) {
return validReplicaSet.iterator().next();
}
// Find maximum segment count assigned to a replica
String[] replicas = validReplicaSet.toArray(new String[validReplicaSet.size()]);
int[] replicaSegmentCounts = new int[validReplicaSet.size()];
int maxSegmentCount = 0;
for (int i = 0; i < replicas.length; i++) {
String replica = replicas[i];
int replicaSegmentCount = 0;
if (instanceToSegmentSetMap.containsKey(replica)) {
replicaSegmentCount = instanceToSegmentSetMap.get(replica).size();
}
replicaSegmentCounts[i] = replicaSegmentCount;
if (maxSegmentCount < replicaSegmentCount) {
maxSegmentCount = replicaSegmentCount;
}
}
// Compute replica weights
int[] replicaWeights = new int[validReplicaSet.size()];
int totalReplicaWeights = 0;
for (int i = 0; i < replicas.length; i++) {
int replicaWeight = maxSegmentCount - replicaSegmentCounts[i];
replicaWeights[i] = replicaWeight;
totalReplicaWeights += replicaWeight;
}
// If all replicas are equal, just pick a random replica
if (totalReplicaWeights == 0) {
return replicas[random.nextInt(replicas.length)];
}
// Pick the proper replica given their respective weights
int randomValue = random.nextInt(totalReplicaWeights);
int i = 0;
while(replicaWeights[i] == 0 || replicaWeights[i] <= randomValue) {
randomValue -= replicaWeights[i];
++i;
}
return replicas[i];
}
*/
@Override
protected List<Map<String, List<String>>> computeRoutingTablesFromSegmentToServersMap(
Map<String, List<String>> segmentToServersMap) {
// The default routing table algorithm tries to balance all available segments across all servers, so that each
// server is hit on every query. This works fine with small clusters (say less than 20 servers) but for larger
// clusters, this adds up to significant overhead (one request must be enqueued for each server, processed,
// returned, deserialized, aggregated, etc.).
//
// For large clusters, we want to avoid hitting every server, as this also has an adverse effect on client tail
// latency. This is due to the fact that a query cannot return until it has received a response from each server,
// and the greater the number of servers that are hit, the more likely it is that one of the servers will be a
// straggler (eg. due to contention for query processing threads, GC, etc.). We also want to balance the segments
// within any given routing table so that each server in the routing table has approximately the same number of
// segments to process.
//
// To do so, we have a routing table generator that generates routing tables by picking a random subset of servers.
// With this set of servers, we check if the set of segments served by these servers is complete. If the set of
// segments served does not cover all of the segments, we compute the list of missing segments and pick a random
// server that serves these missing segments until we have complete coverage of all the segments.
//
// We then order the segments in ascending number of replicas within our server set, in order to allocate the
// segments with fewer replicas first. This ensures that segments that are 'easier' to allocate are more likely to
// end up on a server with fewer segments.
//
// Then, we pick a server with least segments already assigned for each segment. This ensures that we build a
// routing table that's as even as possible.
//
// The algorithm to generate a routing table is thus:
// 1. Compute the inverse external view, a mapping of servers to segments
// 2. For each routing table to generate:
// a) Pick _targetNumServersPerQuery distinct servers
// b) Check if the server set covers all the segments; if not, add additional servers until it does
// c) Order the segments in our server set in ascending order of number of replicas present in our server set
// d) For each segment, pick a server with least segments already assigned
// e) Return that routing table
//
// Given that we can generate routing tables at will, we then generate many routing tables and use them to optimize
// according to two criteria: the variance in workload per server for any individual table as well as the variance
// in workload per server across all the routing tables. To do so, we generate an initial set of routing tables
// according to a per-routing table metric and discard the worst routing tables.
PriorityQueue<Pair<Map<String, List<String>>, Float>> topRoutingTables =
new PriorityQueue<>(ROUTING_TABLE_COUNT, (left, right) -> {
// Float.compare sorts in ascending order and we want a max heap, so we need to return the negative
// of the comparison
return -Float.compare(left.getValue(), right.getValue());
});
for (int i = 0; i < ROUTING_TABLE_COUNT; i++) {
topRoutingTables.add(generateRoutingTableWithMetric(segmentToServersMap));
}
// Generate routing more tables and keep the ROUTING_TABLE_COUNT top ones
for (int i = 0; i < (ROUTING_TABLE_GENERATION_COUNT - ROUTING_TABLE_COUNT); ++i) {
Pair<Map<String, List<String>>, Float> newRoutingTable = generateRoutingTableWithMetric(segmentToServersMap);
Pair<Map<String, List<String>>, Float> worstRoutingTable = topRoutingTables.peek();
// If the new routing table is better than the worst one, keep it
if (newRoutingTable.getRight() < worstRoutingTable.getRight()) {
topRoutingTables.poll();
topRoutingTables.add(newRoutingTable);
}
}
// Return the best routing tables
List<Map<String, List<String>>> routingTables = new ArrayList<>(topRoutingTables.size());
while (!topRoutingTables.isEmpty()) {
routingTables.add(topRoutingTables.poll().getKey());
}
return routingTables;
}
/**
* Returns the number of target servers per query
*/
abstract int getTargetNumServersPerQuery();
}
| apache-2.0 |
alibaba/fastjson | src/test/java/com/alibaba/json/test/benchmark/encode/ArrayObjectEmptyMap1000Encode.java | 685 | package com.alibaba.json.test.benchmark.encode;
import java.util.Collections;
import com.alibaba.json.test.benchmark.BenchmarkCase;
import com.alibaba.json.test.codec.Codec;
public class ArrayObjectEmptyMap1000Encode extends BenchmarkCase {
private Object object;
public ArrayObjectEmptyMap1000Encode(){
super("ArrayObjectEmptyMap1000Encode");
Object[] array = new Object[1000];
for (int i = 0; i < array.length; ++i) {
array[i] = Collections.emptyMap();
}
this.object = array;
}
@Override
public void execute(Codec codec) throws Exception {
codec.encode(object);
}
}
| apache-2.0 |
yawkat/dagger | core/src/main/java/dagger/Subcomponent.java | 1478 | /*
* Copyright (C) 2015 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger;
import java.lang.annotation.Documented;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
/**
* A component that inherits the bindings from a parent {@link Component} or {@link Subcomponent}.
*
* <p>Subcomponent implementations only exist in the context of a parent and are associated with
* parents using factory methods on the component. Simply add a method that returns the
* subcomponent on the parent.
*
* @author Gregory Kick
* @since 2.0
*/
// TODO(gak): add missing spec for @Scope, validation, etc.
@Target(TYPE)
@Documented
public @interface Subcomponent {
/**
* A list of classes annotated with {@link Module} whose bindings are used to generate the
* component implementation.
*
* <p>At the moment, only modules without arguments are supported.
*/
Class<?>[] modules() default {};
}
| apache-2.0 |
mrprona92/DOTAMOBILE | app/src/main/java/com/badr/infodota/hero/task/HeroLoadRequest.java | 806 | package com.badr.infodota.hero.task;
import android.content.Context;
import com.badr.infodota.BeanContainer;
import com.badr.infodota.base.service.TaskRequest;
import com.badr.infodota.hero.api.Hero;
import com.badr.infodota.hero.service.HeroService;
/**
* Created by ABadretdinov
* 20.08.2015
* 14:44
*/
public class HeroLoadRequest extends TaskRequest<Hero.List> {
private Context mContext;
private String mFilter;
public HeroLoadRequest(Context context, String filter) {
super(Hero.List.class);
mContext = context;
mFilter = filter;
}
@Override
public Hero.List loadData() throws Exception {
HeroService heroService = BeanContainer.getInstance().getHeroService();
return heroService.getFilteredHeroes(mContext, mFilter);
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-healthcare/v1/1.31.0/com/google/api/services/healthcare/v1/model/FhirFilter.java | 2336 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.healthcare.v1.model;
/**
* Filter configuration.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Healthcare API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class FhirFilter extends com.google.api.client.json.GenericJson {
/**
* List of resources to include in the output. If this list is empty or not specified, all
* resources are included in the output.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Resources resources;
/**
* List of resources to include in the output. If this list is empty or not specified, all
* resources are included in the output.
* @return value or {@code null} for none
*/
public Resources getResources() {
return resources;
}
/**
* List of resources to include in the output. If this list is empty or not specified, all
* resources are included in the output.
* @param resources resources or {@code null} for none
*/
public FhirFilter setResources(Resources resources) {
this.resources = resources;
return this;
}
@Override
public FhirFilter set(String fieldName, Object value) {
return (FhirFilter) super.set(fieldName, value);
}
@Override
public FhirFilter clone() {
return (FhirFilter) super.clone();
}
}
| apache-2.0 |
consulo/consulo-android | tools-base/layoutlib-api/src/main/java/com/android/util/Pair.java | 3862 | /*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.util;
/**
* A Pair class is simply a 2-tuple for use in this package. We might want to
* think about adding something like this to a more central utility place, or
* replace it by a common tuple class if one exists, or even rewrite the layout
* classes using this Pair by a more dedicated data structure (so we don't have
* to pass around generic signatures as is currently done, though at least the
* construction is helped a bit by the {@link #of} factory method.
*
* =================================================================================================
* WARNING
* This copy of the class is to be used only by layoutlib and is not to be changed, EVER.
* To use Pair outside of layoutlib, use com.android.utils.Pair, found in common.jar instead.
* =================================================================================================
*
* @param <S> The type of the first value
* @param <T> The type of the second value
*
* @deprecated This is used for backward compatibility with layoutlib-api. Use com.android.utils.Pair instead
*/
@Deprecated
public class Pair<S,T> {
private final S mFirst;
private final T mSecond;
// Use {@link Pair#of} factory instead since it infers generic types
private Pair(S first, T second) {
this.mFirst = first;
this.mSecond = second;
}
/**
* Return the first item in the pair
*
* @return the first item in the pair
*/
public S getFirst() {
return mFirst;
}
/**
* Return the second item in the pair
*
* @return the second item in the pair
*/
public T getSecond() {
return mSecond;
}
/**
* Constructs a new pair of the given two objects, inferring generic types.
*
* @param first the first item to store in the pair
* @param second the second item to store in the pair
* @param <S> the type of the first item
* @param <T> the type of the second item
* @return a new pair wrapping the two items
*/
public static <S,T> Pair<S,T> of(S first, T second) {
return new Pair<S,T>(first,second);
}
@Override
public String toString() {
return "Pair [first=" + mFirst + ", second=" + mSecond + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((mFirst == null) ? 0 : mFirst.hashCode());
result = prime * result + ((mSecond == null) ? 0 : mSecond.hashCode());
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Pair other = (Pair) obj;
if (mFirst == null) {
if (other.mFirst != null)
return false;
} else if (!mFirst.equals(other.mFirst))
return false;
if (mSecond == null) {
if (other.mSecond != null)
return false;
} else if (!mSecond.equals(other.mSecond))
return false;
return true;
}
}
| apache-2.0 |
wildfly-extras/wildfly-camel | itests/standalone/basic/src/test/java/org/wildfly/camel/test/ftp/SftpIntegrationTest.java | 4065 | /*
* #%L
* Wildfly Camel :: Testsuite
* %%
* Copyright (C) 2013 - 2015 RedHat
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wildfly.camel.test.ftp;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.DefaultCamelContext;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.as.arquillian.api.ServerSetup;
import org.jboss.as.arquillian.api.ServerSetupTask;
import org.jboss.as.arquillian.container.ManagementClient;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.camel.test.common.ssh.EmbeddedSSHServer;
import org.wildfly.camel.test.common.utils.FileUtils;
import org.wildfly.camel.test.common.utils.TestUtils;
import org.wildfly.extension.camel.CamelAware;
@CamelAware
@RunWith(Arquillian.class)
@ServerSetup({ SftpIntegrationTest.SSHServerSetupTask.class })
public class SftpIntegrationTest {
private static final String FILE_BASEDIR = "basedir.txt";
private static final Path FTP_ROOT_DIR = Paths.get("target/sftp");
private static final Path KNOWN_HOSTS = FTP_ROOT_DIR.resolve("known_hosts");
static class SSHServerSetupTask implements ServerSetupTask {
static final EmbeddedSSHServer sshServer = new EmbeddedSSHServer(Paths.get("target/sshd"));
@Override
public void setup(ManagementClient managementClient, String containerId) throws Exception {
sshServer.setupSftp();
sshServer.start();
}
@Override
public void tearDown(ManagementClient managementClient, String containerId) throws Exception {
sshServer.stop();
}
}
@Deployment
public static JavaArchive createDeployment() throws IOException {
return ShrinkWrap.create(JavaArchive.class, "camel-ftp-tests.jar")
.addAsResource(new StringAsset(SftpIntegrationTest.SSHServerSetupTask.sshServer.getConnection()), "sftp-connection")
.addAsResource(new StringAsset(System.getProperty("basedir")), FILE_BASEDIR)
.addClasses(TestUtils.class, FileUtils.class);
}
@Test
public void testSendFile() throws Exception {
File testFile = resolvePath(FTP_ROOT_DIR).resolve("test.txt").toFile();
CamelContext camelctx = new DefaultCamelContext();
camelctx.start();
try {
Endpoint endpoint = camelctx.getEndpoint(getSftpEndpointUri());
Assert.assertFalse(testFile.exists());
camelctx.createProducerTemplate().sendBodyAndHeader(endpoint, "Hello", "CamelFileName", "test.txt");
Assert.assertTrue(testFile.exists());
} finally {
camelctx.close();
FileUtils.deleteDirectory(resolvePath(FTP_ROOT_DIR));
}
}
private Path resolvePath(Path other) throws IOException {
return Paths.get(TestUtils.getResourceValue(getClass(), "/" + FILE_BASEDIR)).resolve(other);
}
private String getSftpEndpointUri() throws IOException {
String conUrl = TestUtils.getResourceValue(getClass(), "/sftp-connection");
return String.format("sftp://%s/target/sftp?username=admin&password=admin&knownHostsFile=%s", conUrl, KNOWN_HOSTS);
}
}
| apache-2.0 |
apache/geronimo | testsuite/webservices-testsuite/jaxws-catalog-tests/catalog-ejb/src/main/java/org/apache/geronimo/jaxws/test/GreeterBean.java | 1798 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.jaxws.test;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.Resource;
import javax.ejb.Stateless;
import javax.jws.WebService;
import javax.xml.ws.WebServiceContext;
@WebService(serviceName = "GreeterService",
portName = "GreeterPort",
name = "Greeter",
targetNamespace = "http://apache.org/greeter_control",
wsdlLocation = "META-INF/wsdl/greeter_service.wsdl")
@Stateless(name="GreeterBean")
public class GreeterBean {
@Resource
private WebServiceContext context;
public String greetMe(String me) {
return "Hello " + me;
}
public String sayHi() {
return "Hi!";
}
@PostConstruct
private void myInit() {
System.out.println(this + " PostConstruct");
}
@PreDestroy()
private void myDestroy() {
System.out.println(this + " PreDestroy");
}
}
| apache-2.0 |
apache/incubator-asterixdb | asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/InternalRequestResponse.java | 1989 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.app.message;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.messaging.api.MessageFuture;
import org.apache.asterix.messaging.NCMessageBroker;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public final class InternalRequestResponse implements INcAddressedMessage {
private final long requestMessageId;
private Throwable error;
private static final long serialVersionUID = 1L;
public InternalRequestResponse(long requestMessageId) {
this.requestMessageId = requestMessageId;
}
public void setError(Throwable error) {
this.error = error;
}
public Throwable getError() {
return error;
}
@Override
public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
NCMessageBroker mb = (NCMessageBroker) appCtx.getServiceContext().getMessageBroker();
MessageFuture future = mb.deregisterMessageFuture(requestMessageId);
if (future != null) {
future.complete(this);
}
}
}
| apache-2.0 |
ltsopensource/light-task-scheduler | lts-core/src/main/java/com/github/ltsopensource/autoconfigure/resolver/PrimitiveTypeResolver.java | 1086 | package com.github.ltsopensource.autoconfigure.resolver;
import com.github.ltsopensource.autoconfigure.AutoConfigContext;
import com.github.ltsopensource.core.commons.utils.PrimitiveTypeUtils;
import java.beans.PropertyDescriptor;
/**
* @author Robert HG (254963746@qq.com) on 4/20/16.
*/
public class PrimitiveTypeResolver extends AbstractResolver {
public static final PrimitiveTypeResolver INSTANCE = new PrimitiveTypeResolver();
@Override
public void resolve(final AutoConfigContext context, final PropertyDescriptor descriptor, final Class<?> propertyType) {
doFilter(context, descriptor, new Filter() {
@Override
public boolean onCondition(String name, String key, String value) {
return key.equals(name);
}
@Override
public boolean call(String name, String key, String value) {
Object v = PrimitiveTypeUtils.convert(value, propertyType);
writeProperty(context, descriptor, v);
return false;
}
});
}
}
| apache-2.0 |
amaembo/huntbugs | huntbugs/src/main/java/one/util/huntbugs/analysis/AnalysisListener.java | 1034 | /*
* Copyright 2016 HuntBugs contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package one.util.huntbugs.analysis;
/**
* @author Tagir Valeev
*
*/
@FunctionalInterface
public interface AnalysisListener {
/**
* @param stepName
* @param className
* @param count number of classes processed
* @param total number of classes to process
* @return false if cancel is requested
*/
public boolean eventOccurred(String stepName, String className, int count, int total);
}
| apache-2.0 |
jakubkrolewski/Android-RTEditor | RTEditor/src/main/java/com/onegravity/rteditor/api/RTMediaFactoryImpl.java | 5115 | /*
* Copyright (C) 2015 Emanuel Moecklin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onegravity.rteditor.api;
import android.content.Context;
import android.util.Log;
import com.onegravity.rteditor.api.media.RTAudio;
import com.onegravity.rteditor.api.media.RTAudioImpl;
import com.onegravity.rteditor.api.media.RTImage;
import com.onegravity.rteditor.api.media.RTImageImpl;
import com.onegravity.rteditor.api.media.RTMediaSource;
import com.onegravity.rteditor.api.media.RTMediaType;
import com.onegravity.rteditor.api.media.RTVideo;
import com.onegravity.rteditor.api.media.RTVideoImpl;
import com.onegravity.rteditor.media.MediaUtils;
import com.onegravity.rteditor.utils.Helper;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* This is a basic implementation of the RTMediaFactory using either the
* internal (as in Context.context.getFilesDir() or the primary external
* file system (as in Context.getExternalFilesDir(String).
*/
public class RTMediaFactoryImpl implements RTMediaFactory<RTImage, RTAudio, RTVideo> {
private static final long serialVersionUID = 6970361368051595063L;
private File mStoragePath;
public RTMediaFactoryImpl(Context context) {
this(context, true); // use external storage as default
}
public RTMediaFactoryImpl(Context context, boolean externalStorage) {
mStoragePath = externalStorage ?
context.getExternalFilesDir(null) :
context.getFilesDir();
}
/**
* Returns the absolute file path for a certain RTMediaType.
* <p>
* The media type specific path as provided by RTMediaType is appended to
* the storage path (e.g. <storage area>/images for image files).
*/
protected String getAbsolutePath(RTMediaType mediaType) {
File mediaPath = new File(mStoragePath.getAbsolutePath(), mediaType.mediaPath());
if (!mediaPath.exists()) {
mediaPath.mkdirs();
}
return mediaPath.getAbsolutePath();
}
/*
* Use case 1: Inserting media objects into the rich text editor.
*
* This default implementation copies all files into the dedicated media
* storage area.
*/
@Override
/* @inheritDoc */
public RTImage createImage(RTMediaSource mediaSource) {
File targetFile = loadMedia(mediaSource);
return targetFile == null ? null :
new RTImageImpl(targetFile.getAbsolutePath());
}
@Override
/* @inheritDoc */
public RTAudio createAudio(RTMediaSource mediaSource) {
File targetFile = loadMedia(mediaSource);
return targetFile == null ? null :
new RTAudioImpl(targetFile.getAbsolutePath());
}
@Override
/* @inheritDoc */
public RTVideo createVideo(RTMediaSource mediaSource) {
File targetFile = loadMedia(mediaSource);
return targetFile == null ? null :
new RTVideoImpl(targetFile.getAbsolutePath());
}
private File loadMedia(RTMediaSource mediaSource) {
File targetPath = new File(getAbsolutePath(mediaSource.getMediaType()));
File targetFile = MediaUtils.createUniqueFile(targetPath,
mediaSource.getName(),
mediaSource.getMimeType(),
false);
copyFile(mediaSource.getInputStream(), targetFile);
return targetFile;
}
private void copyFile(InputStream in, File targetFile) {
OutputStream out = null;
try {
out = new FileOutputStream(targetFile);
IOUtils.copy(in, out);
} catch (IOException ioe) {
Log.e(getClass().getSimpleName(), ioe.getMessage(), ioe);
} finally {
Helper.closeQuietly(out);
Helper.closeQuietly(in);
}
}
/*
* Use case 2: Load a rich text with referenced media objects into the rich
* text editor.
*
* This default implementation doesn't apply any transformations to the path
* because the files are stored in the file system where they can be
* accessed directly by the rich text editor (via ImageSpan).
*/
@Override
/* @inheritDoc */
public RTImage createImage(String path) {
return new RTImageImpl(path);
}
@Override
/* @inheritDoc */
public RTAudio createAudio(String path) {
return new RTAudioImpl(path);
}
@Override
/* @inheritDoc */
public RTVideo createVideo(String path) {
return new RTVideoImpl(path);
}
} | apache-2.0 |
apache/solr | solr/test-framework/src/java/org/apache/solr/util/BadZookeeperThreadsFilter.java | 1314 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.util;
import com.carrotsearch.randomizedtesting.ThreadFilter;
public class BadZookeeperThreadsFilter implements ThreadFilter {
@Override
public boolean reject(Thread t) {
String name = t.getName();
StackTraceElement[] stack = t.getStackTrace();
if (name.startsWith("Thread-")
&& stack.length > 1
&& stack[stack.length - 2].getClassName().equals("org.apache.zookeeper.Login$1")) {
return true; // see ZOOKEEPER-2100
}
return false;
}
}
| apache-2.0 |
cdapio/tigon | tigon-sql/src/main/java/co/cask/tigon/sql/internal/DefaultStreamSchema.java | 1161 | /*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.tigon.sql.internal;
import co.cask.tigon.sql.flowlet.GDATField;
import co.cask.tigon.sql.flowlet.StreamSchema;
import java.util.List;
/**
* Default StreamSchema.
*/
public class DefaultStreamSchema implements StreamSchema {
private List<GDATField> fields;
private String name;
public DefaultStreamSchema(String name, List<GDATField> fields) {
this.name = name;
this.fields = fields;
}
@Override
public List<GDATField> getFields() {
return fields;
}
@Override
public String getName() {
return name;
}
}
| apache-2.0 |
alien4cloud/alien4cloud | alien4cloud-core/src/main/java/alien4cloud/application/TopologyCompositionService.java | 21252 | package alien4cloud.application;
import static alien4cloud.paas.function.FunctionEvaluator.isGetInput;
import static org.alien4cloud.tosca.normative.constants.NormativeWorkflowNameConstants.INSTALL;
import static org.alien4cloud.tosca.normative.constants.NormativeWorkflowNameConstants.START;
import static org.alien4cloud.tosca.normative.constants.NormativeWorkflowNameConstants.STOP;
import static org.alien4cloud.tosca.normative.constants.NormativeWorkflowNameConstants.UNINSTALL;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Resource;
import org.alien4cloud.tosca.model.definitions.AbstractPropertyValue;
import org.alien4cloud.tosca.model.definitions.FunctionPropertyValue;
import org.alien4cloud.tosca.model.templates.Capability;
import org.alien4cloud.tosca.model.templates.NodeGroup;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.RelationshipTemplate;
import org.alien4cloud.tosca.model.templates.SubstitutionTarget;
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.springframework.stereotype.Service;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import alien4cloud.component.ICSARRepositorySearchService;
import alien4cloud.exception.AlreadyExistException;
import alien4cloud.exception.CyclicReferenceException;
import alien4cloud.paas.wf.TopologyContext;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.topology.TopologyServiceCore;
import alien4cloud.utils.MapUtil;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
public class TopologyCompositionService {
@Resource
private ICSARRepositorySearchService csarRepoSearchService;
@Resource
private TopologyServiceCore topologyServiceCore;
@Resource
private WorkflowsBuilderService workflowBuilderService;
public void processTopologyComposition(Topology topology) {
Deque<CompositionCouple> stack = new ArrayDeque<CompositionCouple>();
recursivelyBuildSubstitutionStack(topology, stack, "");
// now this stack contains all the embedded topology templates
if (!stack.isEmpty()) {
// iterate over the stack in descending order (manage the deepest topologies at a first time).
Iterator<CompositionCouple> compositionIterator = stack.descendingIterator();
while (compositionIterator.hasNext()) {
processComposition(compositionIterator.next());
}
if (log.isDebugEnabled()) {
log.debug(String.format("Topology composition has been processed for topology <%s> substituting %d embeded topologies", topology.getId(),
stack.size()));
}
// std workflows are reinitialized when some composition is processed
// TODO: find a better way to manage this
TopologyContext topologyContext = workflowBuilderService.buildTopologyContext(topology);
workflowBuilderService.reinitWorkflow(INSTALL, topologyContext, false);
workflowBuilderService.reinitWorkflow(START, topologyContext, false);
workflowBuilderService.reinitWorkflow(STOP, topologyContext, false);
workflowBuilderService.reinitWorkflow(UNINSTALL, topologyContext, false);
workflowBuilderService.postProcessTopologyWorkflows(topologyContext);
}
}
/**
* Process the composition:
* <ul>
* <li>remove the 'proxy' node from the parent.
* <li>merge the child topology nodes into the parent nodes.
* <li>
* </ul>
*
* @param compositionCouple
*/
private void processComposition(CompositionCouple compositionCouple) {
// first of all, remove the proxy node from the parent
NodeTemplate proxyNodeTemplate = compositionCouple.parent.getNodeTemplates().remove(compositionCouple.nodeName);
// properties of the proxy are used to feed the property values of child node that use get_input
for (NodeTemplate childNodeTemplate : compositionCouple.child.getNodeTemplates().values()) {
for (Entry<String, AbstractPropertyValue> propertyEntry : childNodeTemplate.getProperties().entrySet()) {
AbstractPropertyValue pValue = propertyEntry.getValue();
if (isGetInput(pValue)) {
String inputName = ((FunctionPropertyValue) pValue).getTemplateName();
propertyEntry.setValue(proxyNodeTemplate.getProperties().get(inputName));
}
}
for (Entry<String, Capability> capabilityEntry : childNodeTemplate.getCapabilities().entrySet()) {
if (capabilityEntry.getValue().getProperties() != null) {
for (Entry<String, AbstractPropertyValue> propertyEntry : capabilityEntry.getValue().getProperties().entrySet()) {
AbstractPropertyValue pValue = propertyEntry.getValue();
if (isGetInput(pValue)) {
String inputName = ((FunctionPropertyValue) pValue).getTemplateName();
propertyEntry.setValue(proxyNodeTemplate.getProperties().get(inputName));
}
}
}
}
}
// all relations from the proxy must now start from the corresponding node
if (proxyNodeTemplate.getRelationships() != null) {
for (Entry<String, RelationshipTemplate> e : proxyNodeTemplate.getRelationships().entrySet()) {
String relationShipKey = e.getKey();
RelationshipTemplate proxyRelationShip = e.getValue();
String requirementName = proxyRelationShip.getRequirementName();
SubstitutionTarget substitutionTarget = compositionCouple.child.getSubstitutionMapping().getRequirements().get(requirementName);
NodeTemplate nodeTemplate = compositionCouple.child.getNodeTemplates().get(substitutionTarget.getNodeTemplateName());
if (nodeTemplate.getRelationships() == null) {
Map<String, RelationshipTemplate> relationships = Maps.newHashMap();
nodeTemplate.setRelationships(relationships);
}
nodeTemplate.getRelationships().put(relationShipKey, proxyRelationShip);
proxyRelationShip.setRequirementName(substitutionTarget.getTargetId());
}
}
// all relations that target the proxy must be redirected to the corresponding child node
for (NodeTemplate otherNodes : compositionCouple.parent.getNodeTemplates().values()) {
if (otherNodes.getRelationships() != null) {
for (RelationshipTemplate relationshipTemplate : otherNodes.getRelationships().values()) {
if (relationshipTemplate.getTarget().equals(compositionCouple.nodeName)) {
SubstitutionTarget st = compositionCouple.child.getSubstitutionMapping().getCapabilities()
.get(relationshipTemplate.getTargetedCapabilityName());
relationshipTemplate.setTarget(st.getNodeTemplateName());
relationshipTemplate.setTargetedCapabilityName(st.getTargetId());
}
}
}
}
if (compositionCouple.parent.getOutputAttributes() != null) {
Set<String> outputAttributes = compositionCouple.parent.getOutputAttributes().remove(compositionCouple.nodeName);
if (outputAttributes != null) {
for (String proxyAttributeName : outputAttributes) {
sustituteGetAttribute(compositionCouple.child, compositionCouple.parent, proxyAttributeName);
}
}
}
// the parent itself expose stuffs, we eventually need to replace substitution targets
if (compositionCouple.parent.getSubstitutionMapping() != null) {
if (compositionCouple.parent.getSubstitutionMapping().getCapabilities() != null) {
for (Entry<String, SubstitutionTarget> substitutionCapabilityEntry : compositionCouple.parent.getSubstitutionMapping().getCapabilities()
.entrySet()) {
if (substitutionCapabilityEntry.getValue().getNodeTemplateName().equals(compositionCouple.nodeName)) {
String targetCapability = substitutionCapabilityEntry.getValue().getTargetId();
// just substitute the substitution target
substitutionCapabilityEntry.setValue(compositionCouple.child.getSubstitutionMapping().getCapabilities().get(targetCapability));
}
}
}
if (compositionCouple.parent.getSubstitutionMapping().getRequirements() != null) {
for (Entry<String, SubstitutionTarget> e : compositionCouple.parent.getSubstitutionMapping().getRequirements().entrySet()) {
if (e.getValue().getNodeTemplateName().equals(compositionCouple.nodeName)) {
String targetCapability = e.getValue().getTargetId();
// just substitute the substitution target
e.setValue(compositionCouple.child.getSubstitutionMapping().getRequirements().get(targetCapability));
}
}
}
}
// merge each child nodes into the parent
compositionCouple.parent.getNodeTemplates().putAll(compositionCouple.child.getNodeTemplates());
}
/**
* Ugly code : since we don't name outputs in alien topology, we are not able to determine if an output is related to a property, to an attribute or to a
* capability property. This is done in the same order than alien4cloud.topology.TopologyServiceCore.updateSubstitutionType(Topology) processes substitution
* outputs.
*/
private void sustituteGetAttribute(Topology child, Topology parent, String proxyAttributeName) {
if (child.getOutputAttributes() != null) {
for (Entry<String, Set<String>> oae : child.getOutputAttributes().entrySet()) {
String nodeName = oae.getKey();
for (String oa : oae.getValue()) {
if (oa.equals(proxyAttributeName)) {
// ok the proxy attribute name matches the embedded node attribute
Map<String, Set<String>> parentOas = parent.getOutputAttributes();
if (parentOas == null) {
parentOas = Maps.newHashMap();
parent.setOutputAttributes(parentOas);
}
Set<String> parentNodeOas = parentOas.get(nodeName);
if (parentNodeOas == null) {
parentNodeOas = Sets.newHashSet();
parentOas.put(nodeName, parentNodeOas);
}
parentNodeOas.add(proxyAttributeName);
return;
}
}
}
}
if (child.getOutputProperties() != null) {
for (Entry<String, Set<String>> ope : child.getOutputProperties().entrySet()) {
String nodeName = ope.getKey();
for (String op : ope.getValue()) {
if (op.equals(proxyAttributeName)) {
// ok the proxy attribute name matches the embedded node property
Map<String, Set<String>> parentOps = parent.getOutputProperties();
if (parentOps == null) {
parentOps = Maps.newHashMap();
parent.setOutputProperties(parentOps);
}
Set<String> parentNodeOps = parentOps.get(nodeName);
if (parentNodeOps == null) {
parentNodeOps = Sets.newHashSet();
parentOps.put(nodeName, parentNodeOps);
}
parentNodeOps.add(proxyAttributeName);
return;
}
}
}
}
if (child.getOutputCapabilityProperties() != null) {
for (Entry<String, Map<String, Set<String>>> ocpe : child.getOutputCapabilityProperties().entrySet()) {
String nodeName = ocpe.getKey();
for (Entry<String, Set<String>> cpes : ocpe.getValue().entrySet()) {
String embededCapabilityName = cpes.getKey();
for (String op : cpes.getValue()) {
if (op.equals(proxyAttributeName)) {
// ok the embedded output capability property matches the proxy type output attribute
Map<String, Map<String, Set<String>>> parentOcps = parent.getOutputCapabilityProperties();
if (parentOcps == null) {
parentOcps = Maps.newHashMap();
parent.setOutputCapabilityProperties(parentOcps);
}
Map<String, Set<String>> parentNodeOcps = parentOcps.get(nodeName);
if (parentNodeOcps == null) {
parentNodeOcps = Maps.newHashMap();
parentOcps.put(nodeName, parentNodeOcps);
}
Set<String> parentCapabilityOps = parentNodeOcps.get(embededCapabilityName);
if (parentCapabilityOps == null) {
parentCapabilityOps = Sets.newHashSet();
parentNodeOcps.put(embededCapabilityName, parentCapabilityOps);
}
parentCapabilityOps.add(proxyAttributeName);
return;
}
}
}
}
}
}
/**
* Deeply explore this topology to detect if some type must be substituted by the corresponding topology template content and feed the {@link Deque}. <br>
* BTW, rename the nodes by prefixing all the node names.
*/
private void recursivelyBuildSubstitutionStack(Topology topology, Deque<CompositionCouple> stack, String prefix) {
if (topology == null || topology.getNodeTemplates() == null || topology.getNodeTemplates().isEmpty()) {
return;
}
for (Entry<String, NodeTemplate> nodeEntry : topology.getNodeTemplates().entrySet()) {
String nodeName = nodeEntry.getKey();
String type = nodeEntry.getValue().getType();
// FIXME use tosca context, beware of child topologies (dependencies to use ? conflicts ?)
NodeType nodeType = csarRepoSearchService.getRequiredElementInDependencies(NodeType.class, type, topology.getDependencies());
if (nodeType.getSubstitutionTopologyId() != null) {
// this node type is a proxy for a topology template
Topology child = topologyServiceCore.getOrFail(nodeType.getSubstitutionTopologyId());
CompositionCouple couple = new CompositionCouple(topology, child, nodeName, nodeName + "_");
renameNodes(couple);
stack.offer(couple);
recursivelyBuildSubstitutionStack(child, stack, nodeName + "_");
}
}
}
private void renameNodes(CompositionCouple compositionCouple) {
Topology topology = compositionCouple.child;
String[] nodeNames = new String[topology.getNodeTemplates().size()];
nodeNames = topology.getNodeTemplates().keySet().toArray(nodeNames);
for (String nodeName : nodeNames) {
String newName = ensureNodeNameIsUnique(topology.getNodeTemplates().keySet(), compositionCouple.nodeNamePrefix + nodeName, 0);
renameNodeTemplate(topology, nodeName, newName);
}
}
// TODO ALIEN-2589: move elsewhere
@Deprecated
public static String ensureNodeNameIsUnique(Set<String> keys, String prefix, int suffixeNumber) {
String name = (suffixeNumber > 0) ? prefix + suffixeNumber : prefix;
if (keys.contains(name)) {
return ensureNodeNameIsUnique(keys, prefix, ++suffixeNumber);
} else {
return name;
}
}
private void renameNodeTemplate(Topology topology, String oldName, String newName) {
// if the prefixed name is already used by another node ?
// quite improbable but ...
if (topology.getNodeTemplates().containsKey(newName)) {
throw new AlreadyExistException(String.format("A node with name '%s' already exists in this topology", newName));
}
NodeTemplate nodeTemplate = topology.getNodeTemplates().remove(oldName);
// manage relationships that target this node
for (NodeTemplate otherNodes : topology.getNodeTemplates().values()) {
if (otherNodes.getRelationships() == null || otherNodes.getRelationships().isEmpty()) {
continue;
}
for (RelationshipTemplate relationshipTemplate : otherNodes.getRelationships().values()) {
if (relationshipTemplate.getTarget().equals(oldName)) {
relationshipTemplate.setTarget(newName);
}
}
}
// all output stuffs
MapUtil.replaceKey(topology.getOutputProperties(), oldName, newName);
MapUtil.replaceKey(topology.getOutputCapabilityProperties(), oldName, newName);
MapUtil.replaceKey(topology.getOutputAttributes(), oldName, newName);
// group members must be updated
if (topology.getGroups() != null) {
for (NodeGroup nodeGroup : topology.getGroups().values()) {
Set<String> members = nodeGroup.getMembers();
if (members != null && members.remove(oldName)) {
members.add(newName);
}
}
}
// substitutions
if (topology.getSubstitutionMapping() != null) {
renameNodeTemplateInSubstitutionTargets(topology.getSubstitutionMapping().getCapabilities(), oldName, newName);
renameNodeTemplateInSubstitutionTargets(topology.getSubstitutionMapping().getRequirements(), oldName, newName);
}
// finally the node itself
topology.getNodeTemplates().put(newName, nodeTemplate);
}
private void renameNodeTemplateInSubstitutionTargets(Map<String, SubstitutionTarget> substitutionTargets, String oldName, String newName) {
if (substitutionTargets != null) {
for (SubstitutionTarget s : substitutionTargets.values()) {
if (s.getNodeTemplateName().equals(oldName)) {
s.setNodeTemplateName(newName);
}
}
}
}
/**
* Deeply explore composition in order to detect cyclic reference: if a descendant references the mainTopologyId.
*/
public void recursivelyDetectTopologyCompositionCyclicReference(String mainTopologyId, String substitutionTopologyId) {
Topology child = topologyServiceCore.getOrFail(substitutionTopologyId);
if (child == null || child.getNodeTemplates() == null || child.getNodeTemplates().isEmpty()) {
return;
}
for (Entry<String, NodeTemplate> nodeEntry : child.getNodeTemplates().entrySet()) {
String type = nodeEntry.getValue().getType();
NodeType nodeType = csarRepoSearchService.getElementInDependencies(NodeType.class, type, child.getDependencies());
if (nodeType.getSubstitutionTopologyId() != null) {
if (nodeType.getSubstitutionTopologyId().equals(mainTopologyId)) {
throw new CyclicReferenceException("Cyclic reference : a topology template can not reference itself (even indirectly)");
}
recursivelyDetectTopologyCompositionCyclicReference(mainTopologyId, nodeType.getSubstitutionTopologyId());
}
}
}
private static class CompositionCouple {
/** The topology that embeds another one. */
private final Topology parent;
/** The topology template that will substitute the type. */
private final Topology child;
/** The node name referencing the child in the parent. */
private final String nodeName;
/** The prefix that will be used to rename nodes. */
private final String nodeNamePrefix;
public CompositionCouple(Topology parent, Topology child, String nodeName, String nodeNamePrefix) {
super();
this.parent = parent;
this.child = child;
this.nodeName = nodeName;
this.nodeNamePrefix = nodeNamePrefix;
}
}
}
| apache-2.0 |
apache/helix | zookeeper-api/src/main/java/org/apache/helix/zookeeper/zkclient/ZkClient.java | 98695 | package org.apache.helix.zookeeper.zkclient;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.JMException;
import org.apache.helix.zookeeper.api.client.ChildrenSubscribeResult;
import org.apache.helix.zookeeper.constant.ZkSystemPropertyKeys;
import org.apache.helix.zookeeper.datamodel.SessionAwareZNRecord;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.apache.helix.zookeeper.exception.ZkClientException;
import org.apache.helix.zookeeper.util.GZipCompressionUtil;
import org.apache.helix.zookeeper.util.ZNRecordUtil;
import org.apache.helix.zookeeper.zkclient.annotation.PreFetchChangedData;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncCallMonitorContext;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncCallbacks;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncRetryCallContext;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncRetryThread;
import org.apache.helix.zookeeper.zkclient.exception.ZkBadVersionException;
import org.apache.helix.zookeeper.zkclient.exception.ZkException;
import org.apache.helix.zookeeper.zkclient.exception.ZkInterruptedException;
import org.apache.helix.zookeeper.zkclient.exception.ZkMarshallingError;
import org.apache.helix.zookeeper.zkclient.exception.ZkNoNodeException;
import org.apache.helix.zookeeper.zkclient.exception.ZkNodeExistsException;
import org.apache.helix.zookeeper.zkclient.exception.ZkSessionMismatchedException;
import org.apache.helix.zookeeper.zkclient.exception.ZkTimeoutException;
import org.apache.helix.zookeeper.zkclient.metric.ZkClientMonitor;
import org.apache.helix.zookeeper.zkclient.serialize.BasicZkSerializer;
import org.apache.helix.zookeeper.zkclient.serialize.PathBasedZkSerializer;
import org.apache.helix.zookeeper.zkclient.serialize.ZkSerializer;
import org.apache.helix.zookeeper.zkclient.util.ExponentialBackoffStrategy;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.ConnectionLossException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
import org.apache.zookeeper.Op;
import org.apache.zookeeper.OpResult;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.Watcher.Event.EventType;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* "Native ZkClient": not to be used directly.
*
* Abstracts the interaction with zookeeper and allows permanent (not just one time) watches on
* nodes in ZooKeeper.
* WARN: Do not use this class directly, use {@link org.apache.helix.zookeeper.impl.client.ZkClient} instead.
*/
public class ZkClient implements Watcher {
private static final Logger LOG = LoggerFactory.getLogger(ZkClient.class);
private static final long MAX_RECONNECT_INTERVAL_MS = 30000; // 30 seconds
// If number of children exceeds this limit, getChildren() should not retry on connection loss.
// This is a workaround for exiting retry on connection loss because of large number of children.
// 100K is specific for helix messages which use UUID, making packet length just below 4 MB.
// TODO: remove it once we have a better way to exit retry for this case
private static final int NUM_CHILDREN_LIMIT = 100 * 1000;
private static final boolean SYNC_ON_SESSION = Boolean.parseBoolean(
System.getProperty(ZkSystemPropertyKeys.ZK_AUTOSYNC_ENABLED, "true"));
private static final String SYNC_PATH = "/";
private static AtomicLong UID = new AtomicLong(0);
public final long _uid;
// ZNode write size limit in bytes.
// TODO: use ZKConfig#JUTE_MAXBUFFER once bumping up ZK to 3.5.2+
private static final int WRITE_SIZE_LIMIT =
Integer.getInteger(ZkSystemPropertyKeys.JUTE_MAXBUFFER, ZNRecord.SIZE_LIMIT);
private final IZkConnection _connection;
private final long _operationRetryTimeoutInMillis;
private final Map<String, Set<IZkChildListener>> _childListener = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, Set<IZkDataListenerEntry>> _dataListener =
new ConcurrentHashMap<>();
private final Set<IZkStateListener> _stateListener = new CopyOnWriteArraySet<>();
private KeeperState _currentState;
private final ZkLock _zkEventLock = new ZkLock();
// When a new zookeeper instance is created in reconnect, its session id is not yet valid before
// the zookeeper session is established(SyncConnected). To avoid session race condition in
// handling new session, the new session event is only fired after SyncConnected. Meanwhile,
// SyncConnected state is also received when re-opening the zk connection. So to avoid firing
// new session event more than once, this flag is used to check.
// It is set to false when once existing expires. And set it to true once the new session event
// is fired the first time.
private boolean _isNewSessionEventFired;
private boolean _shutdownTriggered;
private ZkEventThread _eventThread;
// TODO PVo remove this later
private Thread _zookeeperEventThread;
private volatile boolean _closed;
private PathBasedZkSerializer _pathBasedZkSerializer;
private ZkClientMonitor _monitor;
// To automatically retry the async operation, we need a separate thread other than the
// ZkEventThread. Otherwise the retry request might block the normal event processing.
protected final ZkAsyncRetryThread _asyncCallRetryThread;
private class IZkDataListenerEntry {
final IZkDataListener _dataListener;
final boolean _prefetchData;
public IZkDataListenerEntry(IZkDataListener dataListener, boolean prefetchData) {
_dataListener = dataListener;
_prefetchData = prefetchData;
}
public IZkDataListenerEntry(IZkDataListener dataListener) {
_dataListener = dataListener;
_prefetchData = false;
}
public IZkDataListener getDataListener() {
return _dataListener;
}
public boolean isPrefetchData() {
return _prefetchData;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof IZkDataListenerEntry)) {
return false;
}
IZkDataListenerEntry that = (IZkDataListenerEntry) o;
return _dataListener.equals(that._dataListener);
}
@Override
public int hashCode() {
return _dataListener.hashCode();
}
}
private class ZkPathStatRecord {
private final String _path;
private Stat _stat = null;
private boolean _checked = false;
public ZkPathStatRecord(String path) {
_path = path;
}
public boolean pathExists() {
return _stat != null;
}
public boolean pathChecked() {
return _checked;
}
/*
* Note this method is not thread safe.
*/
public void recordPathStat(Stat stat, OptionalLong notificationTime) {
_checked = true;
_stat = stat;
if (_monitor != null && stat != null && notificationTime.isPresent()) {
long updateTime = Math.max(stat.getCtime(), stat.getMtime());
if (notificationTime.getAsLong() > updateTime) {
_monitor.recordDataPropagationLatency(_path, notificationTime.getAsLong() - updateTime);
} // else, the node was updated again after the notification. Propagation latency is
// unavailable.
}
}
}
protected ZkClient(IZkConnection zkConnection, int connectionTimeout, long operationRetryTimeout,
PathBasedZkSerializer zkSerializer, String monitorType, String monitorKey,
String monitorInstanceName, boolean monitorRootPathOnly) {
if (zkConnection == null) {
throw new NullPointerException("Zookeeper connection is null!");
}
_uid = UID.getAndIncrement();
validateWriteSizeLimitConfig();
_connection = zkConnection;
_pathBasedZkSerializer = zkSerializer;
_operationRetryTimeoutInMillis = operationRetryTimeout;
_isNewSessionEventFired = false;
_asyncCallRetryThread = new ZkAsyncRetryThread(zkConnection.getServers());
_asyncCallRetryThread.start();
LOG.debug("ZkClient created with uid {}, _asyncCallRetryThread id {}", _uid, _asyncCallRetryThread.getId());
if (monitorKey != null && !monitorKey.isEmpty() && monitorType != null && !monitorType
.isEmpty()) {
_monitor =
new ZkClientMonitor(monitorType, monitorKey, monitorInstanceName, monitorRootPathOnly,
_eventThread);
} else {
LOG.info("ZkClient monitor key or type is not provided. Skip monitoring.");
}
connect(connectionTimeout, this);
try {
if (_monitor != null) {
_monitor.register();
}
} catch (JMException e){
LOG.error("Error in creating ZkClientMonitor", e);
}
}
public List<String> subscribeChildChanges(String path, IZkChildListener listener) {
ChildrenSubscribeResult result = subscribeChildChanges(path, listener, false);
return result.getChildren();
}
public ChildrenSubscribeResult subscribeChildChanges(String path, IZkChildListener listener, boolean skipWatchingNonExistNode) {
synchronized (_childListener) {
Set<IZkChildListener> listeners = _childListener.get(path);
if (listeners == null) {
listeners = new CopyOnWriteArraySet<>();
_childListener.put(path, listeners);
}
listeners.add(listener);
}
List<String> children = watchForChilds(path, skipWatchingNonExistNode);
if (children == null && skipWatchingNonExistNode) {
unsubscribeChildChanges(path, listener);
LOG.info("zkclient{}, watchForChilds failed to install no-existing watch and add listener. Path: {}", _uid, path);
return new ChildrenSubscribeResult(children, false);
}
return new ChildrenSubscribeResult(children, true);
}
public void unsubscribeChildChanges(String path, IZkChildListener childListener) {
synchronized (_childListener) {
final Set<IZkChildListener> listeners = _childListener.get(path);
if (listeners != null) {
listeners.remove(childListener);
}
}
}
public boolean subscribeDataChanges(String path, IZkDataListener listener, boolean skipWatchingNonExistNode) {
Set<IZkDataListenerEntry> listenerEntries;
synchronized (_dataListener) {
listenerEntries = _dataListener.get(path);
if (listenerEntries == null) {
listenerEntries = new CopyOnWriteArraySet<>();
_dataListener.put(path, listenerEntries);
}
boolean prefetchEnabled = isPrefetchEnabled(listener);
IZkDataListenerEntry listenerEntry = new IZkDataListenerEntry(listener, prefetchEnabled);
listenerEntries.add(listenerEntry);
if (prefetchEnabled) {
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {} subscribed data changes for {}, listener {}, prefetch data {}",
_uid, path, listener, prefetchEnabled);
}
}
}
boolean watchInstalled = watchForData(path, skipWatchingNonExistNode);
if (!watchInstalled) {
// Now let us remove this handler.
unsubscribeDataChanges(path, listener);
LOG.info("zkclient {} watchForData failed to install no-existing path and thus add listener. Path: {}",
_uid, path);
return false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {}, Subscribed data changes for {}", _uid, path);
}
return true;
}
/**
* Subscribe the path and the listener will handle data events of the path
* WARNING: if the path is created after deletion, users need to re-subscribe the path
* @param path The zookeeper path
* @param listener Instance of {@link IZkDataListener}
*/
public void subscribeDataChanges(String path, IZkDataListener listener) {
subscribeDataChanges(path, listener, false);
}
private boolean isPrefetchEnabled(IZkDataListener dataListener) {
PreFetchChangedData preFetch = dataListener.getClass().getAnnotation(PreFetchChangedData.class);
if (preFetch != null) {
return preFetch.enabled();
}
Method callbackMethod = IZkDataListener.class.getMethods()[0];
try {
Method method = dataListener.getClass()
.getMethod(callbackMethod.getName(), callbackMethod.getParameterTypes());
PreFetchChangedData preFetchInMethod = method.getAnnotation(PreFetchChangedData.class);
if (preFetchInMethod != null) {
return preFetchInMethod.enabled();
}
} catch (NoSuchMethodException e) {
LOG.warn("Zkclient {}, No method {} defined in listener {}",
_uid, callbackMethod.getName(), dataListener.getClass().getCanonicalName());
}
return true;
}
public void unsubscribeDataChanges(String path, IZkDataListener dataListener) {
synchronized (_dataListener) {
final Set<IZkDataListenerEntry> listeners = _dataListener.get(path);
if (listeners != null) {
IZkDataListenerEntry listenerEntry = new IZkDataListenerEntry(dataListener);
listeners.remove(listenerEntry);
}
if (listeners == null || listeners.isEmpty()) {
_dataListener.remove(path);
}
}
}
public void subscribeStateChanges(final IZkStateListener listener) {
synchronized (_stateListener) {
_stateListener.add(listener);
}
}
/**
* Subscribes state changes for a {@link IZkStateListener} listener.
*
* @deprecated
* This is deprecated. It is kept for backwards compatibility. Please use
* {@link #subscribeStateChanges(IZkStateListener)}.
*
* @param listener {@link IZkStateListener} listener
*/
@Deprecated
public void subscribeStateChanges(
final org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener listener) {
subscribeStateChanges(new IZkStateListenerI0ItecImpl(listener));
}
public void unsubscribeStateChanges(IZkStateListener stateListener) {
synchronized (_stateListener) {
_stateListener.remove(stateListener);
}
}
/**
* Unsubscribes state changes for a {@link IZkStateListener} listener.
*
* @deprecated
* This is deprecated. It is kept for backwards compatibility. Please use
* {@link #unsubscribeStateChanges(IZkStateListener)}.
*
* @param stateListener {@link IZkStateListener} listener
*/
@Deprecated
public void unsubscribeStateChanges(
org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener stateListener) {
unsubscribeStateChanges(new IZkStateListenerI0ItecImpl(stateListener));
}
public void unsubscribeAll() {
synchronized (_childListener) {
_childListener.clear();
}
synchronized (_dataListener) {
_dataListener.clear();
}
synchronized (_stateListener) {
_stateListener.clear();
}
}
// </listeners>
/**
* Create a persistent node.
* @param path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createPersistent(String path)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
createPersistent(path, false);
}
/**
* Create a persistent node and set its ACLs.
* @param path
* @param createParents
* if true all parent dirs are created as well and no {@link ZkNodeExistsException} is
* thrown in case the
* path already exists
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createPersistent(String path, boolean createParents)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
createPersistent(path, createParents, ZooDefs.Ids.OPEN_ACL_UNSAFE);
}
/**
* Create a persistent node and set its ACLs.
* @param path
* @param acl
* List of ACL permissions to assign to the node
* @param createParents
* if true all parent dirs are created as well and no {@link ZkNodeExistsException} is
* thrown in case the
* path already exists
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createPersistent(String path, boolean createParents, List<ACL> acl)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
try {
create(path, null, acl, CreateMode.PERSISTENT);
} catch (ZkNodeExistsException e) {
if (!createParents) {
throw e;
}
} catch (ZkNoNodeException e) {
if (!createParents) {
throw e;
}
String parentDir = path.substring(0, path.lastIndexOf('/'));
createPersistent(parentDir, createParents, acl);
createPersistent(path, createParents, acl);
}
}
/**
* Create a persistent node.
* @param path
* @param data
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createPersistent(String path, Object data)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, data, CreateMode.PERSISTENT);
}
/**
* Create a persistent node.
* @param path
* @param data
* @param acl
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createPersistent(String path, Object data, List<ACL> acl) {
create(path, data, acl, CreateMode.PERSISTENT);
}
/**
* Create a persistent, sequental node.
* @param path
* @param data
* @return create node's path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createPersistentSequential(String path, Object data)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, CreateMode.PERSISTENT_SEQUENTIAL);
}
/**
* Create a persistent, sequential node and set its ACL.
* @param path
* @param acl
* @param data
* @return create node's path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createPersistentSequential(String path, Object data, List<ACL> acl)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, acl, CreateMode.PERSISTENT_SEQUENTIAL);
}
/**
* Create an ephemeral node.
* @param path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, null, CreateMode.EPHEMERAL);
}
/**
* Creates an ephemeral node. This ephemeral node is created by the expected(passed-in) ZK session.
* If the expected session does not match the current ZK session, the node will not be created.
*
* @param path path of the node
* @param sessionId expected session id of the ZK connection. If the session id of current ZK
* connection does not match the expected session id, ephemeral creation will
* fail
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
createEphemeral(path, null, sessionId);
}
/**
* Create an ephemeral node and set its ACL.
* @param path
* @param acl
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final List<ACL> acl)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, null, acl, CreateMode.EPHEMERAL);
}
/**
* Creates an ephemeral node and set its ACL. This ephemeral node is created by the
* expected(passed-in) ZK session. If the expected session does not match the current ZK session,
* the node will not be created.
*
* @param path path of the ephemeral node
* @param acl a list of ACL for the ephemeral node.
* @param sessionId expected session id of the ZK connection. If the session id of current ZK
* connection does not match the expected session id, ephemeral creation will
* fail.
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final List<ACL> acl, final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, null, acl, CreateMode.EPHEMERAL, sessionId);
}
/**
* Create a node.
* @param path
* @param data
* @param mode
* @return create node's path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String create(final String path, Object data, final CreateMode mode)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, mode);
}
/**
* Create a node with ACL.
* @param path
* @param datat
* @param acl
* @param mode
* @return create node's path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String create(final String path, Object datat, final List<ACL> acl, final CreateMode mode)
throws IllegalArgumentException, ZkException {
return create(path, datat, acl, mode, null);
}
/**
* Creates a node and returns the actual path of the created node.
*
* Given an expected non-null session id, if the node is successfully created, it is guaranteed to
* be created in the expected(passed-in) session.
*
* If the expected session is expired, which means the expected session does not match the current
* session of ZK connection, the node will not be created.
*
* @param path the path where you want the node to be created
* @param dataObject data of the node
* @param acl list of ACL for the node
* @param mode {@link CreateMode} of the node
* @param expectedSessionId the expected session ID of the ZK connection. It is not necessarily the
* session ID of current ZK Connection. If the expected session ID is NOT null,
* the node is guaranteed to be created in the expected session, or creation is
* failed if the expected session id doesn't match current connected zk session.
* If the session id is null, it means the create operation is NOT session aware.
* @return path of the node created
* @throws IllegalArgumentException if called from anything else except the ZooKeeper event thread
* @throws ZkException if any zookeeper exception occurs
*/
private String create(final String path, final Object dataObject, final List<ACL> acl,
final CreateMode mode, final String expectedSessionId)
throws IllegalArgumentException, ZkException {
if (path == null) {
throw new NullPointerException("Path must not be null.");
}
if (acl == null || acl.size() == 0) {
throw new NullPointerException("Missing value for ACL");
}
long startT = System.currentTimeMillis();
try {
final byte[] dataBytes = dataObject == null ? null : serialize(dataObject, path);
checkDataSizeLimit(path, dataBytes);
final String actualPath = retryUntilConnected(
() -> getExpectedZookeeper(expectedSessionId).create(path, dataBytes, acl, mode));
record(path, dataBytes, startT, ZkClientMonitor.AccessType.WRITE);
return actualPath;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.WRITE);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient {} create, path {}, time {} ms", _uid, path, (endT - startT));
}
}
}
/**
* Create an ephemeral node.
* @param path
* @param data
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final Object data)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, data, CreateMode.EPHEMERAL);
}
/**
* Creates an ephemeral node. Given an expected non-null session id, if the ephemeral
* node is successfully created, it is guaranteed to be in the expected(passed-in) session.
*
* If the expected session is expired, which means the expected session does not match the session
* of current ZK connection, the ephemeral node will not be created.
* If connection is timed out or interrupted, exception is thrown.
*
* @param path path of the ephemeral node being created
* @param data data of the ephemeral node being created
* @param sessionId the expected session ID of the ZK connection. It is not necessarily the
* session ID of current ZK Connection. If the expected session ID is NOT null,
* the node is guaranteed to be created in the expected session, or creation is
* failed if the expected session id doesn't match current connected zk session.
* If the session id is null, it means the operation is NOT session aware
* and the node will be created by current ZK session.
* @throws ZkInterruptedException if operation is interrupted, or a required reconnection gets
* interrupted
* @throws IllegalArgumentException if called from anything except the ZooKeeper event thread
* @throws ZkException if any ZooKeeper exception occurs
* @throws RuntimeException if any other exception occurs
*/
public void createEphemeral(final String path, final Object data, final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, sessionId);
}
/**
* Create an ephemeral node.
* @param path
* @param data
* @param acl
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final Object data, final List<ACL> acl)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, data, acl, CreateMode.EPHEMERAL);
}
/**
* Creates an ephemeral node in an expected ZK session. Given an expected non-null session id,
* if the ephemeral node is successfully created, it is guaranteed to be in the expected session.
* If the expected session is expired, which means the expected session does not match the session
* of current ZK connection, the ephemeral node will not be created.
* If connection is timed out or interrupted, exception is thrown.
*
* @param path path of the ephemeral node being created
* @param data data of the ephemeral node being created
* @param acl list of ACL for the ephemeral node
* @param sessionId the expected session ID of the ZK connection. It is not necessarily the
* session ID of current ZK Connection. If the expected session ID is NOT null,
* the node is guaranteed to be created in the expected session, or creation is
* failed if the expected session id doesn't match current connected zk session.
* If the session id is null, it means the create operation is NOT session aware
* and the node will be created by current ZK session.
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public void createEphemeral(final String path, final Object data, final List<ACL> acl,
final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
create(path, data, acl, CreateMode.EPHEMERAL, sessionId);
}
/**
* Create an ephemeral, sequential node.
* @param path
* @param data
* @return created path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createEphemeralSequential(final String path, final Object data)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, CreateMode.EPHEMERAL_SEQUENTIAL);
}
/**
* Creates an ephemeral, sequential node with ACL in an expected ZK session.
* Given an expected non-null session id, if the ephemeral node is successfully created,
* it is guaranteed to be in the expected session.
* If the expected session is expired, which means the expected session does not match the session
* of current ZK connection, the ephemeral node will not be created.
* If connection is timed out or interrupted, exception is thrown.
*
* @param path path of the node
* @param data data of the node
* @param acl list of ACL for the node
* @return created path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createEphemeralSequential(final String path, final Object data, final List<ACL> acl,
final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, acl, CreateMode.EPHEMERAL_SEQUENTIAL, sessionId);
}
/**
* Creates an ephemeral, sequential node. Given an expected non-null session id,
* if the ephemeral node is successfully created, it is guaranteed to be in the expected session.
* If the expected session is expired, which means the expected session does not match the session
* of current ZK connection, the ephemeral node will not be created.
* If connection is timed out or interrupted, exception is thrown.
*
* @param path path of the node
* @param data data of the node
* @param sessionId the expected session ID of the ZK connection. It is not necessarily the
* session ID of current ZK Connection. If the expected session ID is NOT null,
* the node is guaranteed to be created in the expected session, or creation is
* failed if the expected session id doesn't match current connected zk session.
* If the session id is null, it means the create operation is NOT session aware
* and the node will be created by current ZK session.
* @return created path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createEphemeralSequential(final String path, final Object data,
final String sessionId)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL,
sessionId);
}
/**
* Create an ephemeral, sequential node with ACL.
* @param path
* @param data
* @param acl
* @return created path
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs
*/
public String createEphemeralSequential(final String path, final Object data, final List<ACL> acl)
throws ZkInterruptedException, IllegalArgumentException, ZkException, RuntimeException {
return create(path, data, acl, CreateMode.EPHEMERAL_SEQUENTIAL);
}
@Override
public void process(WatchedEvent event) {
long notificationTime = System.currentTimeMillis();
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {}, Received event: {} ", _uid, event);
}
_zookeeperEventThread = Thread.currentThread();
boolean stateChanged = event.getPath() == null;
boolean sessionExpired = stateChanged && event.getState() == KeeperState.Expired;
boolean znodeChanged = event.getPath() != null;
boolean dataChanged =
event.getType() == EventType.NodeDataChanged || event.getType() == EventType.NodeDeleted
|| event.getType() == EventType.NodeCreated
|| event.getType() == EventType.NodeChildrenChanged;
if (event.getType() == EventType.NodeDeleted) {
LOG.debug("zkclient {}, Path {} is deleted", _uid, event.getPath());
}
getEventLock().lock();
try {
// We might have to install child change event listener if a new node was created
if (getShutdownTrigger()) {
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {} ignoring event {}|{} since shutdown triggered",
_uid, event.getType(), event.getPath());
}
return;
}
if (stateChanged) {
processStateChanged(event);
}
if (dataChanged) {
processDataOrChildChange(event, notificationTime);
}
} finally {
if (stateChanged) {
getEventLock().getStateChangedCondition().signalAll();
// If the session expired we have to signal all conditions, because watches might have been
// removed and
// there is no guarantee that those
// conditions will be signaled at all after an Expired event
// TODO PVo write a test for this
if (event.getState() == KeeperState.Expired) {
getEventLock().getZNodeEventCondition().signalAll();
getEventLock().getDataChangedCondition().signalAll();
}
}
if (znodeChanged) {
getEventLock().getZNodeEventCondition().signalAll();
}
if (dataChanged) {
getEventLock().getDataChangedCondition().signalAll();
}
getEventLock().unlock();
// update state change counter.
recordStateChange(stateChanged, dataChanged, sessionExpired);
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {} Leaving process event", _uid);
}
}
}
private void fireAllEvents() {
//TODO: During handling new session, if the path is deleted, watcher leakage could still happen
for (Entry<String, Set<IZkChildListener>> entry : _childListener.entrySet()) {
fireChildChangedEvents(entry.getKey(), entry.getValue(), true);
}
for (Entry<String, Set<IZkDataListenerEntry>> entry : _dataListener.entrySet()) {
fireDataChangedEvents(entry.getKey(), entry.getValue(), OptionalLong.empty(), true);
}
}
/**
* Returns a list of children of the given path.
* <p>
* NOTE: if the given path has too many children which causes the network packet length to exceed
* {@code jute.maxbuffer}, there are 2 cases, depending on whether or not the native
* zk supports paginated getChildren API and the config
* {@link ZkSystemPropertyKeys#ZK_GETCHILDREN_PAGINATION_DISABLED}:
* <p>1) pagination is disabled by {@link ZkSystemPropertyKeys#ZK_GETCHILDREN_PAGINATION_DISABLED}
* set to true or zk does not support pagination: the operation will fail.
* <p>2) config is false and zk supports pagination. A list of all children will be fetched using
* pagination and returned. But please note that the final children list is NOT strongly
* consistent with server - the list might contain some deleted children if some children
* are deleted before the last page is fetched. The upstream caller should be able to handle this.
*/
public List<String> getChildren(String path) {
return getChildren(path, hasListeners(path));
}
protected List<String> getChildren(final String path, final boolean watch) {
long startT = System.currentTimeMillis();
try {
List<String> children = retryUntilConnected(new Callable<List<String>>() {
private int connectionLossRetryCount = 0;
@Override
public List<String> call() throws Exception {
try {
return getConnection().getChildren(path, watch);
} catch (ConnectionLossException e) {
// Issue: https://github.com/apache/helix/issues/962
// Connection loss might be caused by an excessive number of children.
// Infinitely retrying connecting may cause high GC in ZK server and kill ZK server.
// This is a workaround to check numChildren to have a chance to exit retry loop.
// Check numChildren stat every other 3 connection loss, because there is a higher
// possibility that connection loss is caused by other factors such as network
// connectivity, session expired, etc.
// TODO: remove this check once we have a better way to exit infinite retry
++connectionLossRetryCount;
if (connectionLossRetryCount >= 3) {
checkNumChildrenLimit(path);
connectionLossRetryCount = 0;
}
// Re-throw the ConnectionLossException for retryUntilConnected() to catch and retry.
throw e;
}
}
});
record(path, null, startT, ZkClientMonitor.AccessType.READ);
return children;
} catch (ZkNoNodeException e) {
record(path, null, startT, ZkClientMonitor.AccessType.READ);
throw e;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.READ);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient {} getChildren, path {} time: {} ms", _uid, path, (endT - startT) );
}
}
}
/**
* Counts number of children for the given path.
* @param path
* @return number of children or 0 if path does not exist.
*/
public int countChildren(String path) {
try {
return getChildren(path).size();
} catch (ZkNoNodeException e) {
return 0;
}
}
public boolean exists(final String path) {
return exists(path, hasListeners(path));
}
protected boolean exists(final String path, final boolean watch) {
long startT = System.currentTimeMillis();
try {
boolean exists = retryUntilConnected(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return getConnection().exists(path, watch);
}
});
record(path, null, startT, ZkClientMonitor.AccessType.READ);
return exists;
} catch (ZkNoNodeException e) {
record(path, null, startT, ZkClientMonitor.AccessType.READ);
throw e;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.READ);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient exists, path: {}, time: {} ms", _uid, path, (endT - startT));
}
}
}
public Stat getStat(final String path) {
return getStat(path, false);
}
private Stat getStat(final String path, final boolean watch) {
long startT = System.currentTimeMillis();
final Stat stat;
try {
stat = retryUntilConnected(
() -> ((ZkConnection) getConnection()).getZookeeper().exists(path, watch));
record(path, null, startT, ZkClientMonitor.AccessType.READ);
return stat;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.READ);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient exists, path: {}, time: {} ms", _uid, path, (endT - startT));
}
}
}
/*
* This one installs watch only if path is there. Meant to avoid leaking watch in Zk server.
*/
private Stat installWatchOnlyPathExist(final String path) {
long startT = System.currentTimeMillis();
final Stat stat;
try {
stat = new Stat();
try {
LOG.debug("installWatchOnlyPathExist with path: {} ", path);
retryUntilConnected(() -> ((ZkConnection) getConnection()).getZookeeper().getData(path, true, stat));
} catch (ZkNoNodeException e) {
LOG.debug("installWatchOnlyPathExist path not existing: {}", path);
record(path, null, startT, ZkClientMonitor.AccessType.READ);
return null;
}
record(path, null, startT, ZkClientMonitor.AccessType.READ);
return stat;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.READ);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient getData (installWatchOnlyPathExist), path: {}, time: {} ms",
_uid, path, (endT - startT));
}
}
}
protected void processStateChanged(WatchedEvent event) {
LOG.info("zkclient {}, zookeeper state changed ( {} )", _uid, event.getState());
setCurrentState(event.getState());
if (getShutdownTrigger()) {
return;
}
fireStateChangedEvent(event.getState());
/*
* Note, the intention is that only the ZkClient managing the session would do auto reconnect
* and fireNewSessionEvents and fireAllEvent.
* Other ZkClient not managing the session would only fireAllEvent upon a new session.
*/
if (event.getState() == KeeperState.SyncConnected) {
if (!_isNewSessionEventFired && !"0".equals(getHexSessionId())) {
/*
* Before the new zookeeper instance is connected to the zookeeper service and its session
* is established, its session id is 0.
* New session event is not fired until the new zookeeper session receives the first
* SyncConnected state(the zookeeper session is established).
* Now the session id is available and non-zero, and we can fire new session events.
*/
fireNewSessionEvents();
/*
* Set it true to avoid firing events again for the same session next time
* when SyncConnected events are received.
*/
_isNewSessionEventFired = true;
/*
* With this first SyncConnected state, we just get connected to zookeeper service after
* reconnecting when the session expired. Because previous session expired, we also have to
* notify all listeners that something might have changed.
*/
fireAllEvents();
}
} else if (event.getState() == KeeperState.Expired) {
_isNewSessionEventFired = false;
reconnectOnExpiring();
}
}
private void reconnectOnExpiring() {
// only managing zkclient reconnect
if (!isManagingZkConnection()) {
return;
}
int retryCount = 0;
ExponentialBackoffStrategy retryStrategy =
new ExponentialBackoffStrategy(MAX_RECONNECT_INTERVAL_MS, true);
Exception reconnectException = new ZkException("Shutdown triggered.");
while (!isClosed()) {
try {
reconnect();
return;
} catch (ZkInterruptedException interrupt) {
reconnectException = interrupt;
break;
} catch (Exception e) {
reconnectException = e;
long waitInterval = retryStrategy.getNextWaitInterval(retryCount++);
LOG.warn("ZkClient {}, reconnect on expiring failed. Will retry after {} ms",
_uid, waitInterval, e);
try {
Thread.sleep(waitInterval);
} catch (InterruptedException ex) {
reconnectException = ex;
break;
}
}
}
LOG.info("Zkclient {} unable to re-establish connection. Notifying consumer of the following exception:{}",
_uid, reconnectException);
fireSessionEstablishmentError(reconnectException);
}
private void reconnect() {
getEventLock().lock();
try {
ZkConnection connection = ((ZkConnection) getConnection());
connection.reconnect(this);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
private void doAsyncSync(final ZooKeeper zk, final String path, final long startT,
final ZkAsyncCallbacks.SyncCallbackHandler cb) {
try {
zk.sync(path, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, 0, true) {
@Override
protected void doRetry() throws Exception {
doAsyncSync(zk, path, System.currentTimeMillis(), cb);
}
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, true));
throw e;
}
}
/*
* Note, issueSync takes a ZooKeeper (client) object and pass it to doAsyncSync().
* The reason we do this is that we want to ensure each new session event is preceded with exactly
* one sync() to server. The sync() is to make sure the server would not see stale data.
*
* ZooKeeper client object has an invariant of each object has one session. With this invariant
* we can achieve each one sync() to server upon new session establishment. The reasoning is:
* issueSync() is called when fireNewSessionEvents() which in under eventLock of ZkClient. Thus
* we are guaranteed the ZooKeeper object passed in would have the new incoming sessionId. If by
* the time sync() is invoked, the session expires. The sync() would fail with a stale session.
* This is exactly what we want. The newer session would ensure another fireNewSessionEvents.
*/
private boolean issueSync(ZooKeeper zk) {
String sessionId = Long.toHexString(zk.getSessionId());
ZkAsyncCallbacks.SyncCallbackHandler callbackHandler =
new ZkAsyncCallbacks.SyncCallbackHandler(sessionId);
final long startT = System.currentTimeMillis();
doAsyncSync(zk, SYNC_PATH, startT, callbackHandler);
callbackHandler.waitForSuccess();
KeeperException.Code code = KeeperException.Code.get(callbackHandler.getRc());
if (code == KeeperException.Code.OK) {
LOG.info("zkclient {}, sycnOnNewSession with sessionID {} async return code: {} and proceeds",
_uid, sessionId, code);
return true;
}
// Not retryable error, including session expiration; return false.
return false;
}
private void fireNewSessionEvents() {
// only managing zkclient fire handleNewSession event
if (!isManagingZkConnection()) {
return;
}
final String sessionId = getHexSessionId();
if (SYNC_ON_SESSION) {
final ZooKeeper zk = ((ZkConnection) getConnection()).getZookeeper();
_eventThread.send(new ZkEventThread.ZkEvent("Sync call before new session event of session " + sessionId,
sessionId) {
@Override
public void run() throws Exception {
if (issueSync(zk) == false) {
LOG.warn("zkclient{}, Failed to call sync() on new session {}", _uid, sessionId);
}
}
});
}
for (final IZkStateListener stateListener : _stateListener) {
_eventThread
.send(new ZkEventThread.ZkEvent("New session event sent to " + stateListener, sessionId) {
@Override
public void run() throws Exception {
stateListener.handleNewSession(sessionId);
}
});
}
}
protected void fireStateChangedEvent(final KeeperState state) {
final String sessionId = getHexSessionId();
for (final IZkStateListener stateListener : _stateListener) {
final String description = "State changed to " + state + " sent to " + stateListener;
_eventThread.send(new ZkEventThread.ZkEvent(description, sessionId) {
@Override
public void run() throws Exception {
stateListener.handleStateChanged(state);
}
});
}
}
private void fireSessionEstablishmentError(final Throwable error) {
for (final IZkStateListener stateListener : _stateListener) {
_eventThread
.send(new ZkEventThread.ZkEvent("Session establishment error(" + error + ") sent to " + stateListener) {
@Override
public void run() throws Exception {
stateListener.handleSessionEstablishmentError(error);
}
});
}
}
private boolean hasListeners(String path) {
Set<IZkDataListenerEntry> dataListeners = _dataListener.get(path);
if (dataListeners != null && dataListeners.size() > 0) {
return true;
}
Set<IZkChildListener> childListeners = _childListener.get(path);
if (childListeners != null && childListeners.size() > 0) {
return true;
}
return false;
}
/**
* Delete the path as well as all its children.
* This method is deprecated, please use {@link #deleteRecursively(String)}} instead
* @param path ZK path
* @return true if successfully deleted all children, and the given path, else false
*/
@Deprecated
public boolean deleteRecursive(String path) {
try {
deleteRecursively(path);
return true;
} catch (ZkClientException e) {
LOG.error("zkcient {}, Failed to recursively delete path {}, exception {}",
_uid, path, e);
return false;
}
}
/**
* Delete the path as well as all its children.
* @param path
* @throws ZkClientException
*/
public void deleteRecursively(String path) throws ZkClientException {
List<String> children;
try {
children = getChildren(path, false);
} catch (ZkNoNodeException e) {
// if the node to be deleted does not exist, treat it as success.
return;
}
for (String subPath : children) {
deleteRecursively(path + "/" + subPath);
}
// delete() function call will return true if successful, false if the path does not
// exist (in this context, it should be treated as successful), and throw exception
// if there is any other failure case.
try {
delete(path);
} catch (Exception e) {
LOG.error("zkclient {}, Failed to delete {}, exception {}", _uid, path, e);
throw new ZkClientException("Failed to delete " + path, e);
}
}
private void processDataOrChildChange(WatchedEvent event, long notificationTime) {
final String path = event.getPath();
final boolean pathExists = event.getType() != EventType.NodeDeleted;
if (EventType.NodeDeleted == event.getType()) {
LOG.debug("zkclient{}, Event NodeDeleted: {}", _uid, event.getPath());
}
if (event.getType() == EventType.NodeChildrenChanged || event.getType() == EventType.NodeCreated
|| event.getType() == EventType.NodeDeleted) {
Set<IZkChildListener> childListeners = _childListener.get(path);
if (childListeners != null && !childListeners.isEmpty()) {
// TODO recording child changed event propagation latency as well. Note this change will
// introduce additional ZK access.
fireChildChangedEvents(path, childListeners, pathExists);
}
}
if (event.getType() == EventType.NodeDataChanged || event.getType() == EventType.NodeDeleted
|| event.getType() == EventType.NodeCreated) {
Set<IZkDataListenerEntry> listeners = _dataListener.get(path);
if (listeners != null && !listeners.isEmpty()) {
fireDataChangedEvents(event.getPath(), listeners, OptionalLong.of(notificationTime),
pathExists);
}
}
}
private void fireDataChangedEvents(final String path, Set<IZkDataListenerEntry> listeners,
final OptionalLong notificationTime, boolean pathExists) {
try {
final ZkPathStatRecord pathStatRecord = new ZkPathStatRecord(path);
// Trigger listener callbacks
for (final IZkDataListenerEntry listener : listeners) {
_eventThread.send(new ZkEventThread.ZkEvent(
"Data of " + path + " changed sent to " + listener.getDataListener()
+ " prefetch data: " + listener.isPrefetchData()) {
@Override
public void run() throws Exception {
if (!pathStatRecord.pathChecked()) {
// getStat() wrapp two ways to install data watch by using exists() or getData().
// getData() aka useGetData (true) would not install the watch if the node not ]
// existing. Exists() aka useGetData (false) would install (leak) the watch if the
// node not existing.
// Here the goal is to avoid leaking watch. Thus, if we know path not exists, we use
// the exists() useGetData (false) route to check stat. Otherwise, we use getData()
// to install watch.
Stat stat = null;
if (!pathExists) {
stat = getStat(path, false);
} else {
stat = installWatchOnlyPathExist(path);
}
pathStatRecord.recordPathStat(stat, notificationTime);
}
if (!pathStatRecord.pathExists()) {
listener.getDataListener().handleDataDeleted(path);
} else {
Object data = null;
if (listener.isPrefetchData()) {
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {} Prefetch data for path: {}", _uid, path);
}
try {
// TODO: the data is redundantly read multiple times when multiple listeners exist
data = readData(path, null, true);
} catch (ZkNoNodeException e) {
LOG.warn("zkclient {} Prefetch data for path: {} failed.", _uid, path, e);
listener.getDataListener().handleDataDeleted(path);
return;
}
}
listener.getDataListener().handleDataChange(path, data);
}
}
});
}
} catch (Exception e) {
LOG.error("zkclient {} Failed to fire data changed event for path: {}", _uid, path, e);
}
}
private void fireChildChangedEvents(final String path, Set<IZkChildListener> childListeners, boolean pathExists) {
try {
final ZkPathStatRecord pathStatRecord = new ZkPathStatRecord(path);
for (final IZkChildListener listener : childListeners) {
_eventThread.send(new ZkEventThread.ZkEvent("Children of " + path + " changed sent to " + listener) {
@Override
public void run() throws Exception {
if (!pathStatRecord.pathChecked()) {
Stat stat = null;
if (!pathExists || !hasListeners(path)) {
// will not install listener using exists call
stat = getStat(path, false);
} else {
// will install listener using getData() call; if node not there, install nothing
stat = installWatchOnlyPathExist(path);
}
pathStatRecord.recordPathStat(stat, OptionalLong.empty());
}
List<String> children = null;
if (pathStatRecord.pathExists()) {
try {
children = getChildren(path);
} catch (ZkNoNodeException e) {
LOG.warn("zkclient {} Get children under path: {} failed.", _uid, path, e);
// Continue trigger the change handler
}
}
listener.handleChildChange(path, children);
}
});
}
} catch (Exception e) {
LOG.error("zkclient {} Failed to fire child changed event. Unable to getChildren.", _uid, e);
}
}
public boolean waitUntilExists(String path, TimeUnit timeUnit, long time)
throws ZkInterruptedException {
Date timeout = new Date(System.currentTimeMillis() + timeUnit.toMillis(time));
if (LOG.isDebugEnabled()) {
LOG.debug("Waiting until znode {} becomes available.", _uid, path);
}
if (exists(path)) {
return true;
}
acquireEventLock();
try {
while (!exists(path, true)) {
boolean gotSignal = getEventLock().getZNodeEventCondition().awaitUntil(timeout);
if (!gotSignal) {
return false;
}
}
return true;
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
public IZkConnection getConnection() {
return _connection;
}
public long waitForEstablishedSession(long timeout, TimeUnit timeUnit) {
validateCurrentThread();
acquireEventLock();
try {
if (!waitForKeeperState(KeeperState.SyncConnected, timeout, timeUnit)) {
throw new ZkTimeoutException("Waiting to be connected to ZK server has timed out.");
}
// Reading session ID before unlocking event lock is critical to guarantee the established
// session's ID won't change.
return getSessionId();
} finally {
getEventLock().unlock();
}
}
public boolean waitUntilConnected(long time, TimeUnit timeUnit) throws ZkInterruptedException {
return waitForKeeperState(KeeperState.SyncConnected, time, timeUnit);
}
public boolean waitForKeeperState(KeeperState keeperState, long time, TimeUnit timeUnit)
throws ZkInterruptedException {
validateCurrentThread();
Date timeout = new Date(System.currentTimeMillis() + timeUnit.toMillis(time));
LOG.debug("zkclient {}, Waiting for keeper state {} ", _uid, keeperState);
acquireEventLock();
try {
boolean stillWaiting = true;
while (_currentState != keeperState) {
if (!stillWaiting) {
return false;
}
stillWaiting = getEventLock().getStateChangedCondition().awaitUntil(timeout);
}
LOG.debug("zkclient {} State is {}",
_uid, (_currentState == null ? "CLOSED" : _currentState));
return true;
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
private void acquireEventLock() {
try {
getEventLock().lockInterruptibly();
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
}
}
/**
* @param <T>
* @param callable
* @return result of Callable
* @throws ZkInterruptedException
* if operation was interrupted, or a required reconnection got interrupted
* @throws IllegalArgumentException
* if called from anything except the ZooKeeper event thread
* @throws ZkException
* if any ZooKeeper exception occurred
* @throws RuntimeException
* if any other exception occurs from invoking the Callable
*/
public <T> T retryUntilConnected(final Callable<T> callable)
throws IllegalArgumentException, ZkException {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
final long operationStartTime = System.currentTimeMillis();
if (_monitor != null) {
_monitor.increaseOutstandingRequestGauge();
}
try {
while (true) {
// Because ConnectionLossException and SessionExpiredException are caught but not thrown,
// we don't know what causes retry. This is used to record which one of the two exceptions
// causes retry in ZkTimeoutException.
// This also helps the test testConnectionLossWhileCreateEphemeral.
KeeperException.Code retryCauseCode;
if (isClosed()) {
throw new IllegalStateException("ZkClient already closed!");
}
try {
final ZkConnection zkConnection = (ZkConnection) getConnection();
// Validate that the connection is not null before trigger callback
if (zkConnection == null || zkConnection.getZookeeper() == null) {
throw new IllegalStateException(
"ZkConnection is in invalid state! Please close this ZkClient and create new client.");
}
return callable.call();
} catch (ConnectionLossException e) {
retryCauseCode = e.code();
// we give the event thread some time to update the status to 'Disconnected'
Thread.yield();
waitForRetry();
} catch (SessionExpiredException e) {
retryCauseCode = e.code();
// we give the event thread some time to update the status to 'Expired'
Thread.yield();
waitForRetry();
} catch (ZkSessionMismatchedException e) {
throw e;
} catch (KeeperException e) {
throw ZkException.create(e);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} catch (Exception e) {
throw ExceptionUtil.convertToRuntimeException(e);
}
LOG.debug("zkclient {}, Retrying operation, caused by {}", _uid,retryCauseCode);
// before attempting a retry, check whether retry timeout has elapsed
if (System.currentTimeMillis() - operationStartTime > _operationRetryTimeoutInMillis) {
throw new ZkTimeoutException("Operation cannot be retried because of retry timeout ("
+ _operationRetryTimeoutInMillis + " milli seconds). Retry was caused by "
+ retryCauseCode);
}
}
} finally {
if (_monitor != null) {
_monitor.decreaseOutstandingRequestGauge();
}
}
}
private void waitForRetry() {
waitUntilConnected(_operationRetryTimeoutInMillis, TimeUnit.MILLISECONDS);
}
public void setCurrentState(KeeperState currentState) {
getEventLock().lock();
try {
_currentState = currentState;
} finally {
getEventLock().unlock();
}
}
/**
* Returns a mutex all zookeeper events are synchronized aginst. So in case you need to do
* something without getting
* any zookeeper event interruption synchronize against this mutex. Also all threads waiting on
* this mutex object
* will be notified on an event.
* @return the mutex.
*/
public ZkLock getEventLock() {
return _zkEventLock;
}
/**
* Delete the given path. Path should not have any children or the deletion will fail.
* This function will throw exception if we fail to delete an existing path
* @param path
* @return true if path is successfully deleted, false if path does not exist
*/
public boolean delete(final String path) {
long startT = System.currentTimeMillis();
boolean success;
try {
try {
retryUntilConnected(new Callable<Object>() {
@Override
public Object call() throws Exception {
getConnection().delete(path);
return null;
}
});
success = true;
} catch (ZkNoNodeException e) {
success = false;
if (LOG.isDebugEnabled()) {
LOG.debug("zkclient {}, Failed to delete path {}, znode does not exist!", _uid, path);
}
}
record(path, null, startT, ZkClientMonitor.AccessType.WRITE);
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.WRITE);
LOG.warn("zkclient {}, Failed to delete path {}! ", _uid, path, e);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient {} delete, path: {}, time {} ms", _uid, path, (endT - startT));
}
}
return success;
}
public void setZkSerializer(ZkSerializer zkSerializer) {
_pathBasedZkSerializer = new BasicZkSerializer(zkSerializer);
}
public void setZkSerializer(PathBasedZkSerializer zkSerializer) {
_pathBasedZkSerializer = zkSerializer;
}
public PathBasedZkSerializer getZkSerializer() {
return _pathBasedZkSerializer;
}
public byte[] serialize(Object data, String path) {
return _pathBasedZkSerializer.serialize(data, path);
}
@SuppressWarnings("unchecked")
public <T extends Object> T deserialize(byte[] data, String path) {
if (data == null) {
return null;
}
return (T) _pathBasedZkSerializer.deserialize(data, path);
}
@SuppressWarnings("unchecked")
public <T extends Object> T readData(String path) {
return (T) readData(path, false);
}
@SuppressWarnings("unchecked")
public <T extends Object> T readData(String path, boolean returnNullIfPathNotExists) {
T data = null;
try {
data = (T) readData(path, null);
} catch (ZkNoNodeException e) {
if (!returnNullIfPathNotExists) {
throw e;
}
}
return data;
}
@SuppressWarnings("unchecked")
public <T extends Object> T readData(String path, Stat stat) {
return (T) readData(path, stat, hasListeners(path));
}
@SuppressWarnings("unchecked")
public <T extends Object> T readData(final String path, final Stat stat, final boolean watch) {
long startT = System.currentTimeMillis();
byte[] data = null;
try {
data = retryUntilConnected(new Callable<byte[]>() {
@Override
public byte[] call() throws Exception {
return getConnection().readData(path, stat, watch);
}
});
record(path, data, startT, ZkClientMonitor.AccessType.READ);
return (T) deserialize(data, path);
} catch (ZkNoNodeException e) {
record(path, data, startT, ZkClientMonitor.AccessType.READ);
throw e;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.READ);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient {}, getData, path {}, time {} ms", _uid, path, (endT - startT));
}
}
}
@SuppressWarnings("unchecked")
public <T extends Object> T readDataAndStat(String path, Stat stat,
boolean returnNullIfPathNotExists) {
T data = null;
try {
data = readData(path, stat);
} catch (ZkNoNodeException e) {
if (!returnNullIfPathNotExists) {
throw e;
}
}
return data;
}
public void writeData(String path, Object object) {
writeData(path, object, -1);
}
/**
* Updates data of an existing znode. The current content of the znode is passed to the
* {@link DataUpdater} that is
* passed into this method, which returns the new content. The new content is only written back to
* ZooKeeper if
* nobody has modified the given znode in between. If a concurrent change has been detected the
* new data of the
* znode is passed to the updater once again until the new contents can be successfully written
* back to ZooKeeper.
* @param <T>
* @param path
* The path of the znode.
* @param updater
* Updater that creates the new contents.
*/
@SuppressWarnings("unchecked")
public <T extends Object> void updateDataSerialized(String path, DataUpdater<T> updater) {
Stat stat = new Stat();
boolean retry;
do {
retry = false;
try {
T oldData = (T) readData(path, stat);
T newData = updater.update(oldData);
writeData(path, newData, stat.getVersion());
} catch (ZkBadVersionException e) {
retry = true;
}
} while (retry);
}
public void writeData(final String path, Object datat, final int expectedVersion) {
writeDataReturnStat(path, datat, expectedVersion);
}
public Stat writeDataReturnStat(final String path, Object datat, final int expectedVersion) {
long startT = System.currentTimeMillis();
try {
final byte[] data = serialize(datat, path);
checkDataSizeLimit(path, data);
final Stat stat = (Stat) retryUntilConnected(
(Callable<Object>) () -> getConnection().writeDataReturnStat(path, data, expectedVersion));
record(path, data, startT, ZkClientMonitor.AccessType.WRITE);
return stat;
} catch (Exception e) {
recordFailure(path, ZkClientMonitor.AccessType.WRITE);
throw e;
} finally {
long endT = System.currentTimeMillis();
if (LOG.isTraceEnabled()) {
LOG.trace("zkclient {}, setData, path {}, time {} ms", _uid, path, (endT - startT));
}
}
}
public Stat writeDataGetStat(final String path, Object datat, final int expectedVersion) {
return writeDataReturnStat(path, datat, expectedVersion);
}
public void asyncCreate(final String path, Object datat, final CreateMode mode,
final ZkAsyncCallbacks.CreateCallbackHandler cb) {
final long startT = System.currentTimeMillis();
final byte[] data;
try {
data = (datat == null ? null : serialize(datat, path));
} catch (ZkMarshallingError e) {
cb.processResult(KeeperException.Code.MARSHALLINGERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, false), null);
return;
}
doAsyncCreate(path, data, mode, startT, cb, parseExpectedSessionId(datat));
}
private void doAsyncCreate(final String path, final byte[] data, final CreateMode mode,
final long startT, final ZkAsyncCallbacks.CreateCallbackHandler cb, final String expectedSessionId) {
try {
retryUntilConnected(() -> {
getExpectedZookeeper(expectedSessionId).create(path, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, mode, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, 0, false,
GZipCompressionUtil.isCompressed(data)) {
@Override
protected void doRetry() {
doAsyncCreate(path, data, mode, System.currentTimeMillis(), cb, expectedSessionId);
}
});
return null;
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, false), null);
throw e;
}
}
// Async Data Accessors
public void asyncSetData(final String path, Object datat, final int version,
final ZkAsyncCallbacks.SetDataCallbackHandler cb) {
final long startT = System.currentTimeMillis();
final byte[] data;
try {
data = serialize(datat, path);
} catch (ZkMarshallingError e) {
cb.processResult(KeeperException.Code.MARSHALLINGERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, false), null);
return;
}
doAsyncSetData(path, data, version, startT, cb, parseExpectedSessionId(datat));
}
private void doAsyncSetData(final String path, byte[] data, final int version, final long startT,
final ZkAsyncCallbacks.SetDataCallbackHandler cb, final String expectedSessionId) {
try {
retryUntilConnected(() -> {
getExpectedZookeeper(expectedSessionId).setData(path, data, version, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, data == null ? 0 : data.length,
false, GZipCompressionUtil.isCompressed(data)) {
@Override
protected void doRetry() {
doAsyncSetData(path, data, version, System.currentTimeMillis(), cb, expectedSessionId);
}
});
return null;
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, false), null);
throw e;
}
}
public void asyncGetData(final String path, final ZkAsyncCallbacks.GetDataCallbackHandler cb) {
final long startT = System.currentTimeMillis();
try {
retryUntilConnected(() -> {
((ZkConnection) getConnection()).getZookeeper().getData(path, null, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, 0, true) {
@Override
protected void doRetry() {
asyncGetData(path, cb);
}
});
return null;
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, true), null, null);
throw e;
}
}
public void asyncExists(final String path, final ZkAsyncCallbacks.ExistsCallbackHandler cb) {
final long startT = System.currentTimeMillis();
try {
retryUntilConnected(() -> {
((ZkConnection) getConnection()).getZookeeper().exists(path, null, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, 0, true) {
@Override
protected void doRetry() {
asyncExists(path, cb);
}
});
return null;
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, true), null);
throw e;
}
}
public void asyncDelete(final String path, final ZkAsyncCallbacks.DeleteCallbackHandler cb) {
final long startT = System.currentTimeMillis();
try {
retryUntilConnected(() -> {
((ZkConnection) getConnection()).getZookeeper().delete(path, -1, cb,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, _monitor, startT, 0, false) {
@Override
protected void doRetry() {
asyncDelete(path, cb);
}
});
return null;
});
} catch (RuntimeException e) {
// Process callback to release caller from waiting
cb.processResult(KeeperException.Code.APIERROR.intValue(), path,
new ZkAsyncCallMonitorContext(_monitor, startT, 0, false));
throw e;
}
}
private void checkDataSizeLimit(String path, byte[] data) {
if (data == null) {
return;
}
if (data.length > WRITE_SIZE_LIMIT) {
throw new ZkClientException("Data size of path " + path
+ " is greater than write size limit "
+ WRITE_SIZE_LIMIT + " bytes");
}
}
public void watchForData(final String path) {
watchForData(path, false);
}
private boolean watchForData(final String path, boolean skipWatchingNonExistNode) {
try {
if (skipWatchingNonExistNode) {
retryUntilConnected(() -> (((ZkConnection) getConnection()).getZookeeper().getData(path, true, new Stat())));
} else {
retryUntilConnected(() -> (((ZkConnection) getConnection()).getZookeeper().exists(path, true)));
}
} catch (ZkNoNodeException e) {
// Do nothing, this is what we want as this is not going to leak watch in ZooKeeepr server.
LOG.info("zkclient {}, watchForData path not existing: {} ", _uid, path);
return false;
}
return true;
}
/**
* Installs a child watch for the given path.
* @param path
* @return the current children of the path or null if the zk node with the given path doesn't
* exist.
*/
public List<String> watchForChilds(final String path) {
return watchForChilds(path, false);
}
/**
* The following captures about how we reason Zookeeper watch leakage issue based on various
* comments in review
* 1. Removal of a parent zk path (such as currentstate/sessionid) is async to all threads in
* Helix router or controller which watches the path. Thus, if we install a watch to a path
* expected to be created, we always have the risk of leaking if the path changed.
*
* 2. Current the CallbackHandler life cycle is like this:
* CallbackHandler for currentstate and some others can be created before the parent path is
* created. Thus, we still needs exists() call. This corresponds to INIT change type of
* CallbackHanlder. This is the time eventually watchForChilds() with be called with
* skipWatchingNonExistNode as false.
* Aside from creation time, CallbackHandler normal cycle would see CALLBACK change type. This
* time we should normally expected the parent path is created. Thus, the subscription from
* CallbackHandler would use skipWatchingNonExistNode false. Avoid leaking path.
* Note, if the path is removed, CallbackHandler would see children of parent path as null. THis
* would end the CallbackHanlder' life.
*
* From the above life cycle of Callbackhandler, we know the only place that can leak is that
* INIT change type time, participant expires the session more than twice in a row before the
* watchForChild(skipWatchingNonExistNode=false) issue exists() call.
*
* THe chance of this sequence is slim though.
*
*/
private List<String> watchForChilds(final String path, boolean skipWatchingNonExistNode) {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
return retryUntilConnected(new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
if (!skipWatchingNonExistNode) {
exists(path, true);
}
try {
return getChildren(path, true);
} catch (ZkNoNodeException e) {
// ignore, the "exists" watch will listen for the parent node to appear
LOG.info("zkclient{} watchForChilds path not existing:{} skipWatchingNodeNoteExist: {}",
_uid, path, skipWatchingNonExistNode);
}
return null;
}
});
}
/**
* Add authentication information to the connection. This will be used to identify the user and
* check access to
* nodes protected by ACLs
* @param scheme
* @param auth
*/
public void addAuthInfo(final String scheme, final byte[] auth) {
retryUntilConnected(new Callable<Object>() {
@Override
public Object call() throws Exception {
getConnection().addAuthInfo(scheme, auth);
return null;
}
});
}
/**
* Connect to ZooKeeper.
* @param maxMsToWaitUntilConnected
* @param watcher
* @throws ZkInterruptedException
* if the connection timed out due to thread interruption
* @throws ZkTimeoutException
* if the connection timed out
* @throws IllegalStateException
* if the connection timed out due to thread interruption
*/
public void connect(final long maxMsToWaitUntilConnected, Watcher watcher)
throws ZkInterruptedException, ZkTimeoutException, IllegalStateException {
if (isClosed()) {
throw new IllegalStateException("ZkClient already closed!");
}
boolean started = false;
acquireEventLock();
try {
setShutdownTrigger(false);
IZkConnection zkConnection = getConnection();
_eventThread = new ZkEventThread(zkConnection.getServers());
if (_monitor != null) {
boolean result = _monitor.setAndInitZkEventThreadMonitor(_eventThread);
if (!result) {
LOG.error("register _eventThread monitor failed due to an existing one");
}
}
_eventThread.start();
LOG.debug("ZkClient {}, _eventThread {}", _uid, _eventThread.getId());
if (isManagingZkConnection()) {
zkConnection.connect(watcher);
LOG.debug("zkclient{} Awaiting connection to Zookeeper server", _uid);
if (!waitUntilConnected(maxMsToWaitUntilConnected, TimeUnit.MILLISECONDS)) {
throw new ZkTimeoutException(
"Unable to connect to zookeeper server within timeout: " + maxMsToWaitUntilConnected);
}
} else {
// if the client is not managing connection, the input connection is supposed to connect.
if (isConnectionClosed()) {
throw new ZkClientException(
"Unable to connect to zookeeper server with the specified ZkConnection");
}
// TODO Refine the init state here. Here we pre-config it to be connected. This may not be
// the case, if the connection is connecting or recovering. -- JJ
// For shared client, the event notification will not be forwarded before wather add to the
// connection manager.
setCurrentState(KeeperState.SyncConnected);
}
started = true;
} finally {
getEventLock().unlock();
// we should close the zookeeper instance, otherwise it would keep
// on trying to connect
if (!started) {
close();
}
}
}
public long getCreationTime(String path) {
acquireEventLock();
try {
return getConnection().getCreateTime(path);
} catch (KeeperException e) {
throw ZkException.create(e);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
} finally {
getEventLock().unlock();
}
}
public String getServers() {
return getConnection().getServers();
}
/**
* Close the client.
* @throws ZkInterruptedException
*/
public void close() throws ZkInterruptedException {
if (LOG.isTraceEnabled()) {
StackTraceElement[] calls = Thread.currentThread().getStackTrace();
LOG.trace("Closing a zkclient uid:{}, callStack: {} ", _uid, Arrays.asList(calls));
}
getEventLock().lock();
IZkConnection connection = getConnection();
try {
if (connection == null || _closed) {
return;
}
setShutdownTrigger(true);
if (_asyncCallRetryThread != null) {
_asyncCallRetryThread.interrupt();
_asyncCallRetryThread.join(2000);
}
_eventThread.interrupt();
_eventThread.join(2000);
if (isManagingZkConnection()) {
LOG.info("Closing zkclient uid:{}, zk:{}", _uid, ((ZkConnection) connection).getZookeeper());
connection.close();
}
_closed = true;
// send state change notification to unlock any wait
setCurrentState(null);
getEventLock().getStateChangedCondition().signalAll();
} catch (InterruptedException e) {
/**
* Workaround for HELIX-264: calling ZkClient#close() in its own eventThread context will
* throw ZkInterruptedException and skip ZkConnection#close()
*/
if (connection != null) {
try {
/**
* ZkInterruptedException#construct() honors InterruptedException by calling
* Thread.currentThread().interrupt(); clear it first, so we can safely close the
* zk-connection
*/
Thread.interrupted();
if (isManagingZkConnection()) {
connection.close();
}
/**
* restore interrupted status of current thread
*/
Thread.currentThread().interrupt();
} catch (InterruptedException e1) {
throw new ZkInterruptedException(e1);
}
}
} finally {
getEventLock().unlock();
if (_monitor != null) {
_monitor.unregister();
}
LOG.info("Closed zkclient with uid:{}", _uid);
}
}
public boolean isClosed() {
try {
getEventLock().lock();
return _closed;
} finally {
getEventLock().unlock();
}
}
public boolean isConnectionClosed() {
IZkConnection connection = getConnection();
return (connection == null || connection.getZookeeperState() == null || !connection
.getZookeeperState().isAlive());
}
public void setShutdownTrigger(boolean triggerState) {
_shutdownTriggered = triggerState;
}
public boolean getShutdownTrigger() {
return _shutdownTriggered;
}
public int numberOfListeners() {
int listeners = 0;
for (Set<IZkChildListener> childListeners : _childListener.values()) {
listeners += childListeners.size();
}
for (Set<IZkDataListenerEntry> dataListeners : _dataListener.values()) {
listeners += dataListeners.size();
}
listeners += _stateListener.size();
return listeners;
}
public List<OpResult> multi(final Iterable<Op> ops) throws ZkException {
if (ops == null) {
throw new NullPointerException("ops must not be null.");
}
return retryUntilConnected(new Callable<List<OpResult>>() {
@Override
public List<OpResult> call() throws Exception {
return getConnection().multi(ops);
}
});
}
/**
* @return true if this ZkClient is managing the ZkConnection.
*/
protected boolean isManagingZkConnection() {
return true;
}
public long getSessionId() {
ZkConnection zkConnection = ((ZkConnection) getConnection());
ZooKeeper zk = zkConnection.getZookeeper();
if (zk == null) {
throw new ZkClientException(
"ZooKeeper connection information is not available now. ZkClient might be disconnected.");
} else {
return zkConnection.getZookeeper().getSessionId();
}
}
/*
* Gets a session id in hexadecimal notation.
* Ex. 1000a5ceb930004 is returned.
*/
private String getHexSessionId() {
return Long.toHexString(getSessionId());
}
/*
* Gets the zookeeper instance that ensures its session ID matches the expected session ID.
* It is used for write operations that suppose the znode to be created by the expected session.
*/
private ZooKeeper getExpectedZookeeper(final String expectedSessionId) {
/*
* Cache the zookeeper reference and make sure later zooKeeper.create() is being run
* under this zookeeper connection. This is to avoid zk session change after expected
* session check.
*/
ZooKeeper zk = ((ZkConnection) getConnection()).getZookeeper();
/*
* The operation is NOT session aware, we will use the actual zookeeper session without
* checking expected session.
*/
if (expectedSessionId == null || expectedSessionId.isEmpty()) {
return zk;
}
/*
* If operation is session aware (expectedSession is valid),
* we have to check whether or not the passed-in(expected) session id
* matches actual session's id.
* If not, we should not return a zk object for the zk operation.
*/
final String actualSessionId = Long.toHexString(zk.getSessionId());
if (!actualSessionId.equals(expectedSessionId)) {
throw new ZkSessionMismatchedException(
"Failed to get expected zookeeper instance! There is a session id mismatch. Expected: "
+ expectedSessionId + ". Actual: " + actualSessionId);
}
return zk;
}
private String parseExpectedSessionId(Object data) {
if (!(data instanceof SessionAwareZNRecord)) {
return null;
}
return ((SessionAwareZNRecord) data).getExpectedSessionId();
}
// operations to update monitor's counters
private void record(String path, byte[] data, long startTimeMilliSec,
ZkClientMonitor.AccessType accessType) {
if (_monitor != null) {
int dataSize = (data != null) ? data.length : 0;
_monitor.record(path, dataSize, startTimeMilliSec, accessType);
if (GZipCompressionUtil.isCompressed(data)) {
_monitor.increaseZnodeCompressCounter();
}
}
}
private void recordFailure(String path, ZkClientMonitor.AccessType accessType) {
if (_monitor != null) {
_monitor.recordFailure(path, accessType);
}
}
private void recordStateChange(boolean stateChanged, boolean dataChanged, boolean sessionExpired) {
// update state change counter.
if (_monitor != null) {
if (stateChanged) {
_monitor.increaseStateChangeEventCounter();
}
if (dataChanged) {
_monitor.increaseDataChangeEventCounter();
}
if (sessionExpired) {
_monitor.increasExpiredSessionCounter();
}
}
}
/**
* Creates a {@link IZkStateListener} that wraps a default
* implementation of {@link org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener}, which means the returned
* listener runs the methods of {@link org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener}.
* This is for backward compatibility with {@link org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener}.
*/
private static class IZkStateListenerI0ItecImpl implements IZkStateListener {
private org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener _listener;
IZkStateListenerI0ItecImpl(
org.apache.helix.zookeeper.zkclient.deprecated.IZkStateListener listener) {
_listener = listener;
}
@Override
public void handleStateChanged(KeeperState keeperState) throws Exception {
_listener.handleStateChanged(keeperState);
}
@Override
public void handleNewSession(final String sessionId) throws Exception {
/*
* org.I0Itec.zkclient.IZkStateListener does not have handleNewSession(sessionId),
* so just call handleNewSession() by default.
*/
_listener.handleNewSession();
}
@Override
public void handleSessionEstablishmentError(Throwable error) throws Exception {
_listener.handleSessionEstablishmentError(error);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof IZkStateListenerI0ItecImpl)) {
return false;
}
if (_listener == null) {
return false;
}
IZkStateListenerI0ItecImpl defaultListener = (IZkStateListenerI0ItecImpl) obj;
return _listener.equals(defaultListener._listener);
}
@Override
public int hashCode() {
/*
* The original listener's hashcode helps find the wrapped listener with the same original
* listener. This is helpful in unsubscribeStateChanges(listener) when finding the listener
* to remove.
*/
return _listener.hashCode();
}
}
private void validateCurrentThread() {
if (_zookeeperEventThread != null && Thread.currentThread() == _zookeeperEventThread) {
throw new IllegalArgumentException("Must not be done in the zookeeper event thread.");
}
}
private void checkNumChildrenLimit(String path) throws KeeperException {
Stat stat = getStat(path);
if (stat == null) {
return;
}
if (stat.getNumChildren() > NUM_CHILDREN_LIMIT) {
LOG.error("Failed to get children for path {} because of connection loss. "
+ "Number of children {} exceeds limit {}, aborting retry.", path, stat.getNumChildren(),
NUM_CHILDREN_LIMIT);
// MarshallingErrorException could represent transport error: exceeding the
// Jute buffer size. So use it to exit retry loop and tell that zk is not able to
// transport the data because packet length is too large.
throw new KeeperException.MarshallingErrorException();
} else {
LOG.debug("Number of children {} is less than limit {}, not exiting retry.",
stat.getNumChildren(), NUM_CHILDREN_LIMIT);
}
}
private void validateWriteSizeLimitConfig() {
int serializerSize = ZNRecordUtil.getSerializerWriteSizeLimit();
LOG.info("ZNRecord serializer write size limit: {}; ZkClient write size limit: {}",
serializerSize, WRITE_SIZE_LIMIT);
if (serializerSize > WRITE_SIZE_LIMIT) {
throw new IllegalStateException("ZNRecord serializer write size limit " + serializerSize
+ " is greater than ZkClient size limit " + WRITE_SIZE_LIMIT);
}
}
}
| apache-2.0 |
jkacer/pac4j | pac4j-http/src/main/java/org/pac4j/http/client/indirect/IndirectBasicAuthClient.java | 4030 | package org.pac4j.http.client.indirect;
import org.pac4j.core.client.IndirectClient;
import org.pac4j.core.context.HttpConstants;
import org.pac4j.core.context.Pac4jConstants;
import org.pac4j.core.redirect.RedirectAction;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.credentials.authenticator.Authenticator;
import org.pac4j.core.exception.CredentialsException;
import org.pac4j.core.exception.HttpAction;
import org.pac4j.core.profile.CommonProfile;
import org.pac4j.core.profile.creator.ProfileCreator;
import org.pac4j.core.credentials.UsernamePasswordCredentials;
import org.pac4j.core.credentials.extractor.BasicAuthExtractor;
import static org.pac4j.core.util.CommonHelper.*;
/**
* <p>This class is the client to authenticate users through HTTP basic auth. It was previously named: <code>BasicAuthClient</code>.</p>
* <p>For authentication, the user is redirected to the callback url. If the user is not authenticated by basic auth, a
* specific exception : {@link HttpAction} is returned which must be handled by the application to force
* authentication.</p>
*
* @author Jerome Leleu
* @since 1.8.0
*/
public class IndirectBasicAuthClient extends IndirectClient<UsernamePasswordCredentials, CommonProfile> {
private String realmName = Pac4jConstants.DEFAULT_REALM_NAME;
public IndirectBasicAuthClient() {}
public IndirectBasicAuthClient(final Authenticator usernamePasswordAuthenticator) {
defaultAuthenticator(usernamePasswordAuthenticator);
}
public IndirectBasicAuthClient(final String realmName, final Authenticator usernamePasswordAuthenticator) {
this.realmName = realmName;
defaultAuthenticator(usernamePasswordAuthenticator);
}
public IndirectBasicAuthClient(final Authenticator usernamePasswordAuthenticator, final ProfileCreator profileCreator) {
defaultAuthenticator(usernamePasswordAuthenticator);
defaultProfileCreator(profileCreator);
}
@Override
protected void clientInit() {
assertNotBlank("realmName", this.realmName);
defaultRedirectActionBuilder(webContext -> RedirectAction.redirect(computeFinalCallbackUrl(webContext)));
defaultCredentialsExtractor(new BasicAuthExtractor());
}
@Override
protected UsernamePasswordCredentials retrieveCredentials(final WebContext context) {
assertNotNull("credentialsExtractor", getCredentialsExtractor());
assertNotNull("authenticator", getAuthenticator());
// set the www-authenticate in case of error
context.setResponseHeader(HttpConstants.AUTHENTICATE_HEADER, "Basic realm=\"" + realmName + "\"");
final UsernamePasswordCredentials credentials;
try {
// retrieve credentials
credentials = getCredentialsExtractor().extract(context);
logger.debug("credentials : {}", credentials);
if (credentials == null) {
throw HttpAction.unauthorized(context);
}
// validate credentials
getAuthenticator().validate(credentials, context);
} catch (final CredentialsException e) {
throw HttpAction.unauthorized(context);
}
return credentials;
}
public String getRealmName() {
return realmName;
}
public void setRealmName(String realmName) {
this.realmName = realmName;
}
@Override
public String toString() {
return toNiceString(this.getClass(), "name", getName(), "callbackUrl", this.callbackUrl,
"callbackUrlResolver", this.callbackUrlResolver, "ajaxRequestResolver", getAjaxRequestResolver(),
"redirectActionBuilder", getRedirectActionBuilder(), "credentialsExtractor", getCredentialsExtractor(),
"authenticator", getAuthenticator(), "profileCreator", getProfileCreator(),
"logoutActionBuilder", getLogoutActionBuilder(), "authorizationGenerators", getAuthorizationGenerators(),
"realmName", this.realmName);
}
}
| apache-2.0 |
michaelschiff/druid | server/src/main/java/org/apache/druid/segment/realtime/firehose/TimedShutoffFirehoseFactory.java | 5006 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.realtime.firehose;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.InputRowPlusRaw;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.utils.CloseableUtils;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Creates firehoses that shut off at a particular time. Useful for limiting the lifespan of a realtime job.
*
* Each {@link Firehose} created by this factory spins up and manages one thread for calling {@link Firehose#close()}
* asynchronously at the specified {@link #shutoffTime}.
*/
public class TimedShutoffFirehoseFactory implements FirehoseFactory<InputRowParser>
{
private static final EmittingLogger log = new EmittingLogger(FirehoseFactory.class);
private final FirehoseFactory delegateFactory;
private final DateTime shutoffTime;
@JsonCreator
public TimedShutoffFirehoseFactory(
@JsonProperty("delegate") FirehoseFactory delegateFactory,
@JsonProperty("shutoffTime") DateTime shutoffTime
)
{
this.delegateFactory = delegateFactory;
this.shutoffTime = shutoffTime;
}
@Override
public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException
{
return new TimedShutoffFirehose(parser, temporaryDirectory, false);
}
@Override
public Firehose connectForSampler(InputRowParser parser, File temporaryDirectory) throws IOException
{
return new TimedShutoffFirehose(parser, temporaryDirectory, true);
}
class TimedShutoffFirehose implements Firehose
{
private final Firehose firehose;
private final ScheduledExecutorService shutdownExec;
@GuardedBy("this")
private boolean closed = false;
TimedShutoffFirehose(InputRowParser parser, File temporaryDirectory, boolean sampling) throws IOException
{
firehose = sampling
? delegateFactory.connectForSampler(parser, temporaryDirectory)
: delegateFactory.connect(parser, temporaryDirectory);
shutdownExec = Execs.scheduledSingleThreaded("timed-shutoff-firehose-%d");
shutdownExec.schedule(
() -> {
log.info("Closing delegate firehose.");
try {
TimedShutoffFirehose.this.close();
}
catch (IOException e) {
log.warn(e, "Failed to close delegate firehose, ignoring.");
}
},
shutoffTime.getMillis() - System.currentTimeMillis(),
TimeUnit.MILLISECONDS
);
log.info("Firehose created, will shut down at: %s", shutoffTime);
}
@Override
public boolean hasMore() throws IOException
{
return firehose.hasMore();
}
@Nullable
@Override
public InputRow nextRow() throws IOException
{
return firehose.nextRow();
}
@Override
public InputRowPlusRaw nextRowWithRaw() throws IOException
{
return firehose.nextRowWithRaw();
}
@Override
public Runnable commit()
{
return firehose.commit();
}
/**
* This method is synchronized because it might be called concurrently from multiple threads: from {@link
* #shutdownExec}, and explicitly on this Firehose object.
*/
@Override
public synchronized void close() throws IOException
{
if (!closed) {
closed = true;
CloseableUtils.closeBoth(firehose, shutdownExec::shutdownNow);
}
}
}
@JsonProperty("delegate")
public FirehoseFactory getDelegateFactory()
{
return delegateFactory;
}
@JsonProperty("shutoffTime")
public DateTime getShutoffTime()
{
return shutoffTime;
}
}
| apache-2.0 |
apache/camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/NettySingleCodecTest.java | 2176 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty;
import java.util.concurrent.TimeUnit;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import org.apache.camel.BindToRegistry;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Test;
public class NettySingleCodecTest extends BaseNettyTest {
@BindToRegistry("decoder")
private StringDecoder stringDecoder = new StringDecoder();
@BindToRegistry("encoder")
private StringEncoder stringEncoder = new StringEncoder();
@Test
public void canSupplySingleCodecToEndpointPipeline() throws Exception {
String poem = new Poetry().getPoem();
MockEndpoint mock = getMockEndpoint("mock:single-codec");
mock.expectedBodiesReceived(poem);
sendBody("direct:single-codec", poem);
mock.await(1, TimeUnit.SECONDS);
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:single-codec").to("netty:tcp://localhost:{{port}}?encoders=#encoder&sync=false");
from("netty:tcp://localhost:{{port}}?decoders=#decoder&sync=false").to("mock:single-codec");
}
};
}
}
| apache-2.0 |
NuwanSameera/syncope | fit/core-reference/src/test/java/org/apache/syncope/fit/core/UserSelfITCase.java | 17603 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.fit.core;
import org.apache.syncope.fit.ActivitiDetector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.security.AccessControlException;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.core.GenericType;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.syncope.client.lib.SyncopeClient;
import org.apache.syncope.common.lib.SyncopeClientException;
import org.apache.syncope.common.lib.patch.BooleanReplacePatchItem;
import org.apache.syncope.common.lib.patch.MembershipPatch;
import org.apache.syncope.common.lib.patch.PasswordPatch;
import org.apache.syncope.common.lib.patch.StringPatchItem;
import org.apache.syncope.common.lib.patch.StringReplacePatchItem;
import org.apache.syncope.common.lib.patch.UserPatch;
import org.apache.syncope.common.lib.to.MembershipTO;
import org.apache.syncope.common.lib.to.ProvisioningResult;
import org.apache.syncope.common.lib.to.UserTO;
import org.apache.syncope.common.lib.to.WorkflowFormPropertyTO;
import org.apache.syncope.common.lib.to.WorkflowFormTO;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.ClientExceptionType;
import org.apache.syncope.common.lib.types.PatchOperation;
import org.apache.syncope.common.rest.api.service.ResourceService;
import org.apache.syncope.common.rest.api.service.UserSelfService;
import org.apache.syncope.common.rest.api.service.UserService;
import org.apache.syncope.fit.AbstractITCase;
import org.junit.Assume;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.jdbc.core.JdbcTemplate;
@FixMethodOrder(MethodSorters.JVM)
public class UserSelfITCase extends AbstractITCase {
@Test
public void selfRegistrationAllowed() {
assertTrue(syncopeService.platform().isSelfRegAllowed());
}
@Test
public void create() {
Assume.assumeTrue(ActivitiDetector.isActivitiEnabledForUsers(syncopeService));
// 1. self-registration as admin: failure
try {
userSelfService.create(UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org"), true);
fail();
} catch (AccessControlException e) {
assertNotNull(e);
}
// 2. self-registration as anonymous: works
SyncopeClient anonClient = clientFactory.create();
UserTO self = anonClient.getService(UserSelfService.class).
create(UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org"), true).
readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getEntity();
assertNotNull(self);
assertEquals("createApproval", self.getStatus());
}
@Test
public void createAndApprove() {
Assume.assumeTrue(ActivitiDetector.isActivitiEnabledForUsers(syncopeService));
// self-create user with membership: goes 'createApproval' with resources and membership but no propagation
UserTO userTO = UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org");
userTO.getMemberships().add(
new MembershipTO.Builder().group("29f96485-729e-4d31-88a1-6fc60e4677f3").build());
userTO.getResources().add(RESOURCE_NAME_TESTDB);
SyncopeClient anonClient = clientFactory.create();
userTO = anonClient.getService(UserSelfService.class).
create(userTO, true).
readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getEntity();
assertNotNull(userTO);
assertEquals("createApproval", userTO.getStatus());
assertFalse(userTO.getMemberships().isEmpty());
assertFalse(userTO.getResources().isEmpty());
try {
resourceService.readConnObject(RESOURCE_NAME_TESTDB, AnyTypeKind.USER.name(), userTO.getKey());
fail();
} catch (SyncopeClientException e) {
assertEquals(ClientExceptionType.NotFound, e.getType());
}
// now approve and verify that propagation has happened
WorkflowFormTO form = userWorkflowService.getFormForUser(userTO.getKey());
form = userWorkflowService.claimForm(form.getTaskId());
Map<String, WorkflowFormPropertyTO> props = form.getPropertyMap();
props.get("approve").setValue(Boolean.TRUE.toString());
form.getProperties().clear();
form.getProperties().addAll(props.values());
userTO = userWorkflowService.submitForm(form);
assertNotNull(userTO);
assertEquals("active", userTO.getStatus());
assertNotNull(resourceService.readConnObject(RESOURCE_NAME_TESTDB, AnyTypeKind.USER.name(), userTO.getKey()));
}
@Test
public void read() {
UserService userService2 = clientFactory.create("rossini", ADMIN_PWD).getService(UserService.class);
try {
userService2.read("1417acbe-cbf6-4277-9372-e75e04f97000");
fail();
} catch (AccessControlException e) {
assertNotNull(e);
}
Pair<Map<String, Set<String>>, UserTO> self = clientFactory.create("rossini", ADMIN_PWD).self();
assertEquals("rossini", self.getValue().getUsername());
}
@Test
public void updateWithoutApproval() {
// 1. create user as admin
UserTO created = createUser(UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org")).getEntity();
assertNotNull(created);
assertFalse(created.getUsername().endsWith("XX"));
// 2. self-update (username) - works
UserPatch userPatch = new UserPatch();
userPatch.setKey(created.getKey());
userPatch.setUsername(new StringReplacePatchItem.Builder().value(created.getUsername() + "XX").build());
SyncopeClient authClient = clientFactory.create(created.getUsername(), "password123");
UserTO updated = authClient.getService(UserSelfService.class).update(userPatch).
readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getEntity();
assertNotNull(updated);
assertEquals(ActivitiDetector.isActivitiEnabledForUsers(syncopeService)
? "active" : "created", updated.getStatus());
assertTrue(updated.getUsername().endsWith("XX"));
}
@Test
public void updateWithApproval() {
Assume.assumeTrue(ActivitiDetector.isActivitiEnabledForUsers(syncopeService));
// 1. create user as admin
UserTO created = createUser(UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org")).getEntity();
assertNotNull(created);
assertFalse(created.getUsername().endsWith("XX"));
// 2. self-update (username + memberships + resource) - works but needs approval
UserPatch userPatch = new UserPatch();
userPatch.setKey(created.getKey());
userPatch.setUsername(new StringReplacePatchItem.Builder().value(created.getUsername() + "XX").build());
userPatch.getMemberships().add(new MembershipPatch.Builder().
operation(PatchOperation.ADD_REPLACE).
group("bf825fe1-7320-4a54-bd64-143b5c18ab97").
build());
userPatch.getResources().add(new StringPatchItem.Builder().
operation(PatchOperation.ADD_REPLACE).value(RESOURCE_NAME_TESTDB).build());
userPatch.setPassword(new PasswordPatch.Builder().
value("newPassword123").onSyncope(false).resource(RESOURCE_NAME_TESTDB).build());
SyncopeClient authClient = clientFactory.create(created.getUsername(), "password123");
UserTO updated = authClient.getService(UserSelfService.class).update(userPatch).
readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getEntity();
assertNotNull(updated);
assertEquals("updateApproval", updated.getStatus());
assertFalse(updated.getUsername().endsWith("XX"));
assertTrue(updated.getMemberships().isEmpty());
// no propagation happened
assertTrue(updated.getResources().isEmpty());
try {
resourceService.readConnObject(RESOURCE_NAME_TESTDB, AnyTypeKind.USER.name(), updated.getKey());
fail();
} catch (SyncopeClientException e) {
assertEquals(ClientExceptionType.NotFound, e.getType());
}
// 3. approve self-update as admin
WorkflowFormTO form = userWorkflowService.getFormForUser(updated.getKey());
form = userWorkflowService.claimForm(form.getTaskId());
Map<String, WorkflowFormPropertyTO> props = form.getPropertyMap();
props.get("approve").setValue(Boolean.TRUE.toString());
form.getProperties().clear();
form.getProperties().addAll(props.values());
updated = userWorkflowService.submitForm(form);
assertNotNull(updated);
assertEquals("active", updated.getStatus());
assertTrue(updated.getUsername().endsWith("XX"));
assertEquals(1, updated.getMemberships().size());
// check that propagation also happened
assertTrue(updated.getResources().contains(RESOURCE_NAME_TESTDB));
assertNotNull(resourceService.readConnObject(RESOURCE_NAME_TESTDB, AnyTypeKind.USER.name(), updated.getKey()));
}
@Test
public void delete() {
UserTO created = createUser(UserITCase.getUniqueSampleTO("anonymous@syncope.apache.org")).getEntity();
assertNotNull(created);
SyncopeClient authClient = clientFactory.create(created.getUsername(), "password123");
UserTO deleted = authClient.getService(UserSelfService.class).delete().readEntity(
new GenericType<ProvisioningResult<UserTO>>() {
}).getEntity();
assertNotNull(deleted);
assertEquals(ActivitiDetector.isActivitiEnabledForUsers(syncopeService)
? "deleteApproval" : null, deleted.getStatus());
}
@Test
public void issueSYNCOPE373() {
UserTO userTO = adminClient.self().getValue();
assertEquals(ADMIN_UNAME, userTO.getUsername());
}
@Test
public void passwordReset() {
// 0. ensure that password request DOES require security question
configurationService.set(attrTO("passwordReset.securityQuestion", "true"));
// 1. create an user with security question and answer
UserTO user = UserITCase.getUniqueSampleTO("pwdReset@syncope.apache.org");
user.setSecurityQuestion("887028ea-66fc-41e7-b397-620d7ea6dfbb");
user.setSecurityAnswer("Rossi");
user.getResources().add(RESOURCE_NAME_TESTDB);
createUser(user);
// verify propagation (including password) on external db
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
String pwdOnResource = jdbcTemplate.queryForObject("SELECT password FROM test WHERE id=?", String.class,
user.getUsername());
assertTrue(StringUtils.isNotBlank(pwdOnResource));
// 2. verify that new user is able to authenticate
SyncopeClient authClient = clientFactory.create(user.getUsername(), "password123");
UserTO read = authClient.self().getValue();
assertNotNull(read);
// 3. request password reset (as anonymous) providing the expected security answer
SyncopeClient anonClient = clientFactory.create();
try {
anonClient.getService(UserSelfService.class).requestPasswordReset(user.getUsername(), "WRONG");
fail();
} catch (SyncopeClientException e) {
assertEquals(ClientExceptionType.InvalidSecurityAnswer, e.getType());
}
anonClient.getService(UserSelfService.class).requestPasswordReset(user.getUsername(), "Rossi");
// 4. get token (normally sent via e-mail, now reading as admin)
String token = userService.read(read.getKey()).getToken();
assertNotNull(token);
// 5. confirm password reset
try {
anonClient.getService(UserSelfService.class).confirmPasswordReset("WRONG TOKEN", "newPassword");
fail();
} catch (SyncopeClientException e) {
assertEquals(ClientExceptionType.NotFound, e.getType());
assertTrue(e.getMessage().contains("WRONG TOKEN"));
}
anonClient.getService(UserSelfService.class).confirmPasswordReset(token, "newPassword123");
// 6. verify that password was reset and token removed
authClient = clientFactory.create(user.getUsername(), "newPassword123");
read = authClient.self().getValue();
assertNotNull(read);
assertNull(read.getToken());
// 7. verify that password was changed on external resource
String newPwdOnResource = jdbcTemplate.queryForObject("SELECT password FROM test WHERE id=?", String.class,
user.getUsername());
assertTrue(StringUtils.isNotBlank(newPwdOnResource));
assertNotEquals(pwdOnResource, newPwdOnResource);
}
@Test
public void passwordResetWithoutSecurityQuestion() {
// 0. disable security question for password reset
configurationService.set(attrTO("passwordReset.securityQuestion", "false"));
// 1. create an user with security question and answer
UserTO user = UserITCase.getUniqueSampleTO("pwdResetNoSecurityQuestion@syncope.apache.org");
createUser(user);
// 2. verify that new user is able to authenticate
SyncopeClient authClient = clientFactory.create(user.getUsername(), "password123");
UserTO read = authClient.self().getValue();
assertNotNull(read);
// 3. request password reset (as anonymous) with no security answer
SyncopeClient anonClient = clientFactory.create();
anonClient.getService(UserSelfService.class).requestPasswordReset(user.getUsername(), null);
// 4. get token (normally sent via e-mail, now reading as admin)
String token = userService.read(read.getKey()).getToken();
assertNotNull(token);
// 5. confirm password reset
try {
anonClient.getService(UserSelfService.class).confirmPasswordReset("WRONG TOKEN", "newPassword");
fail();
} catch (SyncopeClientException e) {
assertEquals(ClientExceptionType.NotFound, e.getType());
assertTrue(e.getMessage().contains("WRONG TOKEN"));
}
anonClient.getService(UserSelfService.class).confirmPasswordReset(token, "newPassword123");
// 6. verify that password was reset and token removed
authClient = clientFactory.create(user.getUsername(), "newPassword123");
read = authClient.self().getValue();
assertNotNull(read);
assertNull(read.getToken());
// 7. re-enable security question for password reset
configurationService.set(attrTO("passwordReset.securityQuestion", "true"));
}
@Test
public void mustChangePassword() {
// PRE: reset vivaldi's password
UserPatch userPatch = new UserPatch();
userPatch.setKey("b3cbc78d-32e6-4bd4-92e0-bbe07566a2ee");
userPatch.setPassword(new PasswordPatch.Builder().value("password321").build());
userService.update(userPatch);
// 0. access as vivaldi -> succeed
SyncopeClient vivaldiClient = clientFactory.create("vivaldi", "password321");
Pair<Map<String, Set<String>>, UserTO> self = vivaldiClient.self();
assertFalse(self.getRight().isMustChangePassword());
// 1. update user vivaldi (3) requirig password update
userPatch = new UserPatch();
userPatch.setKey("b3cbc78d-32e6-4bd4-92e0-bbe07566a2ee");
userPatch.setMustChangePassword(new BooleanReplacePatchItem.Builder().value(true).build());
UserTO vivaldi = updateUser(userPatch).getEntity();
assertTrue(vivaldi.isMustChangePassword());
// 2. attempt to access -> fail
try {
vivaldiClient.getService(ResourceService.class).list();
fail();
} catch (AccessControlException e) {
assertNotNull(e);
assertEquals("Please change your password first", e.getMessage());
}
// 3. change password
vivaldiClient.getService(UserSelfService.class).changePassword("password123");
// 4. verify it worked
self = clientFactory.create("vivaldi", "password123").self();
assertFalse(self.getRight().isMustChangePassword());
}
}
| apache-2.0 |
szenzaro/SeaCloudsPlatform | deployer/src/main/java/org/apache/brooklyn/entity/cloudfoundry/webapp/PaasWebAppCloudFoundryDriver.java | 10463 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.brooklyn.entity.cloudfoundry.webapp;
import org.apache.brooklyn.entity.cloudfoundry.PaasEntityCloudFoundryDriver;
import org.apache.brooklyn.entity.cloudfoundry.services.CloudFoundryService;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import org.apache.brooklyn.api.entity.Entity;
import org.apache.brooklyn.core.entity.Attributes;
import org.apache.brooklyn.core.entity.BrooklynConfigKeys;
import org.apache.brooklyn.core.entity.Entities;
import org.apache.brooklyn.location.cloudfoundry.CloudFoundryPaasLocation;
import org.apache.brooklyn.util.text.Strings;
import org.cloudfoundry.client.lib.domain.CloudApplication;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
public abstract class PaasWebAppCloudFoundryDriver extends PaasEntityCloudFoundryDriver
implements PaasWebAppDriver {
public static final Logger log = LoggerFactory.getLogger(PaasWebAppCloudFoundryDriver.class);
private String applicationUrl;
private String applicationName;
public PaasWebAppCloudFoundryDriver(CloudFoundryWebAppImpl entity,
CloudFoundryPaasLocation location) {
super(entity, location);
}
@Override
protected void init() {
super.init();
initApplicationParameters();
}
@SuppressWarnings("unchecked")
private void initApplicationParameters() {
applicationName = getEntity().getConfig(CloudFoundryWebApp.APPLICATION_NAME);
applicationUrl = getEntity().getConfig(CloudFoundryWebApp.APPLICATION_URL);
}
@Override
public CloudFoundryWebAppImpl getEntity() {
return (CloudFoundryWebAppImpl) super.getEntity();
}
protected String getApplicationUrl(){
return applicationUrl;
}
protected String getApplicationName(){
return applicationName;
}
public abstract String getBuildpack();
@Override
public boolean isRunning() {
try {
CloudApplication app = getClient().getApplication(applicationName);
return (app != null)
&& app.getState().equals(CloudApplication.AppState.STARTED);
} catch (Exception e) {
//log.debug("Application - {} is not available for now", applicationName);
return false;
}
}
@Override
public int getInstancesNumber(){
CloudApplication app = getClient().getApplication(getApplicationName());
return app.getInstances();
}
@Override
public int getDisk(){
CloudApplication app = getClient().getApplication(getApplicationName());
return app.getDiskQuota();
}
@Override
public int getMemory(){
CloudApplication app = getClient().getApplication(getApplicationName());
return app.getMemory();
}
@Override
public void start() {
super.start();
preDeploy();
deploy();
preLaunch();
launch();
postLaunch();
}
public void preDeploy() {}
public abstract void deploy();
public void preLaunch() {
manageServices();
configureEnv();
}
private void manageServices() {
List<Entity> config = getEntity().getConfig(CloudFoundryWebApp.NAMED_SERVICES);
if (config != null) {
for (Entity serviceEntityId : config) {
manageService(serviceEntityId);
}
}
}
/*TODO RENAME Method. It could be represent that the service is bound and the service
operation is called*/
private void manageService(Entity rawEntity){
CloudFoundryService cloudFoundryService;
if (rawEntity instanceof CloudFoundryService){
cloudFoundryService = (CloudFoundryService) rawEntity;
String serviceName = cloudFoundryService
.getConfig(CloudFoundryService.SERVICE_INSTANCE_NAME);
if (!Strings.isEmpty(serviceName)){
Entities.waitForServiceUp(cloudFoundryService,
cloudFoundryService.getConfig(BrooklynConfigKeys.START_TIMEOUT));
bindingServiceToEntity(serviceName);
setCredentialsOnService(cloudFoundryService);
cloudFoundryService.operation(getEntity());
} else {
log.error("Trying to get service instance name from {}, but getting null",
cloudFoundryService);
}
} else {
log.error("The service entity {} is not available from the application {}",
new Object[]{rawEntity, getEntity()});
throw new NoSuchElementException("No entity matching id " + rawEntity.getId() +
" in Management Context "+getEntity().getManagementContext()+
" during entity service binding "+getEntity().getId());
}
}
private void bindingServiceToEntity(String serviceId) {
getClient().bindService(applicationName, serviceId);
log.info("The service {} was bound correctly to the application {}",
new Object[]{serviceId, applicationName});
updateVariableEnvironmentSensors();
updateBoundServiceSensor(serviceId);
}
private void updateVariableEnvironmentSensors(){
Map<String, Object> env = getClient().getApplicationEnvironment(applicationName);
JsonObject envTree = new Gson().toJsonTree(env).getAsJsonObject();
getEntity().setAttribute(CloudFoundryWebApp.VCAP_SERVICES,
envTree.getAsJsonObject("system_env_json")
.getAsJsonObject("VCAP_SERVICES").toString());
}
protected void setCredentialsOnService(CloudFoundryService service) {
service.setBindingCredentialsFromApp(getEntity());
}
//TODO it ma be renamed to updateBoundServicesSensor
protected void updateBoundServiceSensor(String serviceId){
if(serviceIsBoundToCloudApplication(serviceId)){
List<String> currentBoundServices = getEntity()
.getAttribute(CloudFoundryWebApp.BOUND_SERVICES);
currentBoundServices.add(serviceId);
getEntity().setAttribute(CloudFoundryWebApp.BOUND_SERVICES, currentBoundServices);
}
}
/**
* Return if a service is bound to the cloud application. Checks the running application in
* the cloud.
* @param serviceId
* @return
*/
public boolean serviceIsBoundToCloudApplication(String serviceId){
return getClient().getApplication(applicationName)
.getServices().contains(serviceId);
}
public void launch() {
getClient().startApplication(applicationName);
}
public void postLaunch() {
//TODO: we should use TASK for avoid wait methods.
getEntity().waitForEntityStart();
CloudApplication application = getClient().getApplication(applicationName);
String domainUri = "http://"+application.getUris().get(0);
getEntity().setAttribute(Attributes.MAIN_URI, URI.create(domainUri));
getEntity().setAttribute(CloudFoundryWebApp.ROOT_URL, domainUri);
/*getEntity().setAttribute(CloudFoundryWebApp.INSTANCES_NUM,
application.getInstances());*/
getEntity().setAttribute(CloudFoundryWebApp.MEMORY,
application.getMemory());
getEntity().setAttribute(CloudFoundryWebApp.DISK,
application.getDiskQuota());
}
@Override
public void restart() {
// TODO: complete
}
@Override
public void stop() {
getClient().stopApplication(applicationName);
deleteApplication();
}
@Override
public void deleteApplication() {
getClient().deleteApplication(applicationName);
}
protected String inferApplicationDomainUri(String name) {
String defaultDomainName = getClient().getDefaultDomain().getName();
return name + "-domain." + defaultDomainName;
}
@Override
@SuppressWarnings("unchecked")
public void setEnv(String key, String value) {
CloudApplication app = getClient().getApplication(applicationName);
// app.setEnv() replaces the entire set of variables, so we need to add it externally.
Map envAsMap = app.getEnvAsMap();
envAsMap.put(key, value);
app.setEnv(envAsMap);
getClient().updateApplicationEnv(applicationName, envAsMap);
}
protected void configureEnv() {
//TODO a sensor with the custom-environment variables?
setEnv(getEntity().getConfig(CloudFoundryWebApp.ENV));
}
@SuppressWarnings("unchecked")
void setEnv(Map<String, String> envs) {
CloudApplication app = getClient().getApplication(applicationName);
// app.setEnv() replaces the entire set of variables, so we need to add it externally.
Map oldEnv = app.getEnvAsMap();
oldEnv.putAll(envs);
getClient().updateApplicationEnv(applicationName, oldEnv);
}
@Override
public void changeInstancesNumber(int instancesNumber){
getClient().updateApplicationInstances(
getApplicationName(), instancesNumber);
}
@Override
public void updateApplicationDiskQuota(int diskQuota){
getClient().updateApplicationDiskQuota(getApplicationName(), diskQuota);
}
@Override
public void updateApplicationMemory(int memory){
getClient().updateApplicationMemory(getApplicationName(), memory);
}
}
| apache-2.0 |
pcadabam/gobblin | gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_catalog/TopologyCatalog.java | 9451 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_catalog;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.URI;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import javax.annotation.Nonnull;
import lombok.Getter;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import org.apache.gobblin.runtime.api.FlowSpec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AbstractIdleService;
import com.google.common.util.concurrent.Service;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.instrumented.Instrumented;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.Tag;
import org.apache.gobblin.runtime.api.GobblinInstanceEnvironment;
import org.apache.gobblin.runtime.api.MutableSpecCatalog;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.api.SpecCatalog;
import org.apache.gobblin.runtime.api.SpecCatalogListener;
import org.apache.gobblin.runtime.api.SpecNotFoundException;
import org.apache.gobblin.runtime.api.SpecSerDe;
import org.apache.gobblin.runtime.api.SpecStore;
import org.apache.gobblin.runtime.api.TopologySpec;
import org.apache.gobblin.runtime.spec_store.FSSpecStore;
import org.apache.gobblin.util.ClassAliasResolver;
@Alpha
public class TopologyCatalog extends AbstractIdleService implements SpecCatalog, MutableSpecCatalog, SpecSerDe {
public static final String DEFAULT_TOPOLOGYSPEC_STORE_CLASS = FSSpecStore.class.getCanonicalName();
protected final SpecCatalogListenersList listeners;
protected final Logger log;
protected final MetricContext metricContext;
protected final TopologyCatalog.StandardMetrics metrics;
protected final SpecStore specStore;
@Getter
protected CountDownLatch initComplete = new CountDownLatch(1);
private final ClassAliasResolver<SpecStore> aliasResolver;
public TopologyCatalog(Config config) {
this(config, Optional.<Logger>absent());
}
public TopologyCatalog(Config config, Optional<Logger> log) {
this(config, log, Optional.<MetricContext>absent(), true);
}
public TopologyCatalog(Config config, GobblinInstanceEnvironment env) {
this(config, Optional.of(env.getLog()), Optional.of(env.getMetricContext()),
env.isInstrumentationEnabled());
}
public TopologyCatalog(Config config, Optional<Logger> log, Optional<MetricContext> parentMetricContext,
boolean instrumentationEnabled) {
this.log = log.isPresent() ? log.get() : LoggerFactory.getLogger(getClass());
this.listeners = new SpecCatalogListenersList(log);
if (instrumentationEnabled) {
MetricContext realParentCtx =
parentMetricContext.or(Instrumented.getMetricContext(new org.apache.gobblin.configuration.State(), getClass()));
this.metricContext = realParentCtx.childBuilder(TopologyCatalog.class.getSimpleName()).build();
this.metrics = new SpecCatalog.StandardMetrics(this);
this.addListener(this.metrics);
}
else {
this.metricContext = null;
this.metrics = null;
}
this.aliasResolver = new ClassAliasResolver<>(SpecStore.class);
try {
Config newConfig = config;
if (config.hasPath(ConfigurationKeys.TOPOLOGYSPEC_STORE_DIR_KEY)) {
newConfig = config.withValue(ConfigurationKeys.SPECSTORE_FS_DIR_KEY,
config.getValue(ConfigurationKeys.TOPOLOGYSPEC_STORE_DIR_KEY));
}
String specStoreClassName = DEFAULT_TOPOLOGYSPEC_STORE_CLASS;
if (config.hasPath(ConfigurationKeys.TOPOLOGYSPEC_STORE_CLASS_KEY)) {
specStoreClassName = config.getString(ConfigurationKeys.TOPOLOGYSPEC_STORE_CLASS_KEY);
}
this.log.info("Using SpecStore class name/alias " + specStoreClassName);
this.specStore = (SpecStore) ConstructorUtils.invokeConstructor(Class.forName(this.aliasResolver.resolve(
specStoreClassName)), newConfig, this);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
| ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
/***************************************************
/* Catalog init and shutdown handlers *
/**************************************************/
@Override
protected void startUp() throws Exception {
notifyAllListeners();
}
@Override
protected void shutDown() throws Exception {
this.listeners.close();
}
/***************************************************
/* Catalog listeners *
/**************************************************/
protected void notifyAllListeners() {
for (Spec spec : getSpecs()) {
this.listeners.onAddSpec(spec);
}
}
@Override
public void addListener(SpecCatalogListener specListener) {
Preconditions.checkNotNull(specListener);
this.listeners.addListener(specListener);
if (state() == Service.State.RUNNING) {
for (Spec spec : getSpecs()) {
SpecCatalogListener.AddSpecCallback addJobCallback = new SpecCatalogListener.AddSpecCallback(spec);
this.listeners.callbackOneListener(addJobCallback, specListener);
}
}
}
@Override
public void removeListener(SpecCatalogListener specCatalogListener) {
this.listeners.removeListener(specCatalogListener);
}
@Override
public void registerWeakSpecCatalogListener(SpecCatalogListener specCatalogListener) {
this.listeners.registerWeakSpecCatalogListener(specCatalogListener);
}
/***************************************************
/* Catalog metrics *
/**************************************************/
@Nonnull
@Override
public MetricContext getMetricContext() {
return this.metricContext;
}
@Override
public boolean isInstrumentationEnabled() {
return null != this.metricContext;
}
@Override
public List<Tag<?>> generateTags(org.apache.gobblin.configuration.State state) {
return Collections.emptyList();
}
@Override
public void switchMetricContext(List<Tag<?>> tags) {
throw new UnsupportedOperationException();
}
@Override
public void switchMetricContext(MetricContext context) {
throw new UnsupportedOperationException();
}
@Override
public SpecCatalog.StandardMetrics getMetrics() {
return this.metrics;
}
/**************************************************
/* Catalog core functionality *
/**************************************************/
@Override
public Collection<Spec> getSpecs() {
try {
return specStore.getSpecs();
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Specs from Spec store", e);
}
}
@Override
public Spec getSpec(URI uri) throws SpecNotFoundException {
try {
return specStore.getSpec(uri);
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Spec from Spec store for URI: " + uri, e);
}
}
@Override
public void put(Spec spec) {
try {
Preconditions.checkState(state() == Service.State.RUNNING, String.format("%s is not running.", this.getClass().getName()));
Preconditions.checkNotNull(spec);
log.info(String.format("Adding TopologySpec with URI: %s and Config: %s", spec.getUri(),
((TopologySpec) spec).getConfigAsProperties()));
specStore.addSpec(spec);
this.listeners.onAddSpec(spec);
} catch (IOException e) {
throw new RuntimeException("Cannot add Spec to Spec store: " + spec, e);
}
}
@Override
public void remove(URI uri) {
try {
Preconditions.checkState(state() == Service.State.RUNNING, String.format("%s is not running.", this.getClass().getName()));
Preconditions.checkNotNull(uri);
log.info(String.format("Removing TopologySpec with URI: %s", uri));
this.listeners.onDeleteSpec(uri, FlowSpec.Builder.DEFAULT_VERSION);
specStore.deleteSpec(uri);
} catch (IOException e) {
throw new RuntimeException("Cannot delete Spec from Spec store for URI: " + uri, e);
}
}
@Override
public byte[] serialize(Spec spec) {
return SerializationUtils.serialize(spec);
}
@Override
public Spec deserialize(byte[] spec) {
return SerializationUtils.deserialize(spec);
}
}
| apache-2.0 |
ccjeremiahlin/fse-F14-SA5-SSNoC-Java-REST | src/main/java/edu/cmu/sv/ws/ssnoc/rest/StatusListService.java | 1385 | package edu.cmu.sv.ws.ssnoc.rest;
import edu.cmu.sv.ws.ssnoc.common.logging.Log;
import edu.cmu.sv.ws.ssnoc.common.utils.ConverterUtils;
import edu.cmu.sv.ws.ssnoc.data.dao.DAOFactory;
import edu.cmu.sv.ws.ssnoc.data.po.StatusPO;
import edu.cmu.sv.ws.ssnoc.dto.Status;
import edu.cmu.sv.ws.ssnoc.dto.User;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.annotation.XmlElementWrapper;
import java.util.ArrayList;
import java.util.List;
@Path("/statuscrumbs")
public class StatusListService extends BaseService {
/**
* This method loads all active users in the system.
*
* @return - List of all active users.
*/
@GET
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
@Path("/{userName}")
@XmlElementWrapper(name = "users")
public List<Status> loadStatusList(@PathParam("userName")String userName) {
Log.enter();
List<Status> statusList = null;
try {
List<StatusPO> statusPOs = DAOFactory.getInstance().getStatusDAO().findStatusByUserName(userName);
statusList = new ArrayList<Status>();
for (StatusPO po : statusPOs) {
Status dto = ConverterUtils.convert(po);
statusList.add(dto);
}
} catch (Exception e) {
handleException(e);
} finally {
Log.exit(statusList);
}
return statusList;
}
}
| apache-2.0 |
apache/geronimo-yoko | yoko-core/src/test/java/ORBTest_Basic/ExLong.java | 1337 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ORBTest_Basic;
//
// IDL:ORBTest_Basic/ExLong:1.0
//
/***/
final public class ExLong extends org.omg.CORBA.UserException
{
private static final String _ob_id = "IDL:ORBTest_Basic/ExLong:1.0";
public
ExLong()
{
super(_ob_id);
}
public
ExLong(int value)
{
super(_ob_id);
this.value = value;
}
public
ExLong(String _reason,
int value)
{
super(_ob_id + " " + _reason);
this.value = value;
}
public int value;
}
| apache-2.0 |
dusenberrymw/systemml | src/main/java/org/apache/sysml/runtime/instructions/cpfile/ParameterizedBuiltinCPFileInstruction.java | 36238 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions.cpfile;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map.Entry;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.caching.CacheException;
import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.controlprogram.parfor.util.Cell;
import org.apache.sysml.runtime.controlprogram.parfor.util.IDHandler;
import org.apache.sysml.runtime.controlprogram.parfor.util.StagingFileUtils;
import org.apache.sysml.runtime.functionobjects.ParameterizedBuiltin;
import org.apache.sysml.runtime.functionobjects.ValueFunction;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.cp.CPOperand;
import org.apache.sysml.runtime.instructions.cp.ParameterizedBuiltinCPInstruction;
import org.apache.sysml.runtime.io.IOUtilFunctions;
import org.apache.sysml.runtime.io.MatrixWriter;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.MetaDataFormat;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixCell;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.data.OutputInfo;
import org.apache.sysml.runtime.matrix.operators.Operator;
import org.apache.sysml.runtime.matrix.operators.SimpleOperator;
import org.apache.sysml.runtime.util.FastStringTokenizer;
import org.apache.sysml.runtime.util.LocalFileUtils;
import org.apache.sysml.runtime.util.MapReduceTool;
/**
* File-based (out-of-core) realization of remove empty for robustness because there is no
* parallel version due to data-dependent row- and column dependencies.
*
*/
public class ParameterizedBuiltinCPFileInstruction extends ParameterizedBuiltinCPInstruction {
private ParameterizedBuiltinCPFileInstruction(Operator op, HashMap<String, String> paramsMap, CPOperand out,
String opcode, String istr) {
super(op, paramsMap, out, opcode, istr);
}
public static ParameterizedBuiltinCPFileInstruction parseInstruction( String str )
throws DMLRuntimeException
{
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
// first part is always the opcode
String opcode = parts[0];
// last part is always the output
CPOperand out = new CPOperand( parts[parts.length-1] );
// process remaining parts and build a hash map
HashMap<String,String> paramsMap = constructParameterMap(parts);
// determine the appropriate value function
ValueFunction func = null;
if ( opcode.equalsIgnoreCase("rmempty") ) {
func = ParameterizedBuiltin.getParameterizedBuiltinFnObject(opcode);
return new ParameterizedBuiltinCPFileInstruction(new SimpleOperator(func), paramsMap, out, opcode, str);
}
else {
throw new DMLRuntimeException("Unknown opcode (" + opcode + ") for ParameterizedBuiltin Instruction.");
}
}
@Override
public void processInstruction(ExecutionContext ec)
throws DMLRuntimeException
{
String opcode = getOpcode();
if ( opcode.equalsIgnoreCase("rmempty") )
{
// get inputs
MatrixObject src = ec.getMatrixObject( params.get("target") );
MatrixObject out = ec.getMatrixObject( output.getName() );
String margin = params.get("margin");
// export input matrix (if necessary)
src.exportData();
//core execution
RemoveEmpty rm = new RemoveEmpty( margin, src, out );
out = rm.execute();
//put output
ec.setVariable(output.getName(), out);
}
else {
throw new DMLRuntimeException("Unknown opcode : " + opcode);
}
}
/**
* Remove empty rows as a inner class in order to allow testing independent of the
* overall SystemML instruction framework.
*
*/
public static class RemoveEmpty
{
private String _margin = null;
private MatrixObject _src = null;
private MatrixObject _out = null;
public RemoveEmpty( String margin, MatrixObject src, MatrixObject out )
{
_margin = margin;
_src = src;
_out = out;
}
public MatrixObject execute()
throws DMLRuntimeException
{
//Timing time = new Timing();
//time.start();
//initial setup
String fnameOld = _src.getFileName();
String fnameNew = _out.getFileName();
InputInfo ii = ((MetaDataFormat)_src.getMetaData()).getInputInfo();
MatrixCharacteristics mc = _src.getMatrixCharacteristics();
String stagingDir = LocalFileUtils.getUniqueWorkingDir(LocalFileUtils.CATEGORY_WORK);
LocalFileUtils.createLocalFileIfNotExist(stagingDir);
long ret = -1;
try
{
boolean diagBlocks = false;
//Phase 1: write file to staging
if( ii == InputInfo.TextCellInputInfo )
createTextCellStagingFile( fnameOld, stagingDir );
else if( ii == InputInfo.BinaryCellInputInfo )
createBinaryCellStagingFile( fnameOld, stagingDir );
else if( ii == InputInfo.BinaryBlockInputInfo )
diagBlocks = createBinaryBlockStagingFile( fnameOld, stagingDir );
//Phase 2: scan empty rows/cols
if( diagBlocks )
ret = createKeyMappingDiag(stagingDir, mc.getRows(), mc.getCols(), mc.getRowsPerBlock(), mc.getColsPerBlock(), ii);
else
ret = createKeyMapping(stagingDir, mc.getRows(), mc.getCols(), mc.getRowsPerBlock(), mc.getColsPerBlock(), ii);
//Phase 3: create output files
MapReduceTool.deleteFileIfExistOnHDFS(fnameNew);
if( ii == InputInfo.TextCellInputInfo
|| ii == InputInfo.BinaryCellInputInfo )
{
createCellResultFile( fnameNew, stagingDir, mc.getRows(), mc.getCols(), mc.getRowsPerBlock(), mc.getColsPerBlock(), ii );
}
else if( ii == InputInfo.BinaryBlockInputInfo )
{
if( diagBlocks )
createBlockResultFileDiag( fnameNew, stagingDir, mc.getRows(), mc.getCols(), ret, mc.getNonZeros(), mc.getRowsPerBlock(), mc.getColsPerBlock(), ii );
else
createBlockResultFile( fnameNew, stagingDir, mc.getRows(), mc.getCols(), ret, mc.getNonZeros(), mc.getRowsPerBlock(), mc.getColsPerBlock(), ii );
}
}
catch( IOException ioe ) {
throw new DMLRuntimeException( ioe );
}
//final cleanup
LocalFileUtils.cleanupWorkingDirectory(stagingDir);
//create and return new output object
if( _margin.equals("rows") )
return createNewOutputObject(_src, _out, ret, mc.getCols());
else
return createNewOutputObject(_src, _out, mc.getRows(), ret );
}
private static MatrixObject createNewOutputObject( MatrixObject src, MatrixObject out, long rows, long cols )
throws DMLRuntimeException
{
String fName = out.getFileName();
MetaDataFormat metadata = (MetaDataFormat) src.getMetaData();
MatrixObject moNew = new MatrixObject(src.getValueType(), fName);
//handle empty output block (ensure valid dimensions)
if( rows==0 || cols ==0 ){
rows = Math.max(rows, 1);
cols = Math.max(cols, 1);
try {
moNew.acquireModify(new MatrixBlock((int)rows, (int) cols, true));
moNew.release();
}
catch (CacheException e) {
throw new DMLRuntimeException(e);
}
}
//create deep copy of metadata obj
MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
OutputInfo oiOld = metadata.getOutputInfo();
InputInfo iiOld = metadata.getInputInfo();
MatrixCharacteristics mc = new MatrixCharacteristics( rows, cols,
mcOld.getRowsPerBlock(), mcOld.getColsPerBlock(), mcOld.getNonZeros());
MetaDataFormat meta = new MetaDataFormat(mc,oiOld,iiOld);
moNew.setMetaData( meta );
return moNew;
}
public void createTextCellStagingFile( String fnameOld, String stagingDir )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameOld);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
if( !fs.exists(path) )
throw new IOException("File "+fnameOld+" does not exist on HDFS.");
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
InputSplit[] splits = informat.getSplits(job, 1);
LinkedList<Cell> buffer = new LinkedList<>();
LongWritable key = new LongWritable();
Text value = new Text();
FastStringTokenizer st = new FastStringTokenizer(' ');
for(InputSplit split: splits)
{
RecordReader<LongWritable,Text> reader = informat.getRecordReader(split, job, Reporter.NULL);
try
{
while( reader.next(key, value) )
{
st.reset( value.toString() ); //reset tokenizer
long row = st.nextLong();
long col = st.nextLong();
double lvalue = st.nextDouble();
buffer.add(new Cell(row,col,lvalue));
if( buffer.size() > StagingFileUtils.CELL_BUFFER_SIZE )
{
appendCellBufferToStagingArea(stagingDir, buffer, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
buffer.clear();
}
}
if( !buffer.isEmpty() )
{
appendCellBufferToStagingArea(stagingDir, buffer, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
buffer.clear();
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
}
@SuppressWarnings("deprecation")
public void createBinaryCellStagingFile( String fnameOld, String stagingDir )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameOld);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
if( !fs.exists(path) )
throw new IOException("File "+fnameOld+" does not exist on HDFS.");
LinkedList<Cell> buffer = new LinkedList<>();
MatrixIndexes key = new MatrixIndexes();
MatrixCell value = new MatrixCell();
for(Path lpath: IOUtilFunctions.getSequenceFilePaths(fs, path))
{
SequenceFile.Reader reader = new SequenceFile.Reader(fs,lpath,job);
try
{
while(reader.next(key, value))
{
long row = key.getRowIndex();
long col = key.getColumnIndex();
double lvalue = value.getValue();
buffer.add(new Cell(row,col,lvalue));
if( buffer.size() > StagingFileUtils.CELL_BUFFER_SIZE )
{
appendCellBufferToStagingArea(stagingDir, buffer, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
buffer.clear();
}
}
if( !buffer.isEmpty() )
{
appendCellBufferToStagingArea(stagingDir, buffer, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
buffer.clear();
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
}
/**
* Creates a binary block staging file and returns if the input matrix is a diag,
* because diag is the primary usecase and there is lots of optimization potential.
*
* @param fnameOld old filename
* @param stagingDir staging directory
* @return true if diag
* @throws IOException if IOException occurs
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
@SuppressWarnings("deprecation")
public boolean createBinaryBlockStagingFile( String fnameOld, String stagingDir )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameOld);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
if( !fs.exists(path) )
throw new IOException("File "+fnameOld+" does not exist on HDFS.");
MatrixIndexes key = new MatrixIndexes();
MatrixBlock value = new MatrixBlock();
boolean diagBlocks = true;
for(Path lpath : IOUtilFunctions.getSequenceFilePaths(fs, path))
{
SequenceFile.Reader reader = new SequenceFile.Reader(fs,lpath,job);
try
{
while( reader.next(key, value) )
{
if( !value.isEmptyBlock() ) //skip empty blocks (important for diag)
{
String fname = stagingDir +"/"+key.getRowIndex()+"_"+key.getColumnIndex();
LocalFileUtils.writeMatrixBlockToLocal(fname, value);
diagBlocks &= (key.getRowIndex()==key.getColumnIndex());
}
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
return diagBlocks;
}
private static void appendCellBufferToStagingArea( String dir, LinkedList<Cell> buffer, int brlen, int bclen )
throws DMLRuntimeException, IOException
{
HashMap<String,LinkedList<Cell>> sortedBuffer = new HashMap<>();
//sort cells in buffer wrt key
String key = null;
for( Cell c : buffer )
{
key = (c.getRow()/brlen+1) +"_"+(c.getCol()/bclen+1);
if( !sortedBuffer.containsKey(key) )
sortedBuffer.put(key, new LinkedList<Cell>());
sortedBuffer.get(key).addLast(c);
}
//write lists of cells to local files
for( Entry<String,LinkedList<Cell>> e : sortedBuffer.entrySet() )
{
String pfname = dir + "/" + e.getKey();
StagingFileUtils.writeCellListToLocal(pfname, e.getValue());
}
}
private long createKeyMapping( String stagingDir, long rlen, long clen, int brlen, int bclen, InputInfo ii)
throws FileNotFoundException, IOException, DMLRuntimeException
{
String metaOut = stagingDir+"/meta";
long len = 0;
long lastKey = 0;
if(_margin.equals("rows"))
{
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
boolean[] flags = new boolean[brlen];
for( int k=0; k<brlen; k++ )
flags[k] = true;
//scan for empty rows
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
if( ii == InputInfo.BinaryBlockInputInfo ){
if( !LocalFileUtils.isExisting(fname) )
continue;
MatrixBlock buffer = LocalFileUtils.readMatrixBlockFromLocal(fname);
for( int i=0; i<buffer.getNumRows(); i++ )
for( int j=0; j<buffer.getNumColumns(); j++ )
{
double lvalue = buffer.quickGetValue(i, j);
if( lvalue != 0 )
flags[ i ] = false;
}
}
else{
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
for( Cell c : buffer )
flags[ (int)c.getRow()-blockRow*brlen-1 ] = false;
}
}
//create and append key mapping
LinkedList<long[]> keyMapping = new LinkedList<>();
for( int i = 0; i<flags.length; i++ )
if( !flags[i] )
keyMapping.add(new long[]{blockRow*brlen+i, lastKey++});
len += keyMapping.size();
StagingFileUtils.writeKeyMappingToLocal(metaOut, keyMapping.toArray(new long[0][0]));
}
}
else
{
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
boolean[] flags = new boolean[bclen];
for( int k=0; k<bclen; k++ )
flags[k] = true;
//scan for empty rows
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
if( ii == InputInfo.BinaryBlockInputInfo ){
if( !LocalFileUtils.isExisting(fname) )
continue;
MatrixBlock buffer = LocalFileUtils.readMatrixBlockFromLocal(fname);
for( int i=0; i<buffer.getNumRows(); i++ )
for( int j=0; j<buffer.getNumColumns(); j++ )
{
double lvalue = buffer.quickGetValue(i, j);
if( lvalue != 0 )
flags[ j ] = false;
}
}
else{
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
for( Cell c : buffer )
flags[ (int)c.getCol()-blockCol*bclen-1 ] = false;
}
}
//create and append key mapping
LinkedList<long[]> keyMapping = new LinkedList<>();
for( int i = 0; i<flags.length; i++ )
if( !flags[i] )
keyMapping.add(new long[]{blockCol*bclen+i, lastKey++});
len += keyMapping.size();
StagingFileUtils.writeKeyMappingToLocal(metaOut, keyMapping.toArray(new long[0][0]));
}
}
//final validation (matrices with dimensions 0x0 not allowed)
if( len <= 0 )
throw new DMLRuntimeException("Matrices with dimensions [0,0] not supported.");
return len;
}
private long createKeyMappingDiag( String stagingDir, long rlen, long clen, int brlen, int bclen, InputInfo ii)
throws FileNotFoundException, IOException, DMLRuntimeException
{
String metaOut = stagingDir+"/meta";
long len = 0;
long lastKey = 0;
if(_margin.equals("rows"))
{
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
boolean[] flags = new boolean[brlen];
for( int k=0; k<brlen; k++ )
flags[k] = true;
//scan for empty rows
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockRow+1);
if( ii == InputInfo.BinaryBlockInputInfo ){
if( !LocalFileUtils.isExisting(fname) )
continue;
MatrixBlock buffer = LocalFileUtils.readMatrixBlockFromLocal(fname);
for( int i=0; i<buffer.getNumRows(); i++ )
for( int j=0; j<buffer.getNumColumns(); j++ )
{
double lvalue = buffer.quickGetValue(i, j);
if( lvalue != 0 )
flags[ i ] = false;
}
}
else{
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
for( Cell c : buffer )
flags[ (int)c.getRow()-blockRow*brlen-1 ] = false;
}
//create and append key mapping
LinkedList<long[]> keyMapping = new LinkedList<>();
for( int i = 0; i<flags.length; i++ )
if( !flags[i] )
keyMapping.add(new long[]{blockRow*brlen+i, lastKey++});
len += keyMapping.size();
StagingFileUtils.writeKeyMappingToLocal(metaOut, keyMapping.toArray(new long[0][0]));
}
}
else
{
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
boolean[] flags = new boolean[bclen];
for( int k=0; k<bclen; k++ )
flags[k] = true;
//scan for empty rows
String fname = stagingDir+"/"+(blockCol+1)+"_"+(blockCol+1);
if( ii == InputInfo.BinaryBlockInputInfo ){
if( !LocalFileUtils.isExisting(fname) )
continue;
MatrixBlock buffer = LocalFileUtils.readMatrixBlockFromLocal(fname);
for( int i=0; i<buffer.getNumRows(); i++ )
for( int j=0; j<buffer.getNumColumns(); j++ )
{
double lvalue = buffer.quickGetValue(i, j);
if( lvalue != 0 )
flags[ j ] = false;
}
}
else{
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
for( Cell c : buffer )
flags[ (int)c.getCol()-blockCol*bclen-1 ] = false;
}
//create and append key mapping
LinkedList<long[]> keyMapping = new LinkedList<>();
for( int i = 0; i<flags.length; i++ )
if( !flags[i] )
keyMapping.add(new long[]{blockCol*bclen+i, lastKey++});
len += keyMapping.size();
StagingFileUtils.writeKeyMappingToLocal(metaOut, keyMapping.toArray(new long[0][0]));
}
}
//final validation (matrices with dimensions 0x0 not allowed)
if( len <= 0 )
throw new DMLRuntimeException("Matrices with dimensions [0,0] not supported.");
return len;
}
@SuppressWarnings("deprecation")
public void createCellResultFile( String fnameNew, String stagingDir, long rlen, long clen, int brlen, int bclen, InputInfo ii )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameNew);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
String metaOut = stagingDir+"/meta";
//prepare output
BufferedWriter twriter = null;
SequenceFile.Writer bwriter = null;
if( ii == InputInfo.TextCellInputInfo )
twriter = new BufferedWriter(new OutputStreamWriter(fs.create(path,true)));
else if( ii == InputInfo.BinaryCellInputInfo )
bwriter = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixCell.class);
else
throw new DMLRuntimeException("Unsupported cell input info: "+InputInfo.inputInfoToString(ii));
StringBuilder sb = new StringBuilder();
MatrixIndexes key = new MatrixIndexes();
MatrixCell value = new MatrixCell();
HashMap<Integer,HashMap<Long,Long>> keyMap = new HashMap<>();
BufferedReader fkeyMap = StagingFileUtils.openKeyMap(metaOut);
try
{
if( _margin.equals("rows") )
{
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
StagingFileUtils.nextKeyMap(fkeyMap, keyMap, blockRow, brlen);
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
if( ii == InputInfo.TextCellInputInfo )
for( Cell c : buffer )
{
sb.append(keyMap.get(blockRow).get(c.getRow()-1)+1);
sb.append(' ');
sb.append(c.getCol());
sb.append(' ');
sb.append(c.getValue());
sb.append('\n');
twriter.write( sb.toString() );
sb.setLength(0);
}
else if( ii == InputInfo.BinaryCellInputInfo )
for( Cell c : buffer )
{
key.setIndexes(keyMap.get(blockRow).get(c.getRow()-1)+1, c.getCol());
value.setValue(c.getValue());
bwriter.append(key, value);
}
}
keyMap.remove(blockRow);
}
}
else
{
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
StagingFileUtils.nextKeyMap(fkeyMap, keyMap, blockCol, bclen);
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
LinkedList<Cell> buffer = StagingFileUtils.readCellListFromLocal(fname);
if( ii == InputInfo.TextCellInputInfo )
for( Cell c : buffer )
{
sb.append(c.getRow());
sb.append(' ');
sb.append(keyMap.get(blockCol).get(c.getCol()-1)+1);
sb.append(' ');
sb.append(c.getValue());
sb.append('\n');
twriter.write( sb.toString() );
sb.setLength(0);
}
else if( ii == InputInfo.BinaryCellInputInfo )
for( Cell c : buffer )
{
key.setIndexes(c.getRow(), keyMap.get(blockCol).get(c.getCol()-1)+1);
value.setValue(c.getValue());
bwriter.append(key, value);
}
}
keyMap.remove(blockCol);
}
}
//Note: no need to handle empty result
}
finally {
IOUtilFunctions.closeSilently(fkeyMap);
IOUtilFunctions.closeSilently(twriter);
IOUtilFunctions.closeSilently(bwriter);
}
}
@SuppressWarnings("deprecation")
public void createBlockResultFile( String fnameNew, String stagingDir, long rlen, long clen, long newlen, long nnz, int brlen, int bclen, InputInfo ii )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameNew);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
String metaOut = stagingDir+"/meta";
//prepare output
SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixBlock.class);
MatrixIndexes key = new MatrixIndexes();
try
{
if( _margin.equals("rows") )
{
MatrixBlock[] blocks = MatrixWriter.createMatrixBlocksForReuse(newlen, clen, brlen, bclen,
MatrixBlock.evalSparseFormatInMemory(rlen, clen, nnz), nnz);
for(int blockCol = 0; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
HashMap<Integer,HashMap<Long,Long>> keyMap = new HashMap<>();
BufferedReader fkeyMap = StagingFileUtils.openKeyMap(metaOut);
int maxCol = (int)(((long)blockCol*bclen + bclen < clen) ? bclen : clen - (long)blockCol*bclen);
int blockRowOut = 0;
int currentSize = -1;
while( (currentSize = StagingFileUtils.nextSizedKeyMap(fkeyMap, keyMap, brlen, brlen)) > 0 )
{
int maxRow = currentSize;
//get reuse matrix block
MatrixBlock block = MatrixWriter.getMatrixBlockForReuse(blocks, maxRow, maxCol, brlen, bclen);
block.reset(maxRow, maxCol);
int rowPos = 0;
int blockRow = Collections.min(keyMap.keySet());
for( ; blockRow < (int)Math.ceil(rlen/(double)brlen) && rowPos<brlen ; blockRow++)
{
if( keyMap.containsKey(blockRow) )
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
if( LocalFileUtils.isExisting(fname) )
{
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(fname);
HashMap<Long,Long> lkeyMap = keyMap.get(blockRow);
long row_offset = (long)blockRow*brlen;
for( int i=0; i<tmp.getNumRows(); i++ )
if( lkeyMap.containsKey(row_offset+i) ) {
//copy row
for( int j=0; j<tmp.getNumColumns(); j++ ) {
double lvalue = tmp.quickGetValue(i, j);
if( lvalue != 0 )
block.quickSetValue(rowPos, j, lvalue);
}
rowPos++;
}
}
else
{
HashMap<Long,Long> lkeyMap = keyMap.get(blockRow);
rowPos+=lkeyMap.size();
}
}
keyMap.remove(blockRow);
}
key.setIndexes(blockRowOut+1, blockCol+1);
writer.append(key, block);
blockRowOut++;
}
IOUtilFunctions.closeSilently(fkeyMap);
}
}
else
{
MatrixBlock[] blocks = MatrixWriter.createMatrixBlocksForReuse(rlen, newlen, brlen, bclen,
MatrixBlock.evalSparseFormatInMemory(rlen, clen, nnz), nnz);
for(int blockRow = 0; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
HashMap<Integer,HashMap<Long,Long>> keyMap = new HashMap<>();
BufferedReader fkeyMap = StagingFileUtils.openKeyMap(metaOut);
int maxRow = (int)(((long)blockRow*brlen + brlen < rlen) ? brlen : rlen - (long)blockRow*brlen);
int blockColOut = 0;
int currentSize = -1;
while( (currentSize = StagingFileUtils.nextSizedKeyMap(fkeyMap, keyMap, bclen, bclen)) > 0 )
{
int maxCol = currentSize;
//get reuse matrix block
MatrixBlock block = MatrixWriter.getMatrixBlockForReuse(blocks, maxRow, maxCol, brlen, bclen);
block.reset(maxRow, maxCol);
int colPos = 0;
int blockCol = Collections.min(keyMap.keySet());
for( ; blockCol < (int)Math.ceil(clen/(double)bclen) && colPos<bclen ; blockCol++)
{
if( keyMap.containsKey(blockCol) )
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
if( LocalFileUtils.isExisting(fname) )
{
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(fname);
HashMap<Long,Long> lkeyMap = keyMap.get(blockCol);
long col_offset = blockCol*bclen;
for( int j=0; j<tmp.getNumColumns(); j++ )
if( lkeyMap.containsKey(col_offset+j) ) {
//copy column
for( int i=0; i<tmp.getNumRows(); i++ ){
double lvalue = tmp.quickGetValue(i, j);
if( lvalue != 0 )
block.quickSetValue(i, colPos, lvalue);
}
colPos++;
}
}
else
{
HashMap<Long,Long> lkeyMap = keyMap.get(blockCol);
colPos+=lkeyMap.size();
}
}
keyMap.remove(blockCol);
}
key.setIndexes(blockRow+1, blockColOut+1);
writer.append(key, block);
blockColOut++;
}
IOUtilFunctions.closeSilently(fkeyMap);
}
}
//Note: no handling of empty matrices necessary
}
finally {
IOUtilFunctions.closeSilently(writer);
}
}
@SuppressWarnings("deprecation")
public void createBlockResultFileDiag( String fnameNew, String stagingDir, long rlen, long clen, long newlen, long nnz, int brlen, int bclen, InputInfo ii )
throws IOException, DMLRuntimeException
{
//prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameNew);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
String metaOut = stagingDir+"/meta";
//prepare output
SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixBlock.class);
MatrixIndexes key = new MatrixIndexes();
HashSet<Long> writtenBlocks = new HashSet<>();
try
{
if( _margin.equals("rows") )
{
MatrixBlock[] blocks = MatrixWriter.createMatrixBlocksForReuse(newlen, clen, brlen, bclen,
MatrixBlock.evalSparseFormatInMemory(rlen, clen, nnz), nnz);
HashMap<Integer,HashMap<Long,Long>> keyMap = new HashMap<>();
BufferedReader fkeyMap = StagingFileUtils.openKeyMap(metaOut);
int currentSize = -1;
int blockRowOut = 0;
while( (currentSize = StagingFileUtils.nextSizedKeyMap(fkeyMap, keyMap, brlen, brlen)) > 0 )
{
int rowPos = 0;
int blockRow = Collections.min(keyMap.keySet());
int maxRow = currentSize;
for( ; blockRow < (int)Math.ceil(rlen/(double)brlen); blockRow++)
{
int blockCol = blockRow; // for diag known to be equivalent
int maxCol = (int)(((long)blockCol*bclen + bclen < clen) ? bclen : clen - (long)blockCol*bclen);
//get reuse matrix block
MatrixBlock block = MatrixWriter.getMatrixBlockForReuse(blocks, maxRow, maxCol, brlen, bclen);
block.reset(maxRow, maxCol);
if( keyMap.containsKey(blockRow) )
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(fname);
HashMap<Long,Long> lkeyMap = keyMap.get(blockRow);
long row_offset = blockRow*brlen;
for( int i=0; i<tmp.getNumRows(); i++ )
if( lkeyMap.containsKey(row_offset+i) ) {
//copy row
for( int j=0; j<tmp.getNumColumns(); j++ ) {
double lvalue = tmp.quickGetValue(i, j);
if( lvalue != 0 )
block.quickSetValue(rowPos, j, lvalue);
}
rowPos++;
}
}
//output current block (by def of diagBlocks, no additional rows)
key.setIndexes(blockRowOut+1, blockCol+1);
writer.append(key, block);
writtenBlocks.add(IDHandler.concatIntIDsToLong(blockRowOut+1, blockCol+1));
//finished block
if( rowPos == maxRow )
{
keyMap.remove(blockRow);
blockRowOut++;
break;
}
}
}
IOUtilFunctions.closeSilently(fkeyMap);
}
else //cols
{
MatrixBlock[] blocks = MatrixWriter.createMatrixBlocksForReuse(rlen, newlen, brlen, bclen,
MatrixBlock.evalSparseFormatInMemory(rlen, clen, nnz), nnz);
HashMap<Integer,HashMap<Long,Long>> keyMap = new HashMap<>();
BufferedReader fkeyMap = StagingFileUtils.openKeyMap(metaOut);
int currentSize = -1;
int blockColOut = 0;
while( (currentSize = StagingFileUtils.nextSizedKeyMap(fkeyMap, keyMap, bclen, bclen)) > 0 )
{
int colPos = 0;
int blockCol = Collections.min(keyMap.keySet());
int maxCol = currentSize;
for( ; blockCol < (int)Math.ceil(clen/(double)bclen); blockCol++)
{
int blockRow = blockCol; // for diag known to be equivalent
int maxRow = (int)((blockRow*brlen + brlen < rlen) ? brlen : rlen - blockRow*brlen);
//get reuse matrix block
MatrixBlock block = MatrixWriter.getMatrixBlockForReuse(blocks, maxRow, maxCol, brlen, bclen);
block.reset(maxRow, maxCol);
if( keyMap.containsKey(blockCol) )
{
String fname = stagingDir+"/"+(blockRow+1)+"_"+(blockCol+1);
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(fname);
HashMap<Long,Long> lkeyMap = keyMap.get(blockCol);
long col_offset = blockCol*bclen;
for( int j=0; j<tmp.getNumColumns(); j++ )
if( lkeyMap.containsKey(col_offset+j) ) {
//copy column
for( int i=0; i<tmp.getNumRows(); i++ ){
double lvalue = tmp.quickGetValue(i, j);
if( lvalue != 0 )
block.quickSetValue(i, colPos, lvalue);
}
colPos++;
}
}
//output current block (by def of diagBlocks, no additional cols)
key.setIndexes(blockRow+1, blockColOut+1);
writer.append(key, block);
writtenBlocks.add(IDHandler.concatIntIDsToLong(blockRow+1, blockColOut+1));
//finished block
if( colPos == maxCol )
{
keyMap.remove(blockCol);
blockColOut++;
break;
}
}
}
IOUtilFunctions.closeSilently(fkeyMap);
}
//write remaining empty blocks
MatrixBlock empty = new MatrixBlock(1,1,true);
long rows = _margin.equals("rows") ? newlen : rlen;
long cols = _margin.equals("cols") ? newlen : clen;
int countBlk1 = (int)Math.ceil(rows/(double)brlen)*(int)Math.ceil(cols/(double)bclen);
int countBlk2 = writtenBlocks.size();
for( int i=0; i<(int)Math.ceil(rows/(double)brlen); i++)
for(int j=0; j<(int)Math.ceil(cols/(double)bclen); j++ )
if( !writtenBlocks.contains(IDHandler.concatIntIDsToLong(i+1, j+1)) )
{
int maxRow = (int)((i*brlen + brlen < rows) ? brlen : rows - i*brlen);
int maxCol = (int)((j*bclen + bclen < cols) ? bclen : cols - j*bclen);
empty.reset(maxRow, maxCol);
key.setIndexes(i+1, j+1);
writer.append(key, empty);
countBlk2++;
}
if( countBlk1 != countBlk2 )
throw new DMLRuntimeException("Wrong number of written result blocks: "+countBlk1+" vs "+countBlk2+".");
}
finally {
IOUtilFunctions.closeSilently(writer);
}
}
}
}
| apache-2.0 |
lukhnos/j2objc | jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/List.java | 35165 | /*
* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util;
import java.util.function.UnaryOperator;
/**
* An ordered collection (also known as a <i>sequence</i>). The user of this
* interface has precise control over where in the list each element is
* inserted. The user can access elements by their integer index (position in
* the list), and search for elements in the list.<p>
*
* Unlike sets, lists typically allow duplicate elements. More formally,
* lists typically allow pairs of elements <tt>e1</tt> and <tt>e2</tt>
* such that <tt>e1.equals(e2)</tt>, and they typically allow multiple
* null elements if they allow null elements at all. It is not inconceivable
* that someone might wish to implement a list that prohibits duplicates, by
* throwing runtime exceptions when the user attempts to insert them, but we
* expect this usage to be rare.<p>
*
* The <tt>List</tt> interface places additional stipulations, beyond those
* specified in the <tt>Collection</tt> interface, on the contracts of the
* <tt>iterator</tt>, <tt>add</tt>, <tt>remove</tt>, <tt>equals</tt>, and
* <tt>hashCode</tt> methods. Declarations for other inherited methods are
* also included here for convenience.<p>
*
* The <tt>List</tt> interface provides four methods for positional (indexed)
* access to list elements. Lists (like Java arrays) are zero based. Note
* that these operations may execute in time proportional to the index value
* for some implementations (the <tt>LinkedList</tt> class, for
* example). Thus, iterating over the elements in a list is typically
* preferable to indexing through it if the caller does not know the
* implementation.<p>
*
* The <tt>List</tt> interface provides a special iterator, called a
* <tt>ListIterator</tt>, that allows element insertion and replacement, and
* bidirectional access in addition to the normal operations that the
* <tt>Iterator</tt> interface provides. A method is provided to obtain a
* list iterator that starts at a specified position in the list.<p>
*
* The <tt>List</tt> interface provides two methods to search for a specified
* object. From a performance standpoint, these methods should be used with
* caution. In many implementations they will perform costly linear
* searches.<p>
*
* The <tt>List</tt> interface provides two methods to efficiently insert and
* remove multiple elements at an arbitrary point in the list.<p>
*
* Note: While it is permissible for lists to contain themselves as elements,
* extreme caution is advised: the <tt>equals</tt> and <tt>hashCode</tt>
* methods are no longer well defined on such a list.
*
* <p>Some list implementations have restrictions on the elements that
* they may contain. For example, some implementations prohibit null elements,
* and some have restrictions on the types of their elements. Attempting to
* add an ineligible element throws an unchecked exception, typically
* <tt>NullPointerException</tt> or <tt>ClassCastException</tt>. Attempting
* to query the presence of an ineligible element may throw an exception,
* or it may simply return false; some implementations will exhibit the former
* behavior and some will exhibit the latter. More generally, attempting an
* operation on an ineligible element whose completion would not result in
* the insertion of an ineligible element into the list may throw an
* exception or it may succeed, at the option of the implementation.
* Such exceptions are marked as "optional" in the specification for this
* interface.
*
* <p>This interface is a member of the
* <a href="{@docRoot}openjdk-redirect.html?v=8&path=/technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @param <E> the type of elements in this list
*
* @author Josh Bloch
* @author Neal Gafter
* @see Collection
* @see Set
* @see ArrayList
* @see LinkedList
* @see Vector
* @see Arrays#asList(Object[])
* @see Collections#nCopies(int, Object)
* @see Collections#EMPTY_LIST
* @see AbstractList
* @see AbstractSequentialList
* @since 1.2
*/
public interface List<E> extends Collection<E> {
// Query Operations
/**
* Returns the number of elements in this list. If this list contains
* more than <tt>Integer.MAX_VALUE</tt> elements, returns
* <tt>Integer.MAX_VALUE</tt>.
*
* @return the number of elements in this list
*/
int size();
/**
* Returns <tt>true</tt> if this list contains no elements.
*
* @return <tt>true</tt> if this list contains no elements
*/
boolean isEmpty();
/**
* Returns <tt>true</tt> if this list contains the specified element.
* More formally, returns <tt>true</tt> if and only if this list contains
* at least one element <tt>e</tt> such that
* <tt>(o==null ? e==null : o.equals(e))</tt>.
*
* @param o element whose presence in this list is to be tested
* @return <tt>true</tt> if this list contains the specified element
* @throws ClassCastException if the type of the specified element
* is incompatible with this list
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>)
*/
boolean contains(Object o);
/**
* Returns an iterator over the elements in this list in proper sequence.
*
* @return an iterator over the elements in this list in proper sequence
*/
Iterator<E> iterator();
/**
* Returns an array containing all of the elements in this list in proper
* sequence (from first to last element).
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this list. (In other words, this method must
* allocate a new array even if this list is backed by an array).
* The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this list in proper
* sequence
* @see Arrays#asList(Object[])
*/
Object[] toArray();
/**
* Returns an array containing all of the elements in this list in
* proper sequence (from first to last element); the runtime type of
* the returned array is that of the specified array. If the list fits
* in the specified array, it is returned therein. Otherwise, a new
* array is allocated with the runtime type of the specified array and
* the size of this list.
*
* <p>If the list fits in the specified array with room to spare (i.e.,
* the array has more elements than the list), the element in the array
* immediately following the end of the list is set to <tt>null</tt>.
* (This is useful in determining the length of the list <i>only</i> if
* the caller knows that the list does not contain any null elements.)
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose <tt>x</tt> is a list known to contain only strings.
* The following code can be used to dump the list into a newly
* allocated array of <tt>String</tt>:
*
* <pre>{@code
* String[] y = x.toArray(new String[0]);
* }</pre>
*
* Note that <tt>toArray(new Object[0])</tt> is identical in function to
* <tt>toArray()</tt>.
*
* @param a the array into which the elements of this list are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose.
* @return an array containing the elements of this list
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this list
* @throws NullPointerException if the specified array is null
*/
<T> T[] toArray(T[] a);
// Modification Operations
/**
* Appends the specified element to the end of this list (optional
* operation).
*
* <p>Lists that support this operation may place limitations on what
* elements may be added to this list. In particular, some
* lists will refuse to add null elements, and others will impose
* restrictions on the type of elements that may be added. List
* classes should clearly specify in their documentation any restrictions
* on what elements may be added.
*
* @param e element to be appended to this list
* @return <tt>true</tt> (as specified by {@link Collection#add})
* @throws UnsupportedOperationException if the <tt>add</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this list
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* @throws IllegalArgumentException if some property of this element
* prevents it from being added to this list
*/
boolean add(E e);
/**
* Removes the first occurrence of the specified element from this list,
* if it is present (optional operation). If this list does not contain
* the element, it is unchanged. More formally, removes the element with
* the lowest index <tt>i</tt> such that
* <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt>
* (if such an element exists). Returns <tt>true</tt> if this list
* contained the specified element (or equivalently, if this list changed
* as a result of the call).
*
* @param o element to be removed from this list, if present
* @return <tt>true</tt> if this list contained the specified element
* @throws ClassCastException if the type of the specified element
* is incompatible with this list
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws UnsupportedOperationException if the <tt>remove</tt> operation
* is not supported by this list
*/
boolean remove(Object o);
// Bulk Modification Operations
/**
* Returns <tt>true</tt> if this list contains all of the elements of the
* specified collection.
*
* @param c collection to be checked for containment in this list
* @return <tt>true</tt> if this list contains all of the elements of the
* specified collection
* @throws ClassCastException if the types of one or more elements
* in the specified collection are incompatible with this
* list
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if the specified collection contains one
* or more null elements and this list does not permit null
* elements
* (<a href="Collection.html#optional-restrictions">optional</a>),
* or if the specified collection is null
* @see #contains(Object)
*/
boolean containsAll(Collection<?> c);
/**
* Appends all of the elements in the specified collection to the end of
* this list, in the order that they are returned by the specified
* collection's iterator (optional operation). The behavior of this
* operation is undefined if the specified collection is modified while
* the operation is in progress. (Note that this will occur if the
* specified collection is this list, and it's nonempty.)
*
* @param c collection containing elements to be added to this list
* @return <tt>true</tt> if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>addAll</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of an element of the specified
* collection prevents it from being added to this list
* @throws NullPointerException if the specified collection contains one
* or more null elements and this list does not permit null
* elements, or if the specified collection is null
* @throws IllegalArgumentException if some property of an element of the
* specified collection prevents it from being added to this list
* @see #add(Object)
*/
boolean addAll(Collection<? extends E> c);
/**
* Inserts all of the elements in the specified collection into this
* list at the specified position (optional operation). Shifts the
* element currently at that position (if any) and any subsequent
* elements to the right (increases their indices). The new elements
* will appear in this list in the order that they are returned by the
* specified collection's iterator. The behavior of this operation is
* undefined if the specified collection is modified while the
* operation is in progress. (Note that this will occur if the specified
* collection is this list, and it's nonempty.)
*
* @param index index at which to insert the first element from the
* specified collection
* @param c collection containing elements to be added to this list
* @return <tt>true</tt> if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>addAll</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of an element of the specified
* collection prevents it from being added to this list
* @throws NullPointerException if the specified collection contains one
* or more null elements and this list does not permit null
* elements, or if the specified collection is null
* @throws IllegalArgumentException if some property of an element of the
* specified collection prevents it from being added to this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index > size()</tt>)
*/
boolean addAll(int index, Collection<? extends E> c);
/**
* Removes from this list all of its elements that are contained in the
* specified collection (optional operation).
*
* @param c collection containing elements to be removed from this list
* @return <tt>true</tt> if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>removeAll</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of an element of this list
* is incompatible with the specified collection
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if this list contains a null element and the
* specified collection does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>),
* or if the specified collection is null
* @see #remove(Object)
* @see #contains(Object)
*/
boolean removeAll(Collection<?> c);
/**
* Retains only the elements in this list that are contained in the
* specified collection (optional operation). In other words, removes
* from this list all of its elements that are not contained in the
* specified collection.
*
* @param c collection containing elements to be retained in this list
* @return <tt>true</tt> if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>retainAll</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of an element of this list
* is incompatible with the specified collection
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if this list contains a null element and the
* specified collection does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>),
* or if the specified collection is null
* @see #remove(Object)
* @see #contains(Object)
*/
boolean retainAll(Collection<?> c);
/**
* Replaces each element of this list with the result of applying the
* operator to that element. Errors or runtime exceptions thrown by
* the operator are relayed to the caller.
*
* @implSpec
* The default implementation is equivalent to, for this {@code list}:
* <pre>{@code
* final ListIterator<E> li = list.listIterator();
* while (li.hasNext()) {
* li.set(operator.apply(li.next()));
* }
* }</pre>
*
* If the list's list-iterator does not support the {@code set} operation
* then an {@code UnsupportedOperationException} will be thrown when
* replacing the first element.
*
* @param operator the operator to apply to each element
* @throws UnsupportedOperationException if this list is unmodifiable.
* Implementations may throw this exception if an element
* cannot be replaced or if, in general, modification is not
* supported
* @throws NullPointerException if the specified operator is null or
* if the operator result is a null value and this list does
* not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @since 1.8
*/
default void replaceAll(UnaryOperator<E> operator) {
Objects.requireNonNull(operator);
final ListIterator<E> li = this.listIterator();
while (li.hasNext()) {
li.set(operator.apply(li.next()));
}
}
// Android-changed: Warn about Collections.sort() being built on top
// of List.sort() rather than the other way round when targeting an
// API version > 25.
/**
* Sorts this list according to the order induced by the specified
* {@link Comparator}.
*
* <p>All elements in this list must be <i>mutually comparable</i> using the
* specified comparator (that is, {@code c.compare(e1, e2)} must not throw
* a {@code ClassCastException} for any elements {@code e1} and {@code e2}
* in the list).
*
* <p>If the specified comparator is {@code null} then all elements in this
* list must implement the {@link Comparable} interface and the elements'
* {@linkplain Comparable natural ordering} should be used.
*
* <p>This list must be modifiable, but need not be resizable.
*
* <p>For apps running on and targeting Android versions greater than
* Nougat (API level {@code > 25}), {@link Collections#sort(List)}
* delegates to this method. Such apps must not call
* {@link Collections#sort(List)} from this method. Instead, prefer
* not overriding this method at all. If you must override it, consider
* this implementation:
* <pre>
* @Override
* public void sort(Comparator<? super E> c) {
* Object[] elements = toArray();
* Arrays.sort(elements, c);
* ListIterator<E> iterator = (ListIterator<Object>) listIterator();
* for (Object element : elements) {
* iterator.next();
* iterator.set((E) element);
* }
* }
* </pre>
*
* @implSpec
* The default implementation obtains an array containing all elements in
* this list, sorts the array, and iterates over this list resetting each
* element from the corresponding position in the array. (This avoids the
* n<sup>2</sup> log(n) performance that would result from attempting
* to sort a linked list in place.)
*
* @implNote
* This implementation is a stable, adaptive, iterative mergesort that
* requires far fewer than n lg(n) comparisons when the input array is
* partially sorted, while offering the performance of a traditional
* mergesort when the input array is randomly ordered. If the input array
* is nearly sorted, the implementation requires approximately n
* comparisons. Temporary storage requirements vary from a small constant
* for nearly sorted input arrays to n/2 object references for randomly
* ordered input arrays.
*
* <p>The implementation takes equal advantage of ascending and
* descending order in its input array, and can take advantage of
* ascending and descending order in different parts of the same
* input array. It is well-suited to merging two or more sorted arrays:
* simply concatenate the arrays and sort the resulting array.
*
* <p>The implementation was adapted from Tim Peters's list sort for Python
* (<a href="http://svn.python.org/projects/python/trunk/Objects/listsort.txt">
* TimSort</a>). It uses techniques from Peter McIlroy's "Optimistic
* Sorting and Information Theoretic Complexity", in Proceedings of the
* Fourth Annual ACM-SIAM Symposium on Discrete Algorithms, pp 467-474,
* January 1993.
*
* @param c the {@code Comparator} used to compare list elements.
* A {@code null} value indicates that the elements'
* {@linkplain Comparable natural ordering} should be used
* @throws ClassCastException if the list contains elements that are not
* <i>mutually comparable</i> using the specified comparator
* @throws UnsupportedOperationException if the list's list-iterator does
* not support the {@code set} operation
* @throws IllegalArgumentException
* (<a href="Collection.html#optional-restrictions">optional</a>)
* if the comparator is found to violate the {@link Comparator}
* contract
* @since 1.8
*/
@SuppressWarnings({"unchecked", "rawtypes"})
default void sort(Comparator<? super E> c) {
Object[] a = this.toArray();
Arrays.sort(a, (Comparator) c);
ListIterator<E> i = this.listIterator();
for (Object e : a) {
i.next();
i.set((E) e);
}
}
/**
* Removes all of the elements from this list (optional operation).
* The list will be empty after this call returns.
*
* @throws UnsupportedOperationException if the <tt>clear</tt> operation
* is not supported by this list
*/
void clear();
// Comparison and hashing
/**
* Compares the specified object with this list for equality. Returns
* <tt>true</tt> if and only if the specified object is also a list, both
* lists have the same size, and all corresponding pairs of elements in
* the two lists are <i>equal</i>. (Two elements <tt>e1</tt> and
* <tt>e2</tt> are <i>equal</i> if <tt>(e1==null ? e2==null :
* e1.equals(e2))</tt>.) In other words, two lists are defined to be
* equal if they contain the same elements in the same order. This
* definition ensures that the equals method works properly across
* different implementations of the <tt>List</tt> interface.
*
* @param o the object to be compared for equality with this list
* @return <tt>true</tt> if the specified object is equal to this list
*/
boolean equals(Object o);
/**
* Returns the hash code value for this list. The hash code of a list
* is defined to be the result of the following calculation:
* <pre>{@code
* int hashCode = 1;
* for (E e : list)
* hashCode = 31*hashCode + (e==null ? 0 : e.hashCode());
* }</pre>
* This ensures that <tt>list1.equals(list2)</tt> implies that
* <tt>list1.hashCode()==list2.hashCode()</tt> for any two lists,
* <tt>list1</tt> and <tt>list2</tt>, as required by the general
* contract of {@link Object#hashCode}.
*
* @return the hash code value for this list
* @see Object#equals(Object)
* @see #equals(Object)
*/
int hashCode();
// Positional Access Operations
/**
* Returns the element at the specified position in this list.
*
* @param index index of the element to return
* @return the element at the specified position in this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index >= size()</tt>)
*/
E get(int index);
/**
* Replaces the element at the specified position in this list with the
* specified element (optional operation).
*
* @param index index of the element to replace
* @param element element to be stored at the specified position
* @return the element previously at the specified position
* @throws UnsupportedOperationException if the <tt>set</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this list
* @throws NullPointerException if the specified element is null and
* this list does not permit null elements
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index >= size()</tt>)
*/
E set(int index, E element);
/**
* Inserts the specified element at the specified position in this list
* (optional operation). Shifts the element currently at that position
* (if any) and any subsequent elements to the right (adds one to their
* indices).
*
* @param index index at which the specified element is to be inserted
* @param element element to be inserted
* @throws UnsupportedOperationException if the <tt>add</tt> operation
* is not supported by this list
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this list
* @throws NullPointerException if the specified element is null and
* this list does not permit null elements
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index > size()</tt>)
*/
void add(int index, E element);
/**
* Removes the element at the specified position in this list (optional
* operation). Shifts any subsequent elements to the left (subtracts one
* from their indices). Returns the element that was removed from the
* list.
*
* @param index the index of the element to be removed
* @return the element previously at the specified position
* @throws UnsupportedOperationException if the <tt>remove</tt> operation
* is not supported by this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index >= size()</tt>)
*/
E remove(int index);
// Search Operations
/**
* Returns the index of the first occurrence of the specified element
* in this list, or -1 if this list does not contain the element.
* More formally, returns the lowest index <tt>i</tt> such that
* <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt>,
* or -1 if there is no such index.
*
* @param o element to search for
* @return the index of the first occurrence of the specified element in
* this list, or -1 if this list does not contain the element
* @throws ClassCastException if the type of the specified element
* is incompatible with this list
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>)
*/
int indexOf(Object o);
/**
* Returns the index of the last occurrence of the specified element
* in this list, or -1 if this list does not contain the element.
* More formally, returns the highest index <tt>i</tt> such that
* <tt>(o==null ? get(i)==null : o.equals(get(i)))</tt>,
* or -1 if there is no such index.
*
* @param o element to search for
* @return the index of the last occurrence of the specified element in
* this list, or -1 if this list does not contain the element
* @throws ClassCastException if the type of the specified element
* is incompatible with this list
* (<a href="Collection.html#optional-restrictions">optional</a>)
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* (<a href="Collection.html#optional-restrictions">optional</a>)
*/
int lastIndexOf(Object o);
// List Iterators
/**
* Returns a list iterator over the elements in this list (in proper
* sequence).
*
* @return a list iterator over the elements in this list (in proper
* sequence)
*/
ListIterator<E> listIterator();
/**
* Returns a list iterator over the elements in this list (in proper
* sequence), starting at the specified position in the list.
* The specified index indicates the first element that would be
* returned by an initial call to {@link ListIterator#next next}.
* An initial call to {@link ListIterator#previous previous} would
* return the element with the specified index minus one.
*
* @param index index of the first element to be returned from the
* list iterator (by a call to {@link ListIterator#next next})
* @return a list iterator over the elements in this list (in proper
* sequence), starting at the specified position in the list
* @throws IndexOutOfBoundsException if the index is out of range
* ({@code index < 0 || index > size()})
*/
ListIterator<E> listIterator(int index);
// View
/**
* Returns a view of the portion of this list between the specified
* <tt>fromIndex</tt>, inclusive, and <tt>toIndex</tt>, exclusive. (If
* <tt>fromIndex</tt> and <tt>toIndex</tt> are equal, the returned list is
* empty.) The returned list is backed by this list, so non-structural
* changes in the returned list are reflected in this list, and vice-versa.
* The returned list supports all of the optional list operations supported
* by this list.<p>
*
* This method eliminates the need for explicit range operations (of
* the sort that commonly exist for arrays). Any operation that expects
* a list can be used as a range operation by passing a subList view
* instead of a whole list. For example, the following idiom
* removes a range of elements from a list:
* <pre>{@code
* list.subList(from, to).clear();
* }</pre>
* Similar idioms may be constructed for <tt>indexOf</tt> and
* <tt>lastIndexOf</tt>, and all of the algorithms in the
* <tt>Collections</tt> class can be applied to a subList.<p>
*
* The semantics of the list returned by this method become undefined if
* the backing list (i.e., this list) is <i>structurally modified</i> in
* any way other than via the returned list. (Structural modifications are
* those that change the size of this list, or otherwise perturb it in such
* a fashion that iterations in progress may yield incorrect results.)
*
* @param fromIndex low endpoint (inclusive) of the subList
* @param toIndex high endpoint (exclusive) of the subList
* @return a view of the specified range within this list
* @throws IndexOutOfBoundsException for an illegal endpoint index value
* (<tt>fromIndex < 0 || toIndex > size ||
* fromIndex > toIndex</tt>)
*/
List<E> subList(int fromIndex, int toIndex);
/**
* Creates a {@link Spliterator} over the elements in this list.
*
* <p>The {@code Spliterator} reports {@link Spliterator#SIZED} and
* {@link Spliterator#ORDERED}. Implementations should document the
* reporting of additional characteristic values.
*
* @implSpec
* The default implementation creates a
* <em><a href="Spliterator.html#binding">late-binding</a></em> spliterator
* from the list's {@code Iterator}. The spliterator inherits the
* <em>fail-fast</em> properties of the list's iterator.
*
* @implNote
* The created {@code Spliterator} additionally reports
* {@link Spliterator#SUBSIZED}.
*
* @return a {@code Spliterator} over the elements in this list
* @since 1.8
*/
@Override
default Spliterator<E> spliterator() {
return Spliterators.spliterator(this, Spliterator.ORDERED);
}
}
| apache-2.0 |
ChinaQuants/Strata | modules/product/src/main/java/com/opengamma/strata/product/PositionInfoBuilder.java | 2330 | /**
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product;
import java.util.HashMap;
import java.util.Map;
import com.opengamma.strata.basics.StandardId;
import com.opengamma.strata.collect.ArgChecker;
/**
* Builder to create {@code PositionInfo}.
* <p>
* This builder allows a {@link PositionInfo} to be created.
*/
public final class PositionInfoBuilder {
/**
* The primary identifier for the position.
* <p>
* The identifier is used to identify the position.
*/
private StandardId id;
/**
* The position attributes.
* <p>
* Position attributes, provide the ability to associate arbitrary information
* with a position in a key-value map.
*/
private final Map<PositionAttributeType<?>, Object> attributes = new HashMap<>();
// creates an empty instance
PositionInfoBuilder() {
}
// creates a populated instance
PositionInfoBuilder(
StandardId id,
Map<PositionAttributeType<?>, Object> attributes) {
this.id = id;
this.attributes.putAll(attributes);
}
//-----------------------------------------------------------------------
/**
* Sets the primary identifier for the position, optional.
* <p>
* The identifier is used to identify the position.
*
* @param id the identifier
* @return this, for chaining
*/
public PositionInfoBuilder id(StandardId id) {
this.id = id;
return this;
}
/**
* Adds a position attribute to the map of attributes.
* <p>
* The attribute is added using {@code Map.put(type, value)} semantics.
*
* @param <T> the type of the value
* @param type the type providing meaning to the value
* @param value the value
* @return this, for chaining
*/
@SuppressWarnings("unchecked")
public <T> PositionInfoBuilder addAttribute(PositionAttributeType<T> type, T value) {
ArgChecker.notNull(type, "type");
ArgChecker.notNull(value, "value");
// ImmutableMap.Builder would not provide Map.put semantics
attributes.put(type, value);
return this;
}
/**
* Builds the position information.
*
* @return the position information
*/
public PositionInfo build() {
return new PositionInfo(id, attributes);
}
}
| apache-2.0 |
Samuel789/MediPi | Clinician/MediPiClinical/src/main/java/org/medipi/clinical/utilities/Utilities.java | 3846 | /*
Copyright 2016 Richard Robinson @ NHS Digital <rrobinson@nhs.net>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.medipi.clinical.utilities;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Properties;
/**
* Utilities class to provide common elements
* @author rick@robinsonhq.com
*/
public class Utilities {
private Properties properties;
public static final DateFormat DISPLAY_FORMAT = new SimpleDateFormat("EEE d MMM yyyy HH:mm:ss z");
public static final DateFormat INTERNAL_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss");
public static final DateFormat DISPLAY_DOB_FORMAT = new SimpleDateFormat("dd-MMM-yyyy");
public static final DateFormat INTERNAL_DOB_FORMAT = new SimpleDateFormat("yyyyMMdd");
public static final DateFormat DISPLAY_TABLE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm");
public static final DateFormat INTERNAL_DEVICE_FORMAT = new SimpleDateFormat("yyyy-MM-dd':'HH:mm");
public static final DateFormat DISPLAY_SCALE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
public static final DateFormat DISPLAY_SCHEDULE_FORMAT = new SimpleDateFormat("HH:mm:ss dd/MM/yyyy");
public static final DateFormat INTERNAL_SPINE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss.SSS");
public static final DateFormat DISPLAY_OXIMETER_TIME_FORMAT = new SimpleDateFormat("HH:mm:ss");
public static final String CREATEPATIENTFORUNASSOCIATEDDEVICES = "medipi.concentrator.db.createpatientforunassociateddevices";
public static final String SAVEMESSAGESTOFILE = "medipi.concentrator.savemessagestofile";
public static final String MEDIPIINBOUNDSAVEDMESSAGEDIR = "medipi.concentrator.inboundsavedmessagedir";
public static final DateFormat ISO8601FORMATDATEMILLI = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
public static final DateFormat ISO8601FORMATDATESECONDS = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
public static final DateFormat ISO8601FORMATDATEMINUTES = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'");
private Utilities() {
}
public static Utilities getInstance() {
return UtilitiesNHolder.INSTANCE;
}
private static class UtilitiesNHolder {
private static final Utilities INSTANCE = new Utilities();
}
public void setProperties(Properties mp){
properties = mp;
}
public Properties getProperties() {
return properties;
}
/**
*
* @param property the property to be returned
* @param dfault - the default state of the property should it not be found
* @return the boolean state of the property
*/
public boolean getBooleanProperty(String property, boolean dfault) {
String value = properties.getProperty(property);
if (value == null) {
return dfault;
} else if (value.toLowerCase().startsWith("y")) {
return true;
} else if (value.toLowerCase().startsWith("n")) {
return false;
} else {
return dfault;
}
}
/**
*
* @param property the property to be returned
* @return the String representation of the property
*/
public String getStringProperty(String property) {
String value = properties.getProperty(property);
return value;
}
}
| apache-2.0 |
joney000/Competitive-Programming-Java-Implementation | direci/E.java | 9242 | //pakage joney_000[let_me_start]
import java.util.*;
import java.lang.*;
import java.io.*;
import java.math.*;
class FastReader{
private boolean finished = false;
private InputStream stream;
private byte[] buf = new byte[1024];
private int curChar;
private int numChars;
private SpaceCharFilter filter;
public FastReader(InputStream stream){
this.stream = stream;
}
public int read(){
if (numChars == -1){
throw new InputMismatchException ();
}
if (curChar >= numChars){
curChar = 0;
try{
numChars = stream.read (buf);
} catch (IOException e){
throw new InputMismatchException ();
}
if (numChars <= 0){
return -1;
}
}
return buf[curChar++];
}
public int peek(){
if (numChars == -1){
return -1;
}
if (curChar >= numChars){
curChar = 0;
try{
numChars = stream.read (buf);
} catch (IOException e){
return -1;
}
if (numChars <= 0){
return -1;
}
}
return buf[curChar];
}
public int nextInt(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
int res = 0;
do{
if(c==','){
c = read();
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
} while (!isSpaceChar (c));
return res * sgn;
}
public long nextLong(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
long res = 0;
do{
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
} while (!isSpaceChar (c));
return res * sgn;
}
public String nextString(){
int c = read ();
while (isSpaceChar (c))
c = read ();
StringBuilder res = new StringBuilder ();
do{
res.appendCodePoint (c);
c = read ();
} while (!isSpaceChar (c));
return res.toString ();
}
public boolean isSpaceChar(int c){
if (filter != null){
return filter.isSpaceChar (c);
}
return isWhitespace (c);
}
public static boolean isWhitespace(int c){
return c == ' ' || c == '\n' || c == '\r' || c == '\t' || c == -1;
}
private String readLine0(){
StringBuilder buf = new StringBuilder ();
int c = read ();
while (c != '\n' && c != -1){
if (c != '\r'){
buf.appendCodePoint (c);
}
c = read ();
}
return buf.toString ();
}
public String nextLine(){
String s = readLine0 ();
while (s.trim ().length () == 0)
s = readLine0 ();
return s;
}
public String nextLine(boolean ignoreEmptyLines){
if (ignoreEmptyLines){
return nextLine ();
}else{
return readLine0 ();
}
}
public BigInteger nextBigInteger(){
try{
return new BigInteger (nextString ());
} catch (NumberFormatException e){
throw new InputMismatchException ();
}
}
public char nextCharacter(){
int c = read ();
while (isSpaceChar (c))
c = read ();
return (char) c;
}
public double nextDouble(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
double res = 0;
while (!isSpaceChar (c) && c != '.'){
if (c == 'e' || c == 'E'){
return res * Math.pow (10, nextInt ());
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
}
if (c == '.'){
c = read ();
double m = 1;
while (!isSpaceChar (c)){
if (c == 'e' || c == 'E'){
return res * Math.pow (10, nextInt ());
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
m /= 10;
res += (c - '0') * m;
c = read ();
}
}
return res * sgn;
}
public boolean isExhausted(){
int value;
while (isSpaceChar (value = peek ()) && value != -1)
read ();
return value == -1;
}
public String next(){
return nextString ();
}
public SpaceCharFilter getFilter(){
return filter;
}
public void setFilter(SpaceCharFilter filter){
this.filter = filter;
}
public interface SpaceCharFilter{
public boolean isSpaceChar(int ch);
}
}
/******************** Pair class ***********************/
class Pair implements Comparable<Pair>{
public int a;
public int b;
public Pair(){
this.a = 0;
this.b = 0;
}
public Pair(int a,int b){
this.a = a;
this.b = b;
}
public int compareTo(Pair p){
if(this.a==p.a){
return this.b-p.b;
}
return this.a-p.a;
}
public String toString(){
return "a="+this.a+" b="+this.b;
}
}
/******************** Main Class ***********************/
class E
{
public static InputStream inputStream = System.in;
public static OutputStream outputStream = System.out;
public static FastReader in = new FastReader(inputStream);;
public static PrintWriter out = new PrintWriter(outputStream);;
/*
Overhead
*/
public static int tempints[] = new int[100005];
public static long templongs[] = new long[100005];
//public static double tempdoubles[] = new double[100005];
public static double tempdoubles[] = new double[11];
public static char tempchars[] = new char[100005];
public static void main(String[] args) throws java.lang.Exception
{
//let_me_start
int tests = i();
for(int t=1;t<=tests;t++){
int n = i();//int m = i();
int[] arr = is(n);
long ans =0 ;
for(int i=1;i<=n;i++){
ans=arr[i];
}
out.write(""+ans+"\n");
}
//out.write(""+ans+"\n");
out.flush();
}
//****************************** Utilities ***********************//
public static boolean isPrime(long n)throws Exception{
if(n==1)return false;
if(n<=3)return true;
if(n%2==0)return false;
for(int i=2 ;i <= Math.sqrt(n); i++){
if(n%i==0)return false;
}
return true;
}
// sieve
public static int[] primes(int n)throws Exception{ // for(int i=1;i<=arr.length-1;i++)out.write(""+arr[i]+" ");
boolean arr[] = new boolean[n+1];
Arrays.fill(arr,true);
for(int i=1;i<=Math.sqrt(n);i++){
if(!arr[i])continue;
for(int j = 2*i ;j<=n;j+=i){
arr[i]=false;
}
}
LinkedList<Integer> ll = new LinkedList<Integer>();
for(int i=1;i<=n;i++){
if(arr[i])ll.add(i);
}
n = ll.size();
int primes[] = new int[n+1];
for(int i=1;i<=n;i++){
primes[i]=ll.removeFirst();
}
return primes;
}
public static long gcd (long a , long b)throws Exception{
if(b==0)return a;
return gcd(b , a%b);
}
public static long lcm (long a , long b)throws Exception{
if(a==0||b==0)return 0;
return (a*b)/gcd(a,b);
}
public static long mulmod(long a , long b ,long mod)throws Exception{
if(a==0||b==0)return 0;
if(b==1)return a;
long ans = mulmod(a,b/2,mod);
ans = (ans*2)% mod;
if(b%2==1)ans = (a + ans)% mod;
return ans;
}
public static long pow(long a , long b ,long mod)throws Exception{
if(b==0)return 1;
if(b==1)return a;
long ans = pow(a,b/2,mod);
ans = (ans * ans)% mod;
if(b%2==1)ans = (a * ans)% mod;
return ans;
}
// 20*20 nCr Pascal Table
public static long[][] ncrTable()throws Exception{
long ncr[][] = new long[21][21];
for(int i=0 ;i<=20 ;i++){ncr[i][0]=1;ncr[i][i]=1;}
for(int j=0;j<=20 ;j++){
for(int i=j+1;i<= 20 ;i++){
ncr[i][j] = ncr[i-1][j]+ncr[i-1][j-1];
}
}
return ncr;
}
//*******************************I/O******************************//
public static int i()throws Exception{
//return Integer.parseInt(br.readLine().trim());
return in.nextInt();
}
public static int[] is(int n)throws Exception{
//int arr[] = new int[n+1];
for(int i=1 ; i <= n ;i++)tempints[i] = in.nextInt();
return tempints;
}
public static long l()throws Exception{
return in.nextLong();
}
public static long[] ls(int n)throws Exception{
//long arr[] = new long[n+1];
for(int i=1 ; i <= n ;i++)templongs[i] = in.nextLong();
return templongs;
}
public static double d()throws Exception{
return in.nextDouble();
}
public static double[] ds(int n)throws Exception{
//double arr[] = new double[n+1];
for(int i=1 ; i <= n ;i++)tempdoubles[i] = in.nextDouble();
return tempdoubles;
}
public static char c()throws Exception{
return in.nextCharacter();
}
public static char[] cs(int n)throws Exception{
//double arr[] = new double[n+1];
for(int i=1 ; i <= n ;i++)tempchars[i] = in.nextCharacter();
return tempchars;
}
public static String s()throws Exception{
return in.nextLine();
}
public static BigInteger bi()throws Exception{
return in.nextBigInteger();
}
//***********************I/O ENDS ***********************//
//*********************** 0.3%f [precision]***********************//
/* roundoff upto 2 digits
double roundOff = Math.round(a * 100.0) / 100.0;
or
System.out.printf("%.2f", val);
*/
/* print upto 2 digits after decimal
val = ((long)(val * 100.0))/100.0;
*/
} | apache-2.0 |
apache/openwebbeans | webbeans-impl/src/test/java/org/apache/webbeans/test/annotation/binding/AnnotationWithArrayOfStringMember.java | 1267 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.webbeans.test.annotation.binding;
import javax.inject.Qualifier;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Qualifier
@Target( { ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
public @interface AnnotationWithArrayOfStringMember
{
String[] value();
}
| apache-2.0 |
code4craft/hello-design-pattern | src/test/java/helloworld/behavioral/visitor/HelloWorldVisitorTest.java | 681 | package helloworld.behavioral.visitor;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
/**
* @author yihua.huang@dianping.com
*/
public class HelloWorldVisitorTest {
@Test
public void testHelloWorldVisitor(){
HelloWorldCharacterElements helloWorldCharacterElements = new HelloWorldCharacterElements("Hello Visitor!".toCharArray());
HelloWorldCharacterVisitor helloWorldCharacterVisitor = new HelloWorldCharacterVisitor();
helloWorldCharacterElements.accept(helloWorldCharacterVisitor);
assertThat(helloWorldCharacterVisitor.helloWorld(),is("Hello Visitor!"));
}
}
| apache-2.0 |
wu-sheng/sky-walking | test/plugin/scenarios/spring-cloud-feign-1.2.x-scenario/src/main/java/org/apache/skywalking/apm/testcase/feign/controller/CaseController.java | 1730 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.testcase.feign.controller;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping("/case")
public class CaseController {
private static final Logger LOGGER = LogManager.getLogger(CaseController.class);
@Autowired
private RestRequest restRequest;
@ResponseBody
@RequestMapping("/healthCheck")
public String healthcheck() {
return "Success";
}
@ResponseBody
@RequestMapping("/spring-cloud-feign-1.2.x-scenario")
public String feignCase() {
String user = restRequest.getById();
LOGGER.info(user);
return "success";
}
}
| apache-2.0 |
0359xiaodong/GotyeSDK-Android | GotyeSDK/src/com/gotye/sdk/handmark/pulltorefresh/library/PullToRefreshLinearLayout.java | 4030 | package com.gotye.sdk.handmark.pulltorefresh.library;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.gotye.sdk.R;
import com.gotye.sdk.handmark.pulltorefresh.library.internal.LoadingLayout;
public class PullToRefreshLinearLayout extends PullToRefreshBase<LinearLayout> {
private LoadingLayout mHeaderLoadingView;
private LoadingLayout mFooterLoadingView;
private FrameLayout mLvFooterLoadingFrame;
private boolean mListViewExtrasEnabled;
public PullToRefreshLinearLayout(Context context) {
super(context);
}
public PullToRefreshLinearLayout(Context context, AttributeSet attrs) {
super(context, attrs);
}
public PullToRefreshLinearLayout(Context context, Mode mode) {
super(context, mode);
}
public PullToRefreshLinearLayout(Context context, Mode mode, AnimationStyle style) {
super(context, mode, style);
}
@Override
public com.gotye.sdk.handmark.pulltorefresh.library.PullToRefreshBase.Orientation getPullToRefreshScrollDirection() {
return Orientation.VERTICAL;
}
@Override
protected LinearLayout createRefreshableView(Context context,
AttributeSet attrs) {
LinearLayout lv = createLinearLayout(context, attrs);
// Set it to this so it can be used in ListActivity/ListFragment
lv.setId(android.R.id.list);
return null;
}
protected LinearLayout createLinearLayout(Context context, AttributeSet attrs) {
final LinearLayout lv;
if (VERSION.SDK_INT >= VERSION_CODES.GINGERBREAD) {
lv = new InternalLinearLayoutSDK9(context, attrs);
} else {
lv = new LinearLayout(context, attrs);
}
return lv;
}
@Override
protected boolean isReadyForPullEnd() {
return false;
}
@Override
protected boolean isReadyForPullStart() {
return true;
}
@Override
protected void handleStyledAttributes(TypedArray a) {
super.handleStyledAttributes(a);
mListViewExtrasEnabled = a.getBoolean(R.styleable.PullToRefresh_ptrListViewExtrasEnabled, true);
if (mListViewExtrasEnabled) {
final FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL);
// Create Loading Views ready for use later
FrameLayout frame = new FrameLayout(getContext());
mHeaderLoadingView = createLoadingLayout(getContext(), Mode.PULL_FROM_START, a);
mHeaderLoadingView.setVisibility(View.GONE);
frame.addView(mHeaderLoadingView, lp);
// mRefreshableView.addHeaderView(frame, null, false);
mLvFooterLoadingFrame = new FrameLayout(getContext());
mFooterLoadingView = createLoadingLayout(getContext(), Mode.PULL_FROM_END, a);
mFooterLoadingView.setVisibility(View.GONE);
mLvFooterLoadingFrame.addView(mFooterLoadingView, lp);
/**
* If the value for Scrolling While Refreshing hasn't been
* explicitly set via XML, enable Scrolling While Refreshing.
*/
if (!a.hasValue(R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled)) {
setScrollingWhileRefreshingEnabled(true);
}
}
}
@TargetApi(9)
final class InternalLinearLayoutSDK9 extends LinearLayout {
public InternalLinearLayoutSDK9(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected boolean overScrollBy(int deltaX, int deltaY, int scrollX, int scrollY, int scrollRangeX,
int scrollRangeY, int maxOverScrollX, int maxOverScrollY, boolean isTouchEvent) {
final boolean returnValue = super.overScrollBy(deltaX, deltaY, scrollX, scrollY, scrollRangeX,
scrollRangeY, maxOverScrollX, maxOverScrollY, isTouchEvent);
// Does all of the hard work...
OverscrollHelper.overScrollBy(PullToRefreshLinearLayout.this, deltaX, scrollX, deltaY, scrollY, isTouchEvent);
return returnValue;
}
}
}
| apache-2.0 |
nileshpatelksy/hello-pod-cast | archive/FILE/Compiler/tringle/src/Triangle/SyntacticAnalyzer/Scanner.java | 4886 | /*
* @(#)Scanner.java 2.1 2003/10/07
*
* Copyright (C) 1999, 2003 D.A. Watt and D.F. Brown
* Dept. of Computing Science, University of Glasgow, Glasgow G12 8QQ Scotland
* and School of Computer and Math Sciences, The Robert Gordon University,
* St. Andrew Street, Aberdeen AB25 1HG, Scotland.
* All rights reserved.
*
* This software is provided free for educational use only. It may
* not be used for commercial purposes without the prior written permission
* of the authors.
*/
package Triangle.SyntacticAnalyzer;
public final class Scanner {
private SourceFile sourceFile;
private boolean debug;
private char currentChar;
private StringBuffer currentSpelling;
private boolean currentlyScanningToken;
//ÊÇ×Ö·û
private boolean isLetter(char c) {
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z');
}
//ÊÇÊý×Ö
private boolean isDigit(char c) {
return (c >= '0' && c <= '9');
}
// isOperator returns true iff the given character is an operator character.
//ÊDzÙ×÷·û
private boolean isOperator(char c) {
return (c == '+' || c == '-' || c == '*' || c == '/' || c == '='
|| c == '<' || c == '>' || c == '\\' || c == '&' || c == '@'
|| c == '%' || c == '^' || c == '?');
}
// /////////////////////////////////////////////////////////////////////////////
public Scanner(SourceFile source) {
sourceFile = source;
currentChar = sourceFile.getSource();
debug = false;
}
public void enableDebugging() {
debug = true;
}
// takeIt appends the current character to the current token, and gets
// the next character from the source program.
private void takeIt() {
if (currentlyScanningToken)
currentSpelling.append(currentChar);
currentChar = sourceFile.getSource();
}
// scanSeparator skips a single separator.
private void scanSeparator() {
switch (currentChar) {
case '!': {
takeIt();
while ((currentChar != SourceFile.EOL)
&& (currentChar != SourceFile.EOT))
takeIt();
if (currentChar == SourceFile.EOL)
takeIt();
}
break;
case ' ':
case '\n':
case '\r':
case '\t':
takeIt();
break;
}
}
private int scanToken() {
switch (currentChar) {
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
takeIt();
while (isLetter(currentChar) || isDigit(currentChar))
takeIt();
return Token.IDENTIFIER;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
takeIt();
while (isDigit(currentChar))
takeIt();
return Token.INTLITERAL;
case '+':
case '-':
case '*':
case '/':
case '=':
case '<':
case '>':
case '\\':
case '&':
case '@':
case '%':
case '^':
case '?':
takeIt();
while (isOperator(currentChar))
takeIt();
return Token.OPERATOR;
//''ÒýºÅ.ÕâÀïÊÇתÒå
case '\'':
takeIt();
takeIt(); // the quoted character
if (currentChar == '\'') {
takeIt();
return Token.CHARLITERAL;
} else
return Token.ERROR;
case '.':
takeIt();
return Token.DOT;
case ':':
takeIt();
if (currentChar == '=') {
takeIt();
return Token.BECOMES;
} else
return Token.COLON;
case ';':
takeIt();
return Token.SEMICOLON;
case ',':
takeIt();
return Token.COMMA;
case '~':
takeIt();
return Token.IS;
case '(':
takeIt();
return Token.LPAREN;
case ')':
takeIt();
return Token.RPAREN;
case '[':
takeIt();
return Token.LBRACKET;
case ']':
takeIt();
return Token.RBRACKET;
case '{':
takeIt();
return Token.LCURLY;
case '}':
takeIt();
return Token.RCURLY;
case SourceFile.EOT:
return Token.EOT;
default:
takeIt();
return Token.ERROR;
}
}
public Token scan() {
Token tok;
SourcePosition pos;
int kind;
currentlyScanningToken = false;
while (currentChar == '!' || currentChar == ' ' || currentChar == '\n'
|| currentChar == '\r' || currentChar == '\t')
scanSeparator();
currentlyScanningToken = true;
currentSpelling = new StringBuffer("");
pos = new SourcePosition();
pos.start = sourceFile.getCurrentLine();
kind = scanToken();
pos.finish = sourceFile.getCurrentLine();
tok = new Token(kind, currentSpelling.toString(), pos);
if (debug)
System.out.println(tok);
return tok;
}
}
| apache-2.0 |
pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ShutdownCommand.java | 8894 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.logging.log4j.Logger;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.execute.Execution;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionException;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.GfshParseResult;
import org.apache.geode.management.internal.cli.functions.ShutDownFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class ShutdownCommand implements GfshCommand {
private static final String DEFAULT_TIME_OUT = "10";
private static final Logger logger = LogService.getLogger();
@CliCommand(value = CliStrings.SHUTDOWN, help = CliStrings.SHUTDOWN__HELP)
@CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_LIFECYCLE},
interceptor = "org.apache.geode.management.internal.cli.commands.ShutdownCommand$ShutdownCommandInterceptor")
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.MANAGE)
public Result shutdown(
@CliOption(key = CliStrings.SHUTDOWN__TIMEOUT, unspecifiedDefaultValue = DEFAULT_TIME_OUT,
help = CliStrings.SHUTDOWN__TIMEOUT__HELP) int userSpecifiedTimeout,
@CliOption(key = CliStrings.INCLUDE_LOCATORS, unspecifiedDefaultValue = "false",
help = CliStrings.INCLUDE_LOCATORS_HELP) boolean shutdownLocators) {
try {
if (userSpecifiedTimeout < Integer.parseInt(DEFAULT_TIME_OUT)) {
return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN__MSG__IMPROPER_TIMEOUT);
}
// convert to milliseconds
long timeout = userSpecifiedTimeout * 1000;
InternalCache cache = getCache();
int numDataNodes = CliUtil.getAllNormalMembers(cache).size();
Set<DistributedMember> locators = CliUtil.getAllMembers(cache);
Set<DistributedMember> dataNodes = CliUtil.getAllNormalMembers(cache);
locators.removeAll(dataNodes);
if (!shutdownLocators && numDataNodes == 0) {
return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN__MSG__NO_DATA_NODE_FOUND);
}
String managerName = cache.getJmxManagerAdvisor().getDistributionManager().getId().getId();
final DistributedMember manager = CliUtil.getDistributedMemberByNameOrId(managerName);
dataNodes.remove(manager);
// shut down all data members excluding this manager if manager is a data node
long timeElapsed = shutDownNodeWithTimeOut(timeout, dataNodes);
timeout = timeout - timeElapsed;
// shut down locators one by one
if (shutdownLocators) {
if (manager == null) {
return ResultBuilder.createUserErrorResult(CliStrings.SHUTDOWN__MSG__MANAGER_NOT_FOUND);
}
// remove current locator as that would get shutdown last
if (locators.contains(manager)) {
locators.remove(manager);
}
for (DistributedMember locator : locators) {
Set<DistributedMember> lsSet = new HashSet<>();
lsSet.add(locator);
long elapsedTime = shutDownNodeWithTimeOut(timeout, lsSet);
timeout = timeout - elapsedTime;
}
}
if (locators.contains(manager) && !shutdownLocators) { // This means manager is a locator and
// shutdownLocators is false. Hence we
// should not stop the manager
return ResultBuilder.createInfoResult("Shutdown is triggered");
}
// now shut down this manager
Set<DistributedMember> mgrSet = new HashSet<>();
mgrSet.add(manager);
// No need to check further timeout as this is the last node we will be
// shutting down
shutDownNodeWithTimeOut(timeout, mgrSet);
} catch (TimeoutException tex) {
return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN_TIMEDOUT);
} catch (Exception ex) {
ex.printStackTrace();
return ResultBuilder.createUserErrorResult(ex.getMessage());
}
// @TODO. List all the nodes which could be successfully shutdown
return ResultBuilder.createInfoResult("Shutdown is triggered");
}
/**
* @param timeout user specified timeout
* @param nodesToBeStopped list of nodes to be stopped
* @return Elapsed time to shutdown the given nodes;
*/
private long shutDownNodeWithTimeOut(long timeout, Set<DistributedMember> nodesToBeStopped)
throws TimeoutException, InterruptedException, ExecutionException {
long shutDownTimeStart = System.currentTimeMillis();
shutdownNode(timeout, nodesToBeStopped);
long shutDownTimeEnd = System.currentTimeMillis();
long timeElapsed = shutDownTimeEnd - shutDownTimeStart;
if (timeElapsed > timeout || Boolean.getBoolean("ThrowTimeoutException")) {
// The second check for ThrowTimeoutException is a test hook
throw new TimeoutException();
}
return timeElapsed;
}
private void shutdownNode(final long timeout, final Set<DistributedMember> includeMembers)
throws TimeoutException, InterruptedException, ExecutionException {
ExecutorService exec = Executors.newSingleThreadExecutor();
try {
final Function shutDownFunction = new ShutDownFunction();
logger.info("Gfsh executing shutdown on members " + includeMembers);
Callable<String> shutdownNodes = () -> {
try {
Execution execution = FunctionService.onMembers(includeMembers);
execution.execute(shutDownFunction);
} catch (FunctionException functionEx) {
// Expected Exception as the function is shutting down the target members and the result
// collector will get member departed exception
}
return "SUCCESS";
};
Future<String> result = exec.submit(shutdownNodes);
result.get(timeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException te) {
logger.error("TimeoutException in shutting down members." + includeMembers);
throw te;
} catch (InterruptedException e) {
logger.error("InterruptedException in shutting down members." + includeMembers);
throw e;
} catch (ExecutionException e) {
logger.error("ExecutionException in shutting down members." + includeMembers);
throw e;
} finally {
exec.shutdownNow();
}
}
public static class ShutdownCommandInterceptor extends AbstractCliAroundInterceptor {
@Override
public Result preExecution(GfshParseResult parseResult) {
// This hook is for testing purpose only.
if (Boolean.getBoolean(CliStrings.IGNORE_INTERCEPTORS)) {
return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN__MSG__SHUTDOWN_ENTIRE_DS);
}
Response response = readYesNo(CliStrings.SHUTDOWN__MSG__WARN_USER, Response.YES);
if (response == Response.NO) {
return ResultBuilder
.createShellClientAbortOperationResult(CliStrings.SHUTDOWN__MSG__ABORTING_SHUTDOWN);
} else {
return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN__MSG__SHUTDOWN_ENTIRE_DS);
}
}
}
}
| apache-2.0 |
SammysHP/cgeo | main/src/cgeo/geocaching/filter/AttributeFilter.java | 2316 | package cgeo.geocaching.filter;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.R;
import org.eclipse.jdt.annotation.NonNull;
import android.content.res.Resources;
import android.os.Parcel;
import android.os.Parcelable;
import java.util.LinkedList;
import java.util.List;
class AttributeFilter extends AbstractFilter {
private final String attribute;
public AttributeFilter(@NonNull final String name, final String attribute) {
super(name);
this.attribute = attribute;
}
protected AttributeFilter(final Parcel in) {
super(in);
attribute = in.readString();
}
private static String getName(final String attribute, final Resources res, final String packageName) {
// dynamically search for a translation of the attribute
final int id = res.getIdentifier(attribute, "string", packageName);
return id > 0 ? res.getString(id) : attribute;
}
@Override
public boolean accepts(@NonNull final Geocache cache) {
return cache.getAttributes().contains(attribute);
}
public static class Factory implements IFilterFactory {
@Override
@NonNull
public List<IFilter> getFilters() {
final String packageName = CgeoApplication.getInstance().getBaseContext().getPackageName();
final Resources res = CgeoApplication.getInstance().getResources();
final List<IFilter> filters = new LinkedList<>();
for (final String id: res.getStringArray(R.array.attribute_ids)) {
filters.add(new AttributeFilter(getName("attribute_" + id, res, packageName), id));
}
return filters;
}
}
@Override
public void writeToParcel(final Parcel dest, final int flags) {
super.writeToParcel(dest, flags);
dest.writeString(attribute);
}
public static final Creator<AttributeFilter> CREATOR
= new Parcelable.Creator<AttributeFilter>() {
@Override
public AttributeFilter createFromParcel(final Parcel in) {
return new AttributeFilter(in);
}
@Override
public AttributeFilter[] newArray(final int size) {
return new AttributeFilter[size];
}
};
}
| apache-2.0 |
beckje01/devoxx-hands-on-ratpack-java | lab-06/src/main/java/lab06/Lab06.java | 523 | package lab06;
import ratpack.guice.Guice;
import ratpack.server.BaseDir;
import ratpack.server.RatpackServer;
public class Lab06 {
public static void main(String[] args) throws Exception {
RatpackServer.start(ratpackServerSpec -> {
ratpackServerSpec
.serverConfig(s -> s.baseDir(BaseDir.find()))
.registry(Guice.registry(bindingsSpec -> {
//Add any needed modules
}))
.handlers(chain -> {
//Add handlers
});
}
);
}
}
| apache-2.0 |
huihoo/olat | olat7.8/src/main/legacy/de/bps/olat/modules/cl/ChecklistFilter.java | 673 | /**
*
* BPS Bildungsportal Sachsen GmbH<br>
* Bahnhofstrasse 6<br>
* 09111 Chemnitz<br>
* Germany<br>
*
* Copyright (c) 2005-2009 by BPS Bildungsportal Sachsen GmbH<br>
* http://www.bps-system.de<br>
*
* All rights reserved.
*/
package de.bps.olat.modules.cl;
import java.util.List;
/**
* Description:<br>
* TODO: bja Class Description for ChecklistFilter
*
* <P>
* Initial Date: 23.07.2009 <br>
* @author bja <bja@bps-system.de>
*/
public class ChecklistFilter {
private String title;
private List<Long> identityIds;
public ChecklistFilter(String title, List<Long> identityIds) {
this.title = title;
this.identityIds = identityIds;
}
}
| apache-2.0 |
gamerson/blade | test-resources/projects/tasks-plugins-sdk/portlets/tasks-portlet/docroot/WEB-INF/service/com/liferay/tasks/service/persistence/TasksEntryFinderUtil.java | 2278 | /**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.tasks.service.persistence;
import aQute.bnd.annotation.ProviderType;
import com.liferay.portal.kernel.bean.PortletBeanLocatorUtil;
import com.liferay.portal.kernel.util.ReferenceRegistry;
/**
* @author Ryan Park
* @generated
*/
@ProviderType
public class TasksEntryFinderUtil {
public static int countByG_P_A_R_S_T_N(long groupId, int priority,
long assigneeUserId, long reporterUserId, int status,
long[] assetTagIds, long[] notAssetTagIds)
throws com.liferay.portal.kernel.exception.SystemException {
return getFinder()
.countByG_P_A_R_S_T_N(groupId, priority, assigneeUserId,
reporterUserId, status, assetTagIds, notAssetTagIds);
}
public static java.util.List<com.liferay.tasks.model.TasksEntry> findByG_P_A_R_S_T_N(
long groupId, int priority, long assigneeUserId, long reporterUserId,
int status, long[] assetTagIds, long[] notAssetTagIds, int start,
int end) throws com.liferay.portal.kernel.exception.SystemException {
return getFinder()
.findByG_P_A_R_S_T_N(groupId, priority, assigneeUserId,
reporterUserId, status, assetTagIds, notAssetTagIds, start, end);
}
public static TasksEntryFinder getFinder() {
if (_finder == null) {
_finder = (TasksEntryFinder)PortletBeanLocatorUtil.locate(com.liferay.tasks.service.ClpSerializer.getServletContextName(),
TasksEntryFinder.class.getName());
ReferenceRegistry.registerReference(TasksEntryFinderUtil.class,
"_finder");
}
return _finder;
}
public void setFinder(TasksEntryFinder finder) {
_finder = finder;
ReferenceRegistry.registerReference(TasksEntryFinderUtil.class,
"_finder");
}
private static TasksEntryFinder _finder;
} | apache-2.0 |
msebire/intellij-community | jps/model-api/src/org/jetbrains/jps/model/module/JpsModuleSourceRootType.java | 1462 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.model.module;
import org.jetbrains.jps.model.JpsElement;
import org.jetbrains.jps.model.JpsElementType;
import org.jetbrains.jps.model.JpsElementTypeWithDefaultProperties;
/**
* Represents a type of source roots of modules in JPS model.
*
* <p>
* Use {@link org.jetbrains.jps.model.ex.JpsElementTypeBase} as a base class for implementations of this interface
* </p>
* @author nik
*/
public interface JpsModuleSourceRootType<P extends JpsElement> extends JpsElementType<P>, JpsElementTypeWithDefaultProperties<P> {
/**
* Returns {@code true} if roots of this type are supposed to contain test sources only. This information is used by the IDE to show files
* accordingly, process them during analysis only if 'Include test source' option is enabled, etc.
*/
default boolean isForTests() {
return false;
}
}
| apache-2.0 |
vivantech/kc_fixes | src/main/java/org/kuali/kra/iacuc/correspondence/IacucBatchCorrespondenceDetailService.java | 850 | /*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.iacuc.correspondence;
import org.kuali.kra.protocol.correspondence.BatchCorrespondenceDetailService;
public interface IacucBatchCorrespondenceDetailService extends BatchCorrespondenceDetailService {
}
| apache-2.0 |
bazelbuild/bazel | src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java | 106468 | // Copyright 2019 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.util;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.MoreCollectors.onlyElement;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.getFirstArtifactEndingWith;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.fail;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionExecutionContext.LostInputsCheck;
import com.google.devtools.build.lib.actions.ActionGraph;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionKeyContext;
import com.google.devtools.build.lib.actions.ActionLogBufferPathGenerator;
import com.google.devtools.build.lib.actions.ActionLookupKey;
import com.google.devtools.build.lib.actions.ActionLookupValue;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.Artifact.DerivedArtifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.ArtifactPathResolver;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.CommandAction;
import com.google.devtools.build.lib.actions.CommandLine;
import com.google.devtools.build.lib.actions.CommandLineExpansionException;
import com.google.devtools.build.lib.actions.CommandLines;
import com.google.devtools.build.lib.actions.CommandLines.CommandLineAndParamFileInfo;
import com.google.devtools.build.lib.actions.DiscoveredModulesPruner;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.MapBasedActionGraph;
import com.google.devtools.build.lib.actions.MetadataProvider;
import com.google.devtools.build.lib.actions.MiddlemanFactory;
import com.google.devtools.build.lib.actions.MutableActionGraph;
import com.google.devtools.build.lib.actions.ParameterFile;
import com.google.devtools.build.lib.actions.ThreadStateReceiver;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.actions.util.DummyExecutor;
import com.google.devtools.build.lib.analysis.AnalysisEnvironment;
import com.google.devtools.build.lib.analysis.AnalysisOptions;
import com.google.devtools.build.lib.analysis.AnalysisResult;
import com.google.devtools.build.lib.analysis.AnalysisUtils;
import com.google.devtools.build.lib.analysis.AspectValue;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.CachingAnalysisEnvironment;
import com.google.devtools.build.lib.analysis.ConfiguredAspect;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.DependencyResolver.Failure;
import com.google.devtools.build.lib.analysis.ExtraActionArtifactsProvider;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.InconsistentAspectOrderException;
import com.google.devtools.build.lib.analysis.OutputGroupInfo;
import com.google.devtools.build.lib.analysis.PseudoAction;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.RunfilesSupport;
import com.google.devtools.build.lib.analysis.ServerDirectories;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.TransitiveInfoProvider;
import com.google.devtools.build.lib.analysis.WorkspaceStatusAction;
import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoKey;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.BuildOptionsView;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.analysis.config.transitions.NoTransition;
import com.google.devtools.build.lib.analysis.config.transitions.NullTransition;
import com.google.devtools.build.lib.analysis.config.transitions.PatchTransition;
import com.google.devtools.build.lib.analysis.configuredtargets.FileConfiguredTarget;
import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget;
import com.google.devtools.build.lib.analysis.extra.ExtraAction;
import com.google.devtools.build.lib.analysis.starlark.StarlarkTransition;
import com.google.devtools.build.lib.analysis.starlark.StarlarkTransition.TransitionException;
import com.google.devtools.build.lib.analysis.test.BaselineCoverageAction;
import com.google.devtools.build.lib.analysis.test.InstrumentedFilesInfo;
import com.google.devtools.build.lib.buildtool.BuildRequestOptions;
import com.google.devtools.build.lib.clock.BlazeClock;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelConstants;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.cmdline.RepositoryName;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.exec.ExecutionOptions;
import com.google.devtools.build.lib.packages.AspectClass;
import com.google.devtools.build.lib.packages.AspectDescriptor;
import com.google.devtools.build.lib.packages.AspectParameters;
import com.google.devtools.build.lib.packages.AttributeMap;
import com.google.devtools.build.lib.packages.ConfiguredAttributeMapper;
import com.google.devtools.build.lib.packages.ConstantRuleVisibility;
import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction;
import com.google.devtools.build.lib.packages.NativeAspectClass;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.OutputFile;
import com.google.devtools.build.lib.packages.PackageFactory;
import com.google.devtools.build.lib.packages.PackageFactory.EnvironmentExtension;
import com.google.devtools.build.lib.packages.PackageOverheadEstimator;
import com.google.devtools.build.lib.packages.PackageValidator;
import com.google.devtools.build.lib.packages.RawAttributeMapper;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.packages.util.MockToolsConfig;
import com.google.devtools.build.lib.pkgcache.LoadingOptions;
import com.google.devtools.build.lib.pkgcache.PackageManager;
import com.google.devtools.build.lib.pkgcache.PackageOptions;
import com.google.devtools.build.lib.pkgcache.PathPackageLocator;
import com.google.devtools.build.lib.rules.repository.RepositoryDelegatorFunction;
import com.google.devtools.build.lib.rules.repository.RepositoryDirectoryDirtinessChecker;
import com.google.devtools.build.lib.skyframe.AspectKeyCreator;
import com.google.devtools.build.lib.skyframe.AspectKeyCreator.AspectKey;
import com.google.devtools.build.lib.skyframe.BazelSkyframeExecutorConstants;
import com.google.devtools.build.lib.skyframe.BuildConfigurationKey;
import com.google.devtools.build.lib.skyframe.BuildInfoCollectionFunction;
import com.google.devtools.build.lib.skyframe.BzlLoadFunction;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey;
import com.google.devtools.build.lib.skyframe.DiffAwareness;
import com.google.devtools.build.lib.skyframe.ManagedDirectoriesKnowledge;
import com.google.devtools.build.lib.skyframe.PackageFunction;
import com.google.devtools.build.lib.skyframe.PackageRootsNoSymlinkCreation;
import com.google.devtools.build.lib.skyframe.PrecomputedValue;
import com.google.devtools.build.lib.skyframe.SequencedSkyframeExecutor;
import com.google.devtools.build.lib.skyframe.SkyFunctions;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor;
import com.google.devtools.build.lib.skyframe.SkyframeExecutorRepositoryHelpersHolder;
import com.google.devtools.build.lib.skyframe.StarlarkBuiltinsValue;
import com.google.devtools.build.lib.skyframe.TargetPatternPhaseValue;
import com.google.devtools.build.lib.testutil.FoundationTestCase;
import com.google.devtools.build.lib.testutil.SkyframeExecutorTestHelper;
import com.google.devtools.build.lib.testutil.TestConstants;
import com.google.devtools.build.lib.util.AbruptExitException;
import com.google.devtools.build.lib.util.StringUtil;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.vfs.SyscallCache;
import com.google.devtools.build.skyframe.ErrorInfo;
import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator;
import com.google.devtools.build.skyframe.MemoizingEvaluator;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionName;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.common.options.Options;
import com.google.devtools.common.options.OptionsParser;
import com.google.devtools.common.options.OptionsParsingException;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import net.starlark.java.eval.StarlarkSemantics;
import org.junit.Before;
/** Common test code that creates a BuildView instance. */
public abstract class BuildViewTestCase extends FoundationTestCase {
protected static final int LOADING_PHASE_THREADS = 20;
protected AnalysisMock analysisMock;
protected ConfiguredRuleClassProvider ruleClassProvider;
protected BuildViewForTesting view;
protected SequencedSkyframeExecutor skyframeExecutor;
protected TimestampGranularityMonitor tsgm;
protected BlazeDirectories directories;
protected ActionKeyContext actionKeyContext;
// Note that these configurations are virtual (they use only VFS)
protected BuildConfigurationCollection masterConfig;
protected BuildConfigurationValue targetConfig; // "target" or "build" config
private List<String> configurationArgs;
protected OptionsParser optionsParser;
private PackageOptions packageOptions;
private BuildLanguageOptions buildLanguageOptions;
protected PackageFactory pkgFactory;
protected MockToolsConfig mockToolsConfig;
protected WorkspaceStatusAction.Factory workspaceStatusActionFactory;
private MutableActionGraph mutableActionGraph;
private LoadingOptions customLoadingOptions = null;
protected BuildConfigurationKey targetConfigKey;
private ActionLogBufferPathGenerator actionLogBufferPathGenerator;
@Before
public final void initializeSkyframeExecutor() throws Exception {
initializeSkyframeExecutor(/*doPackageLoadingChecks=*/ true);
}
public void initializeSkyframeExecutor(boolean doPackageLoadingChecks) throws Exception {
initializeSkyframeExecutor(
/*doPackageLoadingChecks=*/ doPackageLoadingChecks,
/*diffAwarenessFactories=*/ ImmutableList.of());
}
public void initializeSkyframeExecutor(
boolean doPackageLoadingChecks, ImmutableList<DiffAwareness.Factory> diffAwarenessFactories)
throws Exception {
analysisMock = getAnalysisMock();
directories =
new BlazeDirectories(
new ServerDirectories(rootDirectory, outputBase, outputBase),
rootDirectory,
/* defaultSystemJavabase= */ null,
analysisMock.getProductName());
actionKeyContext = new ActionKeyContext();
mockToolsConfig = new MockToolsConfig(rootDirectory, false);
analysisMock.setupMockToolsRepository(mockToolsConfig);
initializeMockClient();
packageOptions = parsePackageOptions();
buildLanguageOptions = parseBuildLanguageOptions();
workspaceStatusActionFactory = new AnalysisTestUtil.DummyWorkspaceStatusActionFactory();
mutableActionGraph = new MapBasedActionGraph(actionKeyContext);
ruleClassProvider = createRuleClassProvider();
getOutputPath().createDirectoryAndParents();
ImmutableList<PrecomputedValue.Injected> extraPrecomputedValues =
ImmutableList.<PrecomputedValue.Injected>builder()
.add(
PrecomputedValue.injected(
PrecomputedValue.STARLARK_SEMANTICS, StarlarkSemantics.DEFAULT))
.add(PrecomputedValue.injected(PrecomputedValue.REPO_ENV, ImmutableMap.of()))
.add(
PrecomputedValue.injected(
RepositoryDelegatorFunction.REPOSITORY_OVERRIDES, ImmutableMap.of()))
.add(
PrecomputedValue.injected(
RepositoryDelegatorFunction.RESOLVED_FILE_INSTEAD_OF_WORKSPACE,
Optional.empty()))
.add(
PrecomputedValue.injected(
RepositoryDelegatorFunction.DEPENDENCY_FOR_UNCONDITIONAL_FETCHING,
RepositoryDelegatorFunction.DONT_FETCH_UNCONDITIONALLY))
.add(
PrecomputedValue.injected(
RepositoryDelegatorFunction.ENABLE_BZLMOD, enableBzlmod()))
.add(
PrecomputedValue.injected(
BuildInfoCollectionFunction.BUILD_INFO_FACTORIES,
ruleClassProvider.getBuildInfoFactoriesAsMap()))
.addAll(extraPrecomputedValues())
.build();
PackageFactory.BuilderForTesting pkgFactoryBuilder =
analysisMock
.getPackageFactoryBuilderForTesting(directories)
.setExtraPrecomputeValues(extraPrecomputedValues)
.setEnvironmentExtensions(getEnvironmentExtensions())
.setPackageValidator(getPackageValidator())
.setPackageOverheadEstimator(getPackageOverheadEstimator());
if (!doPackageLoadingChecks) {
pkgFactoryBuilder.disableChecks();
}
pkgFactory = pkgFactoryBuilder.build(ruleClassProvider, fileSystem);
tsgm = new TimestampGranularityMonitor(BlazeClock.instance());
SequencedSkyframeExecutor.Builder builder =
BazelSkyframeExecutorConstants.newBazelSkyframeExecutorBuilder()
.setPkgFactory(pkgFactory)
.setFileSystem(fileSystem)
.setDirectories(directories)
.setActionKeyContext(actionKeyContext)
.setWorkspaceStatusActionFactory(workspaceStatusActionFactory)
.setExtraSkyFunctions(analysisMock.getSkyFunctions(directories))
.setPerCommandSyscallCache(SyscallCache.NO_CACHE)
.setDiffAwarenessFactories(diffAwarenessFactories);
ManagedDirectoriesKnowledge managedDirectoriesKnowledge = getManagedDirectoriesKnowledge();
if (managedDirectoriesKnowledge != null) {
builder.setRepositoryHelpersHolder(
SkyframeExecutorRepositoryHelpersHolder.create(
managedDirectoriesKnowledge,
new RepositoryDirectoryDirtinessChecker(
directories.getWorkspace(), managedDirectoriesKnowledge)));
}
skyframeExecutor = builder.build();
if (usesInliningBzlLoadFunction()) {
injectInliningBzlLoadFunction(skyframeExecutor, pkgFactory, directories);
}
SkyframeExecutorTestHelper.process(skyframeExecutor);
skyframeExecutor.injectExtraPrecomputedValues(extraPrecomputedValues);
packageOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC;
packageOptions.showLoadingProgress = true;
packageOptions.globbingThreads = 7;
skyframeExecutor.preparePackageLoading(
new PathPackageLocator(
outputBase,
ImmutableList.of(root),
BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY),
packageOptions,
buildLanguageOptions,
UUID.randomUUID(),
ImmutableMap.of(),
tsgm);
skyframeExecutor.setActionEnv(ImmutableMap.of());
useConfiguration();
setUpSkyframe();
this.actionLogBufferPathGenerator =
new ActionLogBufferPathGenerator(
directories.getActionTempsDirectory(getExecRoot()),
directories.getPersistentActionOutsDirectory(getExecRoot()));
}
private static void injectInliningBzlLoadFunction(
SkyframeExecutor skyframeExecutor,
PackageFactory packageFactory,
BlazeDirectories directories) {
ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions =
((InMemoryMemoizingEvaluator) skyframeExecutor.getEvaluator()).getSkyFunctionsForTesting();
BzlLoadFunction bzlLoadFunction =
BzlLoadFunction.createForInlining(
packageFactory,
directories,
// Use a cache size of 2 for testing to balance coverage for where loads are present and
// aren't present in the cache.
/*bzlLoadValueCacheSize=*/ 2);
bzlLoadFunction.resetInliningCache();
// This doesn't override the BZL_LOAD -> BzlLoadFunction mapping, but nothing besides
// PackageFunction should be requesting that key while using the inlining code path.
((PackageFunction) skyFunctions.get(SkyFunctions.PACKAGE))
.setBzlLoadFunctionForInliningForTesting(bzlLoadFunction);
}
/**
* Returns whether or not to use the inlined version of BzlLoadFunction in this test.
*
* @see BzlLoadFunction#computeInline
*/
protected boolean usesInliningBzlLoadFunction() {
return false;
}
/** Returns whether or not to enable Bzlmod in this test. */
protected boolean enableBzlmod() {
return false;
}
/**
* Returns extra precomputed values to inject, both into Skyframe and the testing package loaders.
*/
protected ImmutableList<PrecomputedValue.Injected> extraPrecomputedValues() {
return ImmutableList.of();
}
protected void initializeMockClient() throws IOException {
analysisMock.setupMockClient(mockToolsConfig);
analysisMock.setupMockWorkspaceFiles(directories.getEmbeddedBinariesRoot());
}
protected AnalysisMock getAnalysisMock() {
return AnalysisMock.get();
}
/**
* Called to create the rule class provider used in this test.
*
* <p>This function is called only once. (Multiple calls could lead to subtle identity bugs
* between native objects.)
*/
protected ConfiguredRuleClassProvider createRuleClassProvider() {
return getAnalysisMock().createRuleClassProvider();
}
protected final ConfiguredRuleClassProvider getRuleClassProvider() {
return ruleClassProvider;
}
protected ManagedDirectoriesKnowledge getManagedDirectoriesKnowledge() {
return null;
}
protected final PackageFactory getPackageFactory() {
return pkgFactory;
}
protected Iterable<EnvironmentExtension> getEnvironmentExtensions() {
return ImmutableList.of();
}
protected StarlarkSemantics getStarlarkSemantics() {
return buildLanguageOptions.toStarlarkSemantics();
}
protected PackageValidator getPackageValidator() {
return PackageValidator.NOOP_VALIDATOR;
}
protected PackageOverheadEstimator getPackageOverheadEstimator() {
return PackageOverheadEstimator.NOOP_ESTIMATOR;
}
protected final BuildConfigurationCollection createConfigurations(
ImmutableMap<String, Object> starlarkOptions, String... args) throws Exception {
optionsParser =
OptionsParser.builder()
.optionsClasses(
Iterables.concat(
Arrays.asList(ExecutionOptions.class, BuildRequestOptions.class),
ruleClassProvider.getFragmentRegistry().getOptionsClasses()))
.build();
List<String> allArgs = new ArrayList<>();
// TODO(dmarting): Add --stamp option only to test that requires it.
allArgs.add("--stamp"); // Stamp is now defaulted to false.
allArgs.add("--experimental_extended_sanity_checks");
// Always default to k8, even on mac and windows. Tests that need different cpu should set it
// using {@link useConfiguration()} explicitly.
allArgs.add("--cpu=k8");
allArgs.add("--host_cpu=k8");
optionsParser.parse(allArgs);
optionsParser.parse(args);
// TODO(blaze-configurability): It would be nice to be able to do some starlark options loading
// to ensure that the values given in this map are the right types for their keys.
optionsParser.setStarlarkOptions(starlarkOptions);
BuildOptions buildOptions =
BuildOptions.of(ruleClassProvider.getFragmentRegistry().getOptionsClasses(), optionsParser);
return skyframeExecutor.createConfigurations(reporter, buildOptions, ImmutableSet.of(), false);
}
protected Target getTarget(String label)
throws NoSuchPackageException, NoSuchTargetException, LabelSyntaxException,
InterruptedException {
return getTarget(Label.parseAbsolute(label, ImmutableMap.of()));
}
protected Target getTarget(Label label)
throws NoSuchPackageException, NoSuchTargetException, InterruptedException {
return getPackageManager().getTarget(reporter, label);
}
/**
* Checks that loading the given target fails with the expected error message.
*
* <p>Fails with an assertion error if this doesn't happen.
*
* <p>This method is useful for checking loading phase errors. Analysis phase errors can be
* checked with {@link #getConfiguredTarget} and related methods.
*/
protected void assertTargetError(String label, String expectedError) throws InterruptedException {
try {
getTarget(label);
fail("Expected loading phase failure for target " + label);
} catch (NoSuchPackageException | NoSuchTargetException | LabelSyntaxException e) {
// Target loading failed as expected.
}
assertContainsEvent(expectedError);
}
private void setUpSkyframe() {
PathPackageLocator pkgLocator =
PathPackageLocator.create(
outputBase,
packageOptions.packagePath,
reporter,
rootDirectory.asFragment(),
rootDirectory,
BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY);
packageOptions.showLoadingProgress = true;
packageOptions.globbingThreads = 7;
skyframeExecutor.preparePackageLoading(
pkgLocator,
packageOptions,
buildLanguageOptions,
UUID.randomUUID(),
ImmutableMap.of(),
tsgm);
skyframeExecutor.setActionEnv(ImmutableMap.of());
skyframeExecutor.setDeletedPackages(ImmutableSet.copyOf(packageOptions.getDeletedPackages()));
skyframeExecutor.injectExtraPrecomputedValues(
ImmutableList.of(
PrecomputedValue.injected(
RepositoryDelegatorFunction.RESOLVED_FILE_INSTEAD_OF_WORKSPACE, Optional.empty()),
PrecomputedValue.injected(RepositoryDelegatorFunction.ENABLE_BZLMOD, enableBzlmod()),
PrecomputedValue.injected(
RepositoryDelegatorFunction.OUTPUT_VERIFICATION_REPOSITORY_RULES,
ImmutableSet.of()),
PrecomputedValue.injected(
RepositoryDelegatorFunction.RESOLVED_FILE_FOR_VERIFICATION, Optional.empty())));
}
protected void setPackageOptions(String... options) throws Exception {
packageOptions = parsePackageOptions(options);
setUpSkyframe();
}
protected void setBuildLanguageOptions(String... options) throws Exception {
buildLanguageOptions = parseBuildLanguageOptions(options);
setUpSkyframe();
}
private static PackageOptions parsePackageOptions(String... options) throws Exception {
OptionsParser parser = OptionsParser.builder().optionsClasses(PackageOptions.class).build();
parser.parse("--default_visibility=public");
parser.parse(options);
return parser.getOptions(PackageOptions.class);
}
private static BuildLanguageOptions parseBuildLanguageOptions(String... options)
throws Exception {
OptionsParser parser =
OptionsParser.builder().optionsClasses(BuildLanguageOptions.class).build();
parser.parse(options);
return parser.getOptions(BuildLanguageOptions.class);
}
/** Used by skyframe-only tests. */
protected SequencedSkyframeExecutor getSkyframeExecutor() {
return Preconditions.checkNotNull(skyframeExecutor);
}
protected PackageManager getPackageManager() {
return skyframeExecutor.getPackageManager();
}
protected void invalidatePackages() throws InterruptedException, AbruptExitException {
invalidatePackages(true);
}
/**
* Invalidates all existing packages. Optionally invalidates configurations too.
*
* <p>Tests should invalidate both unless they have specific reason not to.
*/
protected void invalidatePackages(boolean alsoConfigs)
throws InterruptedException, AbruptExitException {
skyframeExecutor.invalidateFilesUnderPathForTesting(
reporter, ModifiedFileSet.EVERYTHING_MODIFIED, Root.fromPath(rootDirectory));
if (alsoConfigs) {
try {
// Also invalidate all configurations. This is important: by invalidating all files we
// invalidate CROSSTOOL, which invalidates CppConfiguration (and a few other fragments). So
// we need to invalidate the {@link SkyframeBuildView#hostConfigurationCache} as well.
// Otherwise we end up with old CppConfiguration instances. Even though they're logically
// equal to the new ones, CppConfiguration has no .equals() method and some production code
// expects equality.
useConfiguration(configurationArgs.toArray(new String[0]));
} catch (Exception e) {
// There are enough dependers on this method that don't handle Exception that just passing
// through the Exception would result in a huge refactoring. As it stands this shouldn't
// fail anyway because this method only gets called after a successful useConfiguration()
// call anyway.
throw new RuntimeException(e);
}
}
}
/**
* Returns options that will be implicitly prepended to any options passed to {@link
* #useConfiguration}.
*/
protected Iterable<String> getDefaultsForConfiguration() {
return TestConstants.PRODUCT_SPECIFIC_FLAGS;
}
/**
* Sets host and target configuration using the specified options, falling back to the default
* options for unspecified ones, and recreates the build view.
*
* <p>TODO(juliexxia): when Starlark option parsing exists, find a way to combine these parameters
* into a single parameter so Starlark/native options don't have to be specified separately.
*
* <p>NOTE: Build language options are not support by this method, for example
* --experimental_google_legacy_api. Use {@link #setBuildLanguageOptions} instead.
*
* @param starlarkOptions map of Starlark-defined options where the keys are option names (in the
* form of label-like strings) and the values are option values
* @param args native option name/pair descriptions in command line form (e.g. "--cpu=k8")
*/
protected void useConfiguration(ImmutableMap<String, Object> starlarkOptions, String... args)
throws Exception {
ImmutableList<String> actualArgs =
ImmutableList.<String>builder().addAll(getDefaultsForConfiguration()).add(args).build();
masterConfig = createConfigurations(starlarkOptions, actualArgs.toArray(new String[0]));
targetConfig = getTargetConfiguration();
targetConfigKey = targetConfig.getKey();
configurationArgs = actualArgs;
createBuildView();
}
protected void useConfiguration(String... args) throws Exception {
useConfiguration(ImmutableMap.of(), args);
}
/**
* Creates BuildView using current hostConfig/targetConfig values. Ensures that hostConfig is
* either identical to the targetConfig or has 'host' short name.
*/
protected final void createBuildView() {
Preconditions.checkNotNull(masterConfig);
Preconditions.checkState(
getHostConfiguration().equals(getTargetConfiguration())
|| getHostConfiguration().isHostConfiguration(),
"Host configuration %s is not a host configuration' "
+ "and does not match target configuration %s",
getHostConfiguration(),
getTargetConfiguration());
skyframeExecutor.handleAnalysisInvalidatingChange();
view = new BuildViewForTesting(directories, ruleClassProvider, skyframeExecutor, null);
view.setConfigurationsForTesting(event -> {}, masterConfig);
view.setArtifactRoots(new PackageRootsNoSymlinkCreation(Root.fromPath(rootDirectory)));
}
protected CachingAnalysisEnvironment getTestAnalysisEnvironment() throws InterruptedException {
SkyFunction.Environment env = skyframeExecutor.getSkyFunctionEnvironmentForTesting(reporter);
StarlarkBuiltinsValue starlarkBuiltinsValue =
(StarlarkBuiltinsValue)
Preconditions.checkNotNull(env.getValue(StarlarkBuiltinsValue.key()));
return new CachingAnalysisEnvironment(
view.getArtifactFactory(),
actionKeyContext,
new ActionLookupKey() {
@Nullable
@Override
public Label getLabel() {
return null;
}
@Nullable
@Override
public BuildConfigurationKey getConfigurationKey() {
return null;
}
@Override
public SkyFunctionName functionName() {
return null;
}
},
/*extendedSanityChecks=*/ false,
/*allowAnalysisFailures=*/ false,
reporter,
env,
starlarkBuiltinsValue);
}
/**
* Allows access to the prerequisites of a configured target. This is currently used in some tests
* to reach into the internals of RuleCT for white box testing. In principle, this should not be
* used; instead tests should only assert on properties of the exposed provider instances and / or
* the action graph.
*/
protected final Collection<ConfiguredTarget> getDirectPrerequisites(ConfiguredTarget target)
throws TransitionException, InvalidConfigurationException, InconsistentAspectOrderException,
Failure {
return view.getDirectPrerequisitesForTesting(reporter, target, masterConfig);
}
protected final ConfiguredTarget getDirectPrerequisite(ConfiguredTarget target, String label)
throws Exception {
Label candidateLabel = Label.parseAbsolute(label, ImmutableMap.of());
Optional<ConfiguredTarget> prereq =
getDirectPrerequisites(target).stream()
.filter(candidate -> candidate.getOriginalLabel().equals(candidateLabel))
.findFirst();
return prereq.orElse(null);
}
protected final ConfiguredTargetAndData getConfiguredTargetAndDataDirectPrerequisite(
ConfiguredTargetAndData ctad, String label) throws Exception {
Label candidateLabel = Label.parseAbsolute(label, ImmutableMap.of());
for (ConfiguredTargetAndData candidate :
view.getConfiguredTargetAndDataDirectPrerequisitesForTesting(
reporter, ctad.getConfiguredTarget(), masterConfig)) {
if (candidate.getConfiguredTarget().getLabel().equals(candidateLabel)) {
return candidate;
}
}
return null;
}
/** Returns a {@link BuildOptions} with options in {@code exclude} trimmed away. */
private static BuildOptions trimConfiguration(
BuildOptions original, Set<Class<? extends FragmentOptions>> exclude) {
BuildOptions.Builder trimmed = original.toBuilder();
exclude.forEach(trimmed::removeFragmentOptions);
return trimmed.build();
}
/**
* Asserts that two configurations are the same, with exclusions.
*
* <p>Any fragments options of type specified in excludeFragmentOptions are excluded from the
* comparison.
*
* <p>Generally, this means they share the same checksum, which is computed by iterating over all
* the individual @Option annotated values contained within the {@link FragmentOptions} classes
* contained within the {@link BuildOptions} inside the given configurations.
*/
protected static void assertConfigurationsEqual(
BuildConfigurationValue config1,
BuildConfigurationValue config2,
Set<Class<? extends FragmentOptions>> excludeFragmentOptions) {
// BuildOptions and crosstool files determine a configuration's content. Within the context
// of these tests only the former actually change.
assertThat(trimConfiguration(config2.cloneOptions(), excludeFragmentOptions))
.isEqualTo(trimConfiguration(config1.cloneOptions(), excludeFragmentOptions));
}
protected static void assertConfigurationsEqual(
BuildConfigurationValue config1, BuildConfigurationValue config2) {
assertConfigurationsEqual(config1, config2, /*excludeFragmentOptions=*/ ImmutableSet.of());
}
/**
* Creates and returns a rule context that is equivalent to the one that was used to create the
* given configured target.
*/
protected RuleContext getRuleContext(ConfiguredTarget target) throws Exception {
return view.getRuleContextForTesting(
reporter, target, new StubAnalysisEnvironment(), masterConfig);
}
protected RuleContext getRuleContext(
ConfiguredTarget target, AnalysisEnvironment analysisEnvironment) throws Exception {
return view.getRuleContextForTesting(reporter, target, analysisEnvironment, masterConfig);
}
/**
* Creates and returns a rule context to use for Starlark tests that is equivalent to the one that
* was used to create the given configured target.
*/
protected RuleContext getRuleContextForStarlark(ConfiguredTarget target) throws Exception {
// TODO(bazel-team): we need this horrible workaround because CachingAnalysisEnvironment
// only works with StoredErrorEventListener despite the fact it accepts the interface
// ErrorEventListener, so it's not possible to create it with reporter.
// See BuildView.getRuleContextForTesting().
StoredEventHandler eventHandler =
new StoredEventHandler() {
@Override
public synchronized void handle(Event e) {
super.handle(e);
reporter.handle(e);
}
};
return view.getRuleContextForTesting(target, eventHandler, masterConfig);
}
/**
* Allows access to the prerequisites of a configured target. This is currently used in some tests
* to reach into the internals of RuleCT for white box testing. In principle, this should not be
* used; instead tests should only assert on properties of the exposed provider instances and / or
* the action graph.
*/
protected List<? extends TransitiveInfoCollection> getPrerequisites(
ConfiguredTarget target, String attributeName) throws Exception {
return getRuleContext(target).getConfiguredTargetMap().get(attributeName);
}
/**
* Allows access to the prerequisites of a configured target. This is currently used in some tests
* to reach into the internals of RuleCT for white box testing. In principle, this should not be
* used; instead tests should only assert on properties of the exposed provider instances and / or
* the action graph.
*/
protected <C extends TransitiveInfoProvider> Iterable<C> getPrerequisites(
ConfiguredTarget target, String attributeName, Class<C> classType) throws Exception {
return AnalysisUtils.getProviders(getPrerequisites(target, attributeName), classType);
}
/**
* Allows access to the prerequisites of a configured target. This is currently used in some tests
* to reach into the internals of RuleCT for white box testing. In principle, this should not be
* used; instead tests should only assert on properties of the exposed provider instances and / or
* the action graph.
*/
protected ImmutableList<Artifact> getPrerequisiteArtifacts(
ConfiguredTarget target, String attributeName) throws Exception {
Set<Artifact> result = new LinkedHashSet<>();
for (FileProvider provider : getPrerequisites(target, attributeName, FileProvider.class)) {
result.addAll(provider.getFilesToBuild().toList());
}
return ImmutableList.copyOf(result);
}
protected ActionGraph getActionGraph() {
return skyframeExecutor.getActionGraph(reporter);
}
/** Returns all arguments used by the action. */
protected final ImmutableList<String> allArgsForAction(SpawnAction action) throws Exception {
ImmutableList.Builder<String> args = new ImmutableList.Builder<>();
List<CommandLineAndParamFileInfo> commandLines = action.getCommandLines().getCommandLines();
for (CommandLineAndParamFileInfo pair : commandLines.subList(1, commandLines.size())) {
args.addAll(pair.commandLine.arguments());
}
return args.build();
}
/** Locates the first parameter file used by the action and returns its command line. */
@Nullable
protected final CommandLine paramFileCommandLineForAction(Action action) {
if (action instanceof SpawnAction) {
CommandLines commandLines = ((SpawnAction) action).getCommandLines();
for (CommandLineAndParamFileInfo pair : commandLines.getCommandLines()) {
if (pair.paramFileInfo != null) {
return pair.commandLine;
}
}
}
ParameterFileWriteAction parameterFileWriteAction = paramFileWriteActionForAction(action);
return parameterFileWriteAction != null ? parameterFileWriteAction.getCommandLine() : null;
}
/** Locates the first parameter file used by the action and returns its args. */
@Nullable
protected final Iterable<String> paramFileArgsForAction(Action action)
throws CommandLineExpansionException, InterruptedException {
CommandLine commandLine = paramFileCommandLineForAction(action);
return commandLine != null ? commandLine.arguments() : null;
}
/**
* Locates the first parameter file used by the action and returns its args.
*
* <p>If no param file is used, return the action's arguments.
*/
@Nullable
protected final Iterable<String> paramFileArgsOrActionArgs(CommandAction action)
throws CommandLineExpansionException, InterruptedException {
CommandLine commandLine = paramFileCommandLineForAction(action);
return commandLine != null ? commandLine.arguments() : action.getArguments();
}
/** Locates the first parameter file used by the action and returns its contents. */
@Nullable
protected final String paramFileStringContentsForAction(Action action)
throws CommandLineExpansionException, InterruptedException, IOException {
if (action instanceof SpawnAction) {
CommandLines commandLines = ((SpawnAction) action).getCommandLines();
for (CommandLineAndParamFileInfo pair : commandLines.getCommandLines()) {
if (pair.paramFileInfo != null) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ParameterFile.writeParameterFile(
out,
pair.commandLine.arguments(),
pair.paramFileInfo.getFileType(),
pair.paramFileInfo.getCharset());
return out.toString(pair.paramFileInfo.getCharset());
}
}
}
ParameterFileWriteAction parameterFileWriteAction = paramFileWriteActionForAction(action);
return parameterFileWriteAction != null ? parameterFileWriteAction.getStringContents() : null;
}
@Nullable
protected ParameterFileWriteAction paramFileWriteActionForAction(Action action) {
for (Artifact input : action.getInputs().toList()) {
if (!(input instanceof SpecialArtifact)) {
Action generatingAction = getGeneratingAction(input);
if (generatingAction instanceof ParameterFileWriteAction) {
return (ParameterFileWriteAction) generatingAction;
}
}
}
return null;
}
protected final ActionAnalysisMetadata getGeneratingActionAnalysisMetadata(Artifact artifact) {
Preconditions.checkNotNull(artifact);
ActionAnalysisMetadata actionAnalysisMetadata =
mutableActionGraph.getGeneratingAction(artifact);
if (actionAnalysisMetadata == null) {
if (artifact.isSourceArtifact() || !((DerivedArtifact) artifact).hasGeneratingActionKey()) {
return null;
}
actionAnalysisMetadata = getActionGraph().getGeneratingAction(artifact);
}
return actionAnalysisMetadata;
}
protected Action getGeneratingAction(ConfiguredTarget target, String outputName) {
NestedSet<Artifact> filesToBuild = getFilesToBuild(target);
return getGeneratingAction(outputName, filesToBuild, "filesToBuild");
}
private Action getGeneratingAction(
String outputName, NestedSet<Artifact> filesToBuild, String providerName) {
return getGeneratingAction(findArtifactNamed(outputName, filesToBuild, providerName));
}
protected final Action getGeneratingAction(Artifact artifact) {
ActionAnalysisMetadata action = getGeneratingActionAnalysisMetadata(artifact);
if (action != null) {
Preconditions.checkState(
action instanceof Action, "%s is not a proper Action object", action.prettyPrint());
return (Action) action;
} else {
return null;
}
}
private static Artifact findArtifactNamed(
String name, NestedSet<Artifact> artifacts, Object context) {
return artifacts.toList().stream()
.filter(artifactNamed(name))
.findFirst()
.orElseThrow(
() ->
new NoSuchElementException(
String.format(
"Artifact named '%s' not found in %s (%s)", name, context, artifacts)));
}
protected Action getGeneratingActionInOutputGroup(
ConfiguredTarget target, String outputName, String outputGroupName) {
NestedSet<Artifact> outputGroup = OutputGroupInfo.get(target).getOutputGroup(outputGroupName);
return getGeneratingAction(outputName, outputGroup, "outputGroup/" + outputGroupName);
}
/**
* Returns the SpawnAction that generates an artifact. Implicitly assumes the action is a
* SpawnAction.
*/
protected final SpawnAction getGeneratingSpawnAction(Artifact artifact) {
return (SpawnAction) getGeneratingAction(artifact);
}
protected SpawnAction getGeneratingSpawnAction(ConfiguredTarget target, String outputName) {
return getGeneratingSpawnAction(
findArtifactNamed(outputName, getFilesToBuild(target), target.getLabel()));
}
protected final List<String> getGeneratingSpawnActionArgs(Artifact artifact)
throws CommandLineExpansionException, InterruptedException {
SpawnAction a = getGeneratingSpawnAction(artifact);
return a.getArguments();
}
protected ActionsTestUtil actionsTestUtil() {
return new ActionsTestUtil(getActionGraph());
}
// Get a MutableActionGraph for testing purposes.
protected MutableActionGraph getMutableActionGraph() {
return mutableActionGraph;
}
/**
* Returns the ConfiguredTarget for the specified label, configured for the "build" (aka "target")
* configuration. If the label corresponds to a target with a top-level configuration transition,
* that transition is applied to the given config in the returned ConfiguredTarget.
*
* <p>May return null on error; see {@link #getConfiguredTarget(Label, BuildConfigurationValue)}.
*/
@Nullable
public ConfiguredTarget getConfiguredTarget(String label) throws LabelSyntaxException {
return getConfiguredTarget(label, targetConfig);
}
/**
* Returns the ConfiguredTarget for the specified label, using the given build configuration. If
* the label corresponds to a target with a top-level configuration transition, that transition is
* applied to the given config in the returned ConfiguredTarget.
*
* <p>May return null on error; see {@link #getConfiguredTarget(Label, BuildConfigurationValue)}.
*/
@Nullable
protected ConfiguredTarget getConfiguredTarget(String label, BuildConfigurationValue config)
throws LabelSyntaxException {
return getConfiguredTarget(Label.parseAbsolute(label, ImmutableMap.of()), config);
}
/**
* Returns the ConfiguredTarget for the specified label, using the given build configuration. If
* the label corresponds to a target with a top-level configuration transition, that transition is
* applied to the given config in the returned ConfiguredTarget.
*
* <p>If the evaluation of the SkyKey corresponding to the configured target fails, this method
* may return null. In that case, use a debugger to inspect the {@link ErrorInfo} for the
* evaluation, which is produced by the {@link MemoizingEvaluator#getExistingValue} call in {@link
* SkyframeExecutor#getConfiguredTargetForTesting}. See also b/26382502.
*
* @throws AssertionError if the target cannot be transitioned into with the given configuration
*/
// TODO(bazel-team): Should we work around b/26382502 by asserting here that the result is not
// null?
@Nullable
protected ConfiguredTarget getConfiguredTarget(Label label, BuildConfigurationValue config) {
try {
return view.getConfiguredTargetForTesting(reporter, label, config);
} catch (InvalidConfigurationException
| StarlarkTransition.TransitionException
| InterruptedException e) {
throw new AssertionError(e);
}
}
/**
* Returns a ConfiguredTargetAndData for the specified label, using the given build configuration.
*/
protected ConfiguredTargetAndData getConfiguredTargetAndData(
Label label, BuildConfigurationValue config)
throws StarlarkTransition.TransitionException, InvalidConfigurationException,
InterruptedException {
return view.getConfiguredTargetAndDataForTesting(reporter, label, config);
}
/**
* Returns the ConfiguredTargetAndData for the specified label. If the label corresponds to a
* target with a top-level configuration transition, that transition is applied to the given
* config in the ConfiguredTargetAndData's ConfiguredTarget.
*/
public ConfiguredTargetAndData getConfiguredTargetAndData(String label)
throws LabelSyntaxException, StarlarkTransition.TransitionException,
InvalidConfigurationException, InterruptedException {
return getConfiguredTargetAndData(Label.parseAbsolute(label, ImmutableMap.of()), targetConfig);
}
/**
* Returns the ConfiguredTarget for the specified file label, configured for the "build" (aka
* "target") configuration.
*/
protected FileConfiguredTarget getFileConfiguredTarget(String label) throws LabelSyntaxException {
return (FileConfiguredTarget) getConfiguredTarget(label, targetConfig);
}
/**
* Returns the Artifact for the specified label, configured for the "build" (aka "target")
* configuration.
*/
protected Artifact getArtifact(String label) throws LabelSyntaxException {
ConfiguredTarget target = getConfiguredTarget(label, targetConfig);
if (target instanceof FileConfiguredTarget) {
return ((FileConfiguredTarget) target).getArtifact();
} else {
return getFilesToBuild(target).getSingleton();
}
}
/**
* Returns the ConfiguredTarget for the specified label, configured for the "host" configuration.
*/
protected ConfiguredTarget getHostConfiguredTarget(String label) throws LabelSyntaxException {
return getConfiguredTarget(label, getHostConfiguration());
}
/**
* Returns the ConfiguredTarget for the specified file label, configured for the "host"
* configuration.
*/
protected FileConfiguredTarget getHostFileConfiguredTarget(String label)
throws LabelSyntaxException {
return (FileConfiguredTarget) getHostConfiguredTarget(label);
}
/**
* Returns the {@link ConfiguredAspect} with the given label. For example: {@code
* //my:base_target%my_aspect}.
*
* <p>Assumes only one configured aspect exists for this label. If this isn't true, or you need
* finer grained selection for different configurations, you'll need to expand this method.
*/
protected ConfiguredAspect getAspect(String label) throws Exception {
AspectValue aspect =
(AspectValue)
skyframeExecutor.getEvaluator().getDoneValues().entrySet().stream()
.filter(
entry ->
entry.getKey() instanceof AspectKey
&& ((AspectKey) entry.getKey()).getAspectName().equals(label))
.map(Map.Entry::getValue)
.collect(onlyElement());
return aspect.getConfiguredAspect();
}
/**
* Rewrites the WORKSPACE to have the required boilerplate and the given lines of content.
*
* <p>Triggers Skyframe to reinitialize everything.
*/
public void rewriteWorkspace(String... lines) throws Exception {
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.addAll(ImmutableList.copyOf(lines))
.build());
invalidatePackages();
}
/**
* Create and return a configured scratch rule.
*
* @param packageName the package name of the rule.
* @param ruleName the name of the rule.
* @param lines the text of the rule.
* @return the configured target instance for the created rule.
*/
protected ConfiguredTarget scratchConfiguredTarget(
String packageName, String ruleName, String... lines) throws Exception {
return scratchConfiguredTarget(packageName, ruleName, targetConfig, lines);
}
/**
* Create and return a configured scratch rule.
*
* @param packageName the package name of the rule.
* @param ruleName the name of the rule.
* @param config the configuration to use to construct the configured rule.
* @param lines the text of the rule.
* @return the configured target instance for the created rule.
*/
protected ConfiguredTarget scratchConfiguredTarget(
String packageName, String ruleName, BuildConfigurationValue config, String... lines)
throws Exception {
ConfiguredTargetAndData ctad =
scratchConfiguredTargetAndData(packageName, ruleName, config, lines);
return ctad == null ? null : ctad.getConfiguredTarget();
}
/**
* Creates and returns a configured scratch rule and its data.
*
* @param packageName the package name of the rule.
* @param rulename the name of the rule.
* @param lines the text of the rule.
* @return the configured tatarget and target instance for the created rule.
*/
protected ConfiguredTargetAndData scratchConfiguredTargetAndData(
String packageName, String rulename, String... lines) throws Exception {
return scratchConfiguredTargetAndData(packageName, rulename, targetConfig, lines);
}
/**
* Creates and returns a configured scratch rule and its data.
*
* @param packageName the package name of the rule.
* @param ruleName the name of the rule.
* @param config the configuration to use to construct the configured rule.
* @param lines the text of the rule.
* @return the ConfiguredTargetAndData instance for the created rule.
*/
protected ConfiguredTargetAndData scratchConfiguredTargetAndData(
String packageName, String ruleName, BuildConfigurationValue config, String... lines)
throws Exception {
Target rule = scratchRule(packageName, ruleName, lines);
return view.getConfiguredTargetAndDataForTesting(reporter, rule.getLabel(), config);
}
/**
* Create and return a scratch rule.
*
* @param packageName the package name of the rule.
* @param ruleName the name of the rule.
* @param lines the text of the rule.
* @return the rule instance for the created rule.
*/
protected Rule scratchRule(String packageName, String ruleName, String... lines)
throws Exception {
// Allow to create the BUILD file also in the top package.
String buildFilePathString = packageName.isEmpty() ? "BUILD" : packageName + "/BUILD";
if (packageName.equals(LabelConstants.EXTERNAL_PACKAGE_NAME.getPathString())) {
buildFilePathString = "WORKSPACE";
scratch.overwriteFile(buildFilePathString, lines);
} else {
scratch.file(buildFilePathString, lines);
}
skyframeExecutor.invalidateFilesUnderPathForTesting(
reporter,
new ModifiedFileSet.Builder().modify(PathFragment.create(buildFilePathString)).build(),
Root.fromPath(rootDirectory));
return (Rule) getTarget("//" + packageName + ":" + ruleName);
}
/**
* Check that configuration of the target named 'ruleName' in the specified BUILD file fails with
* an error message containing 'expectedErrorMessage'.
*
* @param packageName the package name of the generated BUILD file
* @param ruleName the rule name for the rule in the generated BUILD file
* @param expectedErrorMessage the expected error message.
* @param lines the text of the rule.
* @return the found error.
*/
protected Event checkError(
String packageName, String ruleName, String expectedErrorMessage, String... lines)
throws Exception {
eventCollector.clear();
reporter.removeHandler(failFastHandler); // expect errors
ConfiguredTarget target = scratchConfiguredTarget(packageName, ruleName, lines);
if (target != null) {
assertWithMessage(
"Rule '" + "//" + packageName + ":" + ruleName + "' did not contain an error")
.that(view.hasErrors(target))
.isTrue();
}
return assertContainsEvent(expectedErrorMessage);
}
/**
* Check that configuration of the target named 'ruleName' in the specified BUILD file fails with
* an error message matching 'expectedErrorPattern'.
*
* @param packageName the package name of the generated BUILD file
* @param ruleName the rule name for the rule in the generated BUILD file
* @param expectedErrorPattern a regex that matches the expected error.
* @param lines the text of the rule.
* @return the found error.
*/
protected Event checkError(
String packageName, String ruleName, Pattern expectedErrorPattern, String... lines)
throws Exception {
eventCollector.clear();
reporter.removeHandler(failFastHandler); // expect errors
ConfiguredTarget target = scratchConfiguredTarget(packageName, ruleName, lines);
if (target != null) {
assertWithMessage(
"Rule '" + "//" + packageName + ":" + ruleName + "' did not contain an error")
.that(view.hasErrors(target))
.isTrue();
}
return assertContainsEvent(expectedErrorPattern);
}
/**
* Check that configuration of the target named 'label' fails with an error message containing
* 'expectedErrorMessage'.
*
* @param label the target name to test
* @param expectedErrorMessage the expected error message.
* @return the found error.
*/
protected Event checkError(String label, String expectedErrorMessage) throws Exception {
eventCollector.clear();
reporter.removeHandler(failFastHandler); // expect errors
ConfiguredTarget target = getConfiguredTarget(label);
if (target != null) {
assertWithMessage("Rule '" + label + "' did not contain an error")
.that(view.hasErrors(target))
.isTrue();
}
return assertContainsEvent(expectedErrorMessage);
}
/**
* Checks whether loading the given target results in the specified error message.
*
* @param target the name of the target.
* @param expectedErrorMessage the expected error message.
*/
protected void checkLoadingPhaseError(String target, String expectedErrorMessage) {
reporter.removeHandler(failFastHandler);
// The error happens during the loading of the Starlark file so checkError doesn't work here
assertThrows(Exception.class, () -> getTarget(target));
assertContainsEvent(expectedErrorMessage);
}
/**
* Check that configuration of the target named 'ruleName' in the specified BUILD file reports a
* warning message ending in 'expectedWarningMessage', and that no errors were reported.
*
* @param packageName the package name of the generated BUILD file
* @param ruleName the rule name for the rule in the generated BUILD file
* @param expectedWarningMessage the expected warning message.
* @param lines the text of the rule.
* @return the found error.
*/
protected Event checkWarning(
String packageName, String ruleName, String expectedWarningMessage, String... lines)
throws Exception {
eventCollector.clear();
ConfiguredTarget target = scratchConfiguredTarget(packageName, ruleName, lines);
assertWithMessage("Rule '" + "//" + packageName + ":" + ruleName + "' did contain an error")
.that(view.hasErrors(target))
.isFalse();
return assertContainsEvent(expectedWarningMessage);
}
/**
* Given a collection of Artifacts, returns a corresponding set of strings of the form "[root]
* [relpath]", such as "bin x/libx.a". Such strings make assertions easier to write.
*
* <p>The returned set preserves the order of the input.
*/
protected Set<String> artifactsToStrings(NestedSet<? extends Artifact> artifacts) {
return artifactsToStrings(artifacts.toList());
}
/**
* Given a collection of Artifacts, returns a corresponding set of strings of the form "[root]
* [relpath]", such as "bin x/libx.a". Such strings make assertions easier to write.
*
* <p>The returned set preserves the order of the input.
*/
protected Set<String> artifactsToStrings(Iterable<? extends Artifact> artifacts) {
return AnalysisTestUtil.artifactsToStrings(masterConfig, artifacts);
}
/**
* Given a list of PathFragments, returns a corresponding list of strings. Such strings make
* assertions easier to write.
*/
protected static ImmutableList<String> pathfragmentsToStrings(List<PathFragment> pathFragments) {
return pathFragments.stream().map(PathFragment::toString).collect(toImmutableList());
}
/**
* Asserts that targetName's outputs are exactly expectedOuts.
*
* @param targetName The label of a rule.
* @param expectedOuts The labels of the expected outputs of the rule.
*/
protected void assertOuts(String targetName, String... expectedOuts) throws Exception {
Rule ruleTarget = (Rule) getTarget(targetName);
for (String expectedOut : expectedOuts) {
Target outTarget = getTarget(expectedOut);
if (!(outTarget instanceof OutputFile)) {
fail("Target " + outTarget + " is not an output");
assertThat(((OutputFile) outTarget).getGeneratingRule()).isSameInstanceAs(ruleTarget);
// This ensures that the output artifact is wired up in the action graph
getConfiguredTarget(expectedOut);
}
}
Collection<OutputFile> outs = ruleTarget.getOutputFiles();
assertWithMessage("Mismatched outputs: " + outs)
.that(outs.size())
.isEqualTo(expectedOuts.length);
}
/** Asserts that there exists a configured target file for the given label. */
protected void assertConfiguredTargetExists(String label) throws Exception {
assertThat(getFileConfiguredTarget(label)).isNotNull();
}
/** Assert that the first label and the second label are both generated by the same command. */
protected void assertSameGeneratingAction(String labelA, String labelB) throws Exception {
assertWithMessage("Action for " + labelA + " did not match " + labelB)
.that(getGeneratingActionForLabel(labelB))
.isSameInstanceAs(getGeneratingActionForLabel(labelA));
}
protected Artifact getSourceArtifact(PathFragment rootRelativePath, Root root) {
return view.getArtifactFactory().getSourceArtifact(rootRelativePath, root);
}
protected Artifact getSourceArtifact(String name) {
return getSourceArtifact(PathFragment.create(name), Root.fromPath(rootDirectory));
}
/**
* Gets a derived artifact, creating it if necessary. {@code ArtifactOwner} should be a genuine
* {@link ConfiguredTargetKey} corresponding to a {@link ConfiguredTarget}. If called from a test
* that does not exercise the analysis phase, the convenience methods {@link
* #getBinArtifactWithNoOwner} or {@link #getGenfilesArtifactWithNoOwner} should be used instead.
*/
protected final Artifact.DerivedArtifact getDerivedArtifact(
PathFragment rootRelativePath, ArtifactRoot root, ArtifactOwner owner) {
if ((owner instanceof ActionLookupKey)) {
SkyValue skyValue;
try {
skyValue = skyframeExecutor.getEvaluator().getExistingValue((SkyKey) owner);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
if (skyValue instanceof ActionLookupValue) {
for (ActionAnalysisMetadata action : ((ActionLookupValue) skyValue).getActions()) {
for (Artifact output : action.getOutputs()) {
if (output.getRootRelativePath().equals(rootRelativePath)
&& output.getRoot().equals(root)) {
return (Artifact.DerivedArtifact) output;
}
}
}
}
}
// Fall back: some tests don't actually need an artifact with an owner.
// TODO(janakr): the tests that are passing in nonsense here should be changed.
return view.getArtifactFactory().getDerivedArtifact(rootRelativePath, root, owner);
}
/**
* Gets a Tree Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBinDirectory} corresponding to the package of {@code owner}. So to
* specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should just be
* "foo.o".
*/
protected final Artifact getTreeArtifact(String packageRelativePath, ConfiguredTarget owner) {
ActionLookupKey actionLookupKey =
ConfiguredTargetKey.builder()
.setConfiguredTarget(owner)
.setConfigurationKey(owner.getConfigurationKey())
.build();
return getDerivedArtifact(
owner.getLabel().getPackageFragment().getRelative(packageRelativePath),
getConfiguration(owner).getBinDirectory(RepositoryName.MAIN),
actionLookupKey);
}
/**
* Gets a derived Artifact for testing with path of the form
* root/owner.getPackageFragment()/packageRelativePath.
*
* @see #getDerivedArtifact(PathFragment, ArtifactRoot, ArtifactOwner)
*/
private Artifact getPackageRelativeDerivedArtifact(
String packageRelativePath, ArtifactRoot root, ArtifactOwner owner) {
return getDerivedArtifact(
owner.getLabel().getPackageFragment().getRelative(packageRelativePath), root, owner);
}
/** Returns the input {@link Artifact}s to the given {@link Action} with the given exec paths. */
protected static List<Artifact> getInputs(Action owner, Collection<String> execPaths) {
Set<String> expectedPaths = new HashSet<>(execPaths);
List<Artifact> result = new ArrayList<>();
for (Artifact output : owner.getInputs().toList()) {
if (expectedPaths.remove(output.getExecPathString())) {
result.add(output);
}
}
assertWithMessage("expected paths not found in: %s", Artifact.asExecPaths(owner.getInputs()))
.that(expectedPaths)
.isEmpty();
return result;
}
/**
* Gets a derived Artifact for testing in the {@link BuildConfigurationValue#getBinDirectory}.
* This method should only be used for tests that do no analysis, and so there is no
* ConfiguredTarget to own this artifact. If the test runs the analysis phase, {@link
* #getBinArtifact(String, ConfiguredTarget)} or its convenience methods should be used instead.
*/
protected Artifact.DerivedArtifact getBinArtifactWithNoOwner(String rootRelativePath) {
return getDerivedArtifact(
PathFragment.create(rootRelativePath),
targetConfig.getBinDirectory(RepositoryName.MAIN),
ActionsTestUtil.NULL_ARTIFACT_OWNER);
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBinDirectory} corresponding to the package of {@code owner}. So to
* specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should just be
* "foo.o".
*/
protected final Artifact getBinArtifact(String packageRelativePath, ConfiguredTarget owner) {
return getPackageRelativeDerivedArtifact(
packageRelativePath,
getConfiguration(owner).getBinDirectory(RepositoryName.MAIN),
ConfiguredTargetKey.builder()
.setConfiguredTarget(owner)
.setConfiguration(
skyframeExecutor.getConfiguration(reporter, owner.getConfigurationKey()))
.build());
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBinDirectory} corresponding to the package of {@code owner}, where
* the given artifact belongs to the given ConfiguredTarget together with the given Aspect. So to
* specify a file foo/foo.o owned by target //foo:foo with an aspect from FooAspect, {@code
* packageRelativePath} should just be "foo.o", and aspectOfOwner should be FooAspect.class. This
* method is necessary when an Aspect of the target, not the target itself, is creating an
* Artifact.
*/
protected Artifact getBinArtifact(
String packageRelativePath, ConfiguredTarget owner, AspectClass creatingAspectFactory) {
return getBinArtifact(
packageRelativePath, owner, creatingAspectFactory, AspectParameters.EMPTY);
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBinDirectory} corresponding to the package of {@code owner}, where
* the given artifact belongs to the given ConfiguredTarget together with the given Aspect. So to
* specify a file foo/foo.o owned by target //foo:foo with an aspect from FooAspect, {@code
* packageRelativePath} should just be "foo.o", and aspectOfOwner should be FooAspect.class. This
* method is necessary when an Aspect of the target, not the target itself, is creating an
* Artifact.
*/
protected Artifact getBinArtifact(
String packageRelativePath,
ConfiguredTarget owner,
AspectClass creatingAspectFactory,
AspectParameters parameters) {
return getPackageRelativeDerivedArtifact(
packageRelativePath,
getConfiguration(owner).getBinDirectory(RepositoryName.MAIN),
AspectKeyCreator.createAspectKey(
new AspectDescriptor(creatingAspectFactory, parameters),
ConfiguredTargetKey.builder()
.setLabel(owner.getLabel())
.setConfiguration(getConfiguration(owner))
.build()));
}
/**
* Gets a derived Artifact for testing in the {@link
* BuildConfigurationValue#getGenfilesDirectory}. This method should only be used for tests that
* do no analysis, and so there is no ConfiguredTarget to own this artifact. If the test runs the
* analysis phase, {@link #getGenfilesArtifact(String, ConfiguredTarget)} or its convenience
* methods should be used instead.
*/
protected Artifact getGenfilesArtifactWithNoOwner(String rootRelativePath) {
return getDerivedArtifact(
PathFragment.create(rootRelativePath),
targetConfig.getGenfilesDirectory(RepositoryName.MAIN),
ActionsTestUtil.NULL_ARTIFACT_OWNER);
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getGenfilesDirectory} corresponding to the package of {@code owner}. So
* to specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should just
* be "foo.o".
*/
protected Artifact getGenfilesArtifact(String packageRelativePath, String owner) {
BuildConfigurationValue config = getConfiguration(owner);
return getGenfilesArtifact(
packageRelativePath,
ConfiguredTargetKey.builder()
.setLabel(Label.parseAbsoluteUnchecked(owner))
.setConfiguration(config)
.build(),
config);
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getGenfilesDirectory} corresponding to the package of {@code owner}. So
* to specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should just
* be "foo.o".
*/
protected Artifact getGenfilesArtifact(String packageRelativePath, ConfiguredTarget owner) {
BuildConfigurationValue configuration =
skyframeExecutor.getConfiguration(reporter, owner.getConfigurationKey());
ConfiguredTargetKey configKey =
ConfiguredTargetKey.builder()
.setConfiguredTarget(owner)
.setConfiguration(configuration)
.build();
return getGenfilesArtifact(packageRelativePath, configKey, configuration);
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getGenfilesDirectory} corresponding to the package of {@code owner},
* where the given artifact belongs to the given ConfiguredTarget together with the given Aspect.
* So to specify a file foo/foo.o owned by target //foo:foo with an apsect from FooAspect, {@code
* packageRelativePath} should just be "foo.o", and aspectOfOwner should be FooAspect.class. This
* method is necessary when an Apsect of the target, not the target itself, is creating an
* Artifact.
*/
protected Artifact getGenfilesArtifact(
String packageRelativePath, ConfiguredTarget owner, NativeAspectClass creatingAspectFactory) {
return getGenfilesArtifact(
packageRelativePath, owner, creatingAspectFactory, AspectParameters.EMPTY);
}
protected Artifact getGenfilesArtifact(
String packageRelativePath,
ConfiguredTarget owner,
NativeAspectClass creatingAspectFactory,
AspectParameters params) {
return getPackageRelativeDerivedArtifact(
packageRelativePath,
getConfiguration(owner).getGenfilesDirectory(owner.getLabel().getRepository()),
getOwnerForAspect(owner, creatingAspectFactory, params));
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getGenfilesDirectory} corresponding to the package of {@code owner}. So
* to specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should just
* be "foo.o".
*/
private Artifact getGenfilesArtifact(
String packageRelativePath, ArtifactOwner owner, BuildConfigurationValue config) {
return getPackageRelativeDerivedArtifact(
packageRelativePath, config.getGenfilesDirectory(RepositoryName.MAIN), owner);
}
protected AspectKey getOwnerForAspect(
ConfiguredTarget owner, NativeAspectClass creatingAspectFactory, AspectParameters params) {
return AspectKeyCreator.createAspectKey(
new AspectDescriptor(creatingAspectFactory, params),
ConfiguredTargetKey.builder()
.setLabel(owner.getLabel())
.setConfiguration(getConfiguration(owner))
.build());
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBuildInfoDirectory} corresponding to the package of {@code owner}.
* So to specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should
* just be "foo.h".
*/
protected Artifact getIncludeArtifact(String packageRelativePath, String owner) {
return getIncludeArtifact(packageRelativePath, makeConfiguredTargetKey(owner));
}
/**
* Gets a derived Artifact for testing in the subdirectory of the {@link
* BuildConfigurationValue#getBuildInfoDirectory} corresponding to the package of {@code owner}.
* So to specify a file foo/foo.o owned by target //foo:foo, {@code packageRelativePath} should
* just be "foo.h".
*/
protected Artifact getIncludeArtifact(String packageRelativePath, ArtifactOwner owner) {
return getPackageRelativeDerivedArtifact(
packageRelativePath,
targetConfig.getBuildInfoDirectory(owner.getLabel().getRepository()),
owner);
}
/**
* @return a shared artifact at the binary-root relative path {@code rootRelativePath} owned by
* {@code owner}.
* @param rootRelativePath the binary-root relative path of the artifact.
* @param owner the artifact's owner.
*/
protected Artifact getSharedArtifact(String rootRelativePath, ConfiguredTarget owner) {
return getDerivedArtifact(
PathFragment.create(rootRelativePath),
targetConfig.getBinDirectory(RepositoryName.MAIN),
ConfiguredTargetKey.builder()
.setConfiguredTarget(owner)
.setConfiguration(
skyframeExecutor.getConfiguration(reporter, owner.getConfigurationKey()))
.build());
}
protected Action getGeneratingActionForLabel(String label) throws Exception {
return getGeneratingAction(getArtifact(label));
}
protected static String fileName(Artifact artifact) {
return artifact.getExecPathString();
}
protected static String fileName(FileConfiguredTarget target) {
return fileName(target.getArtifact());
}
protected String fileName(String name) throws Exception {
return fileName(getFileConfiguredTarget(name));
}
protected Path getOutputPath() {
return directories.getOutputPath(ruleClassProvider.getRunfilesPrefix());
}
protected String getRelativeOutputPath() {
return directories.getRelativeOutputPath();
}
/**
* Verifies whether the rule checks the 'srcs' attribute validity.
*
* <p>At the call site it expects the {@code packageName} to contain:
*
* <ol>
* <li>{@code :gvalid} - genrule that outputs a valid file
* <li>{@code :ginvalid} - genrule that outputs an invalid file
* <li>{@code :gmix} - genrule that outputs a mix of valid and invalid files
* <li>{@code :valid} - rule of type {@code ruleType} that has a valid file, {@code :gvalid} and
* {@code :gmix} in the srcs
* <li>{@code :invalid} - rule of type {@code ruleType} that has an invalid file, {@code
* :ginvalid} in the srcs
* <li>{@code :mix} - rule of type {@code ruleType} that has a valid and an invalid file in the
* srcs
* </ol>
*
* @param packageName the package where the rules under test are located
* @param ruleType rules under test types
* @param expectedTypes expected file types
*/
protected void assertSrcsValidityForRuleType(
String packageName, String ruleType, String expectedTypes) throws Exception {
reporter.removeHandler(failFastHandler);
String descriptionSingle = ruleType + " srcs file (expected " + expectedTypes + ")";
String descriptionPlural = ruleType + " srcs files (expected " + expectedTypes + ")";
String descriptionPluralFile = "(expected " + expectedTypes + ")";
assertSrcsValidity(
ruleType,
packageName + ":valid",
false,
"need at least one " + descriptionSingle,
"'" + packageName + ":gvalid' does not produce any " + descriptionPlural,
"'" + packageName + ":gmix' does not produce any " + descriptionPlural);
assertSrcsValidity(
ruleType,
packageName + ":invalid",
true,
"source file '" + packageName + ":a.foo' is misplaced here " + descriptionPluralFile,
"'" + packageName + ":ginvalid' does not produce any " + descriptionPlural);
assertSrcsValidity(
ruleType,
packageName + ":mix",
true,
"'" + packageName + ":a.foo' does not produce any " + descriptionPlural);
}
protected void assertSrcsValidity(
String ruleType, String targetName, boolean expectedError, String... expectedMessages)
throws Exception {
ConfiguredTarget target = getConfiguredTarget(targetName);
if (expectedError) {
assertThat(view.hasErrors(target)).isTrue();
for (String expectedMessage : expectedMessages) {
String message =
"in srcs attribute of " + ruleType + " rule " + targetName + ": " + expectedMessage;
assertContainsEvent(message);
}
} else {
assertThat(view.hasErrors(target)).isFalse();
for (String expectedMessage : expectedMessages) {
String message =
"in srcs attribute of "
+ ruleType
+ " rule "
+ target.getLabel()
+ ": "
+ expectedMessage;
assertDoesNotContainEvent(message);
}
}
}
protected static ConfiguredAttributeMapper getMapperFromConfiguredTargetAndTarget(
ConfiguredTargetAndData ctad) {
return ConfiguredAttributeMapper.of(
(Rule) ctad.getTarget(),
ctad.getConfiguredTarget().getConfigConditions(),
ctad.getConfiguration());
}
private ConfiguredTargetKey makeConfiguredTargetKey(String label) {
return ConfiguredTargetKey.builder()
.setLabel(Label.parseAbsoluteUnchecked(label))
.setConfiguration(getConfiguration(label))
.build();
}
protected static ImmutableList<String> actionInputsToPaths(
NestedSet<? extends ActionInput> actionInputs) {
return ImmutableList.copyOf(
Lists.transform(actionInputs.toList(), ActionInput::getExecPathString));
}
/**
* Utility method for asserting that the contents of one collection are the same as those in a
* second plus some set of common elements.
*/
protected void assertSameContentsWithCommonElements(
Iterable<String> artifacts, String[] expectedInputs, Iterable<String> common) {
assertThat(artifacts)
.containsExactlyElementsIn(Iterables.concat(Lists.newArrayList(expectedInputs), common));
}
/**
* Utility method for asserting that a list contains the elements of a sublist. This is useful for
* checking that a list of arguments contains a particular set of arguments.
*/
protected static void assertContainsSublist(List<String> list, List<String> sublist) {
assertContainsSublist(null, list, sublist);
}
/**
* Utility method for asserting that a list contains the elements of a sublist. This is useful for
* checking that a list of arguments contains a particular set of arguments.
*/
protected static void assertContainsSublist(
String message, List<String> list, List<String> sublist) {
if (Collections.indexOfSubList(list, sublist) == -1) {
fail(
String.format(
"%sexpected: <%s> to contain sublist: <%s>",
message == null ? "" : (message + ' '), list, sublist));
}
}
protected void assertContainsSelfEdgeEvent(String label) {
assertContainsEvent(Pattern.compile(label + " \\([a-f0-9]+\\) \\[self-edge]"));
}
protected static NestedSet<Artifact> collectRunfiles(ConfiguredTarget target) {
RunfilesProvider runfilesProvider = target.getProvider(RunfilesProvider.class);
if (runfilesProvider != null) {
return runfilesProvider.getDefaultRunfiles().getAllArtifacts();
} else {
return Runfiles.EMPTY.getAllArtifacts();
}
}
protected static NestedSet<Artifact> getFilesToBuild(TransitiveInfoCollection target) {
return target.getProvider(FileProvider.class).getFilesToBuild();
}
/** Returns all extra actions for that target (no transitive actions), no duplicate actions. */
protected ImmutableList<Action> getExtraActionActions(ConfiguredTarget target) {
LinkedHashSet<Action> result = new LinkedHashSet<>();
for (Artifact artifact : getExtraActionArtifacts(target).toList()) {
result.add(getGeneratingAction(artifact));
}
return ImmutableList.copyOf(result);
}
/** Returns all extra actions for that target (including transitive actions). */
protected ImmutableList<ExtraAction> getTransitiveExtraActionActions(ConfiguredTarget target) {
ImmutableList.Builder<ExtraAction> result = new ImmutableList.Builder<>();
for (Artifact artifact :
target
.getProvider(ExtraActionArtifactsProvider.class)
.getTransitiveExtraActionArtifacts()
.toList()) {
Action action = getGeneratingAction(artifact);
if (action instanceof ExtraAction) {
result.add((ExtraAction) action);
}
}
return result.build();
}
protected ImmutableList<Action> getFilesToBuildActions(ConfiguredTarget target) {
List<Action> result = new ArrayList<>();
for (Artifact artifact : getFilesToBuild(target).toList()) {
Action action = getGeneratingAction(artifact);
if (action != null) {
result.add(action);
}
}
return ImmutableList.copyOf(result);
}
protected static NestedSet<Artifact> getOutputGroup(
TransitiveInfoCollection target, String outputGroup) {
OutputGroupInfo provider = OutputGroupInfo.get(target);
return provider == null
? NestedSetBuilder.emptySet(Order.STABLE_ORDER)
: provider.getOutputGroup(outputGroup);
}
protected static NestedSet<Artifact.DerivedArtifact> getExtraActionArtifacts(
ConfiguredTarget target) {
return target.getProvider(ExtraActionArtifactsProvider.class).getExtraActionArtifacts();
}
protected Artifact getExecutable(String label) throws Exception {
return getConfiguredTarget(label).getProvider(FilesToRunProvider.class).getExecutable();
}
protected static Artifact getExecutable(TransitiveInfoCollection target) {
return target.getProvider(FilesToRunProvider.class).getExecutable();
}
protected static NestedSet<Artifact> getFilesToRun(TransitiveInfoCollection target) {
return target.getProvider(FilesToRunProvider.class).getFilesToRun();
}
protected NestedSet<Artifact> getFilesToRun(Label label) {
return getConfiguredTarget(label, targetConfig)
.getProvider(FilesToRunProvider.class)
.getFilesToRun();
}
protected NestedSet<Artifact> getFilesToRun(String label) throws Exception {
return getConfiguredTarget(label).getProvider(FilesToRunProvider.class).getFilesToRun();
}
protected RunfilesSupport getRunfilesSupport(String label) throws Exception {
return getConfiguredTarget(label).getProvider(FilesToRunProvider.class).getRunfilesSupport();
}
protected static RunfilesSupport getRunfilesSupport(TransitiveInfoCollection target) {
return target.getProvider(FilesToRunProvider.class).getRunfilesSupport();
}
protected static Runfiles getDefaultRunfiles(ConfiguredTarget target) {
return target.getProvider(RunfilesProvider.class).getDefaultRunfiles();
}
protected static Runfiles getDataRunfiles(ConfiguredTarget target) {
return target.getProvider(RunfilesProvider.class).getDataRunfiles();
}
protected BuildConfigurationValue getTargetConfiguration() {
return Iterables.getOnlyElement(masterConfig.getTargetConfigurations());
}
protected BuildConfigurationValue getHostConfiguration() {
return masterConfig.getHostConfiguration();
}
/**
* Returns the configuration created by applying the given transition to the source configuration.
*
* @throws AssertionError if the transition couldn't be evaluated
*/
protected BuildConfigurationValue getConfiguration(
BuildConfigurationValue fromConfig, PatchTransition transition) throws InterruptedException {
if (transition == NoTransition.INSTANCE) {
return fromConfig;
} else if (transition == NullTransition.INSTANCE) {
return null;
} else {
try {
return skyframeExecutor.getConfigurationForTesting(
reporter,
transition.patch(
new BuildOptionsView(fromConfig.getOptions(), transition.requiresOptionFragments()),
eventCollector));
} catch (OptionsParsingException | InvalidConfigurationException e) {
throw new AssertionError(e);
}
}
}
private BuildConfigurationValue getConfiguration(String label) {
BuildConfigurationValue config;
try {
config = getConfiguration(getConfiguredTarget(label));
config = view.getConfigurationForTesting(getTarget(label), config, reporter);
} catch (LabelSyntaxException e) {
throw new IllegalArgumentException(e);
} catch (Exception e) {
// TODO(b/36585204): Clean this up
throw new RuntimeException(e);
}
return config;
}
protected final BuildConfigurationValue getConfiguration(BuildConfigurationKey configurationKey) {
return skyframeExecutor.getConfiguration(reporter, configurationKey);
}
protected final BuildConfigurationValue getConfiguration(ConfiguredTarget ct) {
return skyframeExecutor.getConfiguration(reporter, ct.getConfigurationKey());
}
/** Returns an attribute value retriever for the given rule for the target configuration. */
protected AttributeMap attributes(RuleConfiguredTarget ct) {
ConfiguredTargetAndData ctad;
try {
ctad = getConfiguredTargetAndData(ct.getLabel().toString());
} catch (LabelSyntaxException
| StarlarkTransition.TransitionException
| InvalidConfigurationException
| InterruptedException e) {
throw new RuntimeException(e);
}
return getMapperFromConfiguredTargetAndTarget(ctad);
}
protected AttributeMap attributes(ConfiguredTarget rule) {
return attributes((RuleConfiguredTarget) rule);
}
protected void useLoadingOptions(String... options) throws OptionsParsingException {
customLoadingOptions = Options.parse(LoadingOptions.class, options).getOptions();
}
protected AnalysisResult update(
List<String> targets,
boolean keepGoing,
int loadingPhaseThreads,
boolean doAnalysis,
EventBus eventBus)
throws Exception {
return update(
targets, ImmutableList.of(), keepGoing, loadingPhaseThreads, doAnalysis, eventBus);
}
protected AnalysisResult update(
List<String> targets,
List<String> aspects,
boolean keepGoing,
int loadingPhaseThreads,
boolean doAnalysis,
EventBus eventBus)
throws Exception {
LoadingOptions loadingOptions =
customLoadingOptions == null
? Options.getDefaults(LoadingOptions.class)
: customLoadingOptions;
AnalysisOptions viewOptions = Options.getDefaults(AnalysisOptions.class);
TargetPatternPhaseValue loadingResult =
skyframeExecutor.loadTargetPatternsWithFilters(
reporter,
targets,
PathFragment.EMPTY_FRAGMENT,
loadingOptions,
loadingPhaseThreads,
keepGoing,
/*determineTests=*/ false);
if (!doAnalysis) {
// TODO(bazel-team): What's supposed to happen in this case?
return null;
}
return view.update(
loadingResult,
targetConfig.getOptions(),
/* multiCpu= */ ImmutableSet.of(),
/*explicitTargetPatterns=*/ ImmutableSet.of(),
aspects,
/*aspectsParameters=*/ ImmutableMap.of(),
viewOptions,
keepGoing,
loadingPhaseThreads,
AnalysisTestUtil.TOP_LEVEL_ARTIFACT_CONTEXT,
reporter,
eventBus);
}
protected static Predicate<Artifact> artifactNamed(String name) {
return artifact -> name.equals(artifact.prettyPrint());
}
/**
* Utility method for tests. Converts an array of strings into a set of labels.
*
* @param strings the set of strings to be converted to labels.
* @throws LabelSyntaxException if there are any syntax errors in the strings.
*/
public static Set<Label> asLabelSet(String... strings) throws LabelSyntaxException {
return asLabelSet(ImmutableList.copyOf(strings));
}
/**
* Utility method for tests. Converts an array of strings into a set of labels.
*
* @param strings the set of strings to be converted to labels.
* @throws LabelSyntaxException if there are any syntax errors in the strings.
*/
public static Set<Label> asLabelSet(Iterable<String> strings) throws LabelSyntaxException {
Set<Label> result = Sets.newTreeSet();
for (String s : strings) {
result.add(Label.parseAbsolute(s, ImmutableMap.of()));
}
return result;
}
protected static String getErrorMsgNoGoodFiles(
String attrName, String ruleType, String ruleName, String depRuleName) {
return String.format(
"in %s attribute of %s rule %s: '%s' does not produce any %s %s files",
attrName, ruleType, ruleName, depRuleName, ruleType, attrName);
}
protected static String getErrorMsgMisplacedFiles(
String attrName, String ruleType, String ruleName, String fileName) {
return String.format(
"in %s attribute of %s rule %s: source file '%s' is misplaced here",
attrName, ruleType, ruleName, fileName);
}
protected static String getErrorNonExistingTarget(
String attrName, String ruleType, String ruleName, String targetName) {
return String.format(
"in %s attribute of %s rule %s: target '%s' does not exist",
attrName, ruleType, ruleName, targetName);
}
protected static String getErrorNonExistingRule(
String attrName, String ruleType, String ruleName, String targetName) {
return String.format(
"in %s attribute of %s rule %s: rule '%s' does not exist",
attrName, ruleType, ruleName, targetName);
}
protected static String getErrorMsgMisplacedRules(
String attrName, String ruleType, String ruleName, String depRuleType, String depRuleName) {
return String.format(
"in %s attribute of %s rule %s: %s rule '%s' is misplaced here",
attrName, ruleType, ruleName, depRuleType, depRuleName);
}
protected static String getErrorMsgNonEmptyList(
String attrName, String ruleType, String ruleName) {
return String.format(
"in %s attribute of %s rule %s: attribute must be non empty", attrName, ruleType, ruleName);
}
protected static String getErrorMsgMandatoryMissing(String attrName, String ruleType) {
return String.format(
"missing value for mandatory attribute '%s' in '%s' rule", attrName, ruleType);
}
protected static String getErrorMsgWrongAttributeValue(String value, String... expected) {
return String.format(
"has to be one of %s instead of '%s'",
StringUtil.joinEnglishList(ImmutableSet.copyOf(expected), "or", "'"), value);
}
protected static String getErrorMsgMandatoryProviderMissing(
String offendingRule, String providerName) {
return String.format(
"'%s' does not have mandatory providers: '%s'", offendingRule, providerName);
}
/**
* Utility method for tests that result in errors early during package loading. Given the name of
* the package for the test, and the rules for the build file, create a scratch file, load the
* build file, and produce the package.
*
* @param packageName the name of the package for the build file
* @param lines the rules for the build file as an array of strings
* @return the loaded package from the populated package cache
* @throws Exception if there is an error creating the temporary files for the test.
*/
protected com.google.devtools.build.lib.packages.Package createScratchPackageForImplicitCycle(
String packageName, String... lines) throws Exception {
eventCollector.clear();
reporter.removeHandler(failFastHandler);
scratch.file(packageName + "/BUILD", lines);
return getPackageManager()
.getPackage(reporter, PackageIdentifier.createInMainRepo(packageName));
}
/** A stub analysis environment. */
protected class StubAnalysisEnvironment implements AnalysisEnvironment {
@Override
public void registerAction(ActionAnalysisMetadata action) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasErrors() {
return false;
}
@Override
public Artifact getConstantMetadataArtifact(PathFragment rootRelativePath, ArtifactRoot root) {
throw new UnsupportedOperationException();
}
@Override
public SpecialArtifact getTreeArtifact(PathFragment rootRelativePath, ArtifactRoot root) {
throw new UnsupportedOperationException();
}
@Override
public SpecialArtifact getSymlinkArtifact(PathFragment rootRelativePath, ArtifactRoot root) {
throw new UnsupportedOperationException();
}
@Override
public Artifact getSourceArtifactForNinjaBuild(PathFragment execPath, Root root) {
throw new UnsupportedOperationException();
}
@Override
public ExtendedEventHandler getEventHandler() {
return reporter;
}
@Override
public MiddlemanFactory getMiddlemanFactory() {
throw new UnsupportedOperationException();
}
@Override
public Action getLocalGeneratingAction(Artifact artifact) {
throw new UnsupportedOperationException();
}
@Override
public ImmutableList<ActionAnalysisMetadata> getRegisteredActions() {
throw new UnsupportedOperationException();
}
@Override
public SkyFunction.Environment getSkyframeEnv() {
throw new UnsupportedOperationException();
}
@Override
public StarlarkSemantics getStarlarkSemantics() {
return buildLanguageOptions.toStarlarkSemantics();
}
@Override
public ImmutableMap<String, Object> getStarlarkDefinedBuiltins() {
throw new UnsupportedOperationException();
}
@Override
public Artifact getFilesetArtifact(PathFragment rootRelativePath, ArtifactRoot root) {
throw new UnsupportedOperationException();
}
@Override
public Artifact.DerivedArtifact getDerivedArtifact(
PathFragment rootRelativePath, ArtifactRoot root) {
throw new UnsupportedOperationException();
}
@Override
public Artifact.DerivedArtifact getDerivedArtifact(
PathFragment rootRelativePath, ArtifactRoot root, boolean contentBasedPath) {
throw new UnsupportedOperationException();
}
@Override
public Artifact getStableWorkspaceStatusArtifact() {
throw new UnsupportedOperationException();
}
@Override
public Artifact getVolatileWorkspaceStatusArtifact() {
throw new UnsupportedOperationException();
}
@Override
public ImmutableList<Artifact> getBuildInfo(
boolean stamp, BuildInfoKey key, BuildConfigurationValue config) {
throw new UnsupportedOperationException();
}
@Override
public ActionLookupKey getOwner() {
throw new UnsupportedOperationException();
}
@Override
public ImmutableSet<Artifact> getOrphanArtifacts() {
throw new UnsupportedOperationException();
}
@Override
public ImmutableSet<Artifact> getTreeArtifactsConflictingWithFiles() {
throw new UnsupportedOperationException();
}
@Override
public ActionKeyContext getActionKeyContext() {
return actionKeyContext;
}
}
protected Iterable<String> baselineCoverageArtifactBasenames(ConfiguredTarget target)
throws Exception {
ImmutableList.Builder<String> basenames = ImmutableList.builder();
for (Artifact baselineCoverage :
target
.get(InstrumentedFilesInfo.STARLARK_CONSTRUCTOR)
.getBaselineCoverageArtifacts()
.toList()) {
BaselineCoverageAction baselineAction =
(BaselineCoverageAction) getGeneratingAction(baselineCoverage);
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
baselineAction
.newDeterministicWriter(ActionsTestUtil.createContext(reporter))
.writeOutputFile(bytes);
for (String line : Splitter.on('\n').split(bytes.toString(UTF_8))) {
if (line.startsWith("SF:")) {
String basename = line.substring(line.lastIndexOf('/') + 1);
basenames.add(basename);
}
}
}
return basenames.build();
}
/**
* Finds an artifact in the transitive closure of a set of other artifacts by following a path
* based on artifact name suffixes.
*
* <p>This selects the first artifact in the input set that matches the first suffix, then selects
* the first artifact in the inputs of its generating action that matches the second suffix etc.,
* and repeats this until the supplied suffixes run out.
*/
protected Artifact artifactByPath(NestedSet<Artifact> artifacts, String... suffixes) {
return artifactByPath(artifacts.toList(), suffixes);
}
/**
* Finds an artifact in the transitive closure of a set of other artifacts by following a path
* based on artifact name suffixes.
*
* <p>This selects the first artifact in the input set that matches the first suffix, then selects
* the first artifact in the inputs of its generating action that matches the second suffix etc.,
* and repeats this until the supplied suffixes run out.
*/
protected Artifact artifactByPath(Iterable<Artifact> artifacts, String... suffixes) {
Artifact artifact = getFirstArtifactEndingWith(artifacts, suffixes[0]);
Action action = null;
for (int i = 1; i < suffixes.length; i++) {
if (artifact == null) {
if (action == null) {
throw new IllegalStateException(
String.format(
"No suffix %s among artifacts: %s",
suffixes[0], ActionsTestUtil.baseArtifactNames(artifacts)));
} else {
throw new IllegalStateException(
String.format(
"No suffix %s among inputs of action %s: %s",
suffixes[i], action.describe(), ActionsTestUtil.baseArtifactNames(artifacts)));
}
}
action = getGeneratingAction(artifact);
artifacts = action.getInputs().toList();
artifact = getFirstArtifactEndingWith(artifacts, suffixes[i]);
}
return artifact;
}
/**
* Retrieves an instance of {@code PseudoAction} that is shadowed by an extra action
*
* @param targetLabel Label of the target with an extra action
* @param actionListenerLabel Label of the action listener
*/
protected PseudoAction<?> getPseudoActionViaExtraAction(
String targetLabel, String actionListenerLabel) throws Exception {
useConfiguration(String.format("--experimental_action_listener=%s", actionListenerLabel));
ConfiguredTarget target = getConfiguredTarget(targetLabel);
List<Action> actions = getExtraActionActions(target);
assertThat(actions).isNotNull();
assertThat(actions).hasSize(2);
ExtraAction extraAction = null;
for (Action action : actions) {
if (action instanceof ExtraAction) {
extraAction = (ExtraAction) action;
break;
}
}
assertWithMessage(actions.toString()).that(extraAction).isNotNull();
Action pseudoAction = extraAction.getShadowedAction();
assertThat(pseudoAction).isInstanceOf(PseudoAction.class);
assertThat(pseudoAction.getPrimaryOutput().getExecPathString())
.isEqualTo(
String.format(
"%s%s.extra_action_dummy",
targetConfig.getGenfilesFragment(RepositoryName.MAIN),
convertLabelToPath(targetLabel)));
return (PseudoAction<?>) pseudoAction;
}
/**
* Converts the given label to an output path where double slashes and colons are replaced with
* single slashes.
*/
private static String convertLabelToPath(String label) {
return label.replace(':', '/').substring(1);
}
protected final String getImplicitOutputPath(
ConfiguredTarget target, SafeImplicitOutputsFunction outputFunction) {
Rule rule;
try {
rule = (Rule) skyframeExecutor.getPackageManager().getTarget(reporter, target.getLabel());
} catch (NoSuchPackageException | NoSuchTargetException | InterruptedException e) {
throw new IllegalStateException(e);
}
RawAttributeMapper attr = RawAttributeMapper.of(rule.getAssociatedRule());
return Iterables.getOnlyElement(outputFunction.getImplicitOutputs(eventCollector, attr));
}
/**
* Gets the artifact whose name is derived from {@code outputFunction}. Despite the name, this can
* be called for artifacts that are not declared as implicit outputs: it just finds the artifact
* inside the configured target by calling {@link #getBinArtifact(String, ConfiguredTarget)} on
* the result of the {@code outputFunction}.
*/
protected final Artifact getImplicitOutputArtifact(
ConfiguredTarget target, SafeImplicitOutputsFunction outputFunction) {
return getBinArtifact(getImplicitOutputPath(target, outputFunction), target);
}
public Path getExecRoot() {
return directories.getExecRoot(ruleClassProvider.getRunfilesPrefix());
}
/** Returns true iff commandLine contains the option --flagName followed by arg. */
protected static boolean containsFlag(String flagName, String arg, Iterable<String> commandLine) {
Iterator<String> iterator = commandLine.iterator();
while (iterator.hasNext()) {
if (flagName.equals(iterator.next()) && iterator.hasNext() && arg.equals(iterator.next())) {
return true;
}
}
return false;
}
/** Returns the list of arguments in commandLine that follow after --flagName. */
protected static ImmutableList<String> flagValue(String flagName, Iterable<String> commandLine) {
ImmutableList.Builder<String> resultBuilder = ImmutableList.builder();
Iterator<String> iterator = commandLine.iterator();
boolean found = false;
while (iterator.hasNext()) {
String val = iterator.next();
if (found) {
if (val.startsWith("--")) {
break;
}
resultBuilder.add(val);
} else if (flagName.equals(val)) {
found = true;
}
}
Preconditions.checkArgument(found);
return resultBuilder.build();
}
/** Creates instances of {@link ActionExecutionContext} consistent with test case. */
public class ActionExecutionContextBuilder {
private MetadataProvider actionInputFileCache = null;
private final TreeMap<String, String> clientEnv = new TreeMap<>();
private ArtifactExpander artifactExpander = null;
private Executor executor = new DummyExecutor(fileSystem, getExecRoot());
public ActionExecutionContextBuilder setMetadataProvider(
MetadataProvider actionInputFileCache) {
this.actionInputFileCache = actionInputFileCache;
return this;
}
public ActionExecutionContextBuilder setArtifactExpander(ArtifactExpander artifactExpander) {
this.artifactExpander = artifactExpander;
return this;
}
public ActionExecutionContextBuilder setExecutor(Executor executor) {
this.executor = executor;
return this;
}
public ActionExecutionContext build() {
return new ActionExecutionContext(
executor,
actionInputFileCache,
/*actionInputPrefetcher=*/ null,
actionKeyContext,
/*metadataHandler=*/ null,
/*rewindingEnabled=*/ false,
LostInputsCheck.NONE,
actionLogBufferPathGenerator.generate(ArtifactPathResolver.IDENTITY),
reporter,
clientEnv,
/*topLevelFilesets=*/ ImmutableMap.of(),
artifactExpander,
/*actionFileSystem=*/ null,
/*skyframeDepsResult*/ null,
DiscoveredModulesPruner.DEFAULT,
SyscallCache.NO_CACHE,
ThreadStateReceiver.NULL_INSTANCE);
}
}
}
| apache-2.0 |
wendal/alipay-sdk | src/main/java/com/alipay/api/request/AlipayOpenServicemarketOrderRejectRequest.java | 3035 | package com.alipay.api.request;
import com.alipay.api.domain.AlipayOpenServicemarketOrderRejectModel;
import java.util.Map;
import com.alipay.api.AlipayRequest;
import com.alipay.api.internal.util.AlipayHashMap;
import com.alipay.api.response.AlipayOpenServicemarketOrderRejectResponse;
import com.alipay.api.AlipayObject;
/**
* ALIPAY API: alipay.open.servicemarket.order.reject request
*
* @author auto create
* @since 1.0, 2016-08-25 11:11:47
*/
public class AlipayOpenServicemarketOrderRejectRequest implements AlipayRequest<AlipayOpenServicemarketOrderRejectResponse> {
private AlipayHashMap udfParams; // add user-defined text parameters
private String apiVersion="1.0";
/**
* 服务商拒绝接单
*/
private String bizContent;
public void setBizContent(String bizContent) {
this.bizContent = bizContent;
}
public String getBizContent() {
return this.bizContent;
}
private String terminalType;
private String terminalInfo;
private String prodCode;
private String notifyUrl;
private String returnUrl;
private boolean needEncrypt=false;
private AlipayObject bizModel=null;
public String getNotifyUrl() {
return this.notifyUrl;
}
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl;
}
public String getReturnUrl() {
return this.returnUrl;
}
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl;
}
public String getApiVersion() {
return this.apiVersion;
}
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
public void setTerminalType(String terminalType){
this.terminalType=terminalType;
}
public String getTerminalType(){
return this.terminalType;
}
public void setTerminalInfo(String terminalInfo){
this.terminalInfo=terminalInfo;
}
public String getTerminalInfo(){
return this.terminalInfo;
}
public void setProdCode(String prodCode) {
this.prodCode=prodCode;
}
public String getProdCode() {
return this.prodCode;
}
public String getApiMethodName() {
return "alipay.open.servicemarket.order.reject";
}
public Map<String, String> getTextParams() {
AlipayHashMap txtParams = new AlipayHashMap();
txtParams.put("biz_content", this.bizContent);
if(udfParams != null) {
txtParams.putAll(this.udfParams);
}
return txtParams;
}
public void putOtherTextParam(String key, String value) {
if(this.udfParams == null) {
this.udfParams = new AlipayHashMap();
}
this.udfParams.put(key, value);
}
public Class<AlipayOpenServicemarketOrderRejectResponse> getResponseClass() {
return AlipayOpenServicemarketOrderRejectResponse.class;
}
public boolean isNeedEncrypt() {
return this.needEncrypt;
}
public void setNeedEncrypt(boolean needEncrypt) {
this.needEncrypt=needEncrypt;
}
public AlipayObject getBizModel() {
return this.bizModel;
}
public void setBizModel(AlipayObject bizModel) {
this.bizModel=bizModel;
}
}
| apache-2.0 |
stefanhoth/got2048 | wear/src/main/java/de/stefanhoth/android/got2048/logic/model/MOVE_DIRECTION.java | 254 | package de.stefanhoth.android.got2048.logic.model;
/**
* TODO describe class
*
* @author Stefan Hoth <sh@jnamic.com>
* date: 20.03.14 23:12
* @since TODO add version
*/
public enum MOVE_DIRECTION {
UP,
DOWN,
LEFT,
RIGHT
}
| apache-2.0 |